diff --git a/TESTING.asciidoc b/TESTING.asciidoc index 5e5207b279e..d3321f7f8cc 100644 --- a/TESTING.asciidoc +++ b/TESTING.asciidoc @@ -350,26 +350,26 @@ and running elasticsearch distributions works correctly on supported operating s These tests should really only be run in vagrant vms because they're destructive. . Install Virtual Box and Vagrant. - ++ . (Optional) Install https://github.com/fgrehm/vagrant-cachier[vagrant-cachier] to squeeze a bit more performance out of the process: - ++ -------------------------------------- vagrant plugin install vagrant-cachier -------------------------------------- - ++ . Validate your installed dependencies: - ++ ------------------------------------- ./gradlew :qa:vagrant:vagrantCheckVersion ------------------------------------- - ++ . Download and smoke test the VMs with `./gradlew vagrantSmokeTest` or `./gradlew -Pvagrant.boxes=all vagrantSmokeTest`. The first time you run this it will download the base images and provision the boxes and immediately quit. Downloading all the images may take a long time. After the images are already on your machine, they won't be downloaded again unless they have been updated to a new version. - ++ . Run the tests with `./gradlew packagingTest`. This will cause Gradle to build the tar, zip, and deb packages and all the plugins. It will then run the tests on ubuntu-1404 and centos-7. We chose those two distributions as the default @@ -402,6 +402,7 @@ These are the linux flavors supported, all of which we provide images for * ubuntu-1404 aka trusty * ubuntu-1604 aka xenial +* ubuntu-1804 aka bionic beaver * debian-8 aka jessie * debian-9 aka stretch, the current debian stable distribution * centos-6 diff --git a/Vagrantfile b/Vagrantfile index 7322399fed5..e47dc811649 100644 --- a/Vagrantfile +++ b/Vagrantfile @@ -61,6 +61,15 @@ Vagrant.configure(2) do |config| SHELL end end + 'ubuntu-1804'.tap do |box| + config.vm.define box, define_opts do |config| + config.vm.box = 'elastic/ubuntu-18.04-x86_64' + deb_common config, box, extra: <<-SHELL + # Install Jayatana so we can work around it being present. + [ -f /usr/share/java/jayatanaag.jar ] || install jayatana + SHELL + end + end # Wheezy's backports don't contain Openjdk 8 and the backflips # required to get the sun jdk on there just aren't worth it. We have # jessie and stretch for testing debian and it works fine. diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/vagrant/VagrantTestPlugin.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/vagrant/VagrantTestPlugin.groovy index de3c0dfc328..0aa57502c39 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/vagrant/VagrantTestPlugin.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/vagrant/VagrantTestPlugin.groovy @@ -31,7 +31,8 @@ class VagrantTestPlugin implements Plugin { 'opensuse-42', 'sles-12', 'ubuntu-1404', - 'ubuntu-1604' + 'ubuntu-1604', + 'ubuntu-1804' ]) /** All Windows boxes that we test, which may or may not be supplied **/ diff --git a/buildSrc/src/main/resources/checkstyle.xml b/buildSrc/src/main/resources/checkstyle.xml index 939d48e72ce..c6873b2c127 100644 --- a/buildSrc/src/main/resources/checkstyle.xml +++ b/buildSrc/src/main/resources/checkstyle.xml @@ -12,11 +12,25 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + @@ -138,7 +161,6 @@ - diff --git a/client/benchmark/src/main/java/org/elasticsearch/client/benchmark/ops/bulk/BulkBenchmarkTask.java b/client/benchmark/src/main/java/org/elasticsearch/client/benchmark/ops/bulk/BulkBenchmarkTask.java index e9cde26e6c8..80d862acd07 100644 --- a/client/benchmark/src/main/java/org/elasticsearch/client/benchmark/ops/bulk/BulkBenchmarkTask.java +++ b/client/benchmark/src/main/java/org/elasticsearch/client/benchmark/ops/bulk/BulkBenchmarkTask.java @@ -19,13 +19,13 @@ package org.elasticsearch.client.benchmark.ops.bulk; import org.apache.logging.log4j.Logger; +import org.apache.logging.log4j.LogManager; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.client.benchmark.BenchmarkTask; import org.elasticsearch.client.benchmark.metrics.Sample; import org.elasticsearch.client.benchmark.metrics.SampleRecorder; import org.elasticsearch.common.SuppressForbidden; import org.elasticsearch.common.io.PathUtils; -import org.elasticsearch.common.logging.ESLoggerFactory; import java.io.BufferedReader; import java.io.IOException; @@ -135,7 +135,7 @@ public class BulkBenchmarkTask implements BenchmarkTask { private static final class BulkIndexer implements Runnable { - private static final Logger logger = ESLoggerFactory.getLogger(BulkIndexer.class.getName()); + private static final Logger logger = LogManager.getLogger(BulkIndexer.class); private final BlockingQueue> bulkData; private final int warmupIterations; diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/MLRequestConverters.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/MLRequestConverters.java index ed83e1b4aba..1030464be4f 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/MLRequestConverters.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/MLRequestConverters.java @@ -48,6 +48,8 @@ import org.elasticsearch.client.ml.PostDataRequest; import org.elasticsearch.client.ml.PutCalendarRequest; import org.elasticsearch.client.ml.PutDatafeedRequest; import org.elasticsearch.client.ml.PutJobRequest; +import org.elasticsearch.client.ml.StartDatafeedRequest; +import org.elasticsearch.client.ml.StopDatafeedRequest; import org.elasticsearch.client.ml.UpdateJobRequest; import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesReference; @@ -231,6 +233,32 @@ final class MLRequestConverters { return request; } + static Request startDatafeed(StartDatafeedRequest startDatafeedRequest) throws IOException { + String endpoint = new EndpointBuilder() + .addPathPartAsIs("_xpack") + .addPathPartAsIs("ml") + .addPathPartAsIs("datafeeds") + .addPathPart(startDatafeedRequest.getDatafeedId()) + .addPathPartAsIs("_start") + .build(); + Request request = new Request(HttpPost.METHOD_NAME, endpoint); + request.setEntity(createEntity(startDatafeedRequest, REQUEST_BODY_CONTENT_TYPE)); + return request; + } + + static Request stopDatafeed(StopDatafeedRequest stopDatafeedRequest) throws IOException { + String endpoint = new EndpointBuilder() + .addPathPartAsIs("_xpack") + .addPathPartAsIs("ml") + .addPathPartAsIs("datafeeds") + .addPathPart(Strings.collectionToCommaDelimitedString(stopDatafeedRequest.getDatafeedIds())) + .addPathPartAsIs("_stop") + .build(); + Request request = new Request(HttpPost.METHOD_NAME, endpoint); + request.setEntity(createEntity(stopDatafeedRequest, REQUEST_BODY_CONTENT_TYPE)); + return request; + } + static Request deleteForecast(DeleteForecastRequest deleteForecastRequest) { String endpoint = new EndpointBuilder() .addPathPartAsIs("_xpack") diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/MachineLearningClient.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/MachineLearningClient.java index 06df9b31488..43bc18fad0d 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/MachineLearningClient.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/MachineLearningClient.java @@ -58,6 +58,10 @@ import org.elasticsearch.client.ml.PutDatafeedRequest; import org.elasticsearch.client.ml.PutDatafeedResponse; import org.elasticsearch.client.ml.PutJobRequest; import org.elasticsearch.client.ml.PutJobResponse; +import org.elasticsearch.client.ml.StartDatafeedRequest; +import org.elasticsearch.client.ml.StartDatafeedResponse; +import org.elasticsearch.client.ml.StopDatafeedRequest; +import org.elasticsearch.client.ml.StopDatafeedResponse; import org.elasticsearch.client.ml.UpdateJobRequest; import org.elasticsearch.client.ml.job.stats.JobStats; @@ -565,6 +569,86 @@ public final class MachineLearningClient { Collections.emptySet()); } + /** + * Starts the given Machine Learning Datafeed + *

+ * For additional info + * see + * ML Start Datafeed documentation + * + * @param request The request to start the datafeed + * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized + * @return action acknowledgement + * @throws IOException when there is a serialization issue sending the request or receiving the response + */ + public StartDatafeedResponse startDatafeed(StartDatafeedRequest request, RequestOptions options) throws IOException { + return restHighLevelClient.performRequestAndParseEntity(request, + MLRequestConverters::startDatafeed, + options, + StartDatafeedResponse::fromXContent, + Collections.emptySet()); + } + + /** + * Starts the given Machine Learning Datafeed asynchronously and notifies the listener on completion + *

+ * For additional info + * see + * ML Start Datafeed documentation + * + * @param request The request to start the datafeed + * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized + * @param listener Listener to be notified upon request completion + */ + public void startDatafeedAsync(StartDatafeedRequest request, RequestOptions options, ActionListener listener) { + restHighLevelClient.performRequestAsyncAndParseEntity(request, + MLRequestConverters::startDatafeed, + options, + StartDatafeedResponse::fromXContent, + listener, + Collections.emptySet()); + } + + /** + * Stops the given Machine Learning Datafeed + *

+ * For additional info + * see + * ML Stop Datafeed documentation + * + * @param request The request to stop the datafeed + * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized + * @return action acknowledgement + * @throws IOException when there is a serialization issue sending the request or receiving the response + */ + public StopDatafeedResponse stopDatafeed(StopDatafeedRequest request, RequestOptions options) throws IOException { + return restHighLevelClient.performRequestAndParseEntity(request, + MLRequestConverters::stopDatafeed, + options, + StopDatafeedResponse::fromXContent, + Collections.emptySet()); + } + + /** + * Stops the given Machine Learning Datafeed asynchronously and notifies the listener on completion + *

+ * For additional info + * see + * ML Stop Datafeed documentation + * + * @param request The request to stop the datafeed + * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized + * @param listener Listener to be notified upon request completion + */ + public void stopDatafeedAsync(StopDatafeedRequest request, RequestOptions options, ActionListener listener) { + restHighLevelClient.performRequestAsyncAndParseEntity(request, + MLRequestConverters::stopDatafeed, + options, + StopDatafeedResponse::fromXContent, + listener, + Collections.emptySet()); + } + /** * Updates a Machine Learning {@link org.elasticsearch.client.ml.job.config.Job} *

diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/RequestConverters.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/RequestConverters.java index 8372f4b0fec..9c461a404cf 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/RequestConverters.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/RequestConverters.java @@ -465,7 +465,8 @@ final class RequestConverters { Params params = new Params(request) .withRefresh(reindexRequest.isRefresh()) .withTimeout(reindexRequest.getTimeout()) - .withWaitForActiveShards(reindexRequest.getWaitForActiveShards()); + .withWaitForActiveShards(reindexRequest.getWaitForActiveShards()) + .withRequestsPerSecond(reindexRequest.getRequestsPerSecond()); if (reindexRequest.getScrollTime() != null) { params.putParam("scroll", reindexRequest.getScrollTime()); @@ -484,6 +485,7 @@ final class RequestConverters { .withRefresh(updateByQueryRequest.isRefresh()) .withTimeout(updateByQueryRequest.getTimeout()) .withWaitForActiveShards(updateByQueryRequest.getWaitForActiveShards()) + .withRequestsPerSecond(updateByQueryRequest.getRequestsPerSecond()) .withIndicesOptions(updateByQueryRequest.indicesOptions()); if (updateByQueryRequest.isAbortOnVersionConflict() == false) { params.putParam("conflicts", "proceed"); @@ -510,6 +512,7 @@ final class RequestConverters { .withRefresh(deleteByQueryRequest.isRefresh()) .withTimeout(deleteByQueryRequest.getTimeout()) .withWaitForActiveShards(deleteByQueryRequest.getWaitForActiveShards()) + .withRequestsPerSecond(deleteByQueryRequest.getRequestsPerSecond()) .withIndicesOptions(deleteByQueryRequest.indicesOptions()); if (deleteByQueryRequest.isAbortOnVersionConflict() == false) { params.putParam("conflicts", "proceed"); @@ -527,6 +530,29 @@ final class RequestConverters { return request; } + static Request rethrottleReindex(RethrottleRequest rethrottleRequest) { + return rethrottle(rethrottleRequest, "_reindex"); + } + + static Request rethrottleUpdateByQuery(RethrottleRequest rethrottleRequest) { + return rethrottle(rethrottleRequest, "_update_by_query"); + } + + static Request rethrottleDeleteByQuery(RethrottleRequest rethrottleRequest) { + return rethrottle(rethrottleRequest, "_delete_by_query"); + } + + private static Request rethrottle(RethrottleRequest rethrottleRequest, String firstPathPart) { + String endpoint = new EndpointBuilder().addPathPart(firstPathPart).addPathPart(rethrottleRequest.getTaskId().toString()) + .addPathPart("_rethrottle").build(); + Request request = new Request(HttpPost.METHOD_NAME, endpoint); + Params params = new Params(request) + .withRequestsPerSecond(rethrottleRequest.getRequestsPerSecond()); + // we set "group_by" to "none" because this is the response format we can parse back + params.putParam("group_by", "none"); + return request; + } + static Request putScript(PutStoredScriptRequest putStoredScriptRequest) throws IOException { String endpoint = new EndpointBuilder().addPathPartAsIs("_scripts").addPathPart(putStoredScriptRequest.id()).build(); Request request = new Request(HttpPost.METHOD_NAME, endpoint); @@ -714,6 +740,16 @@ final class RequestConverters { return this; } + Params withRequestsPerSecond(float requestsPerSecond) { + // the default in AbstractBulkByScrollRequest is Float.POSITIVE_INFINITY, + // but we don't want to add that to the URL parameters, instead we use -1 + if (Float.isFinite(requestsPerSecond)) { + return putParam(RethrottleRequest.REQUEST_PER_SECOND_PARAMETER, Float.toString(requestsPerSecond)); + } else { + return putParam(RethrottleRequest.REQUEST_PER_SECOND_PARAMETER, "-1"); + } + } + Params withRetryOnConflict(int retryOnConflict) { if (retryOnConflict > 0) { return putParam("retry_on_conflict", String.valueOf(retryOnConflict)); @@ -958,7 +994,7 @@ final class RequestConverters { private static String encodePart(String pathPart) { try { //encode each part (e.g. index, type and id) separately before merging them into the path - //we prepend "/" to the path part to make this pate absolute, otherwise there can be issues with + //we prepend "/" to the path part to make this path absolute, otherwise there can be issues with //paths that start with `-` or contain `:` URI uri = new URI(null, null, null, -1, "/" + pathPart, null, null); //manually encode any slash that each part may contain diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/RestHighLevelClient.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/RestHighLevelClient.java index ae1766fab02..86782b364a0 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/RestHighLevelClient.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/RestHighLevelClient.java @@ -25,6 +25,7 @@ import org.elasticsearch.ElasticsearchStatusException; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.action.admin.cluster.node.tasks.list.ListTasksResponse; import org.elasticsearch.action.admin.cluster.storedscripts.DeleteStoredScriptRequest; import org.elasticsearch.action.admin.cluster.storedscripts.GetStoredScriptRequest; import org.elasticsearch.action.admin.cluster.storedscripts.GetStoredScriptResponse; @@ -474,13 +475,14 @@ public class RestHighLevelClient implements Closeable { * Asynchronously executes an update by query request. * See * Update By Query API on elastic.co + * @param updateByQueryRequest the request * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized * @param listener the listener to be notified upon request completion */ - public final void updateByQueryAsync(UpdateByQueryRequest reindexRequest, RequestOptions options, - ActionListener listener) { + public final void updateByQueryAsync(UpdateByQueryRequest updateByQueryRequest, RequestOptions options, + ActionListener listener) { performRequestAsyncAndParseEntity( - reindexRequest, RequestConverters::updateByQuery, options, BulkByScrollResponse::fromXContent, listener, emptySet() + updateByQueryRequest, RequestConverters::updateByQuery, options, BulkByScrollResponse::fromXContent, listener, emptySet() ); } @@ -503,16 +505,103 @@ public class RestHighLevelClient implements Closeable { * Asynchronously executes a delete by query request. * See * Delete By Query API on elastic.co + * @param deleteByQueryRequest the request * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized * @param listener the listener to be notified upon request completion */ - public final void deleteByQueryAsync(DeleteByQueryRequest reindexRequest, RequestOptions options, + public final void deleteByQueryAsync(DeleteByQueryRequest deleteByQueryRequest, RequestOptions options, ActionListener listener) { performRequestAsyncAndParseEntity( - reindexRequest, RequestConverters::deleteByQuery, options, BulkByScrollResponse::fromXContent, listener, emptySet() + deleteByQueryRequest, RequestConverters::deleteByQuery, options, BulkByScrollResponse::fromXContent, listener, emptySet() ); } + /** + * Executes a delete by query rethrottle request. + * See + * Delete By Query API on elastic.co + * @param rethrottleRequest the request + * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized + * @return the response + * @throws IOException in case there is a problem sending the request or parsing back the response + */ + public final ListTasksResponse deleteByQueryRethrottle(RethrottleRequest rethrottleRequest, RequestOptions options) throws IOException { + return performRequestAndParseEntity(rethrottleRequest, RequestConverters::rethrottleDeleteByQuery, options, + ListTasksResponse::fromXContent, emptySet()); + } + + /** + * Asynchronously execute an delete by query rethrottle request. + * See + * Delete By Query API on elastic.co + * @param rethrottleRequest the request + * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized + * @param listener the listener to be notified upon request completion + */ + public final void deleteByQueryRethrottleAsync(RethrottleRequest rethrottleRequest, RequestOptions options, + ActionListener listener) { + performRequestAsyncAndParseEntity(rethrottleRequest, RequestConverters::rethrottleDeleteByQuery, options, + ListTasksResponse::fromXContent, listener, emptySet()); + } + + /** + * Executes a update by query rethrottle request. + * See + * Update By Query API on elastic.co + * @param rethrottleRequest the request + * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized + * @return the response + * @throws IOException in case there is a problem sending the request or parsing back the response + */ + public final ListTasksResponse updateByQueryRethrottle(RethrottleRequest rethrottleRequest, RequestOptions options) throws IOException { + return performRequestAndParseEntity(rethrottleRequest, RequestConverters::rethrottleUpdateByQuery, options, + ListTasksResponse::fromXContent, emptySet()); + } + + /** + * Asynchronously execute an update by query rethrottle request. + * See + * Update By Query API on elastic.co + * @param rethrottleRequest the request + * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized + * @param listener the listener to be notified upon request completion + */ + public final void updateByQueryRethrottleAsync(RethrottleRequest rethrottleRequest, RequestOptions options, + ActionListener listener) { + performRequestAsyncAndParseEntity(rethrottleRequest, RequestConverters::rethrottleUpdateByQuery, options, + ListTasksResponse::fromXContent, listener, emptySet()); + } + + /** + * Executes a reindex rethrottling request. + * See the + * Reindex rethrottling API on elastic.co + * + * @param rethrottleRequest the request + * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized + * @return the response + * @throws IOException in case there is a problem sending the request or parsing back the response + */ + public final ListTasksResponse reindexRethrottle(RethrottleRequest rethrottleRequest, RequestOptions options) throws IOException { + return performRequestAndParseEntity(rethrottleRequest, RequestConverters::rethrottleReindex, options, + ListTasksResponse::fromXContent, emptySet()); + } + + /** + * Executes a reindex rethrottling request. + * See the + * Reindex rethrottling API on elastic.co + * + * @param rethrottleRequest the request + * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized + * @param listener the listener to be notified upon request completion + */ + public final void reindexRethrottleAsync(RethrottleRequest rethrottleRequest, RequestOptions options, + ActionListener listener) { + performRequestAsyncAndParseEntity(rethrottleRequest, RequestConverters::rethrottleReindex, options, ListTasksResponse::fromXContent, + listener, emptySet()); + } + /** * Pings the remote Elasticsearch cluster and returns true if the ping succeeded, false otherwise * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/RethrottleRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/RethrottleRequest.java new file mode 100644 index 00000000000..eb1c666a0cf --- /dev/null +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/RethrottleRequest.java @@ -0,0 +1,77 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.client; + +import org.elasticsearch.tasks.TaskId; + +import java.util.Objects; + +/** + * A request changing throttling of a task. + */ +public class RethrottleRequest implements Validatable { + + static final String REQUEST_PER_SECOND_PARAMETER = "requests_per_second"; + + private final TaskId taskId; + private final float requestsPerSecond; + + /** + * Create a new {@link RethrottleRequest} which disables any throttling for the given taskId. + * @param taskId the task for which throttling will be disabled + */ + public RethrottleRequest(TaskId taskId) { + this.taskId = taskId; + this.requestsPerSecond = Float.POSITIVE_INFINITY; + } + + /** + * Create a new {@link RethrottleRequest} which changes the throttling for the given taskId. + * @param taskId the task that throttling changes will be applied to + * @param requestsPerSecond the number of requests per second that the task should perform. This needs to be a positive value. + */ + public RethrottleRequest(TaskId taskId, float requestsPerSecond) { + Objects.requireNonNull(taskId, "taskId cannot be null"); + if (requestsPerSecond <= 0) { + throw new IllegalArgumentException("requestsPerSecond needs to be positive value but was [" + requestsPerSecond+"]"); + } + this.taskId = taskId; + this.requestsPerSecond = requestsPerSecond; + } + + /** + * @return the task Id + */ + public TaskId getTaskId() { + return taskId; + } + + /** + * @return the requests per seconds value + */ + public float getRequestsPerSecond() { + return requestsPerSecond; + } + + @Override + public String toString() { + return "RethrottleRequest: taskID = " + taskId +"; reqestsPerSecond = " + requestsPerSecond; + } +} \ No newline at end of file diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/RollupClient.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/RollupClient.java index 1a766cb4923..3059eb46065 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/RollupClient.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/RollupClient.java @@ -20,6 +20,8 @@ package org.elasticsearch.client; import org.elasticsearch.action.ActionListener; +import org.elasticsearch.client.rollup.GetRollupJobRequest; +import org.elasticsearch.client.rollup.GetRollupJobResponse; import org.elasticsearch.client.rollup.PutRollupJobRequest; import org.elasticsearch.client.rollup.PutRollupJobResponse; @@ -73,4 +75,37 @@ public class RollupClient { PutRollupJobResponse::fromXContent, listener, Collections.emptySet()); } + + /** + * Get a rollup job from the cluster. + * See + * the docs for more. + * @param request the request + * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized + * @return the response + * @throws IOException in case there is a problem sending the request or parsing back the response + */ + public GetRollupJobResponse getRollupJob(GetRollupJobRequest request, RequestOptions options) throws IOException { + return restHighLevelClient.performRequestAndParseEntity(request, + RollupRequestConverters::getJob, + options, + GetRollupJobResponse::fromXContent, + Collections.emptySet()); + } + + /** + * Asynchronously get a rollup job from the cluster. + * See + * the docs for more. + * @param request the request + * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized + * @param listener the listener to be notified upon request completion + */ + public void getRollupJobAsync(GetRollupJobRequest request, RequestOptions options, ActionListener listener) { + restHighLevelClient.performRequestAsyncAndParseEntity(request, + RollupRequestConverters::getJob, + options, + GetRollupJobResponse::fromXContent, + listener, Collections.emptySet()); + } } diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/RollupRequestConverters.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/RollupRequestConverters.java index f1c4f77ae4c..261467fa268 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/RollupRequestConverters.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/RollupRequestConverters.java @@ -18,7 +18,9 @@ */ package org.elasticsearch.client; +import org.apache.http.client.methods.HttpGet; import org.apache.http.client.methods.HttpPut; +import org.elasticsearch.client.rollup.GetRollupJobRequest; import org.elasticsearch.client.rollup.PutRollupJobRequest; import java.io.IOException; @@ -42,4 +44,14 @@ final class RollupRequestConverters { request.setEntity(createEntity(putRollupJobRequest, REQUEST_BODY_CONTENT_TYPE)); return request; } + + static Request getJob(final GetRollupJobRequest getRollupJobRequest) { + String endpoint = new RequestConverters.EndpointBuilder() + .addPathPartAsIs("_xpack") + .addPathPartAsIs("rollup") + .addPathPartAsIs("job") + .addPathPart(getRollupJobRequest.getJobId()) + .build(); + return new Request(HttpGet.METHOD_NAME, endpoint); + } } diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/SecurityClient.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/SecurityClient.java index a4bc34004c2..7192d82f474 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/SecurityClient.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/SecurityClient.java @@ -25,6 +25,7 @@ import org.elasticsearch.client.security.EnableUserRequest; import org.elasticsearch.client.security.PutUserRequest; import org.elasticsearch.client.security.PutUserResponse; import org.elasticsearch.client.security.EmptyResponse; +import org.elasticsearch.client.security.ChangePasswordRequest; import java.io.IOException; @@ -47,6 +48,7 @@ public final class SecurityClient { * Create/update a user in the native realm synchronously. * See * the docs for more. + * * @param request the request with the user's information * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized * @return the response from the put user call @@ -61,8 +63,9 @@ public final class SecurityClient { * Asynchronously create/update a user in the native realm. * See * the docs for more. - * @param request the request with the user's information - * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized + * + * @param request the request with the user's information + * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized * @param listener the listener to be notified upon request completion */ public void putUserAsync(PutUserRequest request, RequestOptions options, ActionListener listener) { @@ -74,6 +77,7 @@ public final class SecurityClient { * Enable a native realm or built-in user synchronously. * See * the docs for more. + * * @param request the request with the user to enable * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized * @return the response from the enable user call @@ -88,12 +92,13 @@ public final class SecurityClient { * Enable a native realm or built-in user asynchronously. * See * the docs for more. - * @param request the request with the user to enable - * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized + * + * @param request the request with the user to enable + * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized * @param listener the listener to be notified upon request completion */ public void enableUserAsync(EnableUserRequest request, RequestOptions options, - ActionListener listener) { + ActionListener listener) { restHighLevelClient.performRequestAsyncAndParseEntity(request, SecurityRequestConverters::enableUser, options, EmptyResponse::fromXContent, listener, emptySet()); } @@ -102,6 +107,7 @@ public final class SecurityClient { * Disable a native realm or built-in user synchronously. * See * the docs for more. + * * @param request the request with the user to disable * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized * @return the response from the enable user call @@ -116,13 +122,44 @@ public final class SecurityClient { * Disable a native realm or built-in user asynchronously. * See * the docs for more. - * @param request the request with the user to disable - * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized + * + * @param request the request with the user to disable + * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized * @param listener the listener to be notified upon request completion */ public void disableUserAsync(DisableUserRequest request, RequestOptions options, - ActionListener listener) { + ActionListener listener) { restHighLevelClient.performRequestAsyncAndParseEntity(request, SecurityRequestConverters::disableUser, options, EmptyResponse::fromXContent, listener, emptySet()); } + + /** + * Change the password of a user of a native realm or built-in user synchronously. + * See + * the docs for more. + * + * @param request the request with the user's new password + * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized + * @return the response from the change user password call + * @throws IOException in case there is a problem sending the request or parsing back the response + */ + public EmptyResponse changePassword(ChangePasswordRequest request, RequestOptions options) throws IOException { + return restHighLevelClient.performRequestAndParseEntity(request, SecurityRequestConverters::changePassword, options, + EmptyResponse::fromXContent, emptySet()); + } + + /** + * Change the password of a user of a native realm or built-in user asynchronously. + * See + * the docs for more. + * + * @param request the request with the user's new password + * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized + * @param listener the listener to be notified upon request completion + */ + public void changePasswordAsync(ChangePasswordRequest request, RequestOptions options, + ActionListener listener) { + restHighLevelClient.performRequestAsyncAndParseEntity(request, SecurityRequestConverters::changePassword, options, + EmptyResponse::fromXContent, listener, emptySet()); + } } diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/SecurityRequestConverters.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/SecurityRequestConverters.java index 8533e0f1b4c..3157abe6337 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/SecurityRequestConverters.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/SecurityRequestConverters.java @@ -19,9 +19,11 @@ package org.elasticsearch.client; +import org.apache.http.client.methods.HttpPost; import org.apache.http.client.methods.HttpPut; import org.elasticsearch.client.security.DisableUserRequest; import org.elasticsearch.client.security.EnableUserRequest; +import org.elasticsearch.client.security.ChangePasswordRequest; import org.elasticsearch.client.security.PutUserRequest; import org.elasticsearch.client.security.SetUserEnabledRequest; @@ -34,6 +36,19 @@ final class SecurityRequestConverters { private SecurityRequestConverters() {} + static Request changePassword(ChangePasswordRequest changePasswordRequest) throws IOException { + String endpoint = new RequestConverters.EndpointBuilder() + .addPathPartAsIs("_xpack/security/user") + .addPathPart(changePasswordRequest.getUsername()) + .addPathPartAsIs("_password") + .build(); + Request request = new Request(HttpPost.METHOD_NAME, endpoint); + request.setEntity(createEntity(changePasswordRequest, REQUEST_BODY_CONTENT_TYPE)); + RequestConverters.Params params = new RequestConverters.Params(request); + params.withRefreshPolicy(changePasswordRequest.getRefreshPolicy()); + return request; + } + static Request putUser(PutUserRequest putUserRequest) throws IOException { String endpoint = new RequestConverters.EndpointBuilder() .addPathPartAsIs("_xpack/security/user") diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/StartDatafeedRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/StartDatafeedRequest.java new file mode 100644 index 00000000000..4ee6d747e57 --- /dev/null +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/StartDatafeedRequest.java @@ -0,0 +1,160 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.client.ml; + +import org.elasticsearch.action.ActionRequest; +import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.client.ml.datafeed.DatafeedConfig; +import org.elasticsearch.common.ParseField; +import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.common.xcontent.ConstructingObjectParser; +import org.elasticsearch.common.xcontent.ToXContentObject; +import org.elasticsearch.common.xcontent.XContentBuilder; + +import java.io.IOException; +import java.util.Objects; + +/** + * Request to start a Datafeed + */ +public class StartDatafeedRequest extends ActionRequest implements ToXContentObject { + + public static final ParseField START = new ParseField("start"); + public static final ParseField END = new ParseField("end"); + public static final ParseField TIMEOUT = new ParseField("timeout"); + + public static ConstructingObjectParser PARSER = + new ConstructingObjectParser<>("start_datafeed_request", a -> new StartDatafeedRequest((String)a[0])); + + static { + PARSER.declareString(ConstructingObjectParser.constructorArg(), DatafeedConfig.ID); + PARSER.declareString(StartDatafeedRequest::setStart, START); + PARSER.declareString(StartDatafeedRequest::setEnd, END); + PARSER.declareString((params, val) -> + params.setTimeout(TimeValue.parseTimeValue(val, TIMEOUT.getPreferredName())), TIMEOUT); + } + + private final String datafeedId; + private String start; + private String end; + private TimeValue timeout; + + /** + * Create a new StartDatafeedRequest for the given DatafeedId + * + * @param datafeedId non-null existing Datafeed ID + */ + public StartDatafeedRequest(String datafeedId) { + this.datafeedId = Objects.requireNonNull(datafeedId, "[datafeed_id] must not be null"); + } + + public String getDatafeedId() { + return datafeedId; + } + + public String getStart() { + return start; + } + + /** + * The time that the datafeed should begin. This value is inclusive. + * + * If you specify a start value that is earlier than the timestamp of the latest processed record, + * the datafeed continues from 1 millisecond after the timestamp of the latest processed record. + * + * If you do not specify a start time and the datafeed is associated with a new job, + * the analysis starts from the earliest time for which data is available. + * + * @param start String representation of a timestamp; may be an epoch seconds, epoch millis or an ISO 8601 string + */ + public void setStart(String start) { + this.start = start; + } + + public String getEnd() { + return end; + } + + /** + * The time that the datafeed should end. This value is exclusive. + * If you do not specify an end time, the datafeed runs continuously. + * + * @param end String representation of a timestamp; may be an epoch seconds, epoch millis or an ISO 8601 string + */ + public void setEnd(String end) { + this.end = end; + } + + public TimeValue getTimeout() { + return timeout; + } + + /** + * Indicates how long to wait for the cluster to respond to the request. + * + * @param timeout TimeValue for how long to wait for a response from the cluster + */ + public void setTimeout(TimeValue timeout) { + this.timeout = timeout; + } + + @Override + public ActionRequestValidationException validate() { + return null; + } + + @Override + public int hashCode() { + return Objects.hash(datafeedId, start, end, timeout); + } + + @Override + public boolean equals(Object obj) { + if (this == obj) { + return true; + } + + if (obj == null || obj.getClass() != getClass()) { + return false; + } + + StartDatafeedRequest other = (StartDatafeedRequest) obj; + return Objects.equals(datafeedId, other.datafeedId) && + Objects.equals(start, other.start) && + Objects.equals(end, other.end) && + Objects.equals(timeout, other.timeout); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + builder.field(DatafeedConfig.ID.getPreferredName(), datafeedId); + if (start != null) { + builder.field(START.getPreferredName(), start); + } + if (end != null) { + builder.field(END.getPreferredName(), end); + } + if (timeout != null) { + builder.field(TIMEOUT.getPreferredName(), timeout.getStringRep()); + } + builder.endObject(); + return builder; + } +} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/StartDatafeedResponse.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/StartDatafeedResponse.java new file mode 100644 index 00000000000..d4ed46c5316 --- /dev/null +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/StartDatafeedResponse.java @@ -0,0 +1,93 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.client.ml; + +import org.elasticsearch.action.ActionResponse; +import org.elasticsearch.common.ParseField; +import org.elasticsearch.common.xcontent.ConstructingObjectParser; +import org.elasticsearch.common.xcontent.ToXContentObject; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentParser; + +import java.io.IOException; +import java.util.Objects; + +/** + * Response indicating if the Machine Learning Datafeed is now started or not + */ +public class StartDatafeedResponse extends ActionResponse implements ToXContentObject { + + private static final ParseField STARTED = new ParseField("started"); + + public static final ConstructingObjectParser PARSER = + new ConstructingObjectParser<>( + "start_datafeed_response", + true, + (a) -> new StartDatafeedResponse((Boolean)a[0])); + + static { + PARSER.declareBoolean(ConstructingObjectParser.constructorArg(), STARTED); + } + + private final boolean started; + + public StartDatafeedResponse(boolean started) { + this.started = started; + } + + public static StartDatafeedResponse fromXContent(XContentParser parser) throws IOException { + return PARSER.parse(parser, null); + } + + /** + * Has the Datafeed started or not + * + * @return boolean value indicating the Datafeed started status + */ + public boolean isStarted() { + return started; + } + + @Override + public boolean equals(Object other) { + if (this == other) { + return true; + } + + if (other == null || getClass() != other.getClass()) { + return false; + } + + StartDatafeedResponse that = (StartDatafeedResponse) other; + return isStarted() == that.isStarted(); + } + + @Override + public int hashCode() { + return Objects.hash(isStarted()); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + builder.field(STARTED.getPreferredName(), started); + builder.endObject(); + return builder; + } +} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/StopDatafeedRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/StopDatafeedRequest.java new file mode 100644 index 00000000000..f4aa4ff35d8 --- /dev/null +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/StopDatafeedRequest.java @@ -0,0 +1,195 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.client.ml; + +import org.elasticsearch.action.ActionRequest; +import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.client.ml.datafeed.DatafeedConfig; +import org.elasticsearch.common.ParseField; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.common.xcontent.ConstructingObjectParser; +import org.elasticsearch.common.xcontent.ObjectParser; +import org.elasticsearch.common.xcontent.ToXContentObject; +import org.elasticsearch.common.xcontent.XContentBuilder; + +import java.io.IOException; +import java.security.InvalidParameterException; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; +import java.util.Objects; + +/** + * Request to stop Machine Learning Datafeeds + */ +public class StopDatafeedRequest extends ActionRequest implements ToXContentObject { + + public static final ParseField TIMEOUT = new ParseField("timeout"); + public static final ParseField FORCE = new ParseField("force"); + public static final ParseField ALLOW_NO_DATAFEEDS = new ParseField("allow_no_datafeeds"); + + @SuppressWarnings("unchecked") + public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( + "stop_datafeed_request", + a -> new StopDatafeedRequest((List) a[0])); + + static { + PARSER.declareField(ConstructingObjectParser.constructorArg(), + p -> Arrays.asList(Strings.commaDelimitedListToStringArray(p.text())), + DatafeedConfig.ID, ObjectParser.ValueType.STRING_ARRAY); + PARSER.declareString((obj, val) -> obj.setTimeout(TimeValue.parseTimeValue(val, TIMEOUT.getPreferredName())), TIMEOUT); + PARSER.declareBoolean(StopDatafeedRequest::setForce, FORCE); + PARSER.declareBoolean(StopDatafeedRequest::setAllowNoDatafeeds, ALLOW_NO_DATAFEEDS); + } + + private static final String ALL_DATAFEEDS = "_all"; + + private final List datafeedIds; + private TimeValue timeout; + private Boolean force; + private Boolean allowNoDatafeeds; + + /** + * Explicitly stop all datafeeds + * + * @return a {@link StopDatafeedRequest} for all existing datafeeds + */ + public static StopDatafeedRequest stopAllDatafeedsRequest(){ + return new StopDatafeedRequest(ALL_DATAFEEDS); + } + + StopDatafeedRequest(List datafeedIds) { + if (datafeedIds.isEmpty()) { + throw new InvalidParameterException("datafeedIds must not be empty"); + } + if (datafeedIds.stream().anyMatch(Objects::isNull)) { + throw new NullPointerException("datafeedIds must not contain null values"); + } + this.datafeedIds = new ArrayList<>(datafeedIds); + } + + /** + * Close the specified Datafeeds via their unique datafeedIds + * + * @param datafeedIds must be non-null and non-empty and each datafeedId must be non-null + */ + public StopDatafeedRequest(String... datafeedIds) { + this(Arrays.asList(datafeedIds)); + } + + /** + * All the datafeedIds to be stopped + */ + public List getDatafeedIds() { + return datafeedIds; + } + + public TimeValue getTimeout() { + return timeout; + } + + /** + * How long to wait for the stop request to complete before timing out. + * + * @param timeout Default value: 30 minutes + */ + public void setTimeout(TimeValue timeout) { + this.timeout = timeout; + } + + public Boolean isForce() { + return force; + } + + /** + * Should the stopping be forced. + * + * Use to forcefully stop a datafeed + * + * @param force When {@code true} forcefully stop the datafeed. Defaults to {@code false} + */ + public void setForce(boolean force) { + this.force = force; + } + + public Boolean isAllowNoDatafeeds() { + return this.allowNoDatafeeds; + } + + /** + * Whether to ignore if a wildcard expression matches no datafeeds. + * + * This includes {@code _all} string. + * + * @param allowNoDatafeeds When {@code true} ignore if wildcard or {@code _all} matches no datafeeds. Defaults to {@code true} + */ + public void setAllowNoDatafeeds(boolean allowNoDatafeeds) { + this.allowNoDatafeeds = allowNoDatafeeds; + } + + @Override + public ActionRequestValidationException validate() { + return null; + } + + @Override + public int hashCode() { + return Objects.hash(datafeedIds, timeout, force, allowNoDatafeeds); + } + + @Override + public boolean equals(Object other) { + if (this == other) { + return true; + } + + if (other == null || getClass() != other.getClass()) { + return false; + } + + StopDatafeedRequest that = (StopDatafeedRequest) other; + return Objects.equals(datafeedIds, that.datafeedIds) && + Objects.equals(timeout, that.timeout) && + Objects.equals(force, that.force) && + Objects.equals(allowNoDatafeeds, that.allowNoDatafeeds); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + builder.field(DatafeedConfig.ID.getPreferredName(), Strings.collectionToCommaDelimitedString(datafeedIds)); + if (timeout != null) { + builder.field(TIMEOUT.getPreferredName(), timeout.getStringRep()); + } + if (force != null) { + builder.field(FORCE.getPreferredName(), force); + } + if (allowNoDatafeeds != null) { + builder.field(ALLOW_NO_DATAFEEDS.getPreferredName(), allowNoDatafeeds); + } + builder.endObject(); + return builder; + } + + @Override + public String toString() { + return Strings.toString(this); + } +} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/StopDatafeedResponse.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/StopDatafeedResponse.java new file mode 100644 index 00000000000..c370d7d9d0b --- /dev/null +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/StopDatafeedResponse.java @@ -0,0 +1,93 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.client.ml; + +import org.elasticsearch.action.ActionResponse; +import org.elasticsearch.common.ParseField; +import org.elasticsearch.common.xcontent.ConstructingObjectParser; +import org.elasticsearch.common.xcontent.ToXContentObject; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentParser; + +import java.io.IOException; +import java.util.Objects; + +/** + * Response indicating if the Machine Learning Datafeed is now stopped or not + */ +public class StopDatafeedResponse extends ActionResponse implements ToXContentObject { + + private static final ParseField STOPPED = new ParseField("stopped"); + + public static final ConstructingObjectParser PARSER = + new ConstructingObjectParser<>( + "stop_datafeed_response", + true, + (a) -> new StopDatafeedResponse((Boolean)a[0])); + + static { + PARSER.declareBoolean(ConstructingObjectParser.constructorArg(), STOPPED); + } + + private final boolean stopped; + + public StopDatafeedResponse(boolean stopped) { + this.stopped = stopped; + } + + public static StopDatafeedResponse fromXContent(XContentParser parser) throws IOException { + return PARSER.parse(parser, null); + } + + /** + * Has the Datafeed stopped or not + * + * @return boolean value indicating the Datafeed stopped status + */ + public boolean isStopped() { + return stopped; + } + + @Override + public boolean equals(Object other) { + if (this == other) { + return true; + } + + if (other == null || getClass() != other.getClass()) { + return false; + } + + StopDatafeedResponse that = (StopDatafeedResponse) other; + return isStopped() == that.isStopped(); + } + + @Override + public int hashCode() { + return Objects.hash(isStopped()); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + builder.field(STOPPED.getPreferredName(), stopped); + builder.endObject(); + return builder; + } +} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/rollup/GetRollupJobRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/rollup/GetRollupJobRequest.java new file mode 100644 index 00000000000..410bc7caa09 --- /dev/null +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/rollup/GetRollupJobRequest.java @@ -0,0 +1,77 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.client.rollup; + +import org.elasticsearch.client.Validatable; +import org.elasticsearch.client.ValidationException; + +import java.util.Objects; +import java.util.Optional; + +/** + * Request to fetch rollup jobs. + */ +public class GetRollupJobRequest implements Validatable { + private final String jobId; + + /** + * Create a requets . + * @param jobId id of the job to return or {@code _all} to return all jobs + */ + public GetRollupJobRequest(final String jobId) { + Objects.requireNonNull(jobId, "jobId is required"); + if ("_all".equals(jobId)) { + throw new IllegalArgumentException("use the default ctor to ask for all jobs"); + } + this.jobId = jobId; + } + + /** + * Create a request to load all rollup jobs. + */ + public GetRollupJobRequest() { + this.jobId = "_all"; + } + + /** + * ID of the job to return. + */ + public String getJobId() { + return jobId; + } + + @Override + public Optional validate() { + return Optional.empty(); + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + final GetRollupJobRequest that = (GetRollupJobRequest) o; + return jobId.equals(that.jobId); + } + + @Override + public int hashCode() { + return Objects.hash(jobId); + } +} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/rollup/GetRollupJobResponse.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/rollup/GetRollupJobResponse.java new file mode 100644 index 00000000000..131e6ec0eda --- /dev/null +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/rollup/GetRollupJobResponse.java @@ -0,0 +1,374 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.client.rollup; + +import org.elasticsearch.client.rollup.job.config.RollupJobConfig; +import org.elasticsearch.common.ParseField; +import org.elasticsearch.common.xcontent.ConstructingObjectParser; +import org.elasticsearch.common.xcontent.ObjectParser; +import org.elasticsearch.common.xcontent.XContentParser; + +import java.io.IOException; +import java.util.Objects; +import java.util.List; +import java.util.Locale; +import java.util.Map; + +import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg; +import static org.elasticsearch.common.xcontent.ConstructingObjectParser.optionalConstructorArg; +import static java.util.Collections.unmodifiableList; +import static java.util.stream.Collectors.joining; + +/** + * Response from rollup's get jobs api. + */ +public class GetRollupJobResponse { + static final ParseField JOBS = new ParseField("jobs"); + static final ParseField CONFIG = new ParseField("config"); + static final ParseField STATS = new ParseField("stats"); + static final ParseField STATUS = new ParseField("status"); + static final ParseField NUM_PAGES = new ParseField("pages_processed"); + static final ParseField NUM_INPUT_DOCUMENTS = new ParseField("documents_processed"); + static final ParseField NUM_OUTPUT_DOCUMENTS = new ParseField("rollups_indexed"); + static final ParseField NUM_INVOCATIONS = new ParseField("trigger_count"); + static final ParseField STATE = new ParseField("job_state"); + static final ParseField CURRENT_POSITION = new ParseField("current_position"); + static final ParseField UPGRADED_DOC_ID = new ParseField("upgraded_doc_id"); + + private List jobs; + + GetRollupJobResponse(final List jobs) { + this.jobs = Objects.requireNonNull(jobs, "jobs is required"); + } + + /** + * Jobs returned by the request. + */ + public List getJobs() { + return jobs; + } + + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + final GetRollupJobResponse that = (GetRollupJobResponse) o; + return jobs.equals(that.jobs); + } + + @Override + public int hashCode() { + return Objects.hash(jobs); + } + + private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( + "get_rollup_job_response", + true, + args -> { + @SuppressWarnings("unchecked") // We're careful about the type in the list + List jobs = (List) args[0]; + return new GetRollupJobResponse(unmodifiableList(jobs)); + }); + static { + PARSER.declareObjectArray(constructorArg(), JobWrapper.PARSER::apply, JOBS); + } + + public static GetRollupJobResponse fromXContent(final XContentParser parser) throws IOException { + return PARSER.parse(parser, null); + } + + @Override + public final String toString() { + return "{jobs=" + jobs.stream().map(Object::toString).collect(joining("\n")) + "\n}"; + } + + public static class JobWrapper { + private final RollupJobConfig job; + private final RollupIndexerJobStats stats; + private final RollupJobStatus status; + + JobWrapper(RollupJobConfig job, RollupIndexerJobStats stats, RollupJobStatus status) { + this.job = job; + this.stats = stats; + this.status = status; + } + + /** + * Configuration of the job. + */ + public RollupJobConfig getJob() { + return job; + } + + /** + * Statistics about the execution of the job. + */ + public RollupIndexerJobStats getStats() { + return stats; + } + + /** + * Current state of the job. + */ + public RollupJobStatus getStatus() { + return status; + } + + private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( + "job", + true, + a -> new JobWrapper((RollupJobConfig) a[0], (RollupIndexerJobStats) a[1], (RollupJobStatus) a[2])); + static { + PARSER.declareObject(ConstructingObjectParser.constructorArg(), (p, c) -> RollupJobConfig.fromXContent(p, null), CONFIG); + PARSER.declareObject(ConstructingObjectParser.constructorArg(), RollupIndexerJobStats.PARSER::apply, STATS); + PARSER.declareObject(ConstructingObjectParser.constructorArg(), RollupJobStatus.PARSER::apply, STATUS); + } + + @Override + public boolean equals(Object obj) { + if (obj == null) { + return false; + } + if (getClass() != obj.getClass()) { + return false; + } + JobWrapper other = (JobWrapper) obj; + return Objects.equals(job, other.job) + && Objects.equals(stats, other.stats) + && Objects.equals(status, other.status); + } + + @Override + public int hashCode() { + return Objects.hash(job, stats, status); + } + + @Override + public final String toString() { + return "{job=" + job + + ", stats=" + stats + + ", status=" + status + "}"; + } + } + + /** + * The Rollup specialization of stats for the AsyncTwoPhaseIndexer. + * Note: instead of `documents_indexed`, this XContent show `rollups_indexed` + */ + public static class RollupIndexerJobStats { + private final long numPages; + private final long numInputDocuments; + private final long numOuputDocuments; + private final long numInvocations; + + RollupIndexerJobStats(long numPages, long numInputDocuments, long numOuputDocuments, long numInvocations) { + this.numPages = numPages; + this.numInputDocuments = numInputDocuments; + this.numOuputDocuments = numOuputDocuments; + this.numInvocations = numInvocations; + } + + /** + * The number of pages read from the input indices. + */ + public long getNumPages() { + return numPages; + } + + /** + * The number of documents read from the input indices. + */ + public long getNumDocuments() { + return numInputDocuments; + } + + /** + * Number of times that the job woke up to write documents. + */ + public long getNumInvocations() { + return numInvocations; + } + + /** + * Number of documents written to the result indices. + */ + public long getOutputDocuments() { + return numOuputDocuments; + } + + private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( + STATS.getPreferredName(), + true, + args -> new RollupIndexerJobStats((long) args[0], (long) args[1], (long) args[2], (long) args[3])); + static { + PARSER.declareLong(constructorArg(), NUM_PAGES); + PARSER.declareLong(constructorArg(), NUM_INPUT_DOCUMENTS); + PARSER.declareLong(constructorArg(), NUM_OUTPUT_DOCUMENTS); + PARSER.declareLong(constructorArg(), NUM_INVOCATIONS); + } + + @Override + public boolean equals(Object other) { + if (this == other) return true; + if (other == null || getClass() != other.getClass()) return false; + RollupIndexerJobStats that = (RollupIndexerJobStats) other; + return Objects.equals(this.numPages, that.numPages) + && Objects.equals(this.numInputDocuments, that.numInputDocuments) + && Objects.equals(this.numOuputDocuments, that.numOuputDocuments) + && Objects.equals(this.numInvocations, that.numInvocations); + } + + @Override + public int hashCode() { + return Objects.hash(numPages, numInputDocuments, numOuputDocuments, numInvocations); + } + + @Override + public final String toString() { + return "{pages=" + numPages + + ", input_docs=" + numInputDocuments + + ", output_docs=" + numOuputDocuments + + ", invocations=" + numInvocations + "}"; + } + } + + /** + * Status of the rollup job. + */ + public static class RollupJobStatus { + private final IndexerState state; + private final Map currentPosition; + private final boolean upgradedDocumentId; + + RollupJobStatus(IndexerState state, Map position, boolean upgradedDocumentId) { + this.state = state; + this.currentPosition = position; + this.upgradedDocumentId = upgradedDocumentId; + } + + /** + * The state of the writer. + */ + public IndexerState getState() { + return state; + } + /** + * The current position of the writer. + */ + public Map getCurrentPosition() { + return currentPosition; + } + /** + * Flag holds the state of the ID scheme, e.g. if it has been upgraded + * to the concatenation scheme. + */ + public boolean getUpgradedDocumentId() { + return upgradedDocumentId; + } + + private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( + STATUS.getPreferredName(), + true, + args -> { + IndexerState state = (IndexerState) args[0]; + @SuppressWarnings("unchecked") // We're careful of the contents + Map currentPosition = (Map) args[1]; + Boolean upgradedDocumentId = (Boolean) args[2]; + return new RollupJobStatus(state, currentPosition, upgradedDocumentId == null ? false : upgradedDocumentId); + }); + static { + PARSER.declareField(constructorArg(), p -> IndexerState.fromString(p.text()), STATE, ObjectParser.ValueType.STRING); + PARSER.declareField(optionalConstructorArg(), p -> { + if (p.currentToken() == XContentParser.Token.START_OBJECT) { + return p.map(); + } + if (p.currentToken() == XContentParser.Token.VALUE_NULL) { + return null; + } + throw new IllegalArgumentException("Unsupported token [" + p.currentToken() + "]"); + }, CURRENT_POSITION, ObjectParser.ValueType.VALUE_OBJECT_ARRAY); + + // Optional to accommodate old versions of state + PARSER.declareBoolean(ConstructingObjectParser.optionalConstructorArg(), UPGRADED_DOC_ID); + } + + @Override + public boolean equals(Object other) { + if (this == other) return true; + if (other == null || getClass() != other.getClass()) return false; + RollupJobStatus that = (RollupJobStatus) other; + return Objects.equals(state, that.state) + && Objects.equals(currentPosition, that.currentPosition) + && upgradedDocumentId == that.upgradedDocumentId; + } + + @Override + public int hashCode() { + return Objects.hash(state, currentPosition, upgradedDocumentId); + } + + @Override + public final String toString() { + return "{stats=" + state + + ", currentPosition=" + currentPosition + + ", upgradedDocumentId=" + upgradedDocumentId + "}"; + } + } + + /** + * IndexerState represents the internal state of the indexer. It + * is also persistent when changing from started/stopped in case the allocated + * task is restarted elsewhere. + */ + public enum IndexerState { + /** Indexer is running, but not actively indexing data (e.g. it's idle). */ + STARTED, + + /** Indexer is actively indexing data. */ + INDEXING, + + /** + * Transition state to where an indexer has acknowledged the stop + * but is still in process of halting. + */ + STOPPING, + + /** Indexer is "paused" and ignoring scheduled triggers. */ + STOPPED, + + /** + * Something (internal or external) has requested the indexer abort + * and shutdown. + */ + ABORTING; + + static IndexerState fromString(String name) { + return valueOf(name.trim().toUpperCase(Locale.ROOT)); + } + + String value() { + return name().toLowerCase(Locale.ROOT); + } + } +} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/security/ChangePasswordRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/security/ChangePasswordRequest.java new file mode 100644 index 00000000000..ffae034d246 --- /dev/null +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/security/ChangePasswordRequest.java @@ -0,0 +1,76 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.client.security; + +import org.elasticsearch.client.Validatable; +import org.elasticsearch.common.CharArrays; +import org.elasticsearch.common.Nullable; +import org.elasticsearch.common.xcontent.ToXContentObject; +import org.elasticsearch.common.xcontent.XContentBuilder; + +import java.io.IOException; +import java.util.Arrays; +import java.util.Objects; + +/** + * Request object to change the password of a user of a native realm or a built-in user. + */ +public final class ChangePasswordRequest implements Validatable, ToXContentObject { + + private final String username; + private final char[] password; + private final RefreshPolicy refreshPolicy; + + /** + * @param username The username of the user whose password should be changed or null for the current user. + * @param password The new password. The password array is not cleared by the {@link ChangePasswordRequest} object so the + * calling code must clear it after receiving the response. + * @param refreshPolicy The refresh policy for the request. + */ + public ChangePasswordRequest(@Nullable String username, char[] password, RefreshPolicy refreshPolicy) { + this.username = username; + this.password = Objects.requireNonNull(password, "password is required"); + this.refreshPolicy = refreshPolicy == null ? RefreshPolicy.getDefault() : refreshPolicy; + } + + public String getUsername() { + return username; + } + + public char[] getPassword() { + return password; + } + + public RefreshPolicy getRefreshPolicy() { + return refreshPolicy; + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + byte[] charBytes = CharArrays.toUtf8Bytes(password); + try { + return builder.startObject() + .field("password").utf8Value(charBytes, 0, charBytes.length) + .endObject(); + } finally { + Arrays.fill(charBytes, (byte) 0); + } + } +} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/security/PutUserRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/security/PutUserRequest.java index f8c30a25aed..11e13f621e6 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/security/PutUserRequest.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/security/PutUserRequest.java @@ -25,7 +25,6 @@ import org.elasticsearch.common.CharArrays; import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.XContentBuilder; -import java.io.Closeable; import java.io.IOException; import java.util.Arrays; import java.util.Collections; @@ -37,7 +36,7 @@ import java.util.Optional; /** * Request object to create or update a user in the native realm. */ -public final class PutUserRequest implements Validatable, Closeable, ToXContentObject { +public final class PutUserRequest implements Validatable, ToXContentObject { private final String username; private final List roles; @@ -48,6 +47,20 @@ public final class PutUserRequest implements Validatable, Closeable, ToXContentO private final boolean enabled; private final RefreshPolicy refreshPolicy; + /** + * Creates a new request that is used to create or update a user in the native realm. + * + * @param username the username of the user to be created or updated + * @param password the password of the user. The password array is not modified by this class. + * It is the responsibility of the caller to clear the password after receiving + * a response. + * @param roles the roles that this user is assigned + * @param fullName the full name of the user that may be used for display purposes + * @param email the email address of the user + * @param enabled true if the user is enabled and allowed to access elasticsearch + * @param metadata a map of additional user attributes that may be used in templating roles + * @param refreshPolicy the refresh policy for the request. + */ public PutUserRequest(String username, char[] password, List roles, String fullName, String email, boolean enabled, Map metadata, RefreshPolicy refreshPolicy) { this.username = Objects.requireNonNull(username, "username is required"); @@ -114,13 +127,6 @@ public final class PutUserRequest implements Validatable, Closeable, ToXContentO return result; } - @Override - public void close() { - if (password != null) { - Arrays.fill(password, (char) 0); - } - } - @Override public Optional validate() { if (metadata != null && metadata.keySet().stream().anyMatch(s -> s.startsWith("_"))) { @@ -137,7 +143,11 @@ public final class PutUserRequest implements Validatable, Closeable, ToXContentO builder.field("username", username); if (password != null) { byte[] charBytes = CharArrays.toUtf8Bytes(password); - builder.field("password").utf8Value(charBytes, 0, charBytes.length); + try { + builder.field("password").utf8Value(charBytes, 0, charBytes.length); + } finally { + Arrays.fill(charBytes, (byte) 0); + } } if (roles != null) { builder.field("roles", roles); diff --git a/server/src/main/java/org/elasticsearch/script/ExecutableScript.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/security/support/expressiondsl/RoleMapperExpression.java similarity index 54% rename from server/src/main/java/org/elasticsearch/script/ExecutableScript.java rename to client/rest-high-level/src/main/java/org/elasticsearch/client/security/support/expressiondsl/RoleMapperExpression.java index d0d8020371b..10c0d0911ba 100644 --- a/server/src/main/java/org/elasticsearch/script/ExecutableScript.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/security/support/expressiondsl/RoleMapperExpression.java @@ -17,33 +17,14 @@ * under the License. */ -package org.elasticsearch.script; +package org.elasticsearch.client.security.support.expressiondsl; -import java.util.Map; +import org.elasticsearch.common.xcontent.ToXContentObject; /** - * An executable script, can't be used concurrently. + * Implementations of this interface represent an expression used for user role mapping + * that can later be resolved to a boolean value. */ -public interface ExecutableScript { +public interface RoleMapperExpression extends ToXContentObject { - /** - * Sets a runtime script parameter. - *

- * Note that this method may be slow, involving put() and get() calls - * to a hashmap or similar. - * @param name parameter name - * @param value parameter value - */ - void setNextVar(String name, Object value); - - /** - * Executes the script. - */ - Object run(); - - interface Factory { - ExecutableScript newInstance(Map params); - } - - ScriptContext CONTEXT = new ScriptContext<>("executable", Factory.class); } diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/security/support/expressiondsl/expressions/AllRoleMapperExpression.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/security/support/expressiondsl/expressions/AllRoleMapperExpression.java new file mode 100644 index 00000000000..b5cbe4d2e42 --- /dev/null +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/security/support/expressiondsl/expressions/AllRoleMapperExpression.java @@ -0,0 +1,55 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.client.security.support.expressiondsl.expressions; + +import org.elasticsearch.client.security.support.expressiondsl.RoleMapperExpression; + +import java.util.ArrayList; +import java.util.List; + +/** + * An expression that evaluates to true if-and-only-if all its children + * evaluate to true. + * An all expression with no children is always true. + */ +public final class AllRoleMapperExpression extends CompositeRoleMapperExpression { + + private AllRoleMapperExpression(String name, RoleMapperExpression[] elements) { + super(name, elements); + } + + public static Builder builder() { + return new Builder(); + } + + public static final class Builder { + private List elements = new ArrayList<>(); + + public Builder addExpression(final RoleMapperExpression expression) { + assert expression != null : "expression cannot be null"; + elements.add(expression); + return this; + } + + public AllRoleMapperExpression build() { + return new AllRoleMapperExpression(CompositeType.ALL.getName(), elements.toArray(new RoleMapperExpression[0])); + } + } +} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/security/support/expressiondsl/expressions/AnyRoleMapperExpression.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/security/support/expressiondsl/expressions/AnyRoleMapperExpression.java new file mode 100644 index 00000000000..7632a071bd1 --- /dev/null +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/security/support/expressiondsl/expressions/AnyRoleMapperExpression.java @@ -0,0 +1,55 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.client.security.support.expressiondsl.expressions; + +import org.elasticsearch.client.security.support.expressiondsl.RoleMapperExpression; + +import java.util.ArrayList; +import java.util.List; + +/** + * An expression that evaluates to true if at least one of its children + * evaluate to true. + * An any expression with no children is never true. + */ +public final class AnyRoleMapperExpression extends CompositeRoleMapperExpression { + + private AnyRoleMapperExpression(String name, RoleMapperExpression[] elements) { + super(name, elements); + } + + public static Builder builder() { + return new Builder(); + } + + public static final class Builder { + private List elements = new ArrayList<>(); + + public Builder addExpression(final RoleMapperExpression expression) { + assert expression != null : "expression cannot be null"; + elements.add(expression); + return this; + } + + public AnyRoleMapperExpression build() { + return new AnyRoleMapperExpression(CompositeType.ANY.getName(), elements.toArray(new RoleMapperExpression[0])); + } + } +} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/security/support/expressiondsl/expressions/CompositeRoleMapperExpression.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/security/support/expressiondsl/expressions/CompositeRoleMapperExpression.java new file mode 100644 index 00000000000..2519c59b688 --- /dev/null +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/security/support/expressiondsl/expressions/CompositeRoleMapperExpression.java @@ -0,0 +1,100 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.client.security.support.expressiondsl.expressions; + +import org.elasticsearch.client.security.support.expressiondsl.RoleMapperExpression; +import org.elasticsearch.common.xcontent.XContentBuilder; + +import java.io.IOException; +import java.util.Arrays; +import java.util.Collections; +import java.util.List; +import java.util.Objects; + +/** + * Expression of role mapper expressions which can be combined by operators like AND, OR + *

+ * Expression builder example: + *

+ * {@code
+ * final RoleMapperExpression allExpression = AllRoleMapperExpression.builder()
+                    .addExpression(AnyRoleMapperExpression.builder()
+                            .addExpression(FieldRoleMapperExpression.ofUsername("user1@example.org"))
+                            .addExpression(FieldRoleMapperExpression.ofUsername("user2@example.org"))
+                            .build())
+                    .addExpression(FieldRoleMapperExpression.ofMetadata("metadata.location", "AMER"))
+                    .addExpression(new ExceptRoleMapperExpression(FieldRoleMapperExpression.ofUsername("user3@example.org")))
+                    .build();
+ * }
+ * 
+ */ +public abstract class CompositeRoleMapperExpression implements RoleMapperExpression { + private final String name; + private final List elements; + + CompositeRoleMapperExpression(final String name, final RoleMapperExpression... elements) { + assert name != null : "field name cannot be null"; + assert elements != null : "at least one field expression is required"; + this.name = name; + this.elements = Collections.unmodifiableList(Arrays.asList(elements)); + } + + public String getName() { + return this.getName(); + } + + public List getElements() { + return elements; + } + + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + + final CompositeRoleMapperExpression that = (CompositeRoleMapperExpression) o; + if (Objects.equals(this.getName(), that.getName()) == false) { + return false; + } + return Objects.equals(this.getElements(), that.getElements()); + } + + @Override + public int hashCode() { + return Objects.hash(name, elements); + } + + @Override + public XContentBuilder toXContent(final XContentBuilder builder, final Params params) throws IOException { + builder.startObject(); + builder.startArray(name); + for (RoleMapperExpression e : elements) { + e.toXContent(builder, params); + } + builder.endArray(); + return builder.endObject(); + } + +} + diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/security/support/expressiondsl/expressions/CompositeType.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/security/support/expressiondsl/expressions/CompositeType.java new file mode 100644 index 00000000000..1d6c8aea122 --- /dev/null +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/security/support/expressiondsl/expressions/CompositeType.java @@ -0,0 +1,59 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.client.security.support.expressiondsl.expressions; + +import org.elasticsearch.common.ParseField; + +import java.util.Collections; +import java.util.HashMap; +import java.util.Map; + +public enum CompositeType { + + ANY("any"), ALL("all"), EXCEPT("except"); + + private static Map nameToType = Collections.unmodifiableMap(initialize()); + private ParseField field; + + CompositeType(String name) { + this.field = new ParseField(name); + } + + public String getName() { + return field.getPreferredName(); + } + + public ParseField getParseField() { + return field; + } + + public static CompositeType fromName(String name) { + return nameToType.get(name); + } + + private static Map initialize() { + Map map = new HashMap<>(); + for (CompositeType field : values()) { + map.put(field.getName(), field); + } + return map; + } + +} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/security/support/expressiondsl/expressions/ExceptRoleMapperExpression.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/security/support/expressiondsl/expressions/ExceptRoleMapperExpression.java new file mode 100644 index 00000000000..c2cad0d18da --- /dev/null +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/security/support/expressiondsl/expressions/ExceptRoleMapperExpression.java @@ -0,0 +1,47 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.client.security.support.expressiondsl.expressions; + +import org.elasticsearch.client.security.support.expressiondsl.RoleMapperExpression; +import org.elasticsearch.common.xcontent.XContentBuilder; + +import java.io.IOException; + +/** + * A negating expression. That is, this expression evaluates to true if-and-only-if + * its delegate expression evaluate to false. + * Syntactically, except expressions are intended to be children of all + * expressions ({@link AllRoleMapperExpression}). + */ +public final class ExceptRoleMapperExpression extends CompositeRoleMapperExpression { + + public ExceptRoleMapperExpression(final RoleMapperExpression expression) { + super(CompositeType.EXCEPT.getName(), expression); + } + + @Override + public XContentBuilder toXContent(final XContentBuilder builder, final Params params) throws IOException { + builder.startObject(); + builder.field(CompositeType.EXCEPT.getName()); + builder.value(getElements().get(0)); + return builder.endObject(); + } + +} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/security/support/expressiondsl/fields/FieldRoleMapperExpression.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/security/support/expressiondsl/fields/FieldRoleMapperExpression.java new file mode 100644 index 00000000000..c96ac3cc5b5 --- /dev/null +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/security/support/expressiondsl/fields/FieldRoleMapperExpression.java @@ -0,0 +1,122 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.client.security.support.expressiondsl.fields; + +import org.elasticsearch.client.security.support.expressiondsl.RoleMapperExpression; +import org.elasticsearch.common.xcontent.XContentBuilder; + +import java.io.IOException; +import java.util.Arrays; +import java.util.Collections; +import java.util.List; +import java.util.Objects; + +/** + * An expression that evaluates to true if a field (map element) matches + * the provided values. A field expression may have more than one provided value, in which + * case the expression is true if any of the values are matched. + *

+ * Expression builder example: + *

+ * {@code
+ * final RoleMapperExpression usernameExpression = FieldRoleMapperExpression.ofUsername("user1@example.org");
+ * }
+ * 
+ */ +public class FieldRoleMapperExpression implements RoleMapperExpression { + + private final String field; + private final List values; + + public FieldRoleMapperExpression(final String field, final Object... values) { + if (field == null || field.isEmpty()) { + throw new IllegalArgumentException("null or empty field name (" + field + ")"); + } + if (values == null || values.length == 0) { + throw new IllegalArgumentException("null or empty values (" + values + ")"); + } + this.field = field; + this.values = Collections.unmodifiableList(Arrays.asList(values)); + } + + public String getField() { + return field; + } + + public List getValues() { + return values; + } + + @Override + public boolean equals(Object o) { + if (this == o) + return true; + if (o == null || getClass() != o.getClass()) + return false; + + final FieldRoleMapperExpression that = (FieldRoleMapperExpression) o; + + return Objects.equals(this.getField(), that.getField()) && Objects.equals(this.getValues(), that.getValues()); + } + + @Override + public int hashCode() { + int result = field.hashCode(); + result = 31 * result + values.hashCode(); + return result; + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + builder.startObject("field"); + builder.startArray(this.field); + for (Object value : values) { + builder.value(value); + } + builder.endArray(); + builder.endObject(); + return builder.endObject(); + } + + public static FieldRoleMapperExpression ofUsername(Object... values) { + return ofKeyValues("username", values); + } + + public static FieldRoleMapperExpression ofGroups(Object... values) { + return ofKeyValues("groups", values); + } + + public static FieldRoleMapperExpression ofDN(Object... values) { + return ofKeyValues("dn", values); + } + + public static FieldRoleMapperExpression ofMetadata(String key, Object... values) { + if (key.startsWith("metadata.") == false) { + throw new IllegalArgumentException("metadata key must have prefix 'metadata.'"); + } + return ofKeyValues(key, values); + } + + public static FieldRoleMapperExpression ofKeyValues(String key, Object... values) { + return new FieldRoleMapperExpression(key, values); + } + +} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/security/support/expressiondsl/parser/RoleMapperExpressionParser.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/security/support/expressiondsl/parser/RoleMapperExpressionParser.java new file mode 100644 index 00000000000..98de4f4c209 --- /dev/null +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/security/support/expressiondsl/parser/RoleMapperExpressionParser.java @@ -0,0 +1,180 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.client.security.support.expressiondsl.parser; + +import org.elasticsearch.ElasticsearchParseException; +import org.elasticsearch.client.security.support.expressiondsl.RoleMapperExpression; +import org.elasticsearch.client.security.support.expressiondsl.expressions.AllRoleMapperExpression; +import org.elasticsearch.client.security.support.expressiondsl.expressions.AnyRoleMapperExpression; +import org.elasticsearch.client.security.support.expressiondsl.expressions.CompositeType; +import org.elasticsearch.client.security.support.expressiondsl.expressions.ExceptRoleMapperExpression; +import org.elasticsearch.client.security.support.expressiondsl.fields.FieldRoleMapperExpression; +import org.elasticsearch.common.CheckedFunction; +import org.elasticsearch.common.ParseField; +import org.elasticsearch.common.xcontent.XContentParser; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.Collections; +import java.util.List; + +/** + * Parses the JSON (XContent) based boolean expression DSL into a tree of + * {@link RoleMapperExpression} objects. + * Note: As this is client side parser, it mostly validates the structure of + * DSL being parsed it does not enforce rules + * like allowing "except" within "except" or "any" expressions. + */ +public final class RoleMapperExpressionParser { + public static final ParseField FIELD = new ParseField("field"); + + /** + * @param name The name of the expression tree within its containing object. + * Used to provide descriptive error messages. + * @param parser A parser over the XContent (typically JSON) DSL + * representation of the expression + */ + public RoleMapperExpression parse(final String name, final XContentParser parser) throws IOException { + return parseRulesObject(name, parser); + } + + private RoleMapperExpression parseRulesObject(final String objectName, final XContentParser parser) + throws IOException { + // find the start of the DSL object + final XContentParser.Token token; + if (parser.currentToken() == null) { + token = parser.nextToken(); + } else { + token = parser.currentToken(); + } + if (token != XContentParser.Token.START_OBJECT) { + throw new ElasticsearchParseException("failed to parse rules expression. expected [{}] to be an object but found [{}] instead", + objectName, token); + } + + final String fieldName = fieldName(objectName, parser); + final RoleMapperExpression expr = parseExpression(parser, fieldName, objectName); + if (parser.nextToken() != XContentParser.Token.END_OBJECT) { + throw new ElasticsearchParseException("failed to parse rules expression. object [{}] contains multiple fields", objectName); + } + return expr; + } + + private RoleMapperExpression parseExpression(XContentParser parser, String field, String objectName) + throws IOException { + + if (CompositeType.ANY.getParseField().match(field, parser.getDeprecationHandler())) { + final AnyRoleMapperExpression.Builder builder = AnyRoleMapperExpression.builder(); + parseExpressionArray(CompositeType.ANY.getParseField(), parser).forEach(builder::addExpression); + return builder.build(); + } else if (CompositeType.ALL.getParseField().match(field, parser.getDeprecationHandler())) { + final AllRoleMapperExpression.Builder builder = AllRoleMapperExpression.builder(); + parseExpressionArray(CompositeType.ALL.getParseField(), parser).forEach(builder::addExpression); + return builder.build(); + } else if (FIELD.match(field, parser.getDeprecationHandler())) { + return parseFieldExpression(parser); + } else if (CompositeType.EXCEPT.getParseField().match(field, parser.getDeprecationHandler())) { + return parseExceptExpression(parser); + } else { + throw new ElasticsearchParseException("failed to parse rules expression. field [{}] is not recognised in object [{}]", field, + objectName); + } + } + + private RoleMapperExpression parseFieldExpression(XContentParser parser) throws IOException { + checkStartObject(parser); + final String fieldName = fieldName(FIELD.getPreferredName(), parser); + + final List values; + if (parser.nextToken() == XContentParser.Token.START_ARRAY) { + values = parseArray(FIELD, parser, this::parseFieldValue); + } else { + values = Collections.singletonList(parseFieldValue(parser)); + } + if (parser.nextToken() != XContentParser.Token.END_OBJECT) { + throw new ElasticsearchParseException("failed to parse rules expression. object [{}] contains multiple fields", + FIELD.getPreferredName()); + } + + return FieldRoleMapperExpression.ofKeyValues(fieldName, values.toArray()); + } + + private RoleMapperExpression parseExceptExpression(XContentParser parser) throws IOException { + checkStartObject(parser); + return new ExceptRoleMapperExpression(parseRulesObject(CompositeType.EXCEPT.getName(), parser)); + } + + private void checkStartObject(XContentParser parser) throws IOException { + final XContentParser.Token token = parser.nextToken(); + if (token != XContentParser.Token.START_OBJECT) { + throw new ElasticsearchParseException("failed to parse rules expression. expected an object but found [{}] instead", token); + } + } + + private String fieldName(String objectName, XContentParser parser) throws IOException { + if (parser.nextToken() != XContentParser.Token.FIELD_NAME) { + throw new ElasticsearchParseException("failed to parse rules expression. object [{}] does not contain any fields", objectName); + } + String parsedFieldName = parser.currentName(); + return parsedFieldName; + } + + private List parseExpressionArray(ParseField field, XContentParser parser) + throws IOException { + parser.nextToken(); // parseArray requires that the parser is positioned + // at the START_ARRAY token + return parseArray(field, parser, p -> parseRulesObject(field.getPreferredName(), p)); + } + + private List parseArray(ParseField field, XContentParser parser, CheckedFunction elementParser) + throws IOException { + final XContentParser.Token token = parser.currentToken(); + if (token == XContentParser.Token.START_ARRAY) { + List list = new ArrayList<>(); + while (parser.nextToken() != XContentParser.Token.END_ARRAY) { + list.add(elementParser.apply(parser)); + } + return list; + } else { + throw new ElasticsearchParseException("failed to parse rules expression. field [{}] requires an array", field); + } + } + + private Object parseFieldValue(XContentParser parser) throws IOException { + switch (parser.currentToken()) { + case VALUE_STRING: + return parser.text(); + + case VALUE_BOOLEAN: + return parser.booleanValue(); + + case VALUE_NUMBER: + return parser.longValue(); + + case VALUE_NULL: + return null; + + default: + throw new ElasticsearchParseException("failed to parse rules expression. expected a field value but found [{}] instead", parser + .currentToken()); + } + } + +} diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/CrudIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/CrudIT.java index feb57bed9c4..3f90552fe9b 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/CrudIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/CrudIT.java @@ -21,8 +21,12 @@ package org.elasticsearch.client; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.ElasticsearchStatusException; +import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.DocWriteRequest; import org.elasticsearch.action.DocWriteResponse; +import org.elasticsearch.action.admin.cluster.node.tasks.list.ListTasksRequest; +import org.elasticsearch.action.admin.cluster.node.tasks.list.ListTasksResponse; +import org.elasticsearch.action.admin.cluster.node.tasks.list.TaskGroup; import org.elasticsearch.action.admin.indices.get.GetIndexRequest; import org.elasticsearch.action.bulk.BulkItemResponse; import org.elasticsearch.action.bulk.BulkProcessor; @@ -51,13 +55,18 @@ import org.elasticsearch.index.VersionType; import org.elasticsearch.index.get.GetResult; import org.elasticsearch.index.query.IdsQueryBuilder; import org.elasticsearch.index.reindex.BulkByScrollResponse; +import org.elasticsearch.index.reindex.DeleteByQueryAction; import org.elasticsearch.index.reindex.DeleteByQueryRequest; +import org.elasticsearch.index.reindex.ReindexAction; import org.elasticsearch.index.reindex.ReindexRequest; +import org.elasticsearch.index.reindex.UpdateByQueryAction; import org.elasticsearch.index.reindex.UpdateByQueryRequest; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.script.Script; import org.elasticsearch.script.ScriptType; import org.elasticsearch.search.fetch.subphase.FetchSourceContext; +import org.elasticsearch.tasks.RawTaskStatus; +import org.elasticsearch.tasks.TaskId; import org.joda.time.DateTime; import org.joda.time.DateTimeZone; import org.joda.time.format.DateTimeFormat; @@ -65,9 +74,15 @@ import org.joda.time.format.DateTimeFormat; import java.io.IOException; import java.util.Collections; import java.util.Map; +import java.util.concurrent.CountDownLatch; +import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicReference; import static java.util.Collections.singletonMap; +import static org.hamcrest.Matchers.empty; +import static org.hamcrest.Matchers.hasSize; +import static org.hamcrest.Matchers.instanceOf; +import static org.hamcrest.Matchers.lessThan; public class CrudIT extends ESRestHighLevelClientTestCase { @@ -631,7 +646,7 @@ public class CrudIT extends ESRestHighLevelClientTestCase { validateBulkResponses(nbItems, errors, bulkResponse, bulkRequest); } - public void testReindex() throws IOException { + public void testReindex() throws Exception { final String sourceIndex = "source1"; final String destinationIndex = "dest"; { @@ -642,15 +657,14 @@ public class CrudIT extends ESRestHighLevelClientTestCase { .build(); createIndex(sourceIndex, settings); createIndex(destinationIndex, settings); + BulkRequest bulkRequest = new BulkRequest() + .add(new IndexRequest(sourceIndex, "type", "1").source(Collections.singletonMap("foo", "bar"), XContentType.JSON)) + .add(new IndexRequest(sourceIndex, "type", "2").source(Collections.singletonMap("foo2", "bar2"), XContentType.JSON)) + .setRefreshPolicy(RefreshPolicy.IMMEDIATE); assertEquals( RestStatus.OK, highLevelClient().bulk( - new BulkRequest() - .add(new IndexRequest(sourceIndex, "type", "1") - .source(Collections.singletonMap("foo", "bar"), XContentType.JSON)) - .add(new IndexRequest(sourceIndex, "type", "2") - .source(Collections.singletonMap("foo2", "bar2"), XContentType.JSON)) - .setRefreshPolicy(RefreshPolicy.IMMEDIATE), + bulkRequest, RequestOptions.DEFAULT ).status() ); @@ -692,9 +706,74 @@ public class CrudIT extends ESRestHighLevelClientTestCase { assertEquals(0, bulkResponse.getBulkFailures().size()); assertEquals(0, bulkResponse.getSearchFailures().size()); } + { + // test reindex rethrottling + ReindexRequest reindexRequest = new ReindexRequest(); + reindexRequest.setSourceIndices(sourceIndex); + reindexRequest.setDestIndex(destinationIndex); + + // this following settings are supposed to halt reindexing after first document + reindexRequest.setSourceBatchSize(1); + reindexRequest.setRequestsPerSecond(0.00001f); + final CountDownLatch reindexTaskFinished = new CountDownLatch(1); + highLevelClient().reindexAsync(reindexRequest, RequestOptions.DEFAULT, new ActionListener() { + + @Override + public void onResponse(BulkByScrollResponse response) { + reindexTaskFinished.countDown(); + } + + @Override + public void onFailure(Exception e) { + fail(e.toString()); + } + }); + + TaskId taskIdToRethrottle = findTaskToRethrottle(ReindexAction.NAME); + float requestsPerSecond = 1000f; + ListTasksResponse response = execute(new RethrottleRequest(taskIdToRethrottle, requestsPerSecond), + highLevelClient()::reindexRethrottle, highLevelClient()::reindexRethrottleAsync); + assertThat(response.getTasks(), hasSize(1)); + assertEquals(taskIdToRethrottle, response.getTasks().get(0).getTaskId()); + assertThat(response.getTasks().get(0).getStatus(), instanceOf(RawTaskStatus.class)); + assertEquals(Float.toString(requestsPerSecond), + ((RawTaskStatus) response.getTasks().get(0).getStatus()).toMap().get("requests_per_second").toString()); + reindexTaskFinished.await(2, TimeUnit.SECONDS); + + // any rethrottling after the reindex is done performed with the same taskId should result in a failure + response = execute(new RethrottleRequest(taskIdToRethrottle, requestsPerSecond), + highLevelClient()::reindexRethrottle, highLevelClient()::reindexRethrottleAsync); + assertTrue(response.getTasks().isEmpty()); + assertFalse(response.getNodeFailures().isEmpty()); + assertEquals(1, response.getNodeFailures().size()); + assertEquals("Elasticsearch exception [type=resource_not_found_exception, reason=task [" + taskIdToRethrottle + "] is missing]", + response.getNodeFailures().get(0).getCause().getMessage()); + } } - public void testUpdateByQuery() throws IOException { + private TaskId findTaskToRethrottle(String actionName) throws IOException { + long start = System.nanoTime(); + ListTasksRequest request = new ListTasksRequest(); + request.setActions(actionName); + request.setDetailed(true); + do { + ListTasksResponse list = highLevelClient().tasks().list(request, RequestOptions.DEFAULT); + list.rethrowFailures("Finding tasks to rethrottle"); + assertThat("tasks are left over from the last execution of this test", + list.getTaskGroups(), hasSize(lessThan(2))); + if (0 == list.getTaskGroups().size()) { + // The parent task hasn't started yet + continue; + } + TaskGroup taskGroup = list.getTaskGroups().get(0); + assertThat(taskGroup.getChildTasks(), empty()); + return taskGroup.getTaskInfo().getTaskId(); + } while (System.nanoTime() - start < TimeUnit.SECONDS.toNanos(10)); + throw new AssertionError("Couldn't find tasks to rethrottle. Here are the running tasks " + + highLevelClient().tasks().list(request, RequestOptions.DEFAULT)); + } + + public void testUpdateByQuery() throws Exception { final String sourceIndex = "source1"; { // Prepare @@ -758,9 +837,53 @@ public class CrudIT extends ESRestHighLevelClientTestCase { .getSourceAsMap().get("foo")) ); } + { + // test update-by-query rethrottling + UpdateByQueryRequest updateByQueryRequest = new UpdateByQueryRequest(); + updateByQueryRequest.indices(sourceIndex); + updateByQueryRequest.setQuery(new IdsQueryBuilder().addIds("1").types("type")); + updateByQueryRequest.setRefresh(true); + + // this following settings are supposed to halt reindexing after first document + updateByQueryRequest.setBatchSize(1); + updateByQueryRequest.setRequestsPerSecond(0.00001f); + final CountDownLatch taskFinished = new CountDownLatch(1); + highLevelClient().updateByQueryAsync(updateByQueryRequest, RequestOptions.DEFAULT, new ActionListener() { + + @Override + public void onResponse(BulkByScrollResponse response) { + taskFinished.countDown(); + } + + @Override + public void onFailure(Exception e) { + fail(e.toString()); + } + }); + + TaskId taskIdToRethrottle = findTaskToRethrottle(UpdateByQueryAction.NAME); + float requestsPerSecond = 1000f; + ListTasksResponse response = execute(new RethrottleRequest(taskIdToRethrottle, requestsPerSecond), + highLevelClient()::updateByQueryRethrottle, highLevelClient()::updateByQueryRethrottleAsync); + assertThat(response.getTasks(), hasSize(1)); + assertEquals(taskIdToRethrottle, response.getTasks().get(0).getTaskId()); + assertThat(response.getTasks().get(0).getStatus(), instanceOf(RawTaskStatus.class)); + assertEquals(Float.toString(requestsPerSecond), + ((RawTaskStatus) response.getTasks().get(0).getStatus()).toMap().get("requests_per_second").toString()); + taskFinished.await(2, TimeUnit.SECONDS); + + // any rethrottling after the update-by-query is done performed with the same taskId should result in a failure + response = execute(new RethrottleRequest(taskIdToRethrottle, requestsPerSecond), + highLevelClient()::updateByQueryRethrottle, highLevelClient()::updateByQueryRethrottleAsync); + assertTrue(response.getTasks().isEmpty()); + assertFalse(response.getNodeFailures().isEmpty()); + assertEquals(1, response.getNodeFailures().size()); + assertEquals("Elasticsearch exception [type=resource_not_found_exception, reason=task [" + taskIdToRethrottle + "] is missing]", + response.getNodeFailures().get(0).getCause().getMessage()); + } } - public void testDeleteByQuery() throws IOException { + public void testDeleteByQuery() throws Exception { final String sourceIndex = "source1"; { // Prepare @@ -777,6 +900,8 @@ public class CrudIT extends ESRestHighLevelClientTestCase { .source(Collections.singletonMap("foo", 1), XContentType.JSON)) .add(new IndexRequest(sourceIndex, "type", "2") .source(Collections.singletonMap("foo", 2), XContentType.JSON)) + .add(new IndexRequest(sourceIndex, "type", "3") + .source(Collections.singletonMap("foo", 3), XContentType.JSON)) .setRefreshPolicy(RefreshPolicy.IMMEDIATE), RequestOptions.DEFAULT ).status() @@ -800,10 +925,54 @@ public class CrudIT extends ESRestHighLevelClientTestCase { assertEquals(0, bulkResponse.getBulkFailures().size()); assertEquals(0, bulkResponse.getSearchFailures().size()); assertEquals( - 1, + 2, highLevelClient().search(new SearchRequest(sourceIndex), RequestOptions.DEFAULT).getHits().totalHits ); } + { + // test delete-by-query rethrottling + DeleteByQueryRequest deleteByQueryRequest = new DeleteByQueryRequest(); + deleteByQueryRequest.indices(sourceIndex); + deleteByQueryRequest.setQuery(new IdsQueryBuilder().addIds("2", "3").types("type")); + deleteByQueryRequest.setRefresh(true); + + // this following settings are supposed to halt reindexing after first document + deleteByQueryRequest.setBatchSize(1); + deleteByQueryRequest.setRequestsPerSecond(0.00001f); + final CountDownLatch taskFinished = new CountDownLatch(1); + highLevelClient().deleteByQueryAsync(deleteByQueryRequest, RequestOptions.DEFAULT, new ActionListener() { + + @Override + public void onResponse(BulkByScrollResponse response) { + taskFinished.countDown(); + } + + @Override + public void onFailure(Exception e) { + fail(e.toString()); + } + }); + + TaskId taskIdToRethrottle = findTaskToRethrottle(DeleteByQueryAction.NAME); + float requestsPerSecond = 1000f; + ListTasksResponse response = execute(new RethrottleRequest(taskIdToRethrottle, requestsPerSecond), + highLevelClient()::deleteByQueryRethrottle, highLevelClient()::deleteByQueryRethrottleAsync); + assertThat(response.getTasks(), hasSize(1)); + assertEquals(taskIdToRethrottle, response.getTasks().get(0).getTaskId()); + assertThat(response.getTasks().get(0).getStatus(), instanceOf(RawTaskStatus.class)); + assertEquals(Float.toString(requestsPerSecond), + ((RawTaskStatus) response.getTasks().get(0).getStatus()).toMap().get("requests_per_second").toString()); + taskFinished.await(2, TimeUnit.SECONDS); + + // any rethrottling after the delete-by-query is done performed with the same taskId should result in a failure + response = execute(new RethrottleRequest(taskIdToRethrottle, requestsPerSecond), + highLevelClient()::deleteByQueryRethrottle, highLevelClient()::deleteByQueryRethrottleAsync); + assertTrue(response.getTasks().isEmpty()); + assertFalse(response.getNodeFailures().isEmpty()); + assertEquals(1, response.getNodeFailures().size()); + assertEquals("Elasticsearch exception [type=resource_not_found_exception, reason=task [" + taskIdToRethrottle + "] is missing]", + response.getNodeFailures().get(0).getCause().getMessage()); + } } public void testBulkProcessorIntegration() throws IOException { diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/MLRequestConvertersTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/MLRequestConvertersTests.java index 819e2f63449..ee53da18cd2 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/MLRequestConvertersTests.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/MLRequestConvertersTests.java @@ -44,6 +44,9 @@ import org.elasticsearch.client.ml.PostDataRequest; import org.elasticsearch.client.ml.PutCalendarRequest; import org.elasticsearch.client.ml.PutDatafeedRequest; import org.elasticsearch.client.ml.PutJobRequest; +import org.elasticsearch.client.ml.StartDatafeedRequest; +import org.elasticsearch.client.ml.StartDatafeedRequestTests; +import org.elasticsearch.client.ml.StopDatafeedRequest; import org.elasticsearch.client.ml.UpdateJobRequest; import org.elasticsearch.client.ml.calendars.Calendar; import org.elasticsearch.client.ml.calendars.CalendarTests; @@ -261,6 +264,35 @@ public class MLRequestConvertersTests extends ESTestCase { assertEquals(Boolean.toString(true), request.getParameters().get("force")); } + public void testStartDatafeed() throws Exception { + String datafeedId = DatafeedConfigTests.randomValidDatafeedId(); + StartDatafeedRequest datafeedRequest = StartDatafeedRequestTests.createRandomInstance(datafeedId); + + Request request = MLRequestConverters.startDatafeed(datafeedRequest); + assertEquals(HttpPost.METHOD_NAME, request.getMethod()); + assertEquals("/_xpack/ml/datafeeds/" + datafeedId + "/_start", request.getEndpoint()); + try (XContentParser parser = createParser(JsonXContent.jsonXContent, request.getEntity().getContent())) { + StartDatafeedRequest parsedDatafeedRequest = StartDatafeedRequest.PARSER.apply(parser, null); + assertThat(parsedDatafeedRequest, equalTo(datafeedRequest)); + } + } + + public void testStopDatafeed() throws Exception { + StopDatafeedRequest datafeedRequest = new StopDatafeedRequest("datafeed_1", "datafeed_2"); + datafeedRequest.setForce(true); + datafeedRequest.setTimeout(TimeValue.timeValueMinutes(10)); + datafeedRequest.setAllowNoDatafeeds(true); + Request request = MLRequestConverters.stopDatafeed(datafeedRequest); + assertEquals(HttpPost.METHOD_NAME, request.getMethod()); + assertEquals("/_xpack/ml/datafeeds/" + + Strings.collectionToCommaDelimitedString(datafeedRequest.getDatafeedIds()) + + "/_stop", request.getEndpoint()); + try (XContentParser parser = createParser(JsonXContent.jsonXContent, request.getEntity().getContent())) { + StopDatafeedRequest parsedDatafeedRequest = StopDatafeedRequest.PARSER.apply(parser, null); + assertThat(parsedDatafeedRequest, equalTo(datafeedRequest)); + } + } + public void testDeleteForecast() { String jobId = randomAlphaOfLength(10); DeleteForecastRequest deleteForecastRequest = new DeleteForecastRequest(jobId); diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/MachineLearningGetResultsIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/MachineLearningGetResultsIT.java index ddaec641573..751f4cfdf0e 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/MachineLearningGetResultsIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/MachineLearningGetResultsIT.java @@ -147,7 +147,7 @@ public class MachineLearningGetResultsIT extends ESRestHighLevelClientTestCase { @After public void deleteJob() throws IOException { - new MlRestTestStateCleaner(logger, client()).clearMlMetadata(); + new MlTestStateCleaner(logger, highLevelClient().machineLearning()).clearMlMetadata(); } public void testGetCategories() throws IOException { diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/MachineLearningIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/MachineLearningIT.java index 19ca737d6e9..a8050397ad1 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/MachineLearningIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/MachineLearningIT.java @@ -20,8 +20,12 @@ package org.elasticsearch.client; import com.carrotsearch.randomizedtesting.generators.CodepointSetGenerator; import org.elasticsearch.ElasticsearchStatusException; +import org.elasticsearch.action.admin.indices.create.CreateIndexRequest; +import org.elasticsearch.action.bulk.BulkRequest; import org.elasticsearch.action.get.GetRequest; import org.elasticsearch.action.get.GetResponse; +import org.elasticsearch.action.index.IndexRequest; +import org.elasticsearch.action.support.WriteRequest; import org.elasticsearch.action.support.master.AcknowledgedResponse; import org.elasticsearch.client.ml.CloseJobRequest; import org.elasticsearch.client.ml.CloseJobResponse; @@ -51,6 +55,10 @@ import org.elasticsearch.client.ml.PutDatafeedRequest; import org.elasticsearch.client.ml.PutDatafeedResponse; import org.elasticsearch.client.ml.PutJobRequest; import org.elasticsearch.client.ml.PutJobResponse; +import org.elasticsearch.client.ml.StartDatafeedRequest; +import org.elasticsearch.client.ml.StartDatafeedResponse; +import org.elasticsearch.client.ml.StopDatafeedRequest; +import org.elasticsearch.client.ml.StopDatafeedResponse; import org.elasticsearch.client.ml.UpdateJobRequest; import org.elasticsearch.client.ml.calendars.Calendar; import org.elasticsearch.client.ml.calendars.CalendarTests; @@ -63,6 +71,7 @@ import org.elasticsearch.client.ml.job.config.JobState; import org.elasticsearch.client.ml.job.config.JobUpdate; import org.elasticsearch.client.ml.job.stats.JobStats; import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.rest.RestStatus; import org.junit.After; @@ -83,7 +92,7 @@ public class MachineLearningIT extends ESRestHighLevelClientTestCase { @After public void cleanUp() throws IOException { - new MlRestTestStateCleaner(logger, client()).clearMlMetadata(); + new MlTestStateCleaner(logger, highLevelClient().machineLearning()).clearMlMetadata(); } public void testPutJob() throws Exception { @@ -416,6 +425,145 @@ public class MachineLearningIT extends ESRestHighLevelClientTestCase { assertTrue(response.isAcknowledged()); } + public void testStartDatafeed() throws Exception { + String jobId = "test-start-datafeed"; + String indexName = "start_data_1"; + + // Set up the index and docs + CreateIndexRequest createIndexRequest = new CreateIndexRequest(indexName); + createIndexRequest.mapping("doc", "timestamp", "type=date", "total", "type=long"); + highLevelClient().indices().create(createIndexRequest, RequestOptions.DEFAULT); + BulkRequest bulk = new BulkRequest(); + bulk.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE); + long now = (System.currentTimeMillis()/1000)*1000; + long thePast = now - 60000; + int i = 0; + long pastCopy = thePast; + while(pastCopy < now) { + IndexRequest doc = new IndexRequest(); + doc.index(indexName); + doc.type("doc"); + doc.id("id" + i); + doc.source("{\"total\":" +randomInt(1000) + ",\"timestamp\":"+ pastCopy +"}", XContentType.JSON); + bulk.add(doc); + pastCopy += 1000; + i++; + } + highLevelClient().bulk(bulk, RequestOptions.DEFAULT); + final long totalDocCount = i; + + // create the job and the datafeed + Job job = buildJob(jobId); + MachineLearningClient machineLearningClient = highLevelClient().machineLearning(); + machineLearningClient.putJob(new PutJobRequest(job), RequestOptions.DEFAULT); + machineLearningClient.openJob(new OpenJobRequest(jobId), RequestOptions.DEFAULT); + + String datafeedId = jobId + "-feed"; + DatafeedConfig datafeed = DatafeedConfig.builder(datafeedId, jobId) + .setIndices(indexName) + .setQueryDelay(TimeValue.timeValueSeconds(1)) + .setTypes(Arrays.asList("doc")) + .setFrequency(TimeValue.timeValueSeconds(1)).build(); + machineLearningClient.putDatafeed(new PutDatafeedRequest(datafeed), RequestOptions.DEFAULT); + + + StartDatafeedRequest startDatafeedRequest = new StartDatafeedRequest(datafeedId); + startDatafeedRequest.setStart(String.valueOf(thePast)); + // Should only process two documents + startDatafeedRequest.setEnd(String.valueOf(thePast + 2000)); + StartDatafeedResponse response = execute(startDatafeedRequest, + machineLearningClient::startDatafeed, + machineLearningClient::startDatafeedAsync); + + assertTrue(response.isStarted()); + + assertBusy(() -> { + JobStats stats = machineLearningClient.getJobStats(new GetJobStatsRequest(jobId), RequestOptions.DEFAULT).jobStats().get(0); + assertEquals(2L, stats.getDataCounts().getInputRecordCount()); + assertEquals(JobState.CLOSED, stats.getState()); + }, 30, TimeUnit.SECONDS); + + machineLearningClient.openJob(new OpenJobRequest(jobId), RequestOptions.DEFAULT); + StartDatafeedRequest wholeDataFeed = new StartDatafeedRequest(datafeedId); + // Process all documents and end the stream + wholeDataFeed.setEnd(String.valueOf(now)); + StartDatafeedResponse wholeResponse = execute(wholeDataFeed, + machineLearningClient::startDatafeed, + machineLearningClient::startDatafeedAsync); + assertTrue(wholeResponse.isStarted()); + + assertBusy(() -> { + JobStats stats = machineLearningClient.getJobStats(new GetJobStatsRequest(jobId), RequestOptions.DEFAULT).jobStats().get(0); + assertEquals(totalDocCount, stats.getDataCounts().getInputRecordCount()); + assertEquals(JobState.CLOSED, stats.getState()); + }, 30, TimeUnit.SECONDS); + } + + public void testStopDatafeed() throws Exception { + String jobId1 = "test-stop-datafeed1"; + String jobId2 = "test-stop-datafeed2"; + String jobId3 = "test-stop-datafeed3"; + String indexName = "stop_data_1"; + + // Set up the index + CreateIndexRequest createIndexRequest = new CreateIndexRequest(indexName); + createIndexRequest.mapping("doc", "timestamp", "type=date", "total", "type=long"); + highLevelClient().indices().create(createIndexRequest, RequestOptions.DEFAULT); + + // create the job and the datafeed + Job job1 = buildJob(jobId1); + putJob(job1); + openJob(job1); + + Job job2 = buildJob(jobId2); + putJob(job2); + openJob(job2); + + Job job3 = buildJob(jobId3); + putJob(job3); + openJob(job3); + + String datafeedId1 = createAndPutDatafeed(jobId1, indexName); + String datafeedId2 = createAndPutDatafeed(jobId2, indexName); + String datafeedId3 = createAndPutDatafeed(jobId3, indexName); + + MachineLearningClient machineLearningClient = highLevelClient().machineLearning(); + + machineLearningClient.startDatafeed(new StartDatafeedRequest(datafeedId1), RequestOptions.DEFAULT); + machineLearningClient.startDatafeed(new StartDatafeedRequest(datafeedId2), RequestOptions.DEFAULT); + machineLearningClient.startDatafeed(new StartDatafeedRequest(datafeedId3), RequestOptions.DEFAULT); + + { + StopDatafeedRequest request = new StopDatafeedRequest(datafeedId1); + request.setAllowNoDatafeeds(false); + StopDatafeedResponse stopDatafeedResponse = execute(request, + machineLearningClient::stopDatafeed, + machineLearningClient::stopDatafeedAsync); + assertTrue(stopDatafeedResponse.isStopped()); + } + { + StopDatafeedRequest request = new StopDatafeedRequest(datafeedId2, datafeedId3); + request.setAllowNoDatafeeds(false); + StopDatafeedResponse stopDatafeedResponse = execute(request, + machineLearningClient::stopDatafeed, + machineLearningClient::stopDatafeedAsync); + assertTrue(stopDatafeedResponse.isStopped()); + } + { + StopDatafeedResponse stopDatafeedResponse = execute(new StopDatafeedRequest("datafeed_that_doesnot_exist*"), + machineLearningClient::stopDatafeed, + machineLearningClient::stopDatafeedAsync); + assertTrue(stopDatafeedResponse.isStopped()); + } + { + StopDatafeedRequest request = new StopDatafeedRequest("datafeed_that_doesnot_exist*"); + request.setAllowNoDatafeeds(false); + ElasticsearchStatusException exception = expectThrows(ElasticsearchStatusException.class, + () -> execute(request, machineLearningClient::stopDatafeed, machineLearningClient::stopDatafeedAsync)); + assertThat(exception.status().getStatus(), equalTo(404)); + } + } + public void testDeleteForecast() throws Exception { String jobId = "test-delete-forecast"; @@ -551,7 +699,8 @@ public class MachineLearningIT extends ESRestHighLevelClientTestCase { .setDetectorDescription(randomAlphaOfLength(10)) .build(); AnalysisConfig.Builder configBuilder = new AnalysisConfig.Builder(Arrays.asList(detector)); - configBuilder.setBucketSpan(new TimeValue(randomIntBetween(1, 10), TimeUnit.SECONDS)); + //should not be random, see:https://github.com/elastic/ml-cpp/issues/208 + configBuilder.setBucketSpan(new TimeValue(5, TimeUnit.SECONDS)); builder.setAnalysisConfig(configBuilder); DataDescription.Builder dataDescription = new DataDescription.Builder(); @@ -561,4 +710,23 @@ public class MachineLearningIT extends ESRestHighLevelClientTestCase { return builder.build(); } + + private void putJob(Job job) throws IOException { + highLevelClient().machineLearning().putJob(new PutJobRequest(job), RequestOptions.DEFAULT); + } + + private void openJob(Job job) throws IOException { + highLevelClient().machineLearning().openJob(new OpenJobRequest(job.getId()), RequestOptions.DEFAULT); + } + + private String createAndPutDatafeed(String jobId, String indexName) throws IOException { + String datafeedId = jobId + "-feed"; + DatafeedConfig datafeed = DatafeedConfig.builder(datafeedId, jobId) + .setIndices(indexName) + .setQueryDelay(TimeValue.timeValueSeconds(1)) + .setTypes(Arrays.asList("doc")) + .setFrequency(TimeValue.timeValueSeconds(1)).build(); + highLevelClient().machineLearning().putDatafeed(new PutDatafeedRequest(datafeed), RequestOptions.DEFAULT); + return datafeedId; + } } diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/MlRestTestStateCleaner.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/MlRestTestStateCleaner.java deleted file mode 100644 index 7ad86576245..00000000000 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/MlRestTestStateCleaner.java +++ /dev/null @@ -1,109 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.elasticsearch.client; - -import org.apache.logging.log4j.Logger; -import org.elasticsearch.common.xcontent.support.XContentMapValues; -import org.elasticsearch.test.rest.ESRestTestCase; - -import java.io.IOException; -import java.util.List; -import java.util.Map; - -/** - * This is temporarily duplicated from the server side. - * @TODO Replace with an implementation using the HLRC once - * the APIs for managing datafeeds are implemented. - */ -public class MlRestTestStateCleaner { - - private final Logger logger; - private final RestClient adminClient; - - public MlRestTestStateCleaner(Logger logger, RestClient adminClient) { - this.logger = logger; - this.adminClient = adminClient; - } - - public void clearMlMetadata() throws IOException { - deleteAllDatafeeds(); - deleteAllJobs(); - // indices will be deleted by the ESRestTestCase class - } - - @SuppressWarnings("unchecked") - private void deleteAllDatafeeds() throws IOException { - final Request datafeedsRequest = new Request("GET", "/_xpack/ml/datafeeds"); - datafeedsRequest.addParameter("filter_path", "datafeeds"); - final Response datafeedsResponse = adminClient.performRequest(datafeedsRequest); - final List> datafeeds = - (List>) XContentMapValues.extractValue("datafeeds", ESRestTestCase.entityAsMap(datafeedsResponse)); - if (datafeeds == null) { - return; - } - - try { - adminClient.performRequest(new Request("POST", "/_xpack/ml/datafeeds/_all/_stop")); - } catch (Exception e1) { - logger.warn("failed to stop all datafeeds. Forcing stop", e1); - try { - adminClient.performRequest(new Request("POST", "/_xpack/ml/datafeeds/_all/_stop?force=true")); - } catch (Exception e2) { - logger.warn("Force-closing all data feeds failed", e2); - } - throw new RuntimeException( - "Had to resort to force-stopping datafeeds, something went wrong?", e1); - } - - for (Map datafeed : datafeeds) { - String datafeedId = (String) datafeed.get("datafeed_id"); - adminClient.performRequest(new Request("DELETE", "/_xpack/ml/datafeeds/" + datafeedId)); - } - } - - private void deleteAllJobs() throws IOException { - final Request jobsRequest = new Request("GET", "/_xpack/ml/anomaly_detectors"); - jobsRequest.addParameter("filter_path", "jobs"); - final Response response = adminClient.performRequest(jobsRequest); - @SuppressWarnings("unchecked") - final List> jobConfigs = - (List>) XContentMapValues.extractValue("jobs", ESRestTestCase.entityAsMap(response)); - if (jobConfigs == null) { - return; - } - - try { - adminClient.performRequest(new Request("POST", "/_xpack/ml/anomaly_detectors/_all/_close")); - } catch (Exception e1) { - logger.warn("failed to close all jobs. Forcing closed", e1); - try { - adminClient.performRequest(new Request("POST", "/_xpack/ml/anomaly_detectors/_all/_close?force=true")); - } catch (Exception e2) { - logger.warn("Force-closing all jobs failed", e2); - } - throw new RuntimeException("Had to resort to force-closing jobs, something went wrong?", - e1); - } - - for (Map jobConfig : jobConfigs) { - String jobId = (String) jobConfig.get("job_id"); - adminClient.performRequest(new Request("DELETE", "/_xpack/ml/anomaly_detectors/" + jobId)); - } - } -} diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/MlTestStateCleaner.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/MlTestStateCleaner.java new file mode 100644 index 00000000000..c565af7c372 --- /dev/null +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/MlTestStateCleaner.java @@ -0,0 +1,102 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.client; + +import org.apache.logging.log4j.Logger; +import org.elasticsearch.client.ml.CloseJobRequest; +import org.elasticsearch.client.ml.DeleteDatafeedRequest; +import org.elasticsearch.client.ml.DeleteJobRequest; +import org.elasticsearch.client.ml.GetDatafeedRequest; +import org.elasticsearch.client.ml.GetDatafeedResponse; +import org.elasticsearch.client.ml.GetJobRequest; +import org.elasticsearch.client.ml.GetJobResponse; +import org.elasticsearch.client.ml.StopDatafeedRequest; +import org.elasticsearch.client.ml.datafeed.DatafeedConfig; +import org.elasticsearch.client.ml.job.config.Job; + +import java.io.IOException; + +/** + * Cleans up and ML resources created during tests + */ +public class MlTestStateCleaner { + + private final Logger logger; + private final MachineLearningClient mlClient; + + public MlTestStateCleaner(Logger logger, MachineLearningClient mlClient) { + this.logger = logger; + this.mlClient = mlClient; + } + + public void clearMlMetadata() throws IOException { + deleteAllDatafeeds(); + deleteAllJobs(); + } + + private void deleteAllDatafeeds() throws IOException { + stopAllDatafeeds(); + + GetDatafeedResponse getDatafeedResponse = mlClient.getDatafeed(GetDatafeedRequest.getAllDatafeedsRequest(), RequestOptions.DEFAULT); + for (DatafeedConfig datafeed : getDatafeedResponse.datafeeds()) { + mlClient.deleteDatafeed(new DeleteDatafeedRequest(datafeed.getId()), RequestOptions.DEFAULT); + } + } + + private void stopAllDatafeeds() { + StopDatafeedRequest stopAllDatafeedsRequest = StopDatafeedRequest.stopAllDatafeedsRequest(); + try { + mlClient.stopDatafeed(stopAllDatafeedsRequest, RequestOptions.DEFAULT); + } catch (Exception e1) { + logger.warn("failed to stop all datafeeds. Forcing stop", e1); + try { + stopAllDatafeedsRequest.setForce(true); + mlClient.stopDatafeed(stopAllDatafeedsRequest, RequestOptions.DEFAULT); + } catch (Exception e2) { + logger.warn("Force-closing all data feeds failed", e2); + } + throw new RuntimeException("Had to resort to force-stopping datafeeds, something went wrong?", e1); + } + } + + private void deleteAllJobs() throws IOException { + closeAllJobs(); + + GetJobResponse getJobResponse = mlClient.getJob(GetJobRequest.getAllJobsRequest(), RequestOptions.DEFAULT); + for (Job job : getJobResponse.jobs()) { + mlClient.deleteJob(new DeleteJobRequest(job.getId()), RequestOptions.DEFAULT); + } + } + + private void closeAllJobs() { + CloseJobRequest closeAllJobsRequest = CloseJobRequest.closeAllJobsRequest(); + try { + mlClient.closeJob(closeAllJobsRequest, RequestOptions.DEFAULT); + } catch (Exception e1) { + logger.warn("failed to close all jobs. Forcing closed", e1); + closeAllJobsRequest.setForce(true); + try { + mlClient.closeJob(closeAllJobsRequest, RequestOptions.DEFAULT); + } catch (Exception e2) { + logger.warn("Force-closing all jobs failed", e2); + } + throw new RuntimeException("Had to resort to force-closing jobs, something went wrong?", e1); + } + } +} diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/RankEvalIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/RankEvalIT.java index 0af270cb051..15272ad80a6 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/RankEvalIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/RankEvalIT.java @@ -90,7 +90,7 @@ public class RankEvalIT extends ESRestHighLevelClientTestCase { if (id.equals("berlin") || id.equals("amsterdam5")) { assertFalse(hit.getRating().isPresent()); } else { - assertEquals(1, hit.getRating().get().intValue()); + assertEquals(1, hit.getRating().getAsInt()); } } EvalQueryQuality berlinQueryQuality = partialResults.get("berlin_query"); @@ -100,7 +100,7 @@ public class RankEvalIT extends ESRestHighLevelClientTestCase { for (RatedSearchHit hit : hitsAndRatings) { String id = hit.getSearchHit().getId(); if (id.equals("berlin")) { - assertEquals(1, hit.getRating().get().intValue()); + assertEquals(1, hit.getRating().getAsInt()); } else { assertFalse(hit.getRating().isPresent()); } diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/RequestConvertersTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/RequestConvertersTests.java index 6d073a7a60a..3801dfe71de 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/RequestConvertersTests.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/RequestConvertersTests.java @@ -59,6 +59,7 @@ import org.elasticsearch.common.CheckedBiConsumer; import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.io.Streams; import org.elasticsearch.common.lucene.uid.Versions; import org.elasticsearch.common.unit.TimeValue; @@ -95,6 +96,7 @@ import org.elasticsearch.search.fetch.subphase.highlight.HighlightBuilder; import org.elasticsearch.search.rescore.QueryRescorerBuilder; import org.elasticsearch.search.suggest.SuggestBuilder; import org.elasticsearch.search.suggest.completion.CompletionSuggestionBuilder; +import org.elasticsearch.tasks.TaskId; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.RandomObjects; @@ -317,6 +319,13 @@ public class RequestConvertersTests extends ESTestCase { if (randomBoolean()) { reindexRequest.setDestPipeline("my_pipeline"); } + if (randomBoolean()) { + float requestsPerSecond = (float) randomDoubleBetween(0.0, 10.0, false); + expectedParams.put(RethrottleRequest.REQUEST_PER_SECOND_PARAMETER, Float.toString(requestsPerSecond)); + reindexRequest.setRequestsPerSecond(requestsPerSecond); + } else { + expectedParams.put(RethrottleRequest.REQUEST_PER_SECOND_PARAMETER, "-1"); + } if (randomBoolean()) { reindexRequest.setDestRouting("=cat"); } @@ -359,6 +368,13 @@ public class RequestConvertersTests extends ESTestCase { updateByQueryRequest.setPipeline("my_pipeline"); expectedParams.put("pipeline", "my_pipeline"); } + if (randomBoolean()) { + float requestsPerSecond = (float) randomDoubleBetween(0.0, 10.0, false); + expectedParams.put("requests_per_second", Float.toString(requestsPerSecond)); + updateByQueryRequest.setRequestsPerSecond(requestsPerSecond); + } else { + expectedParams.put("requests_per_second", "-1"); + } if (randomBoolean()) { updateByQueryRequest.setRouting("=cat"); expectedParams.put("routing", "=cat"); @@ -430,6 +446,13 @@ public class RequestConvertersTests extends ESTestCase { if (randomBoolean()) { deleteByQueryRequest.setQuery(new TermQueryBuilder("foo", "fooval")); } + if (randomBoolean()) { + float requestsPerSecond = (float) randomDoubleBetween(0.0, 10.0, false); + expectedParams.put("requests_per_second", Float.toString(requestsPerSecond)); + deleteByQueryRequest.setRequestsPerSecond(requestsPerSecond); + } else { + expectedParams.put("requests_per_second", "-1"); + } setRandomIndicesOptions(deleteByQueryRequest::setIndicesOptions, deleteByQueryRequest::indicesOptions, expectedParams); setRandomTimeout(deleteByQueryRequest::setTimeout, ReplicationRequest.DEFAULT_TIMEOUT, expectedParams); Request request = RequestConverters.deleteByQuery(deleteByQueryRequest); @@ -444,6 +467,43 @@ public class RequestConvertersTests extends ESTestCase { assertToXContentBody(deleteByQueryRequest, request.getEntity()); } + public void testRethrottle() { + TaskId taskId = new TaskId(randomAlphaOfLength(10), randomIntBetween(1, 100)); + RethrottleRequest rethrottleRequest; + Float requestsPerSecond; + Map expectedParams = new HashMap<>(); + if (frequently()) { + requestsPerSecond = (float) randomDoubleBetween(0.0, 100.0, true); + rethrottleRequest = new RethrottleRequest(taskId, requestsPerSecond); + expectedParams.put(RethrottleRequest.REQUEST_PER_SECOND_PARAMETER, Float.toString(requestsPerSecond)); + } else { + rethrottleRequest = new RethrottleRequest(taskId); + expectedParams.put(RethrottleRequest.REQUEST_PER_SECOND_PARAMETER, "-1"); + } + expectedParams.put("group_by", "none"); + List>> variants = new ArrayList<>(); + variants.add(new Tuple>("_reindex", () -> RequestConverters.rethrottleReindex(rethrottleRequest))); + variants.add(new Tuple>("_update_by_query", + () -> RequestConverters.rethrottleUpdateByQuery(rethrottleRequest))); + variants.add(new Tuple>("_delete_by_query", + () -> RequestConverters.rethrottleDeleteByQuery(rethrottleRequest))); + + for (Tuple> variant : variants) { + Request request = variant.v2().get(); + assertEquals("/" + variant.v1() + "/" + taskId + "/_rethrottle", request.getEndpoint()); + assertEquals(HttpPost.METHOD_NAME, request.getMethod()); + assertEquals(expectedParams, request.getParameters()); + assertNull(request.getEntity()); + } + + // test illegal RethrottleRequest values + Exception e = expectThrows(NullPointerException.class, () -> new RethrottleRequest(null, 1.0f)); + assertEquals("taskId cannot be null", e.getMessage()); + + e = expectThrows(IllegalArgumentException.class, () -> new RethrottleRequest(new TaskId("taskId", 1), -5.0f)); + assertEquals("requestsPerSecond needs to be positive value but was [-5.0]", e.getMessage()); + } + public void testIndex() throws IOException { String index = randomAlphaOfLengthBetween(3, 10); String type = randomAlphaOfLengthBetween(3, 10); diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/RestHighLevelClientTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/RestHighLevelClientTests.java index ca6043768df..acdfc50b5a1 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/RestHighLevelClientTests.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/RestHighLevelClientTests.java @@ -20,6 +20,7 @@ package org.elasticsearch.client; import com.fasterxml.jackson.core.JsonParseException; + import org.apache.http.HttpEntity; import org.apache.http.HttpHost; import org.apache.http.HttpResponse; @@ -658,7 +659,6 @@ public class RestHighLevelClientTests extends ESTestCase { "indices.get_upgrade", "indices.put_alias", "mtermvectors", - "reindex_rethrottle", "render_search_template", "scripts_painless_execute", "tasks.get", diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/RollupIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/RollupIT.java index 5d88b3f2e29..9898dc971f9 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/RollupIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/RollupIT.java @@ -27,6 +27,10 @@ import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.support.WriteRequest; +import org.elasticsearch.client.rollup.GetRollupJobRequest; +import org.elasticsearch.client.rollup.GetRollupJobResponse; +import org.elasticsearch.client.rollup.GetRollupJobResponse.IndexerState; +import org.elasticsearch.client.rollup.GetRollupJobResponse.JobWrapper; import org.elasticsearch.client.rollup.PutRollupJobRequest; import org.elasticsearch.client.rollup.PutRollupJobResponse; import org.elasticsearch.client.rollup.job.config.DateHistogramGroupConfig; @@ -50,6 +54,13 @@ import java.util.Locale; import java.util.Map; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; +import static org.hamcrest.Matchers.either; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.greaterThan; +import static org.hamcrest.Matchers.hasKey; +import static org.hamcrest.Matchers.hasSize; +import static org.hamcrest.Matchers.empty; +import static org.hamcrest.Matchers.lessThan; public class RollupIT extends ESRestHighLevelClientTestCase { @@ -57,7 +68,7 @@ public class RollupIT extends ESRestHighLevelClientTestCase { SumAggregationBuilder.NAME, AvgAggregationBuilder.NAME, ValueCountAggregationBuilder.NAME); @SuppressWarnings("unchecked") - public void testPutRollupJob() throws Exception { + public void testPutAndGetRollupJob() throws Exception { double sum = 0.0d; int max = Integer.MIN_VALUE; int min = Integer.MAX_VALUE; @@ -90,7 +101,7 @@ public class RollupIT extends ESRestHighLevelClientTestCase { BulkResponse bulkResponse = highLevelClient().bulk(bulkRequest, RequestOptions.DEFAULT); assertEquals(RestStatus.OK, bulkResponse.status()); - if (bulkResponse.hasFailures()) { + if (bulkResponse.hasFailures()) { for (BulkItemResponse itemResponse : bulkResponse.getItems()) { if (itemResponse.isFailed()) { logger.fatal(itemResponse.getFailureMessage()); @@ -158,5 +169,26 @@ public class RollupIT extends ESRestHighLevelClientTestCase { } } }); + + // TODO when we move cleaning rollup into ESTestCase we can randomly choose the _all version of this request + GetRollupJobRequest getRollupJobRequest = new GetRollupJobRequest(id); + GetRollupJobResponse getResponse = execute(getRollupJobRequest, rollupClient::getRollupJob, rollupClient::getRollupJobAsync); + assertThat(getResponse.getJobs(), hasSize(1)); + JobWrapper job = getResponse.getJobs().get(0); + assertEquals(putRollupJobRequest.getConfig(), job.getJob()); + assertThat(job.getStats().getNumPages(), lessThan(10L)); + assertEquals(numDocs, job.getStats().getNumDocuments()); + assertThat(job.getStats().getNumInvocations(), greaterThan(0L)); + assertEquals(1, job.getStats().getOutputDocuments()); + assertThat(job.getStatus().getState(), either(equalTo(IndexerState.STARTED)).or(equalTo(IndexerState.INDEXING))); + assertThat(job.getStatus().getCurrentPosition(), hasKey("date.date_histogram")); + assertEquals(true, job.getStatus().getUpgradedDocumentId()); + } + + public void testGetMissingRollupJob() throws Exception { + GetRollupJobRequest getRollupJobRequest = new GetRollupJobRequest("missing"); + RollupClient rollupClient = highLevelClient().rollup(); + GetRollupJobResponse getResponse = execute(getRollupJobRequest, rollupClient::getRollupJob, rollupClient::getRollupJobAsync); + assertThat(getResponse.getJobs(), empty()); } } diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/RollupRequestConvertersTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/RollupRequestConvertersTests.java new file mode 100644 index 00000000000..df7b2bbfca1 --- /dev/null +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/RollupRequestConvertersTests.java @@ -0,0 +1,61 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.client; + +import org.apache.http.client.methods.HttpGet; +import org.apache.http.client.methods.HttpPut; +import org.elasticsearch.client.rollup.GetRollupJobRequest; +import org.elasticsearch.client.rollup.PutRollupJobRequest; +import org.elasticsearch.client.rollup.job.config.RollupJobConfig; +import org.elasticsearch.client.rollup.job.config.RollupJobConfigTests; +import org.elasticsearch.test.ESTestCase; + +import java.io.IOException; + +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.empty; +import static org.hamcrest.Matchers.nullValue; + +public class RollupRequestConvertersTests extends ESTestCase { + public void testPutJob() throws IOException { + String job = randomAlphaOfLength(5); + + RollupJobConfig config = RollupJobConfigTests.randomRollupJobConfig(job); + PutRollupJobRequest put = new PutRollupJobRequest(config); + + Request request = RollupRequestConverters.putJob(put); + assertThat(request.getEndpoint(), equalTo("/_xpack/rollup/job/" + job)); + assertThat(HttpPut.METHOD_NAME, equalTo(request.getMethod())); + assertThat(request.getParameters().keySet(), empty()); + RequestConvertersTests.assertToXContentBody(put, request.getEntity()); + } + + public void testGetJob() { + boolean getAll = randomBoolean(); + String job = getAll ? "_all" : RequestConvertersTests.randomIndicesNames(1, 1)[0]; + GetRollupJobRequest get = getAll ? new GetRollupJobRequest() : new GetRollupJobRequest(job); + + Request request = RollupRequestConverters.getJob(get); + assertThat(request.getEndpoint(), equalTo("/_xpack/rollup/job/" + job)); + assertThat(HttpGet.METHOD_NAME, equalTo(request.getMethod())); + assertThat(request.getParameters().keySet(), empty()); + assertThat(request.getEntity(), nullValue()); + } +} diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/SecurityRequestConvertersTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/SecurityRequestConvertersTests.java index 3670379cd9f..0741c6f72d9 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/SecurityRequestConvertersTests.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/SecurityRequestConvertersTests.java @@ -19,9 +19,11 @@ package org.elasticsearch.client; +import org.apache.http.client.methods.HttpPost; import org.apache.http.client.methods.HttpPut; import org.elasticsearch.client.security.DisableUserRequest; import org.elasticsearch.client.security.EnableUserRequest; +import org.elasticsearch.client.security.ChangePasswordRequest; import org.elasticsearch.client.security.PutUserRequest; import org.elasticsearch.client.security.RefreshPolicy; import org.elasticsearch.test.ESTestCase; @@ -91,9 +93,34 @@ public class SecurityRequestConvertersTests extends ESTestCase { private static Map getExpectedParamsFromRefreshPolicy(RefreshPolicy refreshPolicy) { if (refreshPolicy != RefreshPolicy.NONE) { - return Collections.singletonMap("refresh", refreshPolicy.getValue()); + return Collections.singletonMap("refresh", refreshPolicy.getValue()); } else { return Collections.emptyMap(); } } + + public void testChangePassword() throws IOException { + final String username = randomAlphaOfLengthBetween(4, 12); + final char[] password = randomAlphaOfLengthBetween(8, 12).toCharArray(); + final RefreshPolicy refreshPolicy = randomFrom(RefreshPolicy.values()); + final Map expectedParams = getExpectedParamsFromRefreshPolicy(refreshPolicy); + ChangePasswordRequest changePasswordRequest = new ChangePasswordRequest(username, password, refreshPolicy); + Request request = SecurityRequestConverters.changePassword(changePasswordRequest); + assertEquals(HttpPost.METHOD_NAME, request.getMethod()); + assertEquals("/_xpack/security/user/" + changePasswordRequest.getUsername() + "/_password", request.getEndpoint()); + assertEquals(expectedParams, request.getParameters()); + assertToXContentBody(changePasswordRequest, request.getEntity()); + } + + public void testSelfChangePassword() throws IOException { + final char[] password = randomAlphaOfLengthBetween(8, 12).toCharArray(); + final RefreshPolicy refreshPolicy = randomFrom(RefreshPolicy.values()); + final Map expectedParams = getExpectedParamsFromRefreshPolicy(refreshPolicy); + ChangePasswordRequest changePasswordRequest = new ChangePasswordRequest(null, password, refreshPolicy); + Request request = SecurityRequestConverters.changePassword(changePasswordRequest); + assertEquals(HttpPost.METHOD_NAME, request.getMethod()); + assertEquals("/_xpack/security/user/_password", request.getEndpoint()); + assertEquals(expectedParams, request.getParameters()); + assertToXContentBody(changePasswordRequest, request.getEntity()); + } } diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/CRUDDocumentationIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/CRUDDocumentationIT.java index 142eacd820f..6584381223c 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/CRUDDocumentationIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/CRUDDocumentationIT.java @@ -24,6 +24,7 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.DocWriteRequest; import org.elasticsearch.action.DocWriteResponse; import org.elasticsearch.action.LatchedActionListener; +import org.elasticsearch.action.admin.cluster.node.tasks.list.ListTasksResponse; import org.elasticsearch.action.bulk.BackoffPolicy; import org.elasticsearch.action.bulk.BulkItemResponse; import org.elasticsearch.action.bulk.BulkProcessor; @@ -50,6 +51,7 @@ import org.elasticsearch.client.Request; import org.elasticsearch.client.RequestOptions; import org.elasticsearch.client.Response; import org.elasticsearch.client.RestHighLevelClient; +import org.elasticsearch.client.RethrottleRequest; import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.settings.Settings; @@ -75,6 +77,7 @@ import org.elasticsearch.script.Script; import org.elasticsearch.script.ScriptType; import org.elasticsearch.search.fetch.subphase.FetchSourceContext; import org.elasticsearch.search.sort.SortOrder; +import org.elasticsearch.tasks.TaskId; import java.util.Collections; import java.util.Date; @@ -92,25 +95,12 @@ import static org.hamcrest.Matchers.hasKey; import static org.hamcrest.Matchers.not; /** - * This class is used to generate the Java CRUD API documentation. - * You need to wrap your code between two tags like: - * // tag::example - * // end::example - * - * Where example is your tag name. - * - * Then in the documentation, you can extract what is between tag and end tags with - * ["source","java",subs="attributes,callouts,macros"] - * -------------------------------------------------- - * include-tagged::{doc-tests}/CRUDDocumentationIT.java[example] - * -------------------------------------------------- - * - * The column width of the code block is 84. If the code contains a line longer - * than 84, the line will be cut and a horizontal scroll bar will be displayed. - * (the code indentation of the tag is not included in the width) + * Documentation for CRUD APIs in the high level java client. + * Code wrapped in {@code tag} and {@code end} tags is included in the docs. */ public class CRUDDocumentationIT extends ESRestHighLevelClientTestCase { + @SuppressWarnings("unused") public void testIndex() throws Exception { RestHighLevelClient client = highLevelClient(); @@ -275,6 +265,7 @@ public class CRUDDocumentationIT extends ESRestHighLevelClientTestCase { } } + @SuppressWarnings("unused") public void testUpdate() throws Exception { RestHighLevelClient client = highLevelClient(); { @@ -543,6 +534,7 @@ public class CRUDDocumentationIT extends ESRestHighLevelClientTestCase { } } + @SuppressWarnings("unused") public void testDelete() throws Exception { RestHighLevelClient client = highLevelClient(); @@ -662,6 +654,7 @@ public class CRUDDocumentationIT extends ESRestHighLevelClientTestCase { } } + @SuppressWarnings("unused") public void testBulk() throws Exception { RestHighLevelClient client = highLevelClient(); { @@ -764,6 +757,7 @@ public class CRUDDocumentationIT extends ESRestHighLevelClientTestCase { } } + @SuppressWarnings("unused") public void testReindex() throws Exception { RestHighLevelClient client = highLevelClient(); { @@ -902,6 +896,59 @@ public class CRUDDocumentationIT extends ESRestHighLevelClientTestCase { } } + @SuppressWarnings("unused") + public void testReindexRethrottle() throws Exception { + RestHighLevelClient client = highLevelClient(); + TaskId taskId = new TaskId("oTUltX4IQMOUUVeiohTt8A:124"); + { + // tag::rethrottle-disable-request + RethrottleRequest request = new RethrottleRequest(taskId); // <1> + // end::rethrottle-disable-request + } + + { + // tag::rethrottle-request + RethrottleRequest request = new RethrottleRequest(taskId, 100.0f); // <1> + // end::rethrottle-request + } + + { + RethrottleRequest request = new RethrottleRequest(taskId); + // tag::rethrottle-request-execution + client.reindexRethrottle(request, RequestOptions.DEFAULT); // <1> + client.updateByQueryRethrottle(request, RequestOptions.DEFAULT); // <2> + client.deleteByQueryRethrottle(request, RequestOptions.DEFAULT); // <3> + // end::rethrottle-request-execution + } + + // tag::rethrottle-request-async-listener + ActionListener listener = new ActionListener() { + @Override + public void onResponse(ListTasksResponse response) { + // <1> + } + + @Override + public void onFailure(Exception e) { + // <2> + } + }; + // end::rethrottle-request-async-listener + + // Replace the empty listener by a blocking listener in test + final CountDownLatch latch = new CountDownLatch(3); + listener = new LatchedActionListener<>(listener, latch); + + RethrottleRequest request = new RethrottleRequest(taskId); + // tag::rethrottle-execute-async + client.reindexRethrottleAsync(request, RequestOptions.DEFAULT, listener); // <1> + client.updateByQueryRethrottleAsync(request, RequestOptions.DEFAULT, listener); // <2> + client.deleteByQueryRethrottleAsync(request, RequestOptions.DEFAULT, listener); // <3> + // end::rethrottle-execute-async + assertTrue(latch.await(30L, TimeUnit.SECONDS)); + } + + @SuppressWarnings("unused") public void testUpdateByQuery() throws Exception { RestHighLevelClient client = highLevelClient(); { @@ -1021,6 +1068,7 @@ public class CRUDDocumentationIT extends ESRestHighLevelClientTestCase { } } + @SuppressWarnings("unused") public void testDeleteByQuery() throws Exception { RestHighLevelClient client = highLevelClient(); { @@ -1128,6 +1176,7 @@ public class CRUDDocumentationIT extends ESRestHighLevelClientTestCase { } } + @SuppressWarnings("unused") public void testGet() throws Exception { RestHighLevelClient client = highLevelClient(); { @@ -1442,6 +1491,7 @@ public class CRUDDocumentationIT extends ESRestHighLevelClientTestCase { } } + @SuppressWarnings("unused") public void testMultiGet() throws Exception { RestHighLevelClient client = highLevelClient(); diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/ClusterClientDocumentationIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/ClusterClientDocumentationIT.java index dedd50096f8..a9576440438 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/ClusterClientDocumentationIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/ClusterClientDocumentationIT.java @@ -55,22 +55,8 @@ import static org.hamcrest.Matchers.greaterThan; import static org.hamcrest.Matchers.notNullValue; /** - * This class is used to generate the Java Cluster API documentation. - * You need to wrap your code between two tags like: - * // tag::example - * // end::example - * - * Where example is your tag name. - * - * Then in the documentation, you can extract what is between tag and end tags with - * ["source","java",subs="attributes,callouts,macros"] - * -------------------------------------------------- - * include-tagged::{doc-tests}/ClusterClientDocumentationIT.java[example] - * -------------------------------------------------- - * - * The column width of the code block is 84. If the code contains a line longer - * than 84, the line will be cut and a horizontal scroll bar will be displayed. - * (the code indentation of the tag is not included in the width) + * Documentation for Cluster APIs in the high level java client. + * Code wrapped in {@code tag} and {@code end} tags is included in the docs. */ public class ClusterClientDocumentationIT extends ESRestHighLevelClientTestCase { @@ -192,6 +178,7 @@ public class ClusterClientDocumentationIT extends ESRestHighLevelClientTestCase } } + @SuppressWarnings("unused") public void testClusterGetSettings() throws IOException { RestHighLevelClient client = highLevelClient(); @@ -257,6 +244,7 @@ public class ClusterClientDocumentationIT extends ESRestHighLevelClientTestCase assertTrue(latch.await(30L, TimeUnit.SECONDS)); } + @SuppressWarnings("unused") public void testClusterHealth() throws IOException { RestHighLevelClient client = highLevelClient(); client.indices().create(new CreateIndexRequest("index"), RequestOptions.DEFAULT); diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/IndicesClientDocumentationIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/IndicesClientDocumentationIT.java index 2da4d306c28..c66baf69d96 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/IndicesClientDocumentationIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/IndicesClientDocumentationIT.java @@ -706,6 +706,7 @@ public class IndicesClientDocumentationIT extends ESRestHighLevelClientTestCase } } + @SuppressWarnings("unused") public void testGetFieldMapping() throws IOException, InterruptedException { RestHighLevelClient client = highLevelClient(); @@ -891,6 +892,7 @@ public class IndicesClientDocumentationIT extends ESRestHighLevelClientTestCase } } + @SuppressWarnings("unused") public void testRefreshIndex() throws Exception { RestHighLevelClient client = highLevelClient(); @@ -959,6 +961,7 @@ public class IndicesClientDocumentationIT extends ESRestHighLevelClientTestCase } } + @SuppressWarnings("unused") public void testFlushIndex() throws Exception { RestHighLevelClient client = highLevelClient(); @@ -1035,6 +1038,7 @@ public class IndicesClientDocumentationIT extends ESRestHighLevelClientTestCase } } + @SuppressWarnings("unused") public void testSyncedFlushIndex() throws Exception { RestHighLevelClient client = highLevelClient(); @@ -1308,6 +1312,7 @@ public class IndicesClientDocumentationIT extends ESRestHighLevelClientTestCase assertTrue(latch.await(30L, TimeUnit.SECONDS)); } + @SuppressWarnings("unused") public void testForceMergeIndex() throws Exception { RestHighLevelClient client = highLevelClient(); @@ -1381,6 +1386,7 @@ public class IndicesClientDocumentationIT extends ESRestHighLevelClientTestCase } } + @SuppressWarnings("unused") public void testClearCache() throws Exception { RestHighLevelClient client = highLevelClient(); @@ -1527,6 +1533,7 @@ public class IndicesClientDocumentationIT extends ESRestHighLevelClientTestCase } } + @SuppressWarnings("unused") public void testExistsAlias() throws Exception { RestHighLevelClient client = highLevelClient(); @@ -1590,6 +1597,7 @@ public class IndicesClientDocumentationIT extends ESRestHighLevelClientTestCase } } + @SuppressWarnings("unused") public void testUpdateAliases() throws Exception { RestHighLevelClient client = highLevelClient(); @@ -1915,6 +1923,7 @@ public class IndicesClientDocumentationIT extends ESRestHighLevelClientTestCase assertTrue(latch.await(30L, TimeUnit.SECONDS)); } + @SuppressWarnings("unused") public void testGetAlias() throws Exception { RestHighLevelClient client = highLevelClient(); @@ -1985,6 +1994,7 @@ public class IndicesClientDocumentationIT extends ESRestHighLevelClientTestCase } } + @SuppressWarnings("unused") public void testIndexPutSettings() throws Exception { RestHighLevelClient client = highLevelClient(); @@ -2315,6 +2325,7 @@ public class IndicesClientDocumentationIT extends ESRestHighLevelClientTestCase assertTrue(latch.await(30L, TimeUnit.SECONDS)); } + @SuppressWarnings("unused") public void testValidateQuery() throws IOException, InterruptedException { RestHighLevelClient client = highLevelClient(); diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/IngestClientDocumentationIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/IngestClientDocumentationIT.java index 4702c34c6de..00bee27807f 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/IngestClientDocumentationIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/IngestClientDocumentationIT.java @@ -143,6 +143,7 @@ public class IngestClientDocumentationIT extends ESRestHighLevelClientTestCase { } } + @SuppressWarnings("unused") public void testGetPipeline() throws IOException { RestHighLevelClient client = highLevelClient(); diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/MlClientDocumentationIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/MlClientDocumentationIT.java index 36d5a08d6d3..a9fbb56f68f 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/MlClientDocumentationIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/MlClientDocumentationIT.java @@ -20,6 +20,7 @@ package org.elasticsearch.client.documentation; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.LatchedActionListener; +import org.elasticsearch.action.admin.indices.create.CreateIndexRequest; import org.elasticsearch.action.bulk.BulkRequest; import org.elasticsearch.action.get.GetRequest; import org.elasticsearch.action.get.GetResponse; @@ -29,7 +30,7 @@ import org.elasticsearch.action.support.master.AcknowledgedResponse; import org.elasticsearch.client.ESRestHighLevelClientTestCase; import org.elasticsearch.client.MachineLearningGetResultsIT; import org.elasticsearch.client.MachineLearningIT; -import org.elasticsearch.client.MlRestTestStateCleaner; +import org.elasticsearch.client.MlTestStateCleaner; import org.elasticsearch.client.RequestOptions; import org.elasticsearch.client.RestHighLevelClient; import org.elasticsearch.client.ml.CloseJobRequest; @@ -70,6 +71,10 @@ import org.elasticsearch.client.ml.PutDatafeedRequest; import org.elasticsearch.client.ml.PutDatafeedResponse; import org.elasticsearch.client.ml.PutJobRequest; import org.elasticsearch.client.ml.PutJobResponse; +import org.elasticsearch.client.ml.StartDatafeedRequest; +import org.elasticsearch.client.ml.StartDatafeedResponse; +import org.elasticsearch.client.ml.StopDatafeedRequest; +import org.elasticsearch.client.ml.StopDatafeedResponse; import org.elasticsearch.client.ml.UpdateJobRequest; import org.elasticsearch.client.ml.calendars.Calendar; import org.elasticsearch.client.ml.datafeed.ChunkingConfig; @@ -121,7 +126,7 @@ public class MlClientDocumentationIT extends ESRestHighLevelClientTestCase { @After public void cleanUp() throws IOException { - new MlRestTestStateCleaner(logger, client()).clearMlMetadata(); + new MlTestStateCleaner(logger, highLevelClient().machineLearning()).clearMlMetadata(); } public void testCreateJob() throws Exception { @@ -703,6 +708,120 @@ public class MlClientDocumentationIT extends ESRestHighLevelClientTestCase { } } + public void testStartDatafeed() throws Exception { + RestHighLevelClient client = highLevelClient(); + + Job job = MachineLearningIT.buildJob("start-datafeed-job"); + client.machineLearning().putJob(new PutJobRequest(job), RequestOptions.DEFAULT); + String datafeedId = job.getId() + "-feed"; + String indexName = "start_data_2"; + CreateIndexRequest createIndexRequest = new CreateIndexRequest(indexName); + createIndexRequest.mapping("doc", "timestamp", "type=date", "total", "type=long"); + highLevelClient().indices().create(createIndexRequest, RequestOptions.DEFAULT); + DatafeedConfig datafeed = DatafeedConfig.builder(datafeedId, job.getId()) + .setTypes(Arrays.asList("doc")) + .setIndices(indexName) + .build(); + client.machineLearning().putDatafeed(new PutDatafeedRequest(datafeed), RequestOptions.DEFAULT); + client.machineLearning().openJob(new OpenJobRequest(job.getId()), RequestOptions.DEFAULT); + { + //tag::x-pack-ml-start-datafeed-request + StartDatafeedRequest request = new StartDatafeedRequest(datafeedId); // <1> + //end::x-pack-ml-start-datafeed-request + + //tag::x-pack-ml-start-datafeed-request-options + request.setEnd("2018-08-21T00:00:00Z"); // <1> + request.setStart("2018-08-20T00:00:00Z"); // <2> + request.setTimeout(TimeValue.timeValueMinutes(10)); // <3> + //end::x-pack-ml-start-datafeed-request-options + + //tag::x-pack-ml-start-datafeed-execute + StartDatafeedResponse response = client.machineLearning().startDatafeed(request, RequestOptions.DEFAULT); + boolean started = response.isStarted(); // <1> + //end::x-pack-ml-start-datafeed-execute + + assertTrue(started); + } + { + StartDatafeedRequest request = new StartDatafeedRequest(datafeedId); + + // tag::x-pack-ml-start-datafeed-listener + ActionListener listener = new ActionListener() { + @Override + public void onResponse(StartDatafeedResponse response) { + // <1> + } + + @Override + public void onFailure(Exception e) { + // <2> + } + }; + // end::x-pack-ml-start-datafeed-listener + + // Replace the empty listener by a blocking listener in test + final CountDownLatch latch = new CountDownLatch(1); + listener = new LatchedActionListener<>(listener, latch); + + // tag::x-pack-ml-start-datafeed-execute-async + client.machineLearning().startDatafeedAsync(request, RequestOptions.DEFAULT, listener); // <1> + // end::x-pack-ml-start-datafeed-execute-async + + assertTrue(latch.await(30L, TimeUnit.SECONDS)); + } + } + + public void testStopDatafeed() throws Exception { + RestHighLevelClient client = highLevelClient(); + + { + //tag::x-pack-ml-stop-datafeed-request + StopDatafeedRequest request = new StopDatafeedRequest("datafeed_id1", "datafeed_id*"); // <1> + //end::x-pack-ml-stop-datafeed-request + request = StopDatafeedRequest.stopAllDatafeedsRequest(); + + //tag::x-pack-ml-stop-datafeed-request-options + request.setAllowNoDatafeeds(true); // <1> + request.setForce(true); // <2> + request.setTimeout(TimeValue.timeValueMinutes(10)); // <3> + //end::x-pack-ml-stop-datafeed-request-options + + //tag::x-pack-ml-stop-datafeed-execute + StopDatafeedResponse response = client.machineLearning().stopDatafeed(request, RequestOptions.DEFAULT); + boolean stopped = response.isStopped(); // <1> + //end::x-pack-ml-stop-datafeed-execute + + assertTrue(stopped); + } + { + StopDatafeedRequest request = StopDatafeedRequest.stopAllDatafeedsRequest(); + + // tag::x-pack-ml-stop-datafeed-listener + ActionListener listener = new ActionListener() { + @Override + public void onResponse(StopDatafeedResponse response) { + // <1> + } + + @Override + public void onFailure(Exception e) { + // <2> + } + }; + // end::x-pack-ml-stop-datafeed-listener + + // Replace the empty listener by a blocking listener in test + final CountDownLatch latch = new CountDownLatch(1); + listener = new LatchedActionListener<>(listener, latch); + + // tag::x-pack-ml-stop-datafeed-execute-async + client.machineLearning().stopDatafeedAsync(request, RequestOptions.DEFAULT, listener); // <1> + // end::x-pack-ml-stop-datafeed-execute-async + + assertTrue(latch.await(30L, TimeUnit.SECONDS)); + } + } + public void testGetBuckets() throws IOException, InterruptedException { RestHighLevelClient client = highLevelClient(); diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/RollupDocumentationIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/RollupDocumentationIT.java index aadb0f0f200..46b89d86824 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/RollupDocumentationIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/RollupDocumentationIT.java @@ -27,8 +27,15 @@ import org.elasticsearch.action.bulk.BulkResponse; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.support.WriteRequest; import org.elasticsearch.client.ESRestHighLevelClientTestCase; +import org.elasticsearch.client.Request; import org.elasticsearch.client.RequestOptions; +import org.elasticsearch.client.Response; import org.elasticsearch.client.RestHighLevelClient; +import org.elasticsearch.client.rollup.GetRollupJobRequest; +import org.elasticsearch.client.rollup.GetRollupJobResponse; +import org.elasticsearch.client.rollup.GetRollupJobResponse.JobWrapper; +import org.elasticsearch.client.rollup.GetRollupJobResponse.RollupIndexerJobStats; +import org.elasticsearch.client.rollup.GetRollupJobResponse.RollupJobStatus; import org.elasticsearch.client.rollup.PutRollupJobRequest; import org.elasticsearch.client.rollup.PutRollupJobResponse; import org.elasticsearch.client.rollup.job.config.DateHistogramGroupConfig; @@ -38,19 +45,26 @@ import org.elasticsearch.client.rollup.job.config.MetricConfig; import org.elasticsearch.client.rollup.job.config.RollupJobConfig; import org.elasticsearch.client.rollup.job.config.TermsGroupConfig; import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.common.xcontent.support.XContentMapValues; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramInterval; +import org.junit.After; import org.junit.Before; +import java.io.BufferedReader; import java.io.IOException; +import java.io.InputStreamReader; +import java.nio.charset.StandardCharsets; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import java.util.Locale; +import java.util.Map; import java.util.concurrent.CountDownLatch; import java.util.concurrent.TimeUnit; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; +import static org.hamcrest.Matchers.hasSize; public class RollupDocumentationIT extends ESRestHighLevelClientTestCase { @@ -160,4 +174,110 @@ public class RollupDocumentationIT extends ESRestHighLevelClientTestCase { assertTrue(latch.await(30L, TimeUnit.SECONDS)); } } + + public void testGetRollupJob() throws Exception { + testCreateRollupJob(); + RestHighLevelClient client = highLevelClient(); + + + // tag::x-pack-rollup-get-rollup-job-request + GetRollupJobRequest getAll = new GetRollupJobRequest(); // <1> + GetRollupJobRequest getJob = new GetRollupJobRequest("job_1"); // <2> + // end::x-pack-rollup-get-rollup-job-request + + // tag::x-pack-rollup-get-rollup-job-execute + GetRollupJobResponse response = client.rollup().getRollupJob(getJob, RequestOptions.DEFAULT); + // end::x-pack-rollup-get-rollup-job-execute + + // tag::x-pack-rollup-get-rollup-job-response + assertThat(response.getJobs(), hasSize(1)); + JobWrapper job = response.getJobs().get(0); // <1> + RollupJobConfig config = job.getJob(); + RollupJobStatus status = job.getStatus(); + RollupIndexerJobStats stats = job.getStats(); + // end::x-pack-rollup-get-rollup-job-response + assertNotNull(config); + assertNotNull(status); + assertNotNull(status); + + // tag::x-pack-rollup-get-rollup-job-execute-listener + ActionListener listener = new ActionListener() { + @Override + public void onResponse(GetRollupJobResponse response) { + // <1> + } + + @Override + public void onFailure(Exception e) { + // <2> + } + }; + // end::x-pack-rollup-get-rollup-job-execute-listener + + // Replace the empty listener by a blocking listener in test + final CountDownLatch latch = new CountDownLatch(1); + listener = new LatchedActionListener<>(listener, latch); + + // tag::x-pack-rollup-get-rollup-job-execute-async + client.rollup().getRollupJobAsync(getJob, RequestOptions.DEFAULT, listener); // <1> + // end::x-pack-rollup-get-rollup-job-execute-async + + assertTrue(latch.await(30L, TimeUnit.SECONDS)); + } + + @After + public void wipeRollup() throws Exception { + // TODO move this to ESRestTestCase + deleteRollupJobs(); + waitForPendingRollupTasks(); + } + + private void deleteRollupJobs() throws Exception { + Response response = adminClient().performRequest(new Request("GET", "/_xpack/rollup/job/_all")); + Map jobs = entityAsMap(response); + @SuppressWarnings("unchecked") + List> jobConfigs = + (List>) XContentMapValues.extractValue("jobs", jobs); + + if (jobConfigs == null) { + return; + } + + for (Map jobConfig : jobConfigs) { + @SuppressWarnings("unchecked") + String jobId = (String) ((Map) jobConfig.get("config")).get("id"); + Request request = new Request("DELETE", "/_xpack/rollup/job/" + jobId); + request.addParameter("ignore", "404"); // Ignore 404s because they imply someone was racing us to delete this + adminClient().performRequest(request); + } + } + + private void waitForPendingRollupTasks() throws Exception { + assertBusy(() -> { + try { + Request request = new Request("GET", "/_cat/tasks"); + request.addParameter("detailed", "true"); + Response response = adminClient().performRequest(request); + + try (BufferedReader responseReader = new BufferedReader( + new InputStreamReader(response.getEntity().getContent(), StandardCharsets.UTF_8))) { + int activeTasks = 0; + String line; + StringBuilder tasksListString = new StringBuilder(); + while ((line = responseReader.readLine()) != null) { + + // We only care about Rollup jobs, otherwise this fails too easily due to unrelated tasks + if (line.startsWith("xpack/rollup/job") == true) { + activeTasks++; + tasksListString.append(line).append('\n'); + } + } + assertEquals(activeTasks + " active tasks found:\n" + tasksListString, 0, activeTasks); + } + } catch (IOException e) { + // Throw an assertion error so we retry + throw new AssertionError("Error getting active tasks list", e); + } + }); + } } diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/SearchDocumentationIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/SearchDocumentationIT.java index d9d4f665f9d..4382924bb97 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/SearchDocumentationIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/SearchDocumentationIT.java @@ -413,6 +413,7 @@ public class SearchDocumentationIT extends ESRestHighLevelClientTestCase { } } + @SuppressWarnings("unused") public void testSearchRequestHighlighting() throws IOException { RestHighLevelClient client = highLevelClient(); { @@ -831,6 +832,8 @@ public class SearchDocumentationIT extends ESRestHighLevelClientTestCase { assertTrue(latch.await(30L, TimeUnit.SECONDS)); } + + @SuppressWarnings("unused") public void testMultiSearchTemplateWithInlineScript() throws Exception { indexSearchTestData(); RestHighLevelClient client = highLevelClient(); diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/SecurityDocumentationIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/SecurityDocumentationIT.java index 103b031fc0e..778ec7b5707 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/SecurityDocumentationIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/SecurityDocumentationIT.java @@ -24,6 +24,7 @@ import org.elasticsearch.action.LatchedActionListener; import org.elasticsearch.client.ESRestHighLevelClientTestCase; import org.elasticsearch.client.RequestOptions; import org.elasticsearch.client.RestHighLevelClient; +import org.elasticsearch.client.security.ChangePasswordRequest; import org.elasticsearch.client.security.DisableUserRequest; import org.elasticsearch.client.security.EnableUserRequest; import org.elasticsearch.client.security.PutUserRequest; @@ -42,7 +43,7 @@ public class SecurityDocumentationIT extends ESRestHighLevelClientTestCase { { //tag::put-user-execute - char[] password = new char[] { 'p', 'a', 's', 's', 'w', 'o', 'r', 'd' }; + char[] password = new char[]{'p', 'a', 's', 's', 'w', 'o', 'r', 'd'}; PutUserRequest request = new PutUserRequest("example", password, Collections.singletonList("superuser"), null, null, true, null, RefreshPolicy.NONE); PutUserResponse response = client.security().putUser(request, RequestOptions.DEFAULT); @@ -56,7 +57,7 @@ public class SecurityDocumentationIT extends ESRestHighLevelClientTestCase { } { - char[] password = new char[] { 'p', 'a', 's', 's', 'w', 'o', 'r', 'd' }; + char[] password = new char[]{'p', 'a', 's', 's', 'w', 'o', 'r', 'd'}; PutUserRequest request = new PutUserRequest("example2", password, Collections.singletonList("superuser"), null, null, true, null, RefreshPolicy.NONE); // tag::put-user-execute-listener @@ -173,4 +174,48 @@ public class SecurityDocumentationIT extends ESRestHighLevelClientTestCase { assertTrue(latch.await(30L, TimeUnit.SECONDS)); } } + + public void testChangePassword() throws Exception { + RestHighLevelClient client = highLevelClient(); + char[] password = new char[]{'p', 'a', 's', 's', 'w', 'o', 'r', 'd'}; + char[] newPassword = new char[]{'n', 'e', 'w', 'p', 'a', 's', 's', 'w', 'o', 'r', 'd'}; + PutUserRequest putUserRequest = new PutUserRequest("change_password_user", password, Collections.singletonList("superuser"), + null, null, true, null, RefreshPolicy.NONE); + PutUserResponse putUserResponse = client.security().putUser(putUserRequest, RequestOptions.DEFAULT); + assertTrue(putUserResponse.isCreated()); + { + //tag::change-password-execute + ChangePasswordRequest request = new ChangePasswordRequest("change_password_user", newPassword, RefreshPolicy.NONE); + EmptyResponse response = client.security().changePassword(request, RequestOptions.DEFAULT); + //end::change-password-execute + + assertNotNull(response); + } + { + //tag::change-password-execute-listener + ChangePasswordRequest request = new ChangePasswordRequest("change_password_user", password, RefreshPolicy.NONE); + ActionListener listener = new ActionListener() { + @Override + public void onResponse(EmptyResponse emptyResponse) { + // <1> + } + + @Override + public void onFailure(Exception e) { + // <2> + } + }; + //end::change-password-execute-listener + + // Replace the empty listener by a blocking listener in test + final CountDownLatch latch = new CountDownLatch(1); + listener = new LatchedActionListener<>(listener, latch); + + //tag::change-password-execute-async + client.security().changePasswordAsync(request, RequestOptions.DEFAULT, listener); // <1> + //end::change-password-execute-async + + assertTrue(latch.await(30L, TimeUnit.SECONDS)); + } + } } diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/SnapshotClientDocumentationIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/SnapshotClientDocumentationIT.java index d1aed55f44e..22ef30c92b7 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/SnapshotClientDocumentationIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/SnapshotClientDocumentationIT.java @@ -577,6 +577,7 @@ public class SnapshotClientDocumentationIT extends ESRestHighLevelClientTestCase } } + @SuppressWarnings("unused") public void testSnapshotGetSnapshots() throws IOException { RestHighLevelClient client = highLevelClient(); diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/StoredScriptsDocumentationIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/StoredScriptsDocumentationIT.java index c5d53abd978..9165c5cf10d 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/StoredScriptsDocumentationIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/StoredScriptsDocumentationIT.java @@ -66,6 +66,7 @@ import static org.hamcrest.Matchers.equalTo; */ public class StoredScriptsDocumentationIT extends ESRestHighLevelClientTestCase { + @SuppressWarnings("unused") public void testGetStoredScript() throws Exception { RestHighLevelClient client = highLevelClient(); @@ -128,6 +129,7 @@ public class StoredScriptsDocumentationIT extends ESRestHighLevelClientTestCase } + @SuppressWarnings("unused") public void testDeleteStoredScript() throws Exception { RestHighLevelClient client = highLevelClient(); diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/TasksClientDocumentationIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/TasksClientDocumentationIT.java index 8a45195757c..38c8986e1d9 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/TasksClientDocumentationIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/TasksClientDocumentationIT.java @@ -66,6 +66,7 @@ import static org.hamcrest.Matchers.notNullValue; */ public class TasksClientDocumentationIT extends ESRestHighLevelClientTestCase { + @SuppressWarnings("unused") public void testListTasks() throws IOException { RestHighLevelClient client = highLevelClient(); { @@ -149,6 +150,7 @@ public class TasksClientDocumentationIT extends ESRestHighLevelClientTestCase { } } + @SuppressWarnings("unused") public void testCancelTasks() throws IOException { RestHighLevelClient client = highLevelClient(); { diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/StartDatafeedRequestTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/StartDatafeedRequestTests.java new file mode 100644 index 00000000000..fb83f5659cd --- /dev/null +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/StartDatafeedRequestTests.java @@ -0,0 +1,61 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.client.ml; + +import org.elasticsearch.client.ml.datafeed.DatafeedConfigTests; +import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.test.AbstractXContentTestCase; + +import java.io.IOException; + +public class StartDatafeedRequestTests extends AbstractXContentTestCase { + + public static StartDatafeedRequest createRandomInstance(String datafeedId) { + StartDatafeedRequest request = new StartDatafeedRequest(datafeedId); + + if (randomBoolean()) { + request.setStart(String.valueOf(randomLongBetween(1, 1000))); + } + if (randomBoolean()) { + request.setEnd(String.valueOf(randomLongBetween(1, 1000))); + } + if (randomBoolean()) { + request.setTimeout(TimeValue.timeValueMinutes(randomLongBetween(1, 1000))); + } + + return request; + } + + @Override + protected StartDatafeedRequest createTestInstance() { + String datafeedId = DatafeedConfigTests.randomValidDatafeedId(); + return createRandomInstance(datafeedId); + } + + @Override + protected StartDatafeedRequest doParseInstance(XContentParser parser) throws IOException { + return StartDatafeedRequest.PARSER.parse(parser, null); + } + + @Override + protected boolean supportsUnknownFields() { + return false; + } +} diff --git a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/SimpleExecutableScript.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/StartDatafeedResponseTests.java similarity index 52% rename from modules/reindex/src/test/java/org/elasticsearch/index/reindex/SimpleExecutableScript.java rename to client/rest-high-level/src/test/java/org/elasticsearch/client/ml/StartDatafeedResponseTests.java index be661282df7..57bc75121d4 100644 --- a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/SimpleExecutableScript.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/StartDatafeedResponseTests.java @@ -16,36 +16,27 @@ * specific language governing permissions and limitations * under the License. */ +package org.elasticsearch.client.ml; -package org.elasticsearch.index.reindex; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.test.AbstractXContentTestCase; -import org.elasticsearch.script.ExecutableScript; +import java.io.IOException; -import java.util.Map; -import java.util.function.Consumer; +public class StartDatafeedResponseTests extends AbstractXContentTestCase { - -public class SimpleExecutableScript implements ExecutableScript { - private final Consumer> script; - private Map ctx; - - public SimpleExecutableScript(Consumer> script) { - this.script = script; + @Override + protected StartDatafeedResponse createTestInstance() { + return new StartDatafeedResponse(randomBoolean()); } @Override - public Object run() { - script.accept(ctx); - return null; + protected StartDatafeedResponse doParseInstance(XContentParser parser) throws IOException { + return StartDatafeedResponse.fromXContent(parser); } @Override - @SuppressWarnings("unchecked") - public void setNextVar(String name, Object value) { - if ("ctx".equals(name)) { - ctx = (Map) value; - } else { - throw new IllegalArgumentException("Unsupported var [" + name + "]"); - } + protected boolean supportsUnknownFields() { + return true; } } diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/StopDatafeedRequestTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/StopDatafeedRequestTests.java new file mode 100644 index 00000000000..5da920b2aef --- /dev/null +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/StopDatafeedRequestTests.java @@ -0,0 +1,81 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.client.ml; + +import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.test.AbstractXContentTestCase; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.List; + +public class StopDatafeedRequestTests extends AbstractXContentTestCase { + + public void testCloseAllDatafeedsRequest() { + StopDatafeedRequest request = StopDatafeedRequest.stopAllDatafeedsRequest(); + assertEquals(request.getDatafeedIds().size(), 1); + assertEquals(request.getDatafeedIds().get(0), "_all"); + } + + public void testWithNullDatafeedIds() { + Exception exception = expectThrows(IllegalArgumentException.class, StopDatafeedRequest::new); + assertEquals(exception.getMessage(), "datafeedIds must not be empty"); + + exception = expectThrows(NullPointerException.class, () -> new StopDatafeedRequest("datafeed1", null)); + assertEquals(exception.getMessage(), "datafeedIds must not contain null values"); + } + + + @Override + protected StopDatafeedRequest createTestInstance() { + int datafeedCount = randomIntBetween(1, 10); + List datafeedIds = new ArrayList<>(datafeedCount); + + for (int i = 0; i < datafeedCount; i++) { + datafeedIds.add(randomAlphaOfLength(10)); + } + + StopDatafeedRequest request = new StopDatafeedRequest(datafeedIds.toArray(new String[0])); + + if (randomBoolean()) { + request.setAllowNoDatafeeds(randomBoolean()); + } + + if (randomBoolean()) { + request.setTimeout(TimeValue.timeValueMinutes(randomIntBetween(1, 10))); + } + + if (randomBoolean()) { + request.setForce(randomBoolean()); + } + + return request; + } + + @Override + protected StopDatafeedRequest doParseInstance(XContentParser parser) throws IOException { + return StopDatafeedRequest.PARSER.parse(parser, null); + } + + @Override + protected boolean supportsUnknownFields() { + return false; + } +} diff --git a/server/src/main/java/org/elasticsearch/index/translog/TranslogToolCli.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/StopDatafeedResponseTests.java similarity index 50% rename from server/src/main/java/org/elasticsearch/index/translog/TranslogToolCli.java rename to client/rest-high-level/src/test/java/org/elasticsearch/client/ml/StopDatafeedResponseTests.java index a8a8d735f9a..583b8e989b0 100644 --- a/server/src/main/java/org/elasticsearch/index/translog/TranslogToolCli.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/StopDatafeedResponseTests.java @@ -16,27 +16,27 @@ * specific language governing permissions and limitations * under the License. */ +package org.elasticsearch.client.ml; -package org.elasticsearch.index.translog; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.test.AbstractXContentTestCase; -import org.elasticsearch.cli.LoggingAwareMultiCommand; -import org.elasticsearch.cli.Terminal; -import org.elasticsearch.index.shard.RemoveCorruptedShardDataCommand; +import java.io.IOException; -/** - * Class encapsulating and dispatching commands from the {@code elasticsearch-translog} command line tool - */ -@Deprecated -public class TranslogToolCli extends LoggingAwareMultiCommand { +public class StopDatafeedResponseTests extends AbstractXContentTestCase { - private TranslogToolCli() { - // that's only for 6.x branch for bwc with elasticsearch-translog - super("A CLI tool for various Elasticsearch translog actions"); - subcommands.put("truncate", new RemoveCorruptedShardDataCommand(true)); + @Override + protected StopDatafeedResponse createTestInstance() { + return new StopDatafeedResponse(randomBoolean()); } - public static void main(String[] args) throws Exception { - exit(new TranslogToolCli().main(args, Terminal.DEFAULT)); + @Override + protected StopDatafeedResponse doParseInstance(XContentParser parser) throws IOException { + return StopDatafeedResponse.fromXContent(parser); } + @Override + protected boolean supportsUnknownFields() { + return true; + } } diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/rollup/GetRollupJobRequestTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/rollup/GetRollupJobRequestTests.java new file mode 100644 index 00000000000..4f42a907f5d --- /dev/null +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/rollup/GetRollupJobRequestTests.java @@ -0,0 +1,33 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.client.rollup; + +import org.elasticsearch.test.ESTestCase; + +public class GetRollupJobRequestTests extends ESTestCase { + public void testRequiresJob() { + final NullPointerException e = expectThrows(NullPointerException.class, () -> new GetRollupJobRequest(null)); + assertEquals("jobId is required", e.getMessage()); + } + + public void testDoNotUseAll() { + final IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> new GetRollupJobRequest("_all")); + assertEquals("use the default ctor to ask for all jobs", e.getMessage()); + } +} diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/rollup/GetRollupJobResponseTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/rollup/GetRollupJobResponseTests.java new file mode 100644 index 00000000000..41979a4b92d --- /dev/null +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/rollup/GetRollupJobResponseTests.java @@ -0,0 +1,120 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.client.rollup; + +import org.elasticsearch.common.xcontent.ToXContent; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.client.rollup.GetRollupJobResponse.IndexerState; +import org.elasticsearch.client.rollup.GetRollupJobResponse.JobWrapper; +import org.elasticsearch.client.rollup.GetRollupJobResponse.RollupIndexerJobStats; +import org.elasticsearch.client.rollup.GetRollupJobResponse.RollupJobStatus; +import org.elasticsearch.client.rollup.job.config.RollupJobConfigTests; +import org.elasticsearch.test.ESTestCase; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +import static org.elasticsearch.test.AbstractXContentTestCase.xContentTester; + +public class GetRollupJobResponseTests extends ESTestCase { + public void testFromXContent() throws IOException { + xContentTester( + this::createParser, + this::createTestInstance, + this::toXContent, + GetRollupJobResponse::fromXContent) + .supportsUnknownFields(true) + .randomFieldsExcludeFilter(field -> + field.endsWith("status.current_position")) + .test(); + } + + private GetRollupJobResponse createTestInstance() { + int jobCount = between(1, 5); + List jobs = new ArrayList<>(); + for (int j = 0; j < jobCount; j++) { + jobs.add(new JobWrapper( + RollupJobConfigTests.randomRollupJobConfig(randomAlphaOfLength(5)), + randomStats(), + randomStatus())); + } + return new GetRollupJobResponse(jobs); + } + + private RollupIndexerJobStats randomStats() { + return new RollupIndexerJobStats(randomLong(), randomLong(), randomLong(), randomLong()); + } + + private RollupJobStatus randomStatus() { + Map currentPosition = new HashMap<>(); + int positions = between(0, 10); + while (currentPosition.size() < positions) { + currentPosition.put(randomAlphaOfLength(2), randomAlphaOfLength(2)); + } + return new RollupJobStatus( + randomFrom(IndexerState.values()), + currentPosition, + randomBoolean()); + } + + private void toXContent(GetRollupJobResponse response, XContentBuilder builder) throws IOException { + ToXContent.Params params = ToXContent.EMPTY_PARAMS; + builder.startObject(); + builder.startArray(GetRollupJobResponse.JOBS.getPreferredName()); + for (JobWrapper job : response.getJobs()) { + toXContent(job, builder, params); + } + builder.endArray(); + builder.endObject(); + } + + private void toXContent(JobWrapper jobWrapper, XContentBuilder builder, ToXContent.Params params) throws IOException { + builder.startObject(); + builder.field(GetRollupJobResponse.CONFIG.getPreferredName()); + jobWrapper.getJob().toXContent(builder, params); + builder.field(GetRollupJobResponse.STATUS.getPreferredName()); + toXContent(jobWrapper.getStatus(), builder, params); + builder.field(GetRollupJobResponse.STATS.getPreferredName()); + toXContent(jobWrapper.getStats(), builder, params); + builder.endObject(); + } + + public void toXContent(RollupJobStatus status, XContentBuilder builder, ToXContent.Params params) throws IOException { + builder.startObject(); + builder.field(GetRollupJobResponse.STATE.getPreferredName(), status.getState().value()); + if (status.getCurrentPosition() != null) { + builder.field(GetRollupJobResponse.CURRENT_POSITION.getPreferredName(), status.getCurrentPosition()); + } + builder.field(GetRollupJobResponse.UPGRADED_DOC_ID.getPreferredName(), status.getUpgradedDocumentId()); + builder.endObject(); + } + + public void toXContent(RollupIndexerJobStats stats, XContentBuilder builder, ToXContent.Params params) throws IOException { + builder.startObject(); + builder.field(GetRollupJobResponse.NUM_PAGES.getPreferredName(), stats.getNumPages()); + builder.field(GetRollupJobResponse.NUM_INPUT_DOCUMENTS.getPreferredName(), stats.getNumDocuments()); + builder.field(GetRollupJobResponse.NUM_OUTPUT_DOCUMENTS.getPreferredName(), stats.getOutputDocuments()); + builder.field(GetRollupJobResponse.NUM_INVOCATIONS.getPreferredName(), stats.getNumInvocations()); + builder.endObject(); + } +} diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/security/support/expressiondsl/RoleMapperExpressionDslTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/security/support/expressiondsl/RoleMapperExpressionDslTests.java new file mode 100644 index 00000000000..df94640f172 --- /dev/null +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/security/support/expressiondsl/RoleMapperExpressionDslTests.java @@ -0,0 +1,97 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.client.security.support.expressiondsl; + +import org.elasticsearch.client.security.support.expressiondsl.expressions.AllRoleMapperExpression; +import org.elasticsearch.client.security.support.expressiondsl.expressions.AnyRoleMapperExpression; +import org.elasticsearch.client.security.support.expressiondsl.expressions.ExceptRoleMapperExpression; +import org.elasticsearch.client.security.support.expressiondsl.fields.FieldRoleMapperExpression; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.xcontent.ToXContent; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentFactory; +import org.elasticsearch.test.ESTestCase; + +import java.io.IOException; +import java.util.Date; + +import static org.hamcrest.Matchers.equalTo; + +public class RoleMapperExpressionDslTests extends ESTestCase { + + public void testRoleMapperExpressionToXContentType() throws IOException { + + final RoleMapperExpression allExpression = AllRoleMapperExpression.builder() + .addExpression(AnyRoleMapperExpression.builder() + .addExpression(FieldRoleMapperExpression.ofDN("*,ou=admin,dc=example,dc=com")) + .addExpression(FieldRoleMapperExpression.ofUsername("es-admin", "es-system")) + .build()) + .addExpression(FieldRoleMapperExpression.ofGroups("cn=people,dc=example,dc=com")) + .addExpression(new ExceptRoleMapperExpression(FieldRoleMapperExpression.ofMetadata("metadata.terminated_date", new Date( + 1537145401027L)))) + .build(); + + final XContentBuilder builder = XContentFactory.jsonBuilder(); + allExpression.toXContent(builder, ToXContent.EMPTY_PARAMS); + final String output = Strings.toString(builder); + final String expected = + "{"+ + "\"all\":["+ + "{"+ + "\"any\":["+ + "{"+ + "\"field\":{"+ + "\"dn\":[\"*,ou=admin,dc=example,dc=com\"]"+ + "}"+ + "},"+ + "{"+ + "\"field\":{"+ + "\"username\":["+ + "\"es-admin\","+ + "\"es-system\""+ + "]"+ + "}"+ + "}"+ + "]"+ + "},"+ + "{"+ + "\"field\":{"+ + "\"groups\":[\"cn=people,dc=example,dc=com\"]"+ + "}"+ + "},"+ + "{"+ + "\"except\":{"+ + "\"field\":{"+ + "\"metadata.terminated_date\":[\"2018-09-17T00:50:01.027Z\"]"+ + "}"+ + "}"+ + "}"+ + "]"+ + "}"; + + assertThat(expected, equalTo(output)); + } + + public void testFieldRoleMapperExpressionThrowsExceptionForMissingMetadataPrefix() { + final IllegalArgumentException ile = expectThrows(IllegalArgumentException.class, () -> FieldRoleMapperExpression.ofMetadata( + "terminated_date", new Date(1537145401027L))); + assertThat(ile.getMessage(), equalTo("metadata key must have prefix 'metadata.'")); + } +} diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/security/support/expressiondsl/parser/RoleMapperExpressionParserTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/security/support/expressiondsl/parser/RoleMapperExpressionParserTests.java new file mode 100644 index 00000000000..24ed5684fa8 --- /dev/null +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/security/support/expressiondsl/parser/RoleMapperExpressionParserTests.java @@ -0,0 +1,129 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.client.security.support.expressiondsl.parser; + +import org.elasticsearch.client.security.support.expressiondsl.RoleMapperExpression; +import org.elasticsearch.client.security.support.expressiondsl.expressions.CompositeRoleMapperExpression; +import org.elasticsearch.client.security.support.expressiondsl.fields.FieldRoleMapperExpression; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.xcontent.DeprecationHandler; +import org.elasticsearch.common.xcontent.NamedXContentRegistry; +import org.elasticsearch.common.xcontent.ToXContent; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentFactory; +import org.elasticsearch.common.xcontent.XContentType; +import org.elasticsearch.test.ESTestCase; + +import java.io.IOException; +import java.util.Collections; + +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.instanceOf; +import static org.hamcrest.Matchers.iterableWithSize; + +public class RoleMapperExpressionParserTests extends ESTestCase { + + public void testParseSimpleFieldExpression() throws Exception { + String json = "{ \"field\": { \"username\" : [\"*@shield.gov\"] } }"; + FieldRoleMapperExpression field = checkExpressionType(parse(json), FieldRoleMapperExpression.class); + assertThat(field.getField(), equalTo("username")); + assertThat(field.getValues(), iterableWithSize(1)); + assertThat(field.getValues().get(0), equalTo("*@shield.gov")); + + assertThat(toJson(field), equalTo(json.replaceAll("\\s", ""))); + } + + public void testParseComplexExpression() throws Exception { + String json = "{ \"any\": [" + + " { \"field\": { \"username\" : \"*@shield.gov\" } }, " + + " { \"all\": [" + + " { \"field\": { \"username\" : \"/.*\\\\@avengers\\\\.(net|org)/\" } }, " + + " { \"field\": { \"groups\" : [ \"admin\", \"operators\" ] } }, " + + " { \"except\":" + + " { \"field\": { \"groups\" : \"disavowed\" } }" + + " }" + + " ] }" + + "] }"; + final RoleMapperExpression expr = parse(json); + + assertThat(expr, instanceOf(CompositeRoleMapperExpression.class)); + CompositeRoleMapperExpression any = (CompositeRoleMapperExpression) expr; + + assertThat(any.getElements(), iterableWithSize(2)); + + final FieldRoleMapperExpression fieldShield = checkExpressionType(any.getElements().get(0), + FieldRoleMapperExpression.class); + assertThat(fieldShield.getField(), equalTo("username")); + assertThat(fieldShield.getValues(), iterableWithSize(1)); + assertThat(fieldShield.getValues().get(0), equalTo("*@shield.gov")); + + final CompositeRoleMapperExpression all = checkExpressionType(any.getElements().get(1), + CompositeRoleMapperExpression.class); + assertThat(all.getElements(), iterableWithSize(3)); + + final FieldRoleMapperExpression fieldAvengers = checkExpressionType(all.getElements().get(0), + FieldRoleMapperExpression.class); + assertThat(fieldAvengers.getField(), equalTo("username")); + assertThat(fieldAvengers.getValues(), iterableWithSize(1)); + assertThat(fieldAvengers.getValues().get(0), equalTo("/.*\\@avengers\\.(net|org)/")); + + final FieldRoleMapperExpression fieldGroupsAdmin = checkExpressionType(all.getElements().get(1), + FieldRoleMapperExpression.class); + assertThat(fieldGroupsAdmin.getField(), equalTo("groups")); + assertThat(fieldGroupsAdmin.getValues(), iterableWithSize(2)); + assertThat(fieldGroupsAdmin.getValues().get(0), equalTo("admin")); + assertThat(fieldGroupsAdmin.getValues().get(1), equalTo("operators")); + + final CompositeRoleMapperExpression except = checkExpressionType(all.getElements().get(2), + CompositeRoleMapperExpression.class); + final FieldRoleMapperExpression fieldDisavowed = checkExpressionType(except.getElements().get(0), + FieldRoleMapperExpression.class); + assertThat(fieldDisavowed.getField(), equalTo("groups")); + assertThat(fieldDisavowed.getValues(), iterableWithSize(1)); + assertThat(fieldDisavowed.getValues().get(0), equalTo("disavowed")); + + } + + private String toJson(final RoleMapperExpression expr) throws IOException { + final XContentBuilder builder = XContentFactory.jsonBuilder(); + expr.toXContent(builder, ToXContent.EMPTY_PARAMS); + final String output = Strings.toString(builder); + return output; + } + + private T checkExpressionType(RoleMapperExpression expr, Class type) { + assertThat(expr, instanceOf(type)); + return type.cast(expr); + } + + private RoleMapperExpression parse(String json) throws IOException { + return new RoleMapperExpressionParser().parse("rules", XContentType.JSON.xContent().createParser(new NamedXContentRegistry( + Collections.emptyList()), new DeprecationHandler() { + @Override + public void usedDeprecatedName(String usedName, String modernName) { + } + + @Override + public void usedDeprecatedField(String usedName, String replacedWith) { + } + }, json)); + } + +} diff --git a/client/rest/src/main/java/org/elasticsearch/client/Response.java b/client/rest/src/main/java/org/elasticsearch/client/Response.java index 39bbf769713..ab61f01f661 100644 --- a/client/rest/src/main/java/org/elasticsearch/client/Response.java +++ b/client/rest/src/main/java/org/elasticsearch/client/Response.java @@ -26,7 +26,11 @@ import org.apache.http.HttpResponse; import org.apache.http.RequestLine; import org.apache.http.StatusLine; +import java.util.ArrayList; +import java.util.List; import java.util.Objects; +import java.util.regex.Matcher; +import java.util.regex.Pattern; /** * Holds an elasticsearch response. It wraps the {@link HttpResponse} returned and associates it with @@ -96,6 +100,46 @@ public class Response { return response.getEntity(); } + private static final Pattern WARNING_HEADER_PATTERN = Pattern.compile( + "299 " + // warn code + "Elasticsearch-\\d+\\.\\d+\\.\\d+(?:-(?:alpha|beta|rc)\\d+)?(?:-SNAPSHOT)?-(?:[a-f0-9]{7}|Unknown) " + // warn agent + "\"((?:\t| |!|[\\x23-\\x5B]|[\\x5D-\\x7E]|[\\x80-\\xFF]|\\\\|\\\\\")*)\" " + // quoted warning value, captured + // quoted RFC 1123 date format + "\"" + // opening quote + "(?:Mon|Tue|Wed|Thu|Fri|Sat|Sun), " + // weekday + "\\d{2} " + // 2-digit day + "(?:Jan|Feb|Mar|Apr|May|Jun|Jul|Aug|Sep|Oct|Nov|Dec) " + // month + "\\d{4} " + // 4-digit year + "\\d{2}:\\d{2}:\\d{2} " + // (two-digit hour):(two-digit minute):(two-digit second) + "GMT" + // GMT + "\""); // closing quote + + /** + * Returns a list of all warning headers returned in the response. + */ + public List getWarnings() { + List warnings = new ArrayList<>(); + for (Header header : response.getHeaders("Warning")) { + String warning = header.getValue(); + final Matcher matcher = WARNING_HEADER_PATTERN.matcher(warning); + if (matcher.matches()) { + warnings.add(matcher.group(1)); + continue; + } + warnings.add(warning); + } + return warnings; + } + + /** + * Returns true if there is at least one warning header returned in the + * response. + */ + public boolean hasWarnings() { + Header[] warnings = response.getHeaders("Warning"); + return warnings != null && warnings.length > 0; + } + HttpResponse getHttpResponse() { return response; } diff --git a/client/rest/src/main/java/org/elasticsearch/client/ResponseException.java b/client/rest/src/main/java/org/elasticsearch/client/ResponseException.java index 5e646d975c8..0957e25fb70 100644 --- a/client/rest/src/main/java/org/elasticsearch/client/ResponseException.java +++ b/client/rest/src/main/java/org/elasticsearch/client/ResponseException.java @@ -58,6 +58,10 @@ public final class ResponseException extends IOException { response.getStatusLine().toString() ); + if (response.hasWarnings()) { + message += "\n" + "Warnings: " + response.getWarnings(); + } + HttpEntity entity = response.getEntity(); if (entity != null) { if (entity.isRepeatable() == false) { diff --git a/client/rest/src/main/java/org/elasticsearch/client/RestClient.java b/client/rest/src/main/java/org/elasticsearch/client/RestClient.java index a7afbc8ffbd..d68e371f318 100644 --- a/client/rest/src/main/java/org/elasticsearch/client/RestClient.java +++ b/client/rest/src/main/java/org/elasticsearch/client/RestClient.java @@ -110,15 +110,17 @@ public class RestClient implements Closeable { private final FailureListener failureListener; private final NodeSelector nodeSelector; private volatile NodeTuple> nodeTuple; + private final boolean strictDeprecationMode; - RestClient(CloseableHttpAsyncClient client, long maxRetryTimeoutMillis, Header[] defaultHeaders, - List nodes, String pathPrefix, FailureListener failureListener, NodeSelector nodeSelector) { + RestClient(CloseableHttpAsyncClient client, long maxRetryTimeoutMillis, Header[] defaultHeaders, List nodes, String pathPrefix, + FailureListener failureListener, NodeSelector nodeSelector, boolean strictDeprecationMode) { this.client = client; this.maxRetryTimeoutMillis = maxRetryTimeoutMillis; this.defaultHeaders = Collections.unmodifiableList(Arrays.asList(defaultHeaders)); this.failureListener = failureListener; this.pathPrefix = pathPrefix; this.nodeSelector = nodeSelector; + this.strictDeprecationMode = strictDeprecationMode; setNodes(nodes); } @@ -296,7 +298,11 @@ public class RestClient implements Closeable { Response response = new Response(request.getRequestLine(), node.getHost(), httpResponse); if (isSuccessfulResponse(statusCode) || ignoreErrorCodes.contains(response.getStatusLine().getStatusCode())) { onResponse(node); - listener.onSuccess(response); + if (strictDeprecationMode && response.hasWarnings()) { + listener.onDefinitiveFailure(new ResponseException(response)); + } else { + listener.onSuccess(response); + } } else { ResponseException responseException = new ResponseException(response); if (isRetryStatus(statusCode)) { diff --git a/client/rest/src/main/java/org/elasticsearch/client/RestClientBuilder.java b/client/rest/src/main/java/org/elasticsearch/client/RestClientBuilder.java index dd3f5ad5a72..84cc3ee1667 100644 --- a/client/rest/src/main/java/org/elasticsearch/client/RestClientBuilder.java +++ b/client/rest/src/main/java/org/elasticsearch/client/RestClientBuilder.java @@ -56,6 +56,7 @@ public final class RestClientBuilder { private RequestConfigCallback requestConfigCallback; private String pathPrefix; private NodeSelector nodeSelector = NodeSelector.ANY; + private boolean strictDeprecationMode = false; /** * Creates a new builder instance and sets the hosts that the client will send requests to. @@ -185,6 +186,15 @@ public final class RestClientBuilder { return this; } + /** + * Whether the REST client should return any response containing at least + * one warning header as a failure. + */ + public RestClientBuilder setStrictDeprecationMode(boolean strictDeprecationMode) { + this.strictDeprecationMode = strictDeprecationMode; + return this; + } + /** * Creates a new {@link RestClient} based on the provided configuration. */ @@ -199,7 +209,7 @@ public final class RestClientBuilder { } }); RestClient restClient = new RestClient(httpClient, maxRetryTimeout, defaultHeaders, nodes, - pathPrefix, failureListener, nodeSelector); + pathPrefix, failureListener, nodeSelector, strictDeprecationMode); httpClient.start(); return restClient; } diff --git a/client/rest/src/test/java/org/elasticsearch/client/RestClientMultipleHostsTests.java b/client/rest/src/test/java/org/elasticsearch/client/RestClientMultipleHostsTests.java index e1062076a0d..7dd1c4d842b 100644 --- a/client/rest/src/test/java/org/elasticsearch/client/RestClientMultipleHostsTests.java +++ b/client/rest/src/test/java/org/elasticsearch/client/RestClientMultipleHostsTests.java @@ -115,7 +115,7 @@ public class RestClientMultipleHostsTests extends RestClientTestCase { } nodes = Collections.unmodifiableList(nodes); failureListener = new HostsTrackingFailureListener(); - return new RestClient(httpClient, 10000, new Header[0], nodes, null, failureListener, nodeSelector); + return new RestClient(httpClient, 10000, new Header[0], nodes, null, failureListener, nodeSelector, false); } /** diff --git a/client/rest/src/test/java/org/elasticsearch/client/RestClientSingleHostTests.java b/client/rest/src/test/java/org/elasticsearch/client/RestClientSingleHostTests.java index 0c589e6a40c..3aa10762676 100644 --- a/client/rest/src/test/java/org/elasticsearch/client/RestClientSingleHostTests.java +++ b/client/rest/src/test/java/org/elasticsearch/client/RestClientSingleHostTests.java @@ -148,7 +148,7 @@ public class RestClientSingleHostTests extends RestClientTestCase { node = new Node(new HttpHost("localhost", 9200)); failureListener = new HostsTrackingFailureListener(); restClient = new RestClient(httpClient, 10000, defaultHeaders, - singletonList(node), null, failureListener, NodeSelector.ANY); + singletonList(node), null, failureListener, NodeSelector.ANY, false); } /** diff --git a/client/rest/src/test/java/org/elasticsearch/client/RestClientTests.java b/client/rest/src/test/java/org/elasticsearch/client/RestClientTests.java index 4a037b18404..69cdfeae85d 100644 --- a/client/rest/src/test/java/org/elasticsearch/client/RestClientTests.java +++ b/client/rest/src/test/java/org/elasticsearch/client/RestClientTests.java @@ -57,7 +57,7 @@ public class RestClientTests extends RestClientTestCase { public void testCloseIsIdempotent() throws IOException { List nodes = singletonList(new Node(new HttpHost("localhost", 9200))); CloseableHttpAsyncClient closeableHttpAsyncClient = mock(CloseableHttpAsyncClient.class); - RestClient restClient = new RestClient(closeableHttpAsyncClient, 1_000, new Header[0], nodes, null, null, null); + RestClient restClient = new RestClient(closeableHttpAsyncClient, 1_000, new Header[0], nodes, null, null, null, false); restClient.close(); verify(closeableHttpAsyncClient, times(1)).close(); restClient.close(); @@ -345,7 +345,7 @@ public class RestClientTests extends RestClientTestCase { private static RestClient createRestClient() { List nodes = Collections.singletonList(new Node(new HttpHost("localhost", 9200))); return new RestClient(mock(CloseableHttpAsyncClient.class), randomLongBetween(1_000, 30_000), - new Header[] {}, nodes, null, null, null); + new Header[] {}, nodes, null, null, null, false); } public void testRoundRobin() throws IOException { diff --git a/client/rest/src/test/java/org/elasticsearch/client/documentation/RestClientDocumentation.java b/client/rest/src/test/java/org/elasticsearch/client/documentation/RestClientDocumentation.java index ce2e0907560..90801715b7e 100644 --- a/client/rest/src/test/java/org/elasticsearch/client/documentation/RestClientDocumentation.java +++ b/client/rest/src/test/java/org/elasticsearch/client/documentation/RestClientDocumentation.java @@ -45,6 +45,7 @@ import org.elasticsearch.client.Response; import org.elasticsearch.client.ResponseListener; import org.elasticsearch.client.RestClient; import org.elasticsearch.client.RestClientBuilder; +import org.elasticsearch.client.RestClientBuilder.HttpClientConfigCallback; import javax.net.ssl.SSLContext; import java.io.IOException; @@ -93,8 +94,8 @@ public class RestClientDocumentation { //tag::rest-client-init RestClient restClient = RestClient.builder( - new HttpHost("localhost", 9200, "http"), - new HttpHost("localhost", 9201, "http")).build(); + new HttpHost("localhost", 9200, "http"), + new HttpHost("localhost", 9201, "http")).build(); //end::rest-client-init //tag::rest-client-close @@ -103,26 +104,30 @@ public class RestClientDocumentation { { //tag::rest-client-init-default-headers - RestClientBuilder builder = RestClient.builder(new HttpHost("localhost", 9200, "http")); + RestClientBuilder builder = RestClient.builder( + new HttpHost("localhost", 9200, "http")); Header[] defaultHeaders = new Header[]{new BasicHeader("header", "value")}; builder.setDefaultHeaders(defaultHeaders); // <1> //end::rest-client-init-default-headers } { //tag::rest-client-init-max-retry-timeout - RestClientBuilder builder = RestClient.builder(new HttpHost("localhost", 9200, "http")); + RestClientBuilder builder = RestClient.builder( + new HttpHost("localhost", 9200, "http")); builder.setMaxRetryTimeoutMillis(10000); // <1> //end::rest-client-init-max-retry-timeout } { //tag::rest-client-init-node-selector - RestClientBuilder builder = RestClient.builder(new HttpHost("localhost", 9200, "http")); + RestClientBuilder builder = RestClient.builder( + new HttpHost("localhost", 9200, "http")); builder.setNodeSelector(NodeSelector.SKIP_DEDICATED_MASTERS); // <1> //end::rest-client-init-node-selector } { //tag::rest-client-init-allocation-aware-selector - RestClientBuilder builder = RestClient.builder(new HttpHost("localhost", 9200, "http")); + RestClientBuilder builder = RestClient.builder( + new HttpHost("localhost", 9200, "http")); builder.setNodeSelector(new NodeSelector() { // <1> @Override public void select(Iterable nodes) { @@ -155,7 +160,8 @@ public class RestClientDocumentation { } { //tag::rest-client-init-failure-listener - RestClientBuilder builder = RestClient.builder(new HttpHost("localhost", 9200, "http")); + RestClientBuilder builder = RestClient.builder( + new HttpHost("localhost", 9200, "http")); builder.setFailureListener(new RestClient.FailureListener() { @Override public void onFailure(Node node) { @@ -166,24 +172,30 @@ public class RestClientDocumentation { } { //tag::rest-client-init-request-config-callback - RestClientBuilder builder = RestClient.builder(new HttpHost("localhost", 9200, "http")); - builder.setRequestConfigCallback(new RestClientBuilder.RequestConfigCallback() { - @Override - public RequestConfig.Builder customizeRequestConfig(RequestConfig.Builder requestConfigBuilder) { - return requestConfigBuilder.setSocketTimeout(10000); // <1> - } - }); + RestClientBuilder builder = RestClient.builder( + new HttpHost("localhost", 9200, "http")); + builder.setRequestConfigCallback( + new RestClientBuilder.RequestConfigCallback() { + @Override + public RequestConfig.Builder customizeRequestConfig( + RequestConfig.Builder requestConfigBuilder) { + return requestConfigBuilder.setSocketTimeout(10000); // <1> + } + }); //end::rest-client-init-request-config-callback } { //tag::rest-client-init-client-config-callback - RestClientBuilder builder = RestClient.builder(new HttpHost("localhost", 9200, "http")); - builder.setHttpClientConfigCallback(new RestClientBuilder.HttpClientConfigCallback() { - @Override - public HttpAsyncClientBuilder customizeHttpClient(HttpAsyncClientBuilder httpClientBuilder) { - return httpClientBuilder.setProxy(new HttpHost("proxy", 9000, "http")); // <1> - } - }); + RestClientBuilder builder = RestClient.builder( + new HttpHost("localhost", 9200, "http")); + builder.setHttpClientConfigCallback(new HttpClientConfigCallback() { + @Override + public HttpAsyncClientBuilder customizeHttpClient( + HttpAsyncClientBuilder httpClientBuilder) { + return httpClientBuilder.setProxy( + new HttpHost("proxy", 9000, "http")); // <1> + } + }); //end::rest-client-init-client-config-callback } @@ -281,58 +293,74 @@ public class RestClientDocumentation { public void testCommonConfiguration() throws Exception { { //tag::rest-client-config-timeouts - RestClientBuilder builder = RestClient.builder(new HttpHost("localhost", 9200)) - .setRequestConfigCallback(new RestClientBuilder.RequestConfigCallback() { + RestClientBuilder builder = RestClient.builder( + new HttpHost("localhost", 9200)) + .setRequestConfigCallback( + new RestClientBuilder.RequestConfigCallback() { @Override - public RequestConfig.Builder customizeRequestConfig(RequestConfig.Builder requestConfigBuilder) { - return requestConfigBuilder.setConnectTimeout(5000) - .setSocketTimeout(60000); + public RequestConfig.Builder customizeRequestConfig( + RequestConfig.Builder requestConfigBuilder) { + return requestConfigBuilder + .setConnectTimeout(5000) + .setSocketTimeout(60000); } }) - .setMaxRetryTimeoutMillis(60000); + .setMaxRetryTimeoutMillis(60000); //end::rest-client-config-timeouts } { //tag::rest-client-config-threads - RestClientBuilder builder = RestClient.builder(new HttpHost("localhost", 9200)) - .setHttpClientConfigCallback(new RestClientBuilder.HttpClientConfigCallback() { - @Override - public HttpAsyncClientBuilder customizeHttpClient(HttpAsyncClientBuilder httpClientBuilder) { - return httpClientBuilder.setDefaultIOReactorConfig( - IOReactorConfig.custom().setIoThreadCount(1).build()); - } - }); + RestClientBuilder builder = RestClient.builder( + new HttpHost("localhost", 9200)) + .setHttpClientConfigCallback(new HttpClientConfigCallback() { + @Override + public HttpAsyncClientBuilder customizeHttpClient( + HttpAsyncClientBuilder httpClientBuilder) { + return httpClientBuilder.setDefaultIOReactorConfig( + IOReactorConfig.custom() + .setIoThreadCount(1) + .build()); + } + }); //end::rest-client-config-threads } { //tag::rest-client-config-basic-auth - final CredentialsProvider credentialsProvider = new BasicCredentialsProvider(); + final CredentialsProvider credentialsProvider = + new BasicCredentialsProvider(); credentialsProvider.setCredentials(AuthScope.ANY, - new UsernamePasswordCredentials("user", "password")); + new UsernamePasswordCredentials("user", "password")); - RestClientBuilder builder = RestClient.builder(new HttpHost("localhost", 9200)) - .setHttpClientConfigCallback(new RestClientBuilder.HttpClientConfigCallback() { - @Override - public HttpAsyncClientBuilder customizeHttpClient(HttpAsyncClientBuilder httpClientBuilder) { - return httpClientBuilder.setDefaultCredentialsProvider(credentialsProvider); - } - }); + RestClientBuilder builder = RestClient.builder( + new HttpHost("localhost", 9200)) + .setHttpClientConfigCallback(new HttpClientConfigCallback() { + @Override + public HttpAsyncClientBuilder customizeHttpClient( + HttpAsyncClientBuilder httpClientBuilder) { + return httpClientBuilder + .setDefaultCredentialsProvider(credentialsProvider); + } + }); //end::rest-client-config-basic-auth } { //tag::rest-client-config-disable-preemptive-auth - final CredentialsProvider credentialsProvider = new BasicCredentialsProvider(); + final CredentialsProvider credentialsProvider = + new BasicCredentialsProvider(); credentialsProvider.setCredentials(AuthScope.ANY, - new UsernamePasswordCredentials("user", "password")); + new UsernamePasswordCredentials("user", "password")); - RestClientBuilder builder = RestClient.builder(new HttpHost("localhost", 9200)) - .setHttpClientConfigCallback(new RestClientBuilder.HttpClientConfigCallback() { - @Override - public HttpAsyncClientBuilder customizeHttpClient(HttpAsyncClientBuilder httpClientBuilder) { - httpClientBuilder.disableAuthCaching(); // <1> - return httpClientBuilder.setDefaultCredentialsProvider(credentialsProvider); - } - }); + RestClientBuilder builder = RestClient.builder( + new HttpHost("localhost", 9200)) + .setHttpClientConfigCallback(new HttpClientConfigCallback() { + @Override + public HttpAsyncClientBuilder customizeHttpClient( + HttpAsyncClientBuilder httpClientBuilder) { + httpClientBuilder.disableAuthCaching(); // <1> + return httpClientBuilder + .setDefaultCredentialsProvider(credentialsProvider); + } + }); //end::rest-client-config-disable-preemptive-auth } { @@ -343,15 +371,18 @@ public class RestClientDocumentation { try (InputStream is = Files.newInputStream(keyStorePath)) { truststore.load(is, keyStorePass.toCharArray()); } - SSLContextBuilder sslBuilder = SSLContexts.custom().loadTrustMaterial(truststore, null); + SSLContextBuilder sslBuilder = SSLContexts.custom() + .loadTrustMaterial(truststore, null); final SSLContext sslContext = sslBuilder.build(); - RestClientBuilder builder = RestClient.builder(new HttpHost("localhost", 9200, "https")) - .setHttpClientConfigCallback(new RestClientBuilder.HttpClientConfigCallback() { - @Override - public HttpAsyncClientBuilder customizeHttpClient(HttpAsyncClientBuilder httpClientBuilder) { - return httpClientBuilder.setSSLContext(sslContext); - } - }); + RestClientBuilder builder = RestClient.builder( + new HttpHost("localhost", 9200, "https")) + .setHttpClientConfigCallback(new HttpClientConfigCallback() { + @Override + public HttpAsyncClientBuilder customizeHttpClient( + HttpAsyncClientBuilder httpClientBuilder) { + return httpClientBuilder.setSSLContext(sslContext); + } + }); //end::rest-client-config-encrypted-communication } } diff --git a/client/sniffer/src/test/java/org/elasticsearch/client/sniff/documentation/SnifferDocumentation.java b/client/sniffer/src/test/java/org/elasticsearch/client/sniff/documentation/SnifferDocumentation.java index 5f305024dba..70d7373dfc9 100644 --- a/client/sniffer/src/test/java/org/elasticsearch/client/sniff/documentation/SnifferDocumentation.java +++ b/client/sniffer/src/test/java/org/elasticsearch/client/sniff/documentation/SnifferDocumentation.java @@ -56,8 +56,8 @@ public class SnifferDocumentation { { //tag::sniffer-init RestClient restClient = RestClient.builder( - new HttpHost("localhost", 9200, "http")) - .build(); + new HttpHost("localhost", 9200, "http")) + .build(); Sniffer sniffer = Sniffer.builder(restClient).build(); //end::sniffer-init @@ -69,21 +69,23 @@ public class SnifferDocumentation { { //tag::sniffer-interval RestClient restClient = RestClient.builder( - new HttpHost("localhost", 9200, "http")) - .build(); + new HttpHost("localhost", 9200, "http")) + .build(); Sniffer sniffer = Sniffer.builder(restClient) - .setSniffIntervalMillis(60000).build(); + .setSniffIntervalMillis(60000).build(); //end::sniffer-interval } { //tag::sniff-on-failure - SniffOnFailureListener sniffOnFailureListener = new SniffOnFailureListener(); - RestClient restClient = RestClient.builder(new HttpHost("localhost", 9200)) - .setFailureListener(sniffOnFailureListener) // <1> - .build(); + SniffOnFailureListener sniffOnFailureListener = + new SniffOnFailureListener(); + RestClient restClient = RestClient.builder( + new HttpHost("localhost", 9200)) + .setFailureListener(sniffOnFailureListener) // <1> + .build(); Sniffer sniffer = Sniffer.builder(restClient) - .setSniffAfterFailureDelayMillis(30000) // <2> - .build(); + .setSniffAfterFailureDelayMillis(30000) // <2> + .build(); sniffOnFailureListener.setSniffer(sniffer); // <3> //end::sniff-on-failure } @@ -103,29 +105,29 @@ public class SnifferDocumentation { { //tag::sniff-request-timeout RestClient restClient = RestClient.builder( - new HttpHost("localhost", 9200, "http")) - .build(); + new HttpHost("localhost", 9200, "http")) + .build(); NodesSniffer nodesSniffer = new ElasticsearchNodesSniffer( - restClient, - TimeUnit.SECONDS.toMillis(5), - ElasticsearchNodesSniffer.Scheme.HTTP); + restClient, + TimeUnit.SECONDS.toMillis(5), + ElasticsearchNodesSniffer.Scheme.HTTP); Sniffer sniffer = Sniffer.builder(restClient) - .setNodesSniffer(nodesSniffer).build(); + .setNodesSniffer(nodesSniffer).build(); //end::sniff-request-timeout } { //tag::custom-nodes-sniffer RestClient restClient = RestClient.builder( - new HttpHost("localhost", 9200, "http")) - .build(); + new HttpHost("localhost", 9200, "http")) + .build(); NodesSniffer nodesSniffer = new NodesSniffer() { - @Override - public List sniff() throws IOException { - return null; // <1> - } - }; + @Override + public List sniff() throws IOException { + return null; // <1> + } + }; Sniffer sniffer = Sniffer.builder(restClient) - .setNodesSniffer(nodesSniffer).build(); + .setNodesSniffer(nodesSniffer).build(); //end::custom-nodes-sniffer } } diff --git a/distribution/archives/integ-test-zip/src/test/java/org/elasticsearch/test/rest/WaitForRefreshAndCloseIT.java b/distribution/archives/integ-test-zip/src/test/java/org/elasticsearch/test/rest/WaitForRefreshAndCloseIT.java index ffd3a1f6c0c..52b918e97f1 100644 --- a/distribution/archives/integ-test-zip/src/test/java/org/elasticsearch/test/rest/WaitForRefreshAndCloseIT.java +++ b/distribution/archives/integ-test-zip/src/test/java/org/elasticsearch/test/rest/WaitForRefreshAndCloseIT.java @@ -20,7 +20,6 @@ package org.elasticsearch.test.rest; import org.apache.http.util.EntityUtils; -import org.apache.lucene.util.LuceneTestCase.AwaitsFix; import org.elasticsearch.action.ActionFuture; import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.client.Response; @@ -31,57 +30,53 @@ import org.junit.After; import org.junit.Before; import java.io.IOException; -import java.nio.charset.StandardCharsets; -import java.util.Locale; import java.util.Map; +import java.util.concurrent.ExecutionException; +import java.util.concurrent.TimeUnit; + +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.instanceOf; /** * Tests that wait for refresh is fired if the index is closed. */ -@AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/33533") public class WaitForRefreshAndCloseIT extends ESRestTestCase { @Before public void setupIndex() throws IOException { - try { - client().performRequest(new Request("DELETE", indexName())); - } catch (ResponseException e) { - // If we get an error, it should be because the index doesn't exist - assertEquals(404, e.getResponse().getStatusLine().getStatusCode()); - } - Request request = new Request("PUT", indexName()); + Request request = new Request("PUT", "/test"); request.setJsonEntity("{\"settings\":{\"refresh_interval\":-1}}"); client().performRequest(request); } @After public void cleanupIndex() throws IOException { - client().performRequest(new Request("DELETE", indexName())); - } - - private String indexName() { - return getTestName().toLowerCase(Locale.ROOT); + client().performRequest(new Request("DELETE", "/test")); } private String docPath() { - return indexName() + "/test/1"; + return "test/_doc/1"; } public void testIndexAndThenClose() throws Exception { - closeWhileListenerEngaged(start("PUT", "", "{\"test\":\"test\"}")); + Request request = new Request("PUT", docPath()); + request.setJsonEntity("{\"test\":\"test\"}"); + closeWhileListenerEngaged(start(request)); } public void testUpdateAndThenClose() throws Exception { - Request request = new Request("PUT", docPath()); - request.setJsonEntity("{\"test\":\"test\"}"); - client().performRequest(request); - closeWhileListenerEngaged(start("POST", "/_update", "{\"doc\":{\"name\":\"test\"}}")); + Request createDoc = new Request("PUT", docPath()); + createDoc.setJsonEntity("{\"test\":\"test\"}"); + client().performRequest(createDoc); + Request updateDoc = new Request("POST", docPath() + "/_update"); + updateDoc.setJsonEntity("{\"doc\":{\"name\":\"test\"}}"); + closeWhileListenerEngaged(start(updateDoc)); } public void testDeleteAndThenClose() throws Exception { Request request = new Request("PUT", docPath()); request.setJsonEntity("{\"test\":\"test\"}"); client().performRequest(request); - closeWhileListenerEngaged(start("DELETE", "", null)); + closeWhileListenerEngaged(start(new Request("DELETE", docPath()))); } private void closeWhileListenerEngaged(ActionFuture future) throws Exception { @@ -89,40 +84,52 @@ public class WaitForRefreshAndCloseIT extends ESRestTestCase { assertBusy(() -> { Map stats; try { - stats = entityAsMap(client().performRequest(new Request("GET", indexName() + "/_stats/refresh"))); + stats = entityAsMap(client().performRequest(new Request("GET", "/test/_stats/refresh"))); } catch (IOException e) { throw new RuntimeException(e); } - @SuppressWarnings("unchecked") - Map indices = (Map) stats.get("indices"); - @SuppressWarnings("unchecked") - Map theIndex = (Map) indices.get(indexName()); - @SuppressWarnings("unchecked") - Map total = (Map) theIndex.get("total"); - @SuppressWarnings("unchecked") - Map refresh = (Map) total.get("refresh"); - int listeners = (int) refresh.get("listeners"); + Map indices = (Map) stats.get("indices"); + Map theIndex = (Map) indices.get("test"); + Map total = (Map) theIndex.get("total"); + Map refresh = (Map) total.get("refresh"); + int listeners = (Integer) refresh.get("listeners"); assertEquals(1, listeners); }); // Close the index. That should flush the listener. - client().performRequest(new Request("POST", indexName() + "/_close")); + client().performRequest(new Request("POST", "/test/_close")); - // The request shouldn't fail. It certainly shouldn't hang. - future.get(); + /* + * The request may fail, but we really, really, really want to make + * sure that it doesn't time out. + */ + try { + future.get(1, TimeUnit.MINUTES); + } catch (ExecutionException ee) { + /* + * If it *does* fail it should fail with a FORBIDDEN error because + * it attempts to take an action on a closed index. Again, it'd be + * nice if all requests waiting for refresh came back even though + * the index is closed and most do, but sometimes they bump into + * the index being closed. At least they don't hang forever. That'd + * be a nightmare. + */ + assertThat(ee.getCause(), instanceOf(ResponseException.class)); + ResponseException re = (ResponseException) ee.getCause(); + assertEquals(403, re.getResponse().getStatusLine().getStatusCode()); + assertThat(EntityUtils.toString(re.getResponse().getEntity()), containsString("FORBIDDEN/4/index closed")); + } } - private ActionFuture start(String method, String path, String body) { + private ActionFuture start(Request request) { PlainActionFuture future = new PlainActionFuture<>(); - Request request = new Request(method, docPath() + path); request.addParameter("refresh", "wait_for"); request.addParameter("error_trace", ""); - request.setJsonEntity(body); client().performRequestAsync(request, new ResponseListener() { @Override public void onSuccess(Response response) { try { - future.onResponse(EntityUtils.toString(response.getEntity(), StandardCharsets.UTF_8)); + future.onResponse(EntityUtils.toString(response.getEntity())); } catch (IOException e) { future.onFailure(e); } diff --git a/distribution/src/bin/elasticsearch-service.bat b/distribution/src/bin/elasticsearch-service.bat index a1d0f04560e..e03581ddfa6 100644 --- a/distribution/src/bin/elasticsearch-service.bat +++ b/distribution/src/bin/elasticsearch-service.bat @@ -173,7 +173,7 @@ if not "%SERVICE_USERNAME%" == "" ( ) ) -"%EXECUTABLE%" //IS//%SERVICE_ID% --Startup %ES_START_TYPE% --StopTimeout %ES_STOP_TIMEOUT% --StartClass org.elasticsearch.bootstrap.Elasticsearch --StartMethod main ++StartParams --quiet --StopClass org.elasticsearch.bootstrap.Elasticsearch --StopMethod close --Classpath "%ES_CLASSPATH%" --JvmMs %JVM_MS% --JvmMx %JVM_MX% --JvmSs %JVM_SS% --JvmOptions %ES_JAVA_OPTS% ++JvmOptions %ES_PARAMS% %LOG_OPTS% --PidFile "%SERVICE_ID%.pid" --DisplayName "%SERVICE_DISPLAY_NAME%" --Description "%SERVICE_DESCRIPTION%" --Jvm "%%JAVA_HOME%%%JVM_DLL%" --StartMode jvm --StopMode jvm --StartPath "%ES_HOME%" %SERVICE_PARAMS% +"%EXECUTABLE%" //IS//%SERVICE_ID% --Startup %ES_START_TYPE% --StopTimeout %ES_STOP_TIMEOUT% --StartClass org.elasticsearch.bootstrap.Elasticsearch --StartMethod main ++StartParams --quiet --StopClass org.elasticsearch.bootstrap.Elasticsearch --StopMethod close --Classpath "%ES_CLASSPATH%" --JvmMs %JVM_MS% --JvmMx %JVM_MX% --JvmSs %JVM_SS% --JvmOptions %ES_JAVA_OPTS% ++JvmOptions %ES_PARAMS% %LOG_OPTS% --PidFile "%SERVICE_ID%.pid" --DisplayName "%SERVICE_DISPLAY_NAME%" --Description "%SERVICE_DESCRIPTION%" --Jvm "%%JAVA_HOME%%%JVM_DLL%" --StartMode jvm --StopMode jvm --StartPath "%ES_HOME%" %SERVICE_PARAMS% ++Environment HOSTNAME="%%COMPUTERNAME%%" if not errorlevel 1 goto installed echo Failed installing '%SERVICE_ID%' service diff --git a/distribution/src/bin/elasticsearch-translog b/distribution/src/bin/elasticsearch-translog deleted file mode 100755 index aa5bfb32df1..00000000000 --- a/distribution/src/bin/elasticsearch-translog +++ /dev/null @@ -1,5 +0,0 @@ -#!/bin/bash - -ES_MAIN_CLASS=org.elasticsearch.index.translog.TranslogToolCli \ - "`dirname "$0"`"/elasticsearch-cli \ - "$@" diff --git a/distribution/src/bin/elasticsearch-translog.bat b/distribution/src/bin/elasticsearch-translog.bat deleted file mode 100644 index 6a2e3046205..00000000000 --- a/distribution/src/bin/elasticsearch-translog.bat +++ /dev/null @@ -1,12 +0,0 @@ -@echo off - -setlocal enabledelayedexpansion -setlocal enableextensions - -set ES_MAIN_CLASS=org.elasticsearch.index.translog.TranslogToolCli -call "%~dp0elasticsearch-cli.bat" ^ - %%* ^ - || exit /b 1 - -endlocal -endlocal diff --git a/distribution/src/config/jvm.options b/distribution/src/config/jvm.options index e486735eb8f..d8b651231cb 100644 --- a/distribution/src/config/jvm.options +++ b/distribution/src/config/jvm.options @@ -37,6 +37,14 @@ -XX:CMSInitiatingOccupancyFraction=75 -XX:+UseCMSInitiatingOccupancyOnly +## G1GC Configuration +# NOTE: G1GC is only supported on JDK version 10 or later. +# To use G1GC uncomment the lines below. +# 10-:-XX:-UseConcMarkSweepGC +# 10-:-XX:-UseCMSInitiatingOccupancyOnly +# 10-:-XX:+UseG1GC +# 10-:-XX:InitiatingHeapOccupancyPercent=75 + ## optimizations # pre-touch memory pages used by the JVM during initialization diff --git a/docs/README.asciidoc b/docs/README.asciidoc index 13a07e92d68..89058849a98 100644 --- a/docs/README.asciidoc +++ b/docs/README.asciidoc @@ -19,9 +19,9 @@ are tests even if they don't have `// CONSOLE` but usually `// TEST` is used for its modifiers: * `// TEST[s/foo/bar/]`: Replace `foo` with `bar` in the generated test. This should be used sparingly because it makes the snippet "lie". Sometimes, - though, you can use it to make the snippet more clear more clear. Keep in - mind the that if there are multiple substitutions then they are applied in - the order that they are defined. + though, you can use it to make the snippet more clear. Keep in mind that + if there are multiple substitutions then they are applied in the order that + they are defined. * `// TEST[catch:foo]`: Used to expect errors in the requests. Replace `foo` with `request` to expect a 400 error, for example. If the snippet contains multiple requests then only the last request will expect the error. diff --git a/docs/build.gradle b/docs/build.gradle index 864567ba835..935149bdc84 100644 --- a/docs/build.gradle +++ b/docs/build.gradle @@ -100,6 +100,7 @@ buildRestTests.docs = fileTree(projectDir) { exclude 'reference/rollup/apis/delete-job.asciidoc' exclude 'reference/rollup/apis/get-job.asciidoc' exclude 'reference/rollup/apis/rollup-caps.asciidoc' + exclude 'reference/graph/explore.asciidoc' } listSnippets.docs = buildRestTests.docs diff --git a/docs/java-api/query-dsl/type-query.asciidoc b/docs/java-api/query-dsl/type-query.asciidoc index cbbc6666077..93c7bd76dfe 100644 --- a/docs/java-api/query-dsl/type-query.asciidoc +++ b/docs/java-api/query-dsl/type-query.asciidoc @@ -1,6 +1,8 @@ [[java-query-dsl-type-query]] ==== Type Query +deprecated[7.0.0, Types are being removed, prefer filtering on a field instead. For more information, please see {ref}/removal-of-types.html[Removal of mapping types].] + See {ref}/query-dsl-type-query.html[Type Query] ["source","java",subs="attributes,callouts,macros"] diff --git a/docs/java-rest/high-level/cluster/get_settings.asciidoc b/docs/java-rest/high-level/cluster/get_settings.asciidoc index 999bd92d791..407d33f8fc8 100644 --- a/docs/java-rest/high-level/cluster/get_settings.asciidoc +++ b/docs/java-rest/high-level/cluster/get_settings.asciidoc @@ -1,16 +1,22 @@ -[[java-rest-high-cluster-get-settings]] +-- +:api: get-settings +:request: ClusterGetSettingsRequest +:response: ClusterGetSettingsResponse +-- + +[id="{upid}-{api}"] === Cluster Get Settings API The Cluster Get Settings API allows to get the cluster wide settings. -[[java-rest-high-cluster-get-settings-request]] +[id="{upid}-{api}-request"] ==== Cluster Get Settings Request -A `ClusterGetSettingsRequest`: +A +{request}+: ["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- -include-tagged::{doc-tests}/ClusterClientDocumentationIT.java[get-settings-request] +include-tagged::{doc-tests-file}[{api}-request] -------------------------------------------------- ==== Optional arguments @@ -18,75 +24,40 @@ The following arguments can optionally be provided: ["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- -include-tagged::{doc-tests}/ClusterClientDocumentationIT.java[get-settings-request-includeDefaults] +include-tagged::{doc-tests-file}[{api}-request-includeDefaults] -------------------------------------------------- <1> By default only those settings that were explicitly set are returned. Setting this to true also returns the default settings. ["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- -include-tagged::{doc-tests}/ClusterClientDocumentationIT.java[get-settings-request-local] +include-tagged::{doc-tests-file}[{api}-request-local] -------------------------------------------------- <1> By default the request goes to the master of the cluster to get the latest results. If local is specified it gets the results from whichever node the request goes to. ["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- -include-tagged::{doc-tests}/ClusterClientDocumentationIT.java[get-settings-request-masterTimeout] +include-tagged::{doc-tests-file}[{api}-request-masterTimeout] -------------------------------------------------- <1> Timeout to connect to the master node as a `TimeValue` <2> Timeout to connect to the master node as a `String` -[[java-rest-high-cluster-get-settings-sync]] -==== Synchronous Execution +include::../execution.asciidoc[] -["source","java",subs="attributes,callouts,macros"] --------------------------------------------------- -include-tagged::{doc-tests}/ClusterClientDocumentationIT.java[get-settings-execute] --------------------------------------------------- -<1> Execute the request and get back the response in a `ClusterGetSettingsResponse` object. - -[[java-rest-high-cluster-get-settings-async]] -==== Asynchronous Execution - -The asynchronous execution of a cluster get settings requires both the -`ClusterGetSettingsRequest` instance and an `ActionListener` instance to be -passed to the asynchronous method: - -["source","java",subs="attributes,callouts,macros"] --------------------------------------------------- -include-tagged::{doc-tests}/ClusterClientDocumentationIT.java[get-settings-execute-async] --------------------------------------------------- -<1> The `ClusterGetSettingsRequest` to execute and the `ActionListener` -to use when the execution completes - -The asynchronous method does not block and returns immediately. Once it is -completed the `ActionListener` is called back using the `onResponse` method -if the execution successfully completed or using the `onFailure` method if -it failed. - -A typical listener for `ClusterGetSettingsResponse` looks like: - -["source","java",subs="attributes,callouts,macros"] --------------------------------------------------- -include-tagged::{doc-tests}/ClusterClientDocumentationIT.java[get-settings-execute-listener] --------------------------------------------------- -<1> Called when the execution is successfully completed. The response is -provided as an argument -<2> Called in case of a failure. The raised exception is provided as an argument - -[[java-rest-high-cluster-get-settings-response]] +[id="{upid}-{api}-response"] ==== Cluster Get Settings Response -The returned `ClusterGetSettingsResponse` allows to retrieve information about the +The returned +{response}+ allows to retrieve information about the executed operation as follows: ["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- -include-tagged::{doc-tests}/ClusterClientDocumentationIT.java[get-settings-response] +include-tagged::{doc-tests-file}[{api}-response] -------------------------------------------------- <1> Get the persistent settings. <2> Get the transient settings. <3> Get the default settings (returns empty settings if `includeDefaults` was not set to `true`). <4> Get the value as a `String` for a particular setting. The order of searching is first in `persistentSettings` then in `transientSettings` and finally, if not found in either, in `defaultSettings`. + diff --git a/docs/java-rest/high-level/cluster/health.asciidoc b/docs/java-rest/high-level/cluster/health.asciidoc index 192880849e2..06163fca52d 100644 --- a/docs/java-rest/high-level/cluster/health.asciidoc +++ b/docs/java-rest/high-level/cluster/health.asciidoc @@ -1,16 +1,22 @@ -[[java-rest-high-cluster-health]] +-- +:api: health +:request: ClusterHealthRequest +:response: ClusterHealthResponse +-- + +[id="{upid}-{api}"] === Cluster Health API The Cluster Health API allows getting cluster health. -[[java-rest-high-cluster-health-request]] +[id="{upid}-{api}-request"] ==== Cluster Health Request -A `ClusterHealthRequest`: +A +{request}+: ["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- -include-tagged::{doc-tests}/ClusterClientDocumentationIT.java[health-request] +include-tagged::{doc-tests-file}[{api}-request] -------------------------------------------------- There are no required parameters. By default, the client will check all indices and will not wait for any events. @@ -21,14 +27,14 @@ Indices which should be checked can be passed in the constructor: ["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- -include-tagged::{doc-tests}/ClusterClientDocumentationIT.java[health-request-indices-ctr] +include-tagged::{doc-tests-file}[{api}-request-indices-ctr] -------------------------------------------------- Or using the corresponding setter method: ["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- -include-tagged::{doc-tests}/ClusterClientDocumentationIT.java[health-request-indices-setter] +include-tagged::{doc-tests-file}[{api}-request-indices-setter] -------------------------------------------------- ==== Other parameters @@ -37,53 +43,53 @@ Other parameters can be passed only through setter methods: ["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- -include-tagged::{doc-tests}/ClusterClientDocumentationIT.java[health-request-timeout] +include-tagged::{doc-tests-file}[{api}-request-timeout] -------------------------------------------------- <1> Timeout for the request as a `TimeValue`. Defaults to 30 seconds <2> As a `String` ["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- -include-tagged::{doc-tests}/ClusterClientDocumentationIT.java[health-request-master-timeout] +include-tagged::{doc-tests-file}[{api}-request-master-timeout] -------------------------------------------------- <1> Timeout to connect to the master node as a `TimeValue`. Defaults to the same as `timeout` <2> As a `String` ["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- -include-tagged::{doc-tests}/ClusterClientDocumentationIT.java[health-request-wait-status] +include-tagged::{doc-tests-file}[{api}-request-wait-status] -------------------------------------------------- <1> The status to wait (e.g. `green`, `yellow`, or `red`). Accepts a `ClusterHealthStatus` value. <2> Using predefined method ["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- -include-tagged::{doc-tests}/ClusterClientDocumentationIT.java[health-request-wait-events] +include-tagged::{doc-tests-file}[{api}-request-wait-events] -------------------------------------------------- <1> The priority of the events to wait for. Accepts a `Priority` value. ["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- -include-tagged::{doc-tests}/ClusterClientDocumentationIT.java[health-request-level] +include-tagged::{doc-tests-file}[{api}-request-level] -------------------------------------------------- -<1> The level of detail of the returned health information. Accepts a `ClusterHealthRequest.Level` value. +<1> The level of detail of the returned health information. Accepts a +{request}.Level+ value. Default value is `cluster`. ["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- -include-tagged::{doc-tests}/ClusterClientDocumentationIT.java[health-request-wait-relocation] +include-tagged::{doc-tests-file}[{api}-request-wait-relocation] -------------------------------------------------- <1> Wait for 0 relocating shards. Defaults to `false` ["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- -include-tagged::{doc-tests}/ClusterClientDocumentationIT.java[health-request-wait-initializing] +include-tagged::{doc-tests-file}[{api}-request-wait-initializing] -------------------------------------------------- <1> Wait for 0 initializing shards. Defaults to `false` ["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- -include-tagged::{doc-tests}/ClusterClientDocumentationIT.java[health-request-wait-nodes] +include-tagged::{doc-tests-file}[{api}-request-wait-nodes] -------------------------------------------------- <1> Wait for `N` nodes in the cluster. Defaults to `0` <2> Using `>=N`, `<=N`, `>N` and ` Wait for all shards to be active in the cluster @@ -99,77 +105,42 @@ include-tagged::{doc-tests}/ClusterClientDocumentationIT.java[health-request-wai ["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- -include-tagged::{doc-tests}/ClusterClientDocumentationIT.java[health-request-local] +include-tagged::{doc-tests-file}[{api}-request-local] -------------------------------------------------- <1> Non-master node can be used for this request. Defaults to `false` -[[java-rest-high-cluster-health-sync]] -==== Synchronous Execution +include::../execution.asciidoc[] -["source","java",subs="attributes,callouts,macros"] --------------------------------------------------- -include-tagged::{doc-tests}/ClusterClientDocumentationIT.java[health-execute] --------------------------------------------------- - -[[java-rest-high-cluster-health-async]] -==== Asynchronous Execution - -The asynchronous execution of a cluster health request requires both the -`ClusterHealthRequest` instance and an `ActionListener` instance to be -passed to the asynchronous method: - -["source","java",subs="attributes,callouts,macros"] --------------------------------------------------- -include-tagged::{doc-tests}/ClusterClientDocumentationIT.java[health-execute-async] --------------------------------------------------- -<1> The `ClusterHealthRequest` to execute and the `ActionListener` to use -when the execution completes - -The asynchronous method does not block and returns immediately. Once it is -completed the `ActionListener` is called back using the `onResponse` method -if the execution successfully completed or using the `onFailure` method if -it failed. - -A typical listener for `ClusterHealthResponse` looks like: - -["source","java",subs="attributes,callouts,macros"] --------------------------------------------------- -include-tagged::{doc-tests}/ClusterClientDocumentationIT.java[health-execute-listener] --------------------------------------------------- -<1> Called when the execution is successfully completed. The response is -provided as an argument -<2> Called in case of a failure. The raised exception is provided as an argument - -[[java-rest-high-cluster-health-response]] +[id="{upid}-{api}-response"] ==== Cluster Health Response -The returned `ClusterHealthResponse` contains the next information about the +The returned +{response}+ contains the next information about the cluster: ["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- -include-tagged::{doc-tests}/ClusterClientDocumentationIT.java[health-response-general] +include-tagged::{doc-tests-file}[{api}-response-general] -------------------------------------------------- <1> Name of the cluster <2> Cluster status (`green`, `yellow` or `red`) ["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- -include-tagged::{doc-tests}/ClusterClientDocumentationIT.java[health-response-request-status] +include-tagged::{doc-tests-file}[{api}-response-request-status] -------------------------------------------------- <1> Whether request was timed out while processing <2> Status of the request (`OK` or `REQUEST_TIMEOUT`). Other errors will be thrown as exceptions ["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- -include-tagged::{doc-tests}/ClusterClientDocumentationIT.java[health-response-nodes] +include-tagged::{doc-tests-file}[{api}-response-nodes] -------------------------------------------------- <1> Number of nodes in the cluster <2> Number of data nodes in the cluster ["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- -include-tagged::{doc-tests}/ClusterClientDocumentationIT.java[health-response-shards] +include-tagged::{doc-tests-file}[{api}-response-shards] -------------------------------------------------- <1> Number of active shards <2> Number of primary active shards @@ -181,7 +152,7 @@ include-tagged::{doc-tests}/ClusterClientDocumentationIT.java[health-response-sh ["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- -include-tagged::{doc-tests}/ClusterClientDocumentationIT.java[health-response-task] +include-tagged::{doc-tests-file}[{api}-response-task] -------------------------------------------------- <1> Maximum wait time of all tasks in the queue <2> Number of currently pending tasks @@ -189,18 +160,18 @@ include-tagged::{doc-tests}/ClusterClientDocumentationIT.java[health-response-ta ["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- -include-tagged::{doc-tests}/ClusterClientDocumentationIT.java[health-response-indices] +include-tagged::{doc-tests-file}[{api}-response-indices] -------------------------------------------------- <1> Detailed information about indices in the cluster ["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- -include-tagged::{doc-tests}/ClusterClientDocumentationIT.java[health-response-index] +include-tagged::{doc-tests-file}[{api}-response-index] -------------------------------------------------- <1> Detailed information about a specific index ["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- -include-tagged::{doc-tests}/ClusterClientDocumentationIT.java[health-response-shard-details] +include-tagged::{doc-tests-file}[{api}-response-shard-details] -------------------------------------------------- <1> Detailed information about a specific shard \ No newline at end of file diff --git a/docs/java-rest/high-level/cluster/put_settings.asciidoc b/docs/java-rest/high-level/cluster/put_settings.asciidoc index dc9b1679d47..bc9abc62456 100644 --- a/docs/java-rest/high-level/cluster/put_settings.asciidoc +++ b/docs/java-rest/high-level/cluster/put_settings.asciidoc @@ -1,16 +1,22 @@ -[[java-rest-high-cluster-put-settings]] +-- +:api: put-settings +:request: ClusterUpdateSettingsRequest +:response: ClusterUpdateSettingsResponse +-- + +[id="{upid}-{api}"] === Cluster Update Settings API The Cluster Update Settings API allows to update cluster wide settings. -[[java-rest-high-cluster-put-settings-request]] +[id="{upid}-{api}-request"] ==== Cluster Update Settings Request -A `ClusterUpdateSettingsRequest`: +A +{request}+: ["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- -include-tagged::{doc-tests}/ClusterClientDocumentationIT.java[put-settings-request] +include-tagged::{doc-tests-file}[{api}-request] -------------------------------------------------- ==== Cluster Settings @@ -18,7 +24,7 @@ At least one setting to be updated must be provided: ["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- -include-tagged::{doc-tests}/ClusterClientDocumentationIT.java[put-settings-request-cluster-settings] +include-tagged::{doc-tests-file}[{api}-request-cluster-settings] -------------------------------------------------- <1> Sets the transient settings to be applied <2> Sets the persistent setting to be applied @@ -28,26 +34,26 @@ The settings to be applied can be provided in different ways: ["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- -include-tagged::{doc-tests}/ClusterClientDocumentationIT.java[put-settings-create-settings] +include-tagged::{doc-tests-file}[{api}-create-settings] -------------------------------------------------- <1> Creates a transient setting as `Settings` <2> Creates a persistent setting as `Settings` ["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- -include-tagged::{doc-tests}/ClusterClientDocumentationIT.java[put-settings-settings-builder] +include-tagged::{doc-tests-file}[{api}-settings-builder] -------------------------------------------------- <1> Settings provided as `Settings.Builder` ["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- -include-tagged::{doc-tests}/ClusterClientDocumentationIT.java[put-settings-settings-source] +include-tagged::{doc-tests-file}[{api}-settings-source] -------------------------------------------------- <1> Settings provided as `String` ["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- -include-tagged::{doc-tests}/ClusterClientDocumentationIT.java[put-settings-settings-map] +include-tagged::{doc-tests-file}[{api}-settings-map] -------------------------------------------------- <1> Settings provided as a `Map` @@ -56,7 +62,7 @@ The following arguments can optionally be provided: ["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- -include-tagged::{doc-tests}/ClusterClientDocumentationIT.java[put-settings-request-timeout] +include-tagged::{doc-tests-file}[{api}-request-timeout] -------------------------------------------------- <1> Timeout to wait for the all the nodes to acknowledge the settings were applied as a `TimeValue` @@ -65,58 +71,23 @@ as a `String` ["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- -include-tagged::{doc-tests}/ClusterClientDocumentationIT.java[put-settings-request-masterTimeout] +include-tagged::{doc-tests-file}[{api}-request-masterTimeout] -------------------------------------------------- <1> Timeout to connect to the master node as a `TimeValue` <2> Timeout to connect to the master node as a `String` -[[java-rest-high-cluster-put-settings-sync]] -==== Synchronous Execution +include::../execution.asciidoc[] -["source","java",subs="attributes,callouts,macros"] --------------------------------------------------- -include-tagged::{doc-tests}/ClusterClientDocumentationIT.java[put-settings-execute] --------------------------------------------------- - -[[java-rest-high-cluster-put-settings-async]] -==== Asynchronous Execution - -The asynchronous execution of a cluster update settings requires both the -`ClusterUpdateSettingsRequest` instance and an `ActionListener` instance to be -passed to the asynchronous method: - -["source","java",subs="attributes,callouts,macros"] --------------------------------------------------- -include-tagged::{doc-tests}/ClusterClientDocumentationIT.java[put-settings-execute-async] --------------------------------------------------- -<1> The `ClusterUpdateSettingsRequest` to execute and the `ActionListener` -to use when the execution completes - -The asynchronous method does not block and returns immediately. Once it is -completed the `ActionListener` is called back using the `onResponse` method -if the execution successfully completed or using the `onFailure` method if -it failed. - -A typical listener for `ClusterUpdateSettingsResponse` looks like: - -["source","java",subs="attributes,callouts,macros"] --------------------------------------------------- -include-tagged::{doc-tests}/ClusterClientDocumentationIT.java[put-settings-execute-listener] --------------------------------------------------- -<1> Called when the execution is successfully completed. The response is -provided as an argument -<2> Called in case of a failure. The raised exception is provided as an argument - -[[java-rest-high-cluster-put-settings-response]] +[id="{upid}-{api}-response"] ==== Cluster Update Settings Response -The returned `ClusterUpdateSettings` allows to retrieve information about the +The returned +{response}+ allows to retrieve information about the executed operation as follows: ["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- -include-tagged::{doc-tests}/ClusterClientDocumentationIT.java[put-settings-response] +include-tagged::{doc-tests-file}[{api}-response] -------------------------------------------------- <1> Indicates whether all of the nodes have acknowledged the request <2> Indicates which transient settings have been applied -<3> Indicates which persistent settings have been applied \ No newline at end of file +<3> Indicates which persistent settings have been applied diff --git a/docs/java-rest/high-level/document/index.asciidoc b/docs/java-rest/high-level/document/index.asciidoc index b64e19d1d33..c5800e15056 100644 --- a/docs/java-rest/high-level/document/index.asciidoc +++ b/docs/java-rest/high-level/document/index.asciidoc @@ -1,14 +1,20 @@ -[[java-rest-high-document-index]] +-- +:api: index +:request: IndexRequest +:response: IndexResponse +-- + +[id="{upid}-{api}"] === Index API -[[java-rest-high-document-index-request]] +[id="{upid}-{api}-request"] ==== Index Request -An `IndexRequest` requires the following arguments: +An +{request}+ requires the following arguments: ["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- -include-tagged::{doc-tests}/CRUDDocumentationIT.java[index-request-string] +include-tagged::{doc-tests-file}[{api}-request-string] -------------------------------------------------- <1> Index <2> Type @@ -21,21 +27,21 @@ The document source can be provided in different ways in addition to the ["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- -include-tagged::{doc-tests}/CRUDDocumentationIT.java[index-request-map] +include-tagged::{doc-tests-file}[{api}-request-map] -------------------------------------------------- <1> Document source provided as a `Map` which gets automatically converted to JSON format ["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- -include-tagged::{doc-tests}/CRUDDocumentationIT.java[index-request-xcontent] +include-tagged::{doc-tests-file}[{api}-request-xcontent] -------------------------------------------------- <1> Document source provided as an `XContentBuilder` object, the Elasticsearch built-in helpers to generate JSON content ["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- -include-tagged::{doc-tests}/CRUDDocumentationIT.java[index-request-shortcut] +include-tagged::{doc-tests-file}[{api}-request-shortcut] -------------------------------------------------- <1> Document source provided as `Object` key-pairs, which gets converted to JSON format @@ -45,95 +51,60 @@ The following arguments can optionally be provided: ["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- -include-tagged::{doc-tests}/CRUDDocumentationIT.java[index-request-routing] +include-tagged::{doc-tests-file}[{api}-request-routing] -------------------------------------------------- <1> Routing value ["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- -include-tagged::{doc-tests}/CRUDDocumentationIT.java[index-request-timeout] +include-tagged::{doc-tests-file}[{api}-request-timeout] -------------------------------------------------- <1> Timeout to wait for primary shard to become available as a `TimeValue` <2> Timeout to wait for primary shard to become available as a `String` ["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- -include-tagged::{doc-tests}/CRUDDocumentationIT.java[index-request-refresh] +include-tagged::{doc-tests-file}[{api}-request-refresh] -------------------------------------------------- <1> Refresh policy as a `WriteRequest.RefreshPolicy` instance <2> Refresh policy as a `String` ["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- -include-tagged::{doc-tests}/CRUDDocumentationIT.java[index-request-version] +include-tagged::{doc-tests-file}[{api}-request-version] -------------------------------------------------- <1> Version ["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- -include-tagged::{doc-tests}/CRUDDocumentationIT.java[index-request-version-type] +include-tagged::{doc-tests-file}[{api}-request-version-type] -------------------------------------------------- <1> Version type ["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- -include-tagged::{doc-tests}/CRUDDocumentationIT.java[index-request-op-type] +include-tagged::{doc-tests-file}[{api}-request-op-type] -------------------------------------------------- <1> Operation type provided as an `DocWriteRequest.OpType` value <2> Operation type provided as a `String`: can be `create` or `update` (default) ["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- -include-tagged::{doc-tests}/CRUDDocumentationIT.java[index-request-pipeline] +include-tagged::{doc-tests-file}[{api}-request-pipeline] -------------------------------------------------- <1> The name of the ingest pipeline to be executed before indexing the document -[[java-rest-high-document-index-sync]] -==== Synchronous Execution +include::../execution.asciidoc[] -["source","java",subs="attributes,callouts,macros"] --------------------------------------------------- -include-tagged::{doc-tests}/CRUDDocumentationIT.java[index-execute] --------------------------------------------------- - -[[java-rest-high-document-index-async]] -==== Asynchronous Execution - -The asynchronous execution of an index request requires both the `IndexRequest` -instance and an `ActionListener` instance to be passed to the asynchronous -method: - -["source","java",subs="attributes,callouts,macros"] --------------------------------------------------- -include-tagged::{doc-tests}/CRUDDocumentationIT.java[index-execute-async] --------------------------------------------------- -<1> The `IndexRequest` to execute and the `ActionListener` to use when -the execution completes - -The asynchronous method does not block and returns immediately. Once it is -completed the `ActionListener` is called back using the `onResponse` method -if the execution successfully completed or using the `onFailure` method if -it failed. - -A typical listener for `IndexResponse` looks like: - -["source","java",subs="attributes,callouts,macros"] --------------------------------------------------- -include-tagged::{doc-tests}/CRUDDocumentationIT.java[index-execute-listener] --------------------------------------------------- -<1> Called when the execution is successfully completed. The response is -provided as an argument -<2> Called in case of failure. The raised exception is provided as an argument - -[[java-rest-high-document-index-response]] +[id="{upid}-{api}-response"] ==== Index Response -The returned `IndexResponse` allows to retrieve information about the executed +The returned +{response}+ allows to retrieve information about the executed operation as follows: ["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- -include-tagged::{doc-tests}/CRUDDocumentationIT.java[index-response] +include-tagged::{doc-tests-file}[{api}-response] -------------------------------------------------- <1> Handle (if needed) the case where the document was created for the first time @@ -148,7 +119,7 @@ be thrown: ["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- -include-tagged::{doc-tests}/CRUDDocumentationIT.java[index-conflict] +include-tagged::{doc-tests-file}[{api}-conflict] -------------------------------------------------- <1> The raised exception indicates that a version conflict error was returned @@ -157,6 +128,6 @@ same index, type and id already existed: ["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- -include-tagged::{doc-tests}/CRUDDocumentationIT.java[index-optype] +include-tagged::{doc-tests-file}[{api}-optype] -------------------------------------------------- <1> The raised exception indicates that a version conflict error was returned diff --git a/docs/java-rest/high-level/document/rethrottle.asciidoc b/docs/java-rest/high-level/document/rethrottle.asciidoc new file mode 100644 index 00000000000..9f6fd69dbcd --- /dev/null +++ b/docs/java-rest/high-level/document/rethrottle.asciidoc @@ -0,0 +1,73 @@ +[[java-rest-high-document-rethrottle]] +=== Rethrottle API + +[[java-rest-high-document-rethrottle-request]] +==== Rethrottle Request + +A `RethrottleRequest` can be used to change the current throttling on a running +reindex, update-by-query or delete-by-query task or to disable throttling of +the task entirely. It requires the task Id of the task to change. + +In its simplest form, you can use it to disable throttling of a running +task using the following: + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/CRUDDocumentationIT.java[rethrottle-disable-request] +-------------------------------------------------- +<1> Create a `RethrottleRequest` that disables throttling for a specific task id + +By providing a `requestsPerSecond` argument, the request will change the +existing task throttling to the specified value: + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/CRUDDocumentationIT.java[rethrottle-request] +-------------------------------------------------- +<1> Request to change the throttling of a task to 100 requests per second + +The rethrottling request can be executed by using one of the three appropriate +methods depending on whether a reindex, update-by-query or delete-by-query task +should be rethrottled: + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/CRUDDocumentationIT.java[rethrottle-request-execution] +-------------------------------------------------- +<1> Execute reindex rethrottling request +<2> The same for update-by-query +<3> The same for delete-by-query + +[[java-rest-high-document-rethrottle-async]] +==== Asynchronous Execution + +The asynchronous execution of a rethrottle request requires both the `RethrottleRequest` +instance and an `ActionListener` instance to be passed to the asynchronous +method: + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/CRUDDocumentationIT.java[rethrottle-execute-async] +-------------------------------------------------- +<1> Execute reindex rethrottling asynchronously +<2> The same for update-by-query +<3> The same for delete-by-query + +The asynchronous method does not block and returns immediately. +Once it is completed the `ActionListener` is called back using the `onResponse` method +if the execution successfully completed or using the `onFailure` method if +it failed. A typical listener looks like this: + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/CRUDDocumentationIT.java[rethrottle-request-async-listener] +-------------------------------------------------- +<1> Code executed when the request is successfully completed +<2> Code executed when the request fails with an exception + +[[java-rest-high-document-retrottle-response]] +==== Rethrottle Response + +Rethrottling returns the task that has been rethrottled in the form of a +`ListTasksResponse`. The structure of this response object is described in detail +in <>. diff --git a/docs/java-rest/high-level/execution.asciidoc b/docs/java-rest/high-level/execution.asciidoc new file mode 100644 index 00000000000..fc4f4c0ec60 --- /dev/null +++ b/docs/java-rest/high-level/execution.asciidoc @@ -0,0 +1,48 @@ +//// +This file is included by every high level rest client API documentation page +so we don't have to copy and paste the same asciidoc over and over again. We +*do* have to copy and paste the same Java tests over and over again. For now +this is intentional because it forces us to *write* and execute the tests +which, while a bit ceremonial, does force us to cover these calls in *some* +test. +//// + +[id="{upid}-{api}-sync"] +==== Synchronous Execution + +When executing a +{request}+ in the following manner, the client waits +for the +{response}+ to be returned before continuing with code execution: + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests-file}[{api}-execute] +-------------------------------------------------- + +[id="{upid}-{api}-async"] +==== Asynchronous Execution + +Executing a +{request}+ can also be done in an asynchronous fashion so that +the client can return directly. Users need to specify how the response or +potential failures will be handled by passing the request and a listener to the +asynchronous {api} method: + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests-file}[{api}-execute-async] +-------------------------------------------------- +<1> The +{request}+ to execute and the `ActionListener` to use when +the execution completes + +The asynchronous method does not block and returns immediately. Once it is +completed the `ActionListener` is called back using the `onResponse` method +if the execution successfully completed or using the `onFailure` method if +it failed. + +A typical listener for +{response}+ looks like: + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests-file}[{api}-execute-listener] +-------------------------------------------------- +<1> Called when the execution is successfully completed. +<2> Called when the whole +{request}+ fails. \ No newline at end of file diff --git a/docs/java-rest/high-level/index.asciidoc b/docs/java-rest/high-level/index.asciidoc index bc4c2dd89bb..a15967e9ad7 100644 --- a/docs/java-rest/high-level/index.asciidoc +++ b/docs/java-rest/high-level/index.asciidoc @@ -1,4 +1,6 @@ -[[java-rest-high]] +:mainid: java-rest-high + +[id="{mainid}"] = Java High Level REST Client [partintro] @@ -31,3 +33,4 @@ include::migration.asciidoc[] include::../license.asciidoc[] :doc-tests!: +:mainid!: diff --git a/docs/java-rest/high-level/java-builders.asciidoc b/docs/java-rest/high-level/java-builders.asciidoc index 48db23c30cb..89f2de5fa9f 100644 --- a/docs/java-rest/high-level/java-builders.asciidoc +++ b/docs/java-rest/high-level/java-builders.asciidoc @@ -10,7 +10,7 @@ The query builders are used to create the query to execute within a search reque is a query builder for every type of query supported by the Query DSL. Each query builder implements the `QueryBuilder` interface and allows to set the specific options for a given type of query. Once created, the `QueryBuilder` object can be set as the query parameter of -`SearchSourceBuilder`. The <> +`SearchSourceBuilder`. The <> page shows an example of how to build a full search request using `SearchSourceBuilder` and `QueryBuilder` objects. The <> page gives a list of all available search queries with their corresponding `QueryBuilder` objects @@ -24,7 +24,7 @@ aggregation (or pipeline aggregation) supported by Elasticsearch. All builders e `AggregationBuilder` class (or `PipelineAggregationBuilder`class). Once created, `AggregationBuilder` objects can be set as the aggregation parameter of `SearchSourceBuilder`. There is a example of how `AggregationBuilder` objects are used with `SearchSourceBuilder` objects to define the aggregations -to compute with a search query in <> page. +to compute with a search query in <> page. The <> page gives a list of all available aggregations with their corresponding `AggregationBuilder` objects and `AggregationBuilders` helper methods. diff --git a/docs/java-rest/high-level/ml/start-datafeed.asciidoc b/docs/java-rest/high-level/ml/start-datafeed.asciidoc new file mode 100644 index 00000000000..6bef621562e --- /dev/null +++ b/docs/java-rest/high-level/ml/start-datafeed.asciidoc @@ -0,0 +1,71 @@ +[[java-rest-high-x-pack-ml-start-datafeed]] +=== Start Datafeed API + +The Start Datafeed API provides the ability to start a {ml} datafeed in the cluster. +It accepts a `StartDatafeedRequest` object and responds +with a `StartDatafeedResponse` object. + +[[java-rest-high-x-pack-ml-start-datafeed-request]] +==== Start Datafeed Request + +A `StartDatafeedRequest` object is created referencing a non-null `datafeedId`. +All other fields are optional for the request. + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-start-datafeed-request] +-------------------------------------------------- +<1> Constructing a new request referencing an existing `datafeedId` + +==== Optional Arguments + +The following arguments are optional. + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-start-datafeed-request-options] +-------------------------------------------------- +<1> Set when the datafeed should end, the value is exclusive. +May be an epoch seconds, epoch millis or an ISO 8601 string. +"now" is a special value that indicates the current time. +If you do not specify an end time, the datafeed runs continuously. +<2> Set when the datafeed should start, the value is inclusive. +May be an epoch seconds, epoch millis or an ISO 8601 string. +If you do not specify a start time and the datafeed is associated with a new job, +the analysis starts from the earliest time for which data is available. +<3> Set the timeout for the request + +[[java-rest-high-x-pack-ml-start-datafeed-execution]] +==== Execution + +The request can be executed through the `MachineLearningClient` contained +in the `RestHighLevelClient` object, accessed via the `machineLearningClient()` method. + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-start-datafeed-execute] +-------------------------------------------------- +<1> Did the datafeed successfully start? + +[[java-rest-high-x-pack-ml-start-datafeed-execution-async]] +==== Asynchronous Execution + +The request can also be executed asynchronously: + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-start-datafeed-execute-async] +-------------------------------------------------- +<1> The `StartDatafeedRequest` to execute and the `ActionListener` to use when +the execution completes + +The method does not block and returns immediately. The passed `ActionListener` is used +to notify the caller of completion. A typical `ActionListener` for `StartDatafeedResponse` may +look like + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-start-datafeed-listener] +-------------------------------------------------- +<1> `onResponse` is called back when the action is completed successfully +<2> `onFailure` is called back when some unexpected error occurs diff --git a/docs/java-rest/high-level/ml/stop-datafeed.asciidoc b/docs/java-rest/high-level/ml/stop-datafeed.asciidoc new file mode 100644 index 00000000000..4e07d9a2e19 --- /dev/null +++ b/docs/java-rest/high-level/ml/stop-datafeed.asciidoc @@ -0,0 +1,66 @@ +[[java-rest-high-x-pack-ml-stop-datafeed]] +=== Stop Datafeed API + +The Stop Datafeed API provides the ability to stop a {ml} datafeed in the cluster. +It accepts a `StopDatafeedRequest` object and responds +with a `StopDatafeedResponse` object. + +[[java-rest-high-x-pack-ml-stop-datafeed-request]] +==== Stop Datafeed Request + +A `StopDatafeedRequest` object is created referencing any number of non-null `datafeedId` entries. +Wildcards and `_all` are also accepted. +All other fields are optional for the request. + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-stop-datafeed-request] +-------------------------------------------------- +<1> Constructing a new request referencing existing `datafeedId` entries. + +==== Optional Arguments + +The following arguments are optional. + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-stop-datafeed-request-options] +-------------------------------------------------- +<1> Whether to ignore if a wildcard expression matches no datafeeds. (This includes `_all` string) +<2> If true, the datafeed is stopped forcefully. +<3> Controls the amount of time to wait until a datafeed stops. The default value is 20 seconds. + +[[java-rest-high-x-pack-ml-stop-datafeed-execution]] +==== Execution + +The request can be executed through the `MachineLearningClient` contained +in the `RestHighLevelClient` object, accessed via the `machineLearningClient()` method. + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-stop-datafeed-execute] +-------------------------------------------------- +<1> Did the datafeed successfully stop? + +[[java-rest-high-x-pack-ml-stop-datafeed-execution-async]] +==== Asynchronous Execution + +The request can also be executed asynchronously: + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-stop-datafeed-execute-async] +-------------------------------------------------- +<1> The `StopDatafeedRequest` to execute and the `ActionListener` to use when +the execution completes + +The method does not block and returns immediately. The passed `ActionListener` is used +to notify the caller of completion. A typical `ActionListener` for `StopDatafeedResponse` may +look like + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-stop-datafeed-listener] +-------------------------------------------------- +<1> `onResponse` is called back when the action is completed successfully +<2> `onFailure` is called back when some unexpected error occurs diff --git a/docs/java-rest/high-level/rollup/get_job.asciidoc b/docs/java-rest/high-level/rollup/get_job.asciidoc new file mode 100644 index 00000000000..68733113e53 --- /dev/null +++ b/docs/java-rest/high-level/rollup/get_job.asciidoc @@ -0,0 +1,71 @@ +[[java-rest-high-x-pack-rollup-get-job]] +=== Get Rollup Job API + +The Get Rollup Job API can be used to get one or all rollup jobs from the +cluster. It accepts a `GetRollupJobRequest` object as a request and returns +a `GetRollupJobResponse`. + +[[java-rest-high-x-pack-rollup-get-rollup-job-request]] +==== Get Rollup Job Request + +A `GetRollupJobRequest` can be built without any parameters to get all of the +rollup jobs or with a job name to get a single job: + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/RollupDocumentationIT.java[x-pack-rollup-get-rollup-job-request] +-------------------------------------------------- +<1> Gets all jobs. +<2> Gets `job_1`. + +[[java-rest-high-x-pack-rollup-get-rollup-job-execution]] +==== Execution + +The Get Rollup Job API can be executed through a `RollupClient` +instance. Such instance can be retrieved from a `RestHighLevelClient` +using the `rollup()` method: + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/RollupDocumentationIT.java[x-pack-rollup-get-rollup-job-execute] +-------------------------------------------------- + +[[java-rest-high-x-pack-rollup-get-rollup-job-response]] +==== Response + +The returned `GetRollupJobResponse` includes a `JobWrapper` per returned job +which contains the configuration of the job, the job's current status, and +statistics about the job's past execution. + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/RollupDocumentationIT.java[x-pack-rollup-get-rollup-job-response] +-------------------------------------------------- +<1> We only asked for a single job + +[[java-rest-high-x-pack-rollup-get-rollup-job-async]] +==== Asynchronous Execution + +This request can be executed asynchronously: + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/RollupDocumentationIT.java[x-pack-rollup-get-rollup-job-execute-async] +-------------------------------------------------- +<1> The `GetRollupJobRequest` to execute and the `ActionListener` to use when +the execution completes + +The asynchronous method does not block and returns immediately. Once it is +completed the `ActionListener` is called back using the `onResponse` method +if the execution successfully completed or using the `onFailure` method if +it failed. + +A typical listener for `GetRollupJobResponse` looks like: + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/RollupDocumentationIT.java[x-pack-rollup-get-rollup-job-execute-listener] +-------------------------------------------------- +<1> Called when the execution is successfully completed. The response is +provided as an argument +<2> Called in case of failure. The raised exception is provided as an argument diff --git a/docs/java-rest/high-level/search/search.asciidoc b/docs/java-rest/high-level/search/search.asciidoc index 3e9472ff2cb..e2bcfda79e6 100644 --- a/docs/java-rest/high-level/search/search.asciidoc +++ b/docs/java-rest/high-level/search/search.asciidoc @@ -1,10 +1,16 @@ -[[java-rest-high-search]] +-- +:api: search +:request: SearchRequest +:response: SearchResponse +-- + +[id="{upid}-{api}"] === Search API -[[java-rest-high-document-search-request]] +[id="{upid}-{api}-request"] ==== Search Request -The `SearchRequest` is used for any operation that has to do with searching +The +{request}+ is used for any operation that has to do with searching documents, aggregations, suggestions and also offers ways of requesting highlighting on the resulting documents. @@ -12,7 +18,7 @@ In its most basic form, we can add a query to the request: ["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- -include-tagged::{doc-tests}/SearchDocumentationIT.java[search-request-basic] +include-tagged::{doc-tests-file}[{api}-request-basic] -------------------------------------------------- <1> Creates the `SeachRequest`. Without arguments this runs against all indices. @@ -20,14 +26,14 @@ include-tagged::{doc-tests}/SearchDocumentationIT.java[search-request-basic] <3> Add a `match_all` query to the `SearchSourceBuilder`. <4> Add the `SearchSourceBuilder` to the `SeachRequest`. -[[java-rest-high-search-request-optional]] +[id="{upid}-{api}-request-optional"] ===== Optional arguments -Let's first look at some of the optional arguments of a `SearchRequest`: +Let's first look at some of the optional arguments of a +{request}+: ["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- -include-tagged::{doc-tests}/SearchDocumentationIT.java[search-request-indices-types] +include-tagged::{doc-tests-file}[{api}-request-indices-types] -------------------------------------------------- <1> Restricts the request to an index <2> Limits the request to a type @@ -36,20 +42,20 @@ There are a couple of other interesting optional parameters: ["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- -include-tagged::{doc-tests}/SearchDocumentationIT.java[search-request-routing] +include-tagged::{doc-tests-file}[{api}-request-routing] -------------------------------------------------- <1> Set a routing parameter ["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- -include-tagged::{doc-tests}/SearchDocumentationIT.java[search-request-indicesOptions] +include-tagged::{doc-tests-file}[{api}-request-indicesOptions] -------------------------------------------------- <1> Setting `IndicesOptions` controls how unavailable indices are resolved and how wildcard expressions are expanded ["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- -include-tagged::{doc-tests}/SearchDocumentationIT.java[search-request-preference] +include-tagged::{doc-tests-file}[{api}-request-preference] -------------------------------------------------- <1> Use the preference parameter e.g. to execute the search to prefer local shards. The default is to randomize across shards. @@ -65,7 +71,7 @@ Here are a few examples of some common options: ["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- -include-tagged::{doc-tests}/SearchDocumentationIT.java[search-source-basics] +include-tagged::{doc-tests-file}[{api}-source-basics] -------------------------------------------------- <1> Create a `SearchSourceBuilder` with default options. <2> Set the query. Can be any type of `QueryBuilder` @@ -77,14 +83,14 @@ Defaults to 10. take. After this, the `SearchSourceBuilder` only needs to be added to the -`SearchRequest`: ++{request}+: ["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- -include-tagged::{doc-tests}/SearchDocumentationIT.java[search-source-setter] +include-tagged::{doc-tests-file}[{api}-source-setter] -------------------------------------------------- -[[java-rest-high-document-search-request-building-queries]] +[id="{upid}-{api}-request-building-queries"] ===== Building queries Search queries are created using `QueryBuilder` objects. A `QueryBuilder` exists @@ -94,7 +100,7 @@ A `QueryBuilder` can be created using its constructor: ["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- -include-tagged::{doc-tests}/SearchDocumentationIT.java[search-query-builder-ctor] +include-tagged::{doc-tests-file}[{api}-query-builder-ctor] -------------------------------------------------- <1> Create a full text {ref}/query-dsl-match-query.html[Match Query] that matches the text "kimchy" over the field "user". @@ -104,7 +110,7 @@ of the search query it creates: ["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- -include-tagged::{doc-tests}/SearchDocumentationIT.java[search-query-builder-options] +include-tagged::{doc-tests-file}[{api}-query-builder-options] -------------------------------------------------- <1> Enable fuzzy matching on the match query <2> Set the prefix length option on the match query @@ -117,7 +123,7 @@ This class provides helper methods that can be used to create `QueryBuilder` obj ["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- -include-tagged::{doc-tests}/SearchDocumentationIT.java[search-query-builders] +include-tagged::{doc-tests-file}[{api}-query-builders] -------------------------------------------------- Whatever the method used to create it, the `QueryBuilder` object must be added @@ -125,10 +131,10 @@ to the `SearchSourceBuilder` as follows: ["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- -include-tagged::{doc-tests}/SearchDocumentationIT.java[search-query-setter] +include-tagged::{doc-tests-file}[{api}-query-setter] -------------------------------------------------- -The <> page gives a list of all available search queries with +The <<{upid}-query-builders, Building Queries>> page gives a list of all available search queries with their corresponding `QueryBuilder` objects and `QueryBuilders` helper methods. @@ -138,7 +144,7 @@ The `SearchSourceBuilder` allows to add one or more `SortBuilder` instances. The ["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- -include-tagged::{doc-tests}/SearchDocumentationIT.java[search-source-sorting] +include-tagged::{doc-tests-file}[{api}-source-sorting] -------------------------------------------------- <1> Sort descending by `_score` (the default) <2> Also sort ascending by `_id` field @@ -149,17 +155,17 @@ By default, search requests return the contents of the document `_source` but li ["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- -include-tagged::{doc-tests}/SearchDocumentationIT.java[search-source-filtering-off] +include-tagged::{doc-tests-file}[{api}-source-filtering-off] -------------------------------------------------- The method also accepts an array of one or more wildcard patterns to control which fields get included or excluded in a more fine grained way: ["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- -include-tagged::{doc-tests}/SearchDocumentationIT.java[search-source-filtering-includes] +include-tagged::{doc-tests-file}[{api}-source-filtering-includes] -------------------------------------------------- -[[java-rest-high-request-highlighting]] +[id="{upid}-{api}-request-highlighting"] ===== Requesting Highlighting Highlighting search results can be achieved by setting a `HighlightBuilder` on the @@ -168,7 +174,7 @@ fields by adding one or more `HighlightBuilder.Field` instances to a `HighlightB ["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- -include-tagged::{doc-tests}/SearchDocumentationIT.java[search-request-highlighting] +include-tagged::{doc-tests-file}[{api}-request-highlighting] -------------------------------------------------- <1> Creates a new `HighlightBuilder` <2> Create a field highlighter for the `title` field @@ -179,9 +185,9 @@ There are many options which are explained in detail in the Rest API documentati API parameters (e.g. `pre_tags`) are usually changed by setters with a similar name (e.g. `#preTags(String ...)`). -Highlighted text fragments can <> from the `SearchResponse`. +Highlighted text fragments can <<{upid}-{api}-response-highlighting,later be retrieved>> from the +{response}+. -[[java-rest-high-document-search-request-building-aggs]] +[id="{upid}-{api}-request-building-aggs"] ===== Requesting Aggregations Aggregations can be added to the search by first creating the appropriate @@ -191,13 +197,13 @@ sub-aggregation on the average age of employees in the company: ["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- -include-tagged::{doc-tests}/SearchDocumentationIT.java[search-request-aggregations] +include-tagged::{doc-tests-file}[{api}-request-aggregations] -------------------------------------------------- -The <> page gives a list of all available aggregations with +The <<{upid}-aggregation-builders, Building Aggregations>> page gives a list of all available aggregations with their corresponding `AggregationBuilder` objects and `AggregationBuilders` helper methods. -We will later see how to <> in the `SearchResponse`. +We will later see how to <<{upid}-{api}-response-aggs,access aggregations>> in the +{response}+. ===== Requesting Suggestions @@ -207,14 +213,14 @@ need to be added to the top level `SuggestBuilder`, which itself can be set on t ["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- -include-tagged::{doc-tests}/SearchDocumentationIT.java[search-request-suggestion] +include-tagged::{doc-tests-file}[{api}-request-suggestion] -------------------------------------------------- <1> Creates a new `TermSuggestionBuilder` for the `user` field and the text `kmichy` <2> Adds the suggestion builder and names it `suggest_user` -We will later see how to <> from the -`SearchResponse`. +We will later see how to <<{upid}-{api}-response-suggestions,retrieve suggestions>> from the ++{response}+. ===== Profiling Queries and Aggregations @@ -223,56 +229,18 @@ a specific search request. in order to use it, the profile flag must be set to t ["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- -include-tagged::{doc-tests}/SearchDocumentationIT.java[search-request-profiling] +include-tagged::{doc-tests-file}[{api}-request-profiling] -------------------------------------------------- -Once the `SearchRequest` is executed the corresponding `SearchResponse` will -<>. +Once the +{request}+ is executed the corresponding +{response}+ will +<<{upid}-{api}-response-profile,contain the profiling results>>. -[[java-rest-high-document-search-sync]] -==== Synchronous Execution +include::../execution.asciidoc[] -When executing a `SearchRequest` in the following manner, the client waits -for the `SearchResponse` to be returned before continuing with code execution: +[id="{upid}-{api}-response"] +==== {response} -["source","java",subs="attributes,callouts,macros"] --------------------------------------------------- -include-tagged::{doc-tests}/SearchDocumentationIT.java[search-execute] --------------------------------------------------- - -[[java-rest-high-document-search-async]] -==== Asynchronous Execution - -Executing a `SearchRequest` can also be done in an asynchronous fashion so that -the client can return directly. Users need to specify how the response or -potential failures will be handled by passing the request and a listeners to the -asynchronous search method: - -["source","java",subs="attributes,callouts,macros"] --------------------------------------------------- -include-tagged::{doc-tests}/SearchDocumentationIT.java[search-execute-async] --------------------------------------------------- -<1> The `SearchRequest` to execute and the `ActionListener` to use when -the execution completes - -The asynchronous method does not block and returns immediately. Once it is -completed the `ActionListener` is called back using the `onResponse` method -if the execution successfully completed or using the `onFailure` method if -it failed. - -A typical listener for `SearchResponse` looks like: - -["source","java",subs="attributes,callouts,macros"] --------------------------------------------------- -include-tagged::{doc-tests}/SearchDocumentationIT.java[search-execute-listener] --------------------------------------------------- -<1> Called when the execution is successfully completed. -<2> Called when the whole `SearchRequest` fails. - -[[java-rest-high-search-response]] -==== SearchResponse - -The `SearchResponse` that is returned by executing the search provides details +The +{response}+ that is returned by executing the search provides details about the search execution itself as well as access to the documents returned. First, there is useful information about the request execution itself, like the HTTP status code, execution time or whether the request terminated early or timed @@ -280,7 +248,7 @@ out: ["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- -include-tagged::{doc-tests}/SearchDocumentationIT.java[search-response-1] +include-tagged::{doc-tests-file}[{api}-response-1] -------------------------------------------------- Second, the response also provides information about the execution on the @@ -291,10 +259,10 @@ failures can also be handled by iterating over an array off ["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- -include-tagged::{doc-tests}/SearchDocumentationIT.java[search-response-2] +include-tagged::{doc-tests-file}[{api}-response-2] -------------------------------------------------- -[[java-rest-high-retrieve-searchHits]] +[id="{upid}-{api}-response-search-hits"] ===== Retrieving SearchHits To get access to the returned documents, we need to first get the `SearchHits` @@ -302,7 +270,7 @@ contained in the response: ["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- -include-tagged::{doc-tests}/SearchDocumentationIT.java[search-hits-get] +include-tagged::{doc-tests-file}[{api}-hits-get] -------------------------------------------------- The `SearchHits` provides global information about all hits, like total number @@ -310,7 +278,7 @@ of hits or the maximum score: ["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- -include-tagged::{doc-tests}/SearchDocumentationIT.java[search-hits-info] +include-tagged::{doc-tests-file}[{api}-hits-info] -------------------------------------------------- Nested inside the `SearchHits` are the individual search results that can @@ -319,7 +287,7 @@ be iterated over: ["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- -include-tagged::{doc-tests}/SearchDocumentationIT.java[search-hits-singleHit] +include-tagged::{doc-tests-file}[{api}-hits-singleHit] -------------------------------------------------- The `SearchHit` provides access to basic information like index, type, docId and @@ -327,7 +295,7 @@ score of each search hit: ["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- -include-tagged::{doc-tests}/SearchDocumentationIT.java[search-hits-singleHit-properties] +include-tagged::{doc-tests-file}[{api}-hits-singleHit-properties] -------------------------------------------------- Furthermore, it lets you get back the document source, either as a simple @@ -338,34 +306,34 @@ cases need to be cast accordingly: ["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- -include-tagged::{doc-tests}/SearchDocumentationIT.java[search-hits-singleHit-source] +include-tagged::{doc-tests-file}[{api}-hits-singleHit-source] -------------------------------------------------- -[[java-rest-high-retrieve-highlighting]] +[id="{upid}-{api}-response-highlighting"] ===== Retrieving Highlighting -If <>, highlighted text fragments can be retrieved from each `SearchHit` in the result. The hit object offers +If <<{upid}-{api}-request-highlighting,requested>>, highlighted text fragments can be retrieved from each `SearchHit` in the result. The hit object offers access to a map of field names to `HighlightField` instances, each of which contains one or many highlighted text fragments: ["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- -include-tagged::{doc-tests}/SearchDocumentationIT.java[search-request-highlighting-get] +include-tagged::{doc-tests-file}[{api}-request-highlighting-get] -------------------------------------------------- <1> Get the highlighting for the `title` field <2> Get one or many fragments containing the highlighted field content -[[java-rest-high-retrieve-aggs]] +[id="{upid}-{api}-response-aggs"] ===== Retrieving Aggregations -Aggregations can be retrieved from the `SearchResponse` by first getting the +Aggregations can be retrieved from the +{response}+ by first getting the root of the aggregation tree, the `Aggregations` object, and then getting the aggregation by name. ["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- -include-tagged::{doc-tests}/SearchDocumentationIT.java[search-request-aggregations-get] +include-tagged::{doc-tests-file}[{api}-request-aggregations-get] -------------------------------------------------- <1> Get the `by_company` terms aggregation <2> Get the buckets that is keyed with `Elastic` @@ -377,7 +345,7 @@ otherwise a `ClassCastException` will be thrown: ["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- -include-tagged::{doc-tests}/SearchDocumentationIT.java[search-request-aggregations-get-wrongCast] +include-tagged::{doc-tests-file}[search-request-aggregations-get-wrongCast] -------------------------------------------------- <1> This will throw an exception because "by_company" is a `terms` aggregation but we try to retrieve it as a `range` aggregation @@ -388,14 +356,14 @@ needs to happen explicitly: ["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- -include-tagged::{doc-tests}/SearchDocumentationIT.java[search-request-aggregations-asMap] +include-tagged::{doc-tests-file}[{api}-request-aggregations-asMap] -------------------------------------------------- There are also getters that return all top level aggregations as a list: ["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- -include-tagged::{doc-tests}/SearchDocumentationIT.java[search-request-aggregations-asList] +include-tagged::{doc-tests-file}[{api}-request-aggregations-asList] -------------------------------------------------- And last but not least you can iterate over all aggregations and then e.g. @@ -403,17 +371,17 @@ decide how to further process them based on their type: ["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- -include-tagged::{doc-tests}/SearchDocumentationIT.java[search-request-aggregations-iterator] +include-tagged::{doc-tests-file}[{api}-request-aggregations-iterator] -------------------------------------------------- -[[java-rest-high-retrieve-suggestions]] +[id="{upid}-{api}-response-suggestions"] ===== Retrieving Suggestions -To get back the suggestions from a `SearchResponse`, use the `Suggest` object as an entry point and then retrieve the nested suggestion objects: +To get back the suggestions from a +{response}+, use the `Suggest` object as an entry point and then retrieve the nested suggestion objects: ["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- -include-tagged::{doc-tests}/SearchDocumentationIT.java[search-request-suggestion-get] +include-tagged::{doc-tests-file}[{api}-request-suggestion-get] -------------------------------------------------- <1> Use the `Suggest` class to access suggestions <2> Suggestions can be retrieved by name. You need to assign them to the correct @@ -421,21 +389,21 @@ type of Suggestion class (here `TermSuggestion`), otherwise a `ClassCastExceptio <3> Iterate over the suggestion entries <4> Iterate over the options in one entry -[[java-rest-high-retrieve-profile-results]] +[id="{upid}-{api}-response-profile"] ===== Retrieving Profiling Results -Profiling results are retrieved from a `SearchResponse` using the `getProfileResults()` method. This +Profiling results are retrieved from a +{response}+ using the `getProfileResults()` method. This method returns a `Map` containing a `ProfileShardResult` object for every shard involved in the - `SearchRequest` execution. `ProfileShardResult` are stored in the `Map` using a key that uniquely + +{request}+ execution. `ProfileShardResult` are stored in the `Map` using a key that uniquely identifies the shard the profile result corresponds to. Here is a sample code that shows how to iterate over all the profiling results of every shard: ["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- -include-tagged::{doc-tests}/SearchDocumentationIT.java[search-request-profiling-get] +include-tagged::{doc-tests-file}[{api}-request-profiling-get] -------------------------------------------------- -<1> Retrieve the `Map` of `ProfileShardResult` from the `SearchResponse` +<1> Retrieve the `Map` of `ProfileShardResult` from the +{response}+ <2> Profiling results can be retrieved by shard's key if the key is known, otherwise it might be simpler to iterate over all the profiling results <3> Retrieve the key that identifies which shard the `ProfileShardResult` belongs to @@ -446,7 +414,7 @@ executed against the underlying Lucene index: ["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- -include-tagged::{doc-tests}/SearchDocumentationIT.java[search-request-profiling-queries] +include-tagged::{doc-tests-file}[{api}-request-profiling-queries] -------------------------------------------------- <1> Retrieve the list of `QueryProfileShardResult` <2> Iterate over each `QueryProfileShardResult` @@ -456,7 +424,7 @@ Each `QueryProfileShardResult` gives access to the detailed query tree execution ["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- -include-tagged::{doc-tests}/SearchDocumentationIT.java[search-request-profiling-queries-results] +include-tagged::{doc-tests-file}[{api}-request-profiling-queries-results] -------------------------------------------------- <1> Iterate over the profile results <2> Retrieve the name of the Lucene query @@ -470,7 +438,7 @@ The `QueryProfileShardResult` also gives access to the profiling information for ["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- -include-tagged::{doc-tests}/SearchDocumentationIT.java[search-request-profiling-queries-collectors] +include-tagged::{doc-tests-file}[{api}-request-profiling-queries-collectors] -------------------------------------------------- <1> Retrieve the profiling result of the Lucene collector <2> Retrieve the name of the Lucene collector @@ -485,7 +453,7 @@ to the detailed aggregations tree execution: ["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- -include-tagged::{doc-tests}/SearchDocumentationIT.java[search-request-profiling-aggs] +include-tagged::{doc-tests-file}[{api}-request-profiling-aggs] -------------------------------------------------- <1> Retrieve the `AggregationProfileShardResult` <2> Iterate over the aggregation profile results diff --git a/docs/java-rest/high-level/security/change-password.asciidoc b/docs/java-rest/high-level/security/change-password.asciidoc new file mode 100644 index 00000000000..40490ad6a83 --- /dev/null +++ b/docs/java-rest/high-level/security/change-password.asciidoc @@ -0,0 +1,46 @@ +[[java-rest-high-security-change-password]] +=== Change Password API + +[[java-rest-high-security-change-password-execution]] +==== Execution + +A user's password can be changed using the `security().changePassword()` +method: + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/SecurityDocumentationIT.java[change-password-execute] +-------------------------------------------------- + +[[java-rest-high-change-password-response]] +==== Response + +The returned `EmptyResponse` does not contain any fields. The return of this +response indicates a successful request. + +[[java-rest-high-x-pack-security-change-password-async]] +==== Asynchronous Execution + +This request can be executed asynchronously: + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/SecurityDocumentationIT.java[change-password-execute-async] +-------------------------------------------------- +<1> The `ChangePassword` request to execute and the `ActionListener` to use when +the execution completes. + +The asynchronous method does not block and returns immediately. Once the request +has completed the `ActionListener` is called back using the `onResponse` method +if the execution successfully completed or using the `onFailure` method if +it failed. + +A typical listener for a `EmptyResponse` looks like: + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/SecurityDocumentationIT.java[change-password-execute-listener] +-------------------------------------------------- +<1> Called when the execution is successfully completed. The response is +provided as an argument. +<2> Called in case of failure. The raised exception is provided as an argument. diff --git a/docs/java-rest/high-level/supported-apis.asciidoc b/docs/java-rest/high-level/supported-apis.asciidoc index 51d00c403de..cfac4a3c293 100644 --- a/docs/java-rest/high-level/supported-apis.asciidoc +++ b/docs/java-rest/high-level/supported-apis.asciidoc @@ -2,22 +2,26 @@ == Document APIs +:upid: {mainid}-document +:doc-tests-file: {doc-tests}/CRUDDocumentationIT.java + The Java High Level REST Client supports the following Document APIs: [[single-doc]] Single document APIs:: -* <> -* <> -* <> -* <> +* <<{upid}-index>> +* <<{upid}-get>> +* <<{upid}-delete>> +* <<{upid}-update>> [[multi-doc]] Multi-document APIs:: -* <> -* <> -* <> -* <> -* <> +* <<{upid}-bulk>> +* <<{upid}-multi-get>> +* <<{upid}-reindex>> +* <<{upid}-update-by-query>> +* <<{upid}-delete-by-query>> +* <<{upid}-rethrottle>> include::document/index.asciidoc[] include::document/get.asciidoc[] @@ -29,20 +33,24 @@ include::document/multi-get.asciidoc[] include::document/reindex.asciidoc[] include::document/update-by-query.asciidoc[] include::document/delete-by-query.asciidoc[] +include::document/rethrottle.asciidoc[] == Search APIs +:upid: {mainid} +:doc-tests-file: {doc-tests}/SearchDocumentationIT.java + The Java High Level REST Client supports the following Search APIs: -* <> -* <> -* <> -* <> -* <> -* <> -* <> -* <> -* <> +* <<{upid}-search>> +* <<{upid}-search-scroll>> +* <<{upid}-clear-scroll>> +* <<{upid}-search-template>> +* <<{upid}-multi-search-template>> +* <<{upid}-multi-search>> +* <<{upid}-field-caps>> +* <<{upid}-rank-eval>> +* <<{upid}-explain>> include::search/search.asciidoc[] include::search/scroll.asciidoc[] @@ -135,6 +143,8 @@ The Java High Level REST Client supports the following Cluster APIs: * <> * <> +:upid: {mainid}-cluster +:doc-tests-file: {doc-tests}/ClusterClientDocumentationIT.java include::cluster/put_settings.asciidoc[] include::cluster/get_settings.asciidoc[] include::cluster/health.asciidoc[] @@ -223,6 +233,8 @@ The Java High Level REST Client supports the following Machine Learning APIs: * <> * <> * <> +* <> +* <> * <> * <> * <> @@ -245,6 +257,8 @@ include::ml/flush-job.asciidoc[] include::ml/put-datafeed.asciidoc[] include::ml/get-datafeed.asciidoc[] include::ml/delete-datafeed.asciidoc[] +include::ml/start-datafeed.asciidoc[] +include::ml/stop-datafeed.asciidoc[] include::ml/get-job-stats.asciidoc[] include::ml/forecast-job.asciidoc[] include::ml/delete-forecast.asciidoc[] @@ -271,8 +285,10 @@ include::migration/get-assistance.asciidoc[] The Java High Level REST Client supports the following Rollup APIs: * <> +* <> include::rollup/put_job.asciidoc[] +include::rollup/get_job.asciidoc[] == Security APIs @@ -281,10 +297,12 @@ The Java High Level REST Client supports the following Security APIs: * <> * <> * <> +* <> include::security/put-user.asciidoc[] include::security/enable-user.asciidoc[] include::security/disable-user.asciidoc[] +include::security/change-password.asciidoc[] == Watcher APIs @@ -303,3 +321,15 @@ The Java High Level REST Client supports the following Graph APIs: * <> include::graph/explore.asciidoc[] + +//// +Clear attributes that we use to document that APIs included above so they +don't leak into the rest of the documentation. +//// +-- +:api!: +:request!: +:response!: +:doc-tests-file!: +:upid!: +-- diff --git a/docs/java-rest/low-level/configuration.asciidoc b/docs/java-rest/low-level/configuration.asciidoc index aa4e843778a..b7da2b5ebcc 100644 --- a/docs/java-rest/low-level/configuration.asciidoc +++ b/docs/java-rest/low-level/configuration.asciidoc @@ -1,3 +1,4 @@ +[[java-rest-low-config]] == Common configuration As explained in <>, the `RestClientBuilder` diff --git a/docs/plugins/integrations.asciidoc b/docs/plugins/integrations.asciidoc index 8bffe5193ed..912d9df2f4b 100644 --- a/docs/plugins/integrations.asciidoc +++ b/docs/plugins/integrations.asciidoc @@ -56,8 +56,8 @@ releases 2.0 and later do not support rivers. * https://github.com/jprante/elasticsearch-jdbc[JDBC importer]: The Java Database Connection (JDBC) importer allows to fetch data from JDBC sources for indexing into Elasticsearch (by Jörg Prante) -* https://github.com/reachkrishnaraj/kafka-elasticsearch-standalone-consumer/tree/branch2.0[Kafka Standalone Consumer(Indexer)]: - Kafka Standalone Consumer [Indexer] will read messages from Kafka in batches, processes(as implemented) and bulk-indexes them into Elasticsearch. Flexible and scalable. More documentation in above GitHub repo's Wiki.(Please use branch 2.0)! +* https://github.com/BigDataDevs/kafka-elasticsearch-consumer [Kafka Standalone Consumer(Indexer)]: + Kafka Standalone Consumer [Indexer] will read messages from Kafka in batches, processes(as implemented) and bulk-indexes them into Elasticsearch. Flexible and scalable. More documentation in above GitHub repo's Wiki. * https://github.com/ozlerhakan/mongolastic[Mongolastic]: A tool that clones data from Elasticsearch to MongoDB and vice versa diff --git a/docs/reference/aggregations/bucket/terms-aggregation.asciidoc b/docs/reference/aggregations/bucket/terms-aggregation.asciidoc index f42d176aea0..1562bf41074 100644 --- a/docs/reference/aggregations/bucket/terms-aggregation.asciidoc +++ b/docs/reference/aggregations/bucket/terms-aggregation.asciidoc @@ -615,7 +615,7 @@ GET /_search "aggs" : { "genres" : { "terms" : { - "field" : "gender", + "field" : "genre", "script" : { "source" : "'Genre: ' +_value", "lang" : "painless" diff --git a/docs/reference/analysis/analyzers/custom-analyzer.asciidoc b/docs/reference/analysis/analyzers/custom-analyzer.asciidoc index 34572acaa96..92133822fa5 100644 --- a/docs/reference/analysis/analyzers/custom-analyzer.asciidoc +++ b/docs/reference/analysis/analyzers/custom-analyzer.asciidoc @@ -59,7 +59,7 @@ PUT my_index "analysis": { "analyzer": { "my_custom_analyzer": { - "type": "custom", + "type": "custom", <1> "tokenizer": "standard", "char_filter": [ "html_strip" @@ -82,6 +82,11 @@ POST my_index/_analyze -------------------------------- // CONSOLE +<1> Setting `type` to `custom` tells Elasticsearch that we are defining a custom analyzer. + Compare this to how <>: + `type` will be set to the name of the built-in analyzer, like + <> or <>. + ///////////////////// [source,js] diff --git a/docs/reference/api-conventions.asciidoc b/docs/reference/api-conventions.asciidoc index 42216a9a0fc..be41c0fdc77 100644 --- a/docs/reference/api-conventions.asciidoc +++ b/docs/reference/api-conventions.asciidoc @@ -1,5 +1,5 @@ [[api-conventions]] -= API Conventions += API conventions [partintro] -- diff --git a/docs/reference/cluster/tasks.asciidoc b/docs/reference/cluster/tasks.asciidoc index d6dfa71b76b..c84f4c43ae8 100644 --- a/docs/reference/cluster/tasks.asciidoc +++ b/docs/reference/cluster/tasks.asciidoc @@ -16,6 +16,7 @@ GET _tasks?nodes=nodeId1,nodeId2 <2> GET _tasks?nodes=nodeId1,nodeId2&actions=cluster:* <3> -------------------------------------------------- // CONSOLE +// TEST[skip:No tasks to retrieve] <1> Retrieves all tasks currently running on all nodes in the cluster. <2> Retrieves all tasks running on nodes `nodeId1` and `nodeId2`. See <> for more info about how to select individual nodes. @@ -57,31 +58,29 @@ The result will look similar to the following: } } -------------------------------------------------- -// NOTCONSOLE -// We can't test tasks output +// TESTRESPONSE -It is also possible to retrieve information for a particular task: +It is also possible to retrieve information for a particular task. The following +example retrieves information about task `oTUltX4IQMOUUVeiohTt8A:124`: [source,js] -------------------------------------------------- -GET _tasks/task_id <1> +GET _tasks/oTUltX4IQMOUUVeiohTt8A:124 -------------------------------------------------- // CONSOLE -// TEST[s/task_id/node_id:1/] // TEST[catch:missing] -<1> This will return a 404 if the task isn't found. +If the task isn't found, the API returns a 404. -Or to retrieve all children of a particular task: +To retrieve all children of a particular task: [source,js] -------------------------------------------------- -GET _tasks?parent_task_id=parent_task_id <1> +GET _tasks?parent_task_id=oTUltX4IQMOUUVeiohTt8A:123 -------------------------------------------------- // CONSOLE -// TEST[s/=parent_task_id/=node_id:1/] -<1> This won't return a 404 if the parent isn't found. +If the parent isn't found, the API does not return a 404. You can also use the `detailed` request parameter to get more information about the running tasks. This is useful for telling one task from another but is more @@ -93,8 +92,9 @@ request parameter: GET _tasks?actions=*search&detailed -------------------------------------------------- // CONSOLE +// TEST[skip:No tasks to retrieve] -might look like: +The results might look like: [source,js] -------------------------------------------------- @@ -121,8 +121,7 @@ might look like: } } -------------------------------------------------- -// NOTCONSOLE -// We can't test tasks output +// TESTRESPONSE The new `description` field contains human readable text that identifies the particular request that the task is performing such as identifying the search @@ -167,14 +166,14 @@ GET _cat/tasks?detailed [[task-cancellation]] === Task Cancellation -If a long-running task supports cancellation, it can be cancelled by the following command: +If a long-running task supports cancellation, it can be cancelled with the cancel +tasks API. The following example cancels task `oTUltX4IQMOUUVeiohTt8A:12345`: [source,js] -------------------------------------------------- -POST _tasks/node_id:task_id/_cancel +POST _tasks/oTUltX4IQMOUUVeiohTt8A:12345/_cancel -------------------------------------------------- // CONSOLE -// TEST[s/task_id/1/] The task cancellation command supports the same task selection parameters as the list tasks command, so multiple tasks can be cancelled at the same time. For example, the following command will cancel all reindex tasks running on the @@ -217,7 +216,7 @@ a the client that started them: -------------------------------------------------- curl -i -H "X-Opaque-Id: 123456" "http://localhost:9200/_tasks?group_by=parents" -------------------------------------------------- -// NOTCONSOLE +//NOTCONSOLE The result will look similar to the following: @@ -260,8 +259,7 @@ content-length: 831 } } -------------------------------------------------- -// NOTCONSOLE - +//NOTCONSOLE <1> id as a part of the response header <2> id for the tasks that was initiated by the REST request <3> the child task of the task initiated by the REST request diff --git a/docs/reference/cluster/update-settings.asciidoc b/docs/reference/cluster/update-settings.asciidoc index 27d5cc316a6..33246b57aa4 100644 --- a/docs/reference/cluster/update-settings.asciidoc +++ b/docs/reference/cluster/update-settings.asciidoc @@ -108,9 +108,11 @@ The order of precedence for cluster settings is: 2. persistent cluster settings 3. settings in the `elasticsearch.yml` configuration file. -It's best to use the `elasticsearch.yml` file only -for local configurations, and set all cluster-wide settings with the -`settings` API. +It's best to set all cluster-wide settings with the `settings` API and use the +`elasticsearch.yml` file only for local configurations. This way you can be sure that +the setting is the same on all nodes. If, on the other hand, you define different +settings on different nodes by accident using the configuration file, it is very +difficult to notice these discrepancies. You can find the list of settings that you can dynamically update in <>. diff --git a/docs/reference/docs/delete-by-query.asciidoc b/docs/reference/docs/delete-by-query.asciidoc index 6edc453903d..93a365623fc 100644 --- a/docs/reference/docs/delete-by-query.asciidoc +++ b/docs/reference/docs/delete-by-query.asciidoc @@ -304,6 +304,7 @@ You can fetch the status of any running delete-by-query requests with the GET _tasks?detailed=true&actions=*/delete/byquery -------------------------------------------------- // CONSOLE +// TEST[skip:No tasks to retrieve] The responses looks like: @@ -344,9 +345,7 @@ The responses looks like: } } -------------------------------------------------- -// NOTCONSOLE -// We can't test tasks output - +// TESTRESPONSE <1> this object contains the actual status. It is just like the response json with the important addition of the `total` field. `total` is the total number of operations that the reindex expects to perform. You can estimate the @@ -357,10 +356,9 @@ With the task id you can look up the task directly: [source,js] -------------------------------------------------- -GET /_tasks/task_id +GET /_tasks/r1A2WoRbTwKZ516z6NEs5A:36619 -------------------------------------------------- // CONSOLE -// TEST[s/task_id/node_id:1/] // TEST[catch:missing] The advantage of this API is that it integrates with `wait_for_completion=false` @@ -375,16 +373,15 @@ you to delete that document. [[docs-delete-by-query-cancel-task-api]] === Works with the Cancel Task API -Any Delete By Query can be canceled using the <>: +Any Delete By Query can be canceled using the <>: [source,js] -------------------------------------------------- -POST _tasks/task_id/_cancel +POST _tasks/r1A2WoRbTwKZ516z6NEs5A:36619/_cancel -------------------------------------------------- -// TEST[s/task_id/node_id:1/] // CONSOLE -The `task_id` can be found using the tasks API above. +The task ID can be found using the <>. Cancellation should happen quickly but might take a few seconds. The task status API above will continue to list the task until it is wakes to cancel itself. @@ -399,12 +396,11 @@ using the `_rethrottle` API: [source,js] -------------------------------------------------- -POST _delete_by_query/task_id/_rethrottle?requests_per_second=-1 +POST _delete_by_query/r1A2WoRbTwKZ516z6NEs5A:36619/_rethrottle?requests_per_second=-1 -------------------------------------------------- -// TEST[s/task_id/node_id:1/] // CONSOLE -The `task_id` can be found using the tasks API above. +The task ID can be found using the <>. Just like when setting it on the `_delete_by_query` API `requests_per_second` can be either `-1` to disable throttling or any decimal number diff --git a/docs/reference/docs/reindex.asciidoc b/docs/reference/docs/reindex.asciidoc index e6a47e0a398..6a9d89b9242 100644 --- a/docs/reference/docs/reindex.asciidoc +++ b/docs/reference/docs/reindex.asciidoc @@ -692,6 +692,7 @@ You can fetch the status of all running reindex requests with the GET _tasks?detailed=true&actions=*reindex -------------------------------------------------- // CONSOLE +// TEST[skip:No tasks to retrieve] The response looks like: @@ -726,32 +727,36 @@ The response looks like: "bulk": 0, "search": 0 }, - "throttled_millis": 0 + "throttled_millis": 0, + "requests_per_second": -1, + "throttled_until_millis": 0 }, - "description" : "" + "description" : "", + "start_time_in_millis": 1535149899665, + "running_time_in_nanos": 5926916792, + "cancellable": true, + "headers": {} } } } } } -------------------------------------------------- -// NOTCONSOLE -// We can't test tasks output - +// TESTRESPONSE <1> this object contains the actual status. It is identical to the response JSON except for the important addition of the `total` field. `total` is the total number of operations that the `_reindex` expects to perform. You can estimate the progress by adding the `updated`, `created`, and `deleted` fields. The request will finish when their sum is equal to the `total` field. -With the task id you can look up the task directly: +With the task id you can look up the task directly. The following example +retrieves information about the task `r1A2WoRbTwKZ516z6NEs5A:36619`: [source,js] -------------------------------------------------- -GET /_tasks/task_id +GET /_tasks/r1A2WoRbTwKZ516z6NEs5A:36619 -------------------------------------------------- // CONSOLE -// TEST[s/task_id/node_id:1/] // TEST[catch:missing] The advantage of this API is that it integrates with `wait_for_completion=false` @@ -766,16 +771,16 @@ you to delete that document. [[docs-reindex-cancel-task-api]] === Works with the Cancel Task API -Any Reindex can be canceled using the <>: +Any Reindex can be canceled using the <>. For +example: [source,js] -------------------------------------------------- -POST _tasks/task_id/_cancel +POST _tasks/r1A2WoRbTwKZ516z6NEs5A:36619/_cancel -------------------------------------------------- // CONSOLE -// TEST[s/task_id/node_id:1/] -The `task_id` can be found using the Tasks API. +The task ID can be found using the <>. Cancelation should happen quickly but might take a few seconds. The Tasks API will continue to list the task until it wakes to cancel itself. @@ -790,12 +795,11 @@ the `_rethrottle` API: [source,js] -------------------------------------------------- -POST _reindex/task_id/_rethrottle?requests_per_second=-1 +POST _reindex/r1A2WoRbTwKZ516z6NEs5A:36619/_rethrottle?requests_per_second=-1 -------------------------------------------------- // CONSOLE -// TEST[s/task_id/node_id:1/] -The `task_id` can be found using the Tasks API above. +The task ID can be found using the <>. Just like when setting it on the Reindex API, `requests_per_second` can be either `-1` to disable throttling or any decimal number diff --git a/docs/reference/docs/update-by-query.asciidoc b/docs/reference/docs/update-by-query.asciidoc index 6e7cfbd2b79..bef531d8de2 100644 --- a/docs/reference/docs/update-by-query.asciidoc +++ b/docs/reference/docs/update-by-query.asciidoc @@ -359,6 +359,7 @@ You can fetch the status of all running update-by-query requests with the GET _tasks?detailed=true&actions=*byquery -------------------------------------------------- // CONSOLE +// TEST[skip:No tasks to retrieve] The responses looks like: @@ -392,7 +393,7 @@ The responses looks like: "retries": { "bulk": 0, "search": 0 - } + }, "throttled_millis": 0 }, "description" : "" @@ -402,8 +403,7 @@ The responses looks like: } } -------------------------------------------------- -// NOTCONSOLE -// We can't test tasks output +// TESTRESPONSE <1> this object contains the actual status. It is just like the response json with the important addition of the `total` field. `total` is the total number @@ -411,14 +411,14 @@ of operations that the reindex expects to perform. You can estimate the progress by adding the `updated`, `created`, and `deleted` fields. The request will finish when their sum is equal to the `total` field. -With the task id you can look up the task directly: +With the task id you can look up the task directly. The following example +retrieves information about task `r1A2WoRbTwKZ516z6NEs5A:36619`: [source,js] -------------------------------------------------- -GET /_tasks/task_id +GET /_tasks/r1A2WoRbTwKZ516z6NEs5A:36619 -------------------------------------------------- // CONSOLE -// TEST[s/task_id/node_id:1/] // TEST[catch:missing] The advantage of this API is that it integrates with `wait_for_completion=false` @@ -437,12 +437,11 @@ Any Update By Query can be canceled using the <>: [source,js] -------------------------------------------------- -POST _tasks/task_id/_cancel +POST _tasks/r1A2WoRbTwKZ516z6NEs5A:36619/_cancel -------------------------------------------------- // CONSOLE -// TEST[s/task_id/node_id:1/] -The `task_id` can be found using the tasks API above. +The task ID can be found using the <>. Cancellation should happen quickly but might take a few seconds. The task status API above will continue to list the task until it is wakes to cancel itself. @@ -457,12 +456,11 @@ using the `_rethrottle` API: [source,js] -------------------------------------------------- -POST _update_by_query/task_id/_rethrottle?requests_per_second=-1 +POST _update_by_query/r1A2WoRbTwKZ516z6NEs5A:36619/_rethrottle?requests_per_second=-1 -------------------------------------------------- // CONSOLE -// TEST[s/task_id/node_id:1/] -The `task_id` can be found using the tasks API above. +The task ID can be found using the <>. Just like when setting it on the `_update_by_query` API `requests_per_second` can be either `-1` to disable throttling or any decimal number diff --git a/docs/reference/getting-started.asciidoc b/docs/reference/getting-started.asciidoc index 6e536f382dd..1a60d4d83e2 100755 --- a/docs/reference/getting-started.asciidoc +++ b/docs/reference/getting-started.asciidoc @@ -1,5 +1,5 @@ [[getting-started]] -= Getting Started += Getting started [partintro] -- @@ -158,19 +158,6 @@ And now we are ready to start our node and single cluster: ./elasticsearch -------------------------------------------------- -[float] -=== Installation with Homebrew - -On macOS, Elasticsearch can also be installed via https://brew.sh[Homebrew]: - -["source","sh"] --------------------------------------------------- -brew install elasticsearch --------------------------------------------------- - -If installation succeeds, Homebrew will finish by saying that you can start Elasticsearch by entering -`elasticsearch`. Do that now. The expected response is described below, under <>. - [float] === Installation example with MSI Windows Installer @@ -499,7 +486,7 @@ If we study the above commands carefully, we can actually see a pattern of how w [source,js] -------------------------------------------------- - /// + /// -------------------------------------------------- // NOTCONSOLE diff --git a/x-pack/docs/en/rest-api/graph/explore.asciidoc b/docs/reference/graph/explore.asciidoc similarity index 99% rename from x-pack/docs/en/rest-api/graph/explore.asciidoc rename to docs/reference/graph/explore.asciidoc index f9902fcbe48..91b07a63129 100644 --- a/x-pack/docs/en/rest-api/graph/explore.asciidoc +++ b/docs/reference/graph/explore.asciidoc @@ -1,4 +1,5 @@ [role="xpack"] +[testenv="platinum"] [[graph-explore-api]] == Explore API diff --git a/docs/reference/how-to/general.asciidoc b/docs/reference/how-to/general.asciidoc index ee876eb3843..b78ef63b4c8 100644 --- a/docs/reference/how-to/general.asciidoc +++ b/docs/reference/how-to/general.asciidoc @@ -15,7 +15,7 @@ use the <> API. [[maximum-document-size]] === Avoid large documents -Given that the default <> is set to +Given that the default <> is set to 100MB, Elasticsearch will refuse to index any document that is larger than that. You might decide to increase that particular setting, but Lucene still has a limit of about 2GB. diff --git a/docs/reference/index-modules.asciidoc b/docs/reference/index-modules.asciidoc index 70c3d09dc93..81bc96bb8f9 100644 --- a/docs/reference/index-modules.asciidoc +++ b/docs/reference/index-modules.asciidoc @@ -1,6 +1,6 @@ [[index-modules]] -= Index Modules += Index modules [partintro] -- diff --git a/docs/reference/index-modules/translog.asciidoc b/docs/reference/index-modules/translog.asciidoc index 713a3522100..705fb81b09c 100644 --- a/docs/reference/index-modules/translog.asciidoc +++ b/docs/reference/index-modules/translog.asciidoc @@ -86,65 +86,3 @@ file based sync. Defaults to `512mb` `index.translog.retention.age`:: The maximum duration for which translog files will be kept. Defaults to `12h`. - - -[float] -[[corrupt-translog-truncation]] -=== What to do if the translog becomes corrupted? - -[WARNING] -This tool is deprecated and will be completely removed in 7.0. -Use the <> instead of this one. - -In some cases (a bad drive, user error) the translog on a shard copy can become -corrupted. When this corruption is detected by Elasticsearch due to mismatching -checksums, Elasticsearch will fail that shard copy and refuse to use that copy -of the data. If there are other copies of the shard available then -Elasticsearch will automatically recover from one of them using the normal -shard allocation and recovery mechanism. In particular, if the corrupt shard -copy was the primary when the corruption was detected then one of its replicas -will be promoted in its place. - -If there is no copy of the data from which Elasticsearch can recover -successfully, a user may want to recover the data that is part of the shard at -the cost of losing the data that is currently contained in the translog. We -provide a command-line tool for this, `elasticsearch-translog`. - -[WARNING] -The `elasticsearch-translog` tool should *not* be run while Elasticsearch is -running. If you attempt to run this tool while Elasticsearch is running, you -will permanently lose the documents that were contained only in the translog! - -In order to run the `elasticsearch-translog` tool, specify the `truncate` -subcommand as well as the directory for the corrupted translog with the `-d` -option: - -[source,txt] --------------------------------------------------- -$ bin/elasticsearch-translog truncate -d /var/lib/elasticsearchdata/nodes/0/indices/P45vf_YQRhqjfwLMUvSqDw/0/translog/ -Checking existing translog files -!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! -! WARNING: Elasticsearch MUST be stopped before running this tool ! -! ! -! WARNING: Documents inside of translog files will be lost ! -! ! -! WARNING: The following files will be DELETED! ! -!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! ---> data/nodes/0/indices/P45vf_YQRhqjfwLMUvSqDw/0/translog/translog-41.ckp ---> data/nodes/0/indices/P45vf_YQRhqjfwLMUvSqDw/0/translog/translog-6.ckp ---> data/nodes/0/indices/P45vf_YQRhqjfwLMUvSqDw/0/translog/translog-37.ckp ---> data/nodes/0/indices/P45vf_YQRhqjfwLMUvSqDw/0/translog/translog-24.ckp ---> data/nodes/0/indices/P45vf_YQRhqjfwLMUvSqDw/0/translog/translog-11.ckp - -Continue and DELETE files? [y/N] y -Reading translog UUID information from Lucene commit from shard at [data/nodes/0/indices/P45vf_YQRhqjfwLMUvSqDw/0/index] -Translog Generation: 3 -Translog UUID : AxqC4rocTC6e0fwsljAh-Q -Removing existing translog files -Creating new empty checkpoint at [data/nodes/0/indices/P45vf_YQRhqjfwLMUvSqDw/0/translog/translog.ckp] -Creating new empty translog at [data/nodes/0/indices/P45vf_YQRhqjfwLMUvSqDw/0/translog/translog-3.tlog] -Done. --------------------------------------------------- - -You can also use the `-h` option to get a list of all options and parameters -that the `elasticsearch-translog` tool supports. diff --git a/docs/reference/index.asciidoc b/docs/reference/index.asciidoc index e4debd30c03..216983bc6f0 100644 --- a/docs/reference/index.asciidoc +++ b/docs/reference/index.asciidoc @@ -29,8 +29,6 @@ include::setup/bootstrap-checks-xes.asciidoc[] :edit_url: include::upgrade.asciidoc[] -include::migration/index.asciidoc[] - include::api-conventions.asciidoc[] include::docs.asciidoc[] @@ -76,6 +74,8 @@ include::glossary.asciidoc[] include::release-notes/highlights.asciidoc[] +include::migration/index.asciidoc[] + include::release-notes.asciidoc[] include::redirects.asciidoc[] diff --git a/docs/reference/ingest.asciidoc b/docs/reference/ingest.asciidoc index 18349beab6a..772013534b6 100644 --- a/docs/reference/ingest.asciidoc +++ b/docs/reference/ingest.asciidoc @@ -1,5 +1,5 @@ [[ingest]] -= Ingest Node += Ingest node [partintro] -- diff --git a/docs/reference/mapping/params/format.asciidoc b/docs/reference/mapping/params/format.asciidoc index 7d621f87522..120233a8f0f 100644 --- a/docs/reference/mapping/params/format.asciidoc +++ b/docs/reference/mapping/params/format.asciidoc @@ -31,10 +31,6 @@ Many APIs which support date values also support <> expressions, such as `now-1m/d` -- the current time, minus one month, rounded down to the nearest day. -TIP: The `format` setting can be updated on existing fields using the -<>. - - [[custom-date-formats]] ==== Custom date formats diff --git a/docs/reference/migration/migrate_7_0.asciidoc b/docs/reference/migration/migrate_7_0.asciidoc index 924a6984dc0..45f383435e4 100644 --- a/docs/reference/migration/migrate_7_0.asciidoc +++ b/docs/reference/migration/migrate_7_0.asciidoc @@ -1,30 +1,14 @@ [[breaking-changes-7.0]] == Breaking changes in 7.0 +++++ +7.0 +++++ This section discusses the changes that you need to be aware of when migrating your application to Elasticsearch 7.0. See also <> and <>. -[float] -=== Indices created before 7.0 - -Elasticsearch 7.0 can read indices created in version 6.0 or above. An -Elasticsearch 7.0 node will not start in the presence of indices created in a -version of Elasticsearch before 6.0. - -[IMPORTANT] -.Reindex indices from Elasticsearch 5.x or before -========================================= - -Indices created in Elasticsearch 5.x or before will need to be reindexed with -Elasticsearch 6.x in order to be readable by Elasticsearch 7.x. - -========================================= - -[float] -=== Also see: - * <> * <> * <> @@ -41,6 +25,22 @@ Elasticsearch 6.x in order to be readable by Elasticsearch 7.x. * <> * <> +[float] +=== Indices created before 7.0 + +Elasticsearch 7.0 can read indices created in version 6.0 or above. An +Elasticsearch 7.0 node will not start in the presence of indices created in a +version of Elasticsearch before 6.0. + +[IMPORTANT] +.Reindex indices from Elasticsearch 5.x or before +========================================= + +Indices created in Elasticsearch 5.x or before will need to be reindexed with +Elasticsearch 6.x in order to be readable by Elasticsearch 7.x. + +========================================= + include::migrate_7_0/aggregations.asciidoc[] include::migrate_7_0/analysis.asciidoc[] include::migrate_7_0/cluster.asciidoc[] diff --git a/docs/reference/migration/migrate_7_0/aggregations.asciidoc b/docs/reference/migration/migrate_7_0/aggregations.asciidoc index 08f181b2919..b29f741dd85 100644 --- a/docs/reference/migration/migrate_7_0/aggregations.asciidoc +++ b/docs/reference/migration/migrate_7_0/aggregations.asciidoc @@ -1,21 +1,26 @@ +[float] [[breaking_70_aggregations_changes]] === Aggregations changes +[float] ==== Deprecated `global_ordinals_hash` and `global_ordinals_low_cardinality` execution hints for terms aggregations have been removed These `execution_hint` are removed and should be replaced by `global_ordinals`. +[float] ==== `search.max_buckets` in the cluster setting The dynamic cluster setting named `search.max_buckets` now defaults to 10,000 (instead of unlimited in the previous version). Requests that try to return more than the limit will fail with an exception. +[float] ==== `missing` option of the `composite` aggregation has been removed The `missing` option of the `composite` aggregation, deprecated in 6.x, has been removed. `missing_bucket` should be used instead. +[float] ==== Replaced `params._agg` with `state` context variable in scripted metric aggregations The object used to share aggregation state between the scripts in a Scripted Metric diff --git a/docs/reference/migration/migrate_7_0/analysis.asciidoc b/docs/reference/migration/migrate_7_0/analysis.asciidoc index 6e6cc5b078d..e4b27def9f8 100644 --- a/docs/reference/migration/migrate_7_0/analysis.asciidoc +++ b/docs/reference/migration/migrate_7_0/analysis.asciidoc @@ -1,12 +1,15 @@ +[float] [[breaking_70_analysis_changes]] === Analysis changes +[float] ==== Limiting the number of tokens produced by _analyze To safeguard against out of memory errors, the number of tokens that can be produced using the `_analyze` endpoint has been limited to 10000. This default limit can be changed for a particular index with the index setting `index.analyze.max_token_count`. +[float] ==== Limiting the length of an analyzed text during highlighting Highlighting a text that was indexed without offsets or term vectors, @@ -16,6 +19,7 @@ To protect against this, the maximum number of characters that will be analyzed limited to 1000000. This default limit can be changed for a particular index with the index setting `index.highlight.max_analyzed_offset`. +[float] ==== `delimited_payload_filter` renaming The `delimited_payload_filter` was deprecated and renamed to `delimited_payload` in 6.2. @@ -23,6 +27,7 @@ Using it in indices created before 7.0 will issue deprecation warnings. Using th name in new indices created in 7.0 will throw an error. Use the new name `delimited_payload` instead. +[float] ==== `standard` filter has been removed The `standard` token filter has been removed because it doesn't change anything in the stream. diff --git a/docs/reference/migration/migrate_7_0/api.asciidoc b/docs/reference/migration/migrate_7_0/api.asciidoc index a58223023bd..71a8e1aa015 100644 --- a/docs/reference/migration/migrate_7_0/api.asciidoc +++ b/docs/reference/migration/migrate_7_0/api.asciidoc @@ -1,6 +1,8 @@ +[float] [[breaking_70_api_changes]] === API changes +[float] ==== Camel case and underscore parameters deprecated in 6.x have been removed A number of duplicate parameters deprecated in 6.x have been removed from Bulk request, Multi Get request, Term Vectors request, and More Like This Query @@ -22,6 +24,7 @@ The following parameters starting with underscore have been removed: Instead of these removed parameters, use their non camel case equivalents without starting underscore, e.g. use `version_type` instead of `_version_type` or `versionType`. +[float] ==== Thread pool info In previous versions of Elasticsearch, the thread pool info returned in the @@ -48,10 +51,12 @@ aligns the output of the API with the configuration values for thread pools. Note that `core` and `max` will be populated for scaling thread pools, and `size` will be populated for fixed thread pools. +[float] ==== The parameter `fields` deprecated in 6.x has been removed from Bulk request and Update request. The Update API returns `400 - Bad request` if request contains unknown parameters (instead of ignored in the previous version). +[float] [[remove-suggest-metric]] ==== Remove support for `suggest` metric/index metric in indices stats and nodes stats APIs @@ -66,6 +71,7 @@ In the past, `fields` could be provided either as a parameter, or as part of the body. Specifying `fields` in the request body as opposed to a parameter was deprecated in 6.4.0, and is now unsupported in 7.0.0. +[float] ==== `copy_settings` is deprecated on shrink and split APIs Versions of Elasticsearch prior to 6.4.0 did not copy index settings on shrink @@ -76,10 +82,12 @@ will be for such settings to be copied on such operations. To enable users in the only behavior in 8.0.0, this parameter is deprecated in 7.0.0 for removal in 8.0.0. +[float] ==== The deprecated stored script contexts have now been removed When putting stored scripts, support for storing them with the deprecated `template` context or without a context is now removed. Scripts must be stored using the `script` context as mentioned in the documentation. +[float] ==== Get Aliases API limitations when {security} is enabled removed The behavior and response codes of the get aliases API no longer vary @@ -88,6 +96,7 @@ depending on whether {security} is enabled. Previously a current user was not authorized for any alias. An empty response with status 200 - OK is now returned instead at all times. +[float] ==== Put User API response no longer has `user` object The Put User API response was changed in 6.5.0 to add the `created` field diff --git a/docs/reference/migration/migrate_7_0/cluster.asciidoc b/docs/reference/migration/migrate_7_0/cluster.asciidoc index e9584074d73..d518d29987d 100644 --- a/docs/reference/migration/migrate_7_0/cluster.asciidoc +++ b/docs/reference/migration/migrate_7_0/cluster.asciidoc @@ -1,16 +1,20 @@ +[float] [[breaking_70_cluster_changes]] === Cluster changes +[float] ==== `:` is no longer allowed in cluster name Due to cross-cluster search using `:` to separate a cluster and index name, cluster names may no longer contain `:`. +[float] ==== New default for `wait_for_active_shards` parameter of the open index command The default value for the `wait_for_active_shards` parameter of the open index API is changed from 0 to 1, which means that the command will now by default wait for all primary shards of the opened index to be allocated. +[float] ==== Shard preferences `_primary`, `_primary_first`, `_replica`, and `_replica_first` are removed These shard preferences are removed in favour of the `_prefer_nodes` and `_only_nodes` preferences. diff --git a/docs/reference/migration/migrate_7_0/indices.asciidoc b/docs/reference/migration/migrate_7_0/indices.asciidoc index a47cc6f4324..634a00e1f44 100644 --- a/docs/reference/migration/migrate_7_0/indices.asciidoc +++ b/docs/reference/migration/migrate_7_0/indices.asciidoc @@ -1,17 +1,20 @@ +[float] [[breaking_70_indices_changes]] === Indices changes +[float] ==== `:` is no longer allowed in index name Due to cross-cluster search using `:` to separate a cluster and index name, index names may no longer contain `:`. +[float] ==== `index.unassigned.node_left.delayed_timeout` may no longer be negative Negative values were interpreted as zero in earlier versions but are no longer accepted. - +[float] ==== `_flush` and `_force_merge` will no longer refresh In previous versions issuing a `_flush` or `_force_merge` (with `flush=true`) @@ -20,7 +23,7 @@ visible to searches and non-realtime GET operations. From now on these operation don't have this side-effect anymore. To make documents visible an explicit `_refresh` call is needed unless the index is refreshed by the internal scheduler. - +[float] ==== Limit to the difference between max_size and min_size in NGramTokenFilter and NGramTokenizer To safeguard against creating too many index terms, the difference between `max_ngram` and @@ -29,7 +32,7 @@ limit can be changed with the index setting `index.max_ngram_diff`. Note that if exceeded a error is thrown only for new indices. For existing pre-7.0 indices, a deprecation warning is logged. - +[float] ==== Limit to the difference between max_size and min_size in ShingleTokenFilter To safeguard against creating too many tokens, the difference between `max_shingle_size` and @@ -38,6 +41,7 @@ limit can be changed with the index setting `index.max_shingle_diff`. Note that exceeded a error is thrown only for new indices. For existing pre-7.0 indices, a deprecation warning is logged. +[float] ==== Document distribution changes Indices created with version `7.0.0` onwards will have an automatic `index.number_of_routing_shards` @@ -46,6 +50,7 @@ shards the index has. In order to maintain the exact same distribution as a pre `index.number_of_routing_shards` must be set to the `index.number_of_shards` at index creation time. Note: if the number of routing shards equals the number of shards `_split` operations are not supported. +[float] ==== Skipped background refresh on search idle shards Shards belonging to an index that does not have an explicit @@ -56,6 +61,7 @@ that access a search idle shard will be "parked" until the next refresh happens. Indexing requests with `wait_for_refresh` will also trigger a background refresh. +[float] ==== Remove deprecated url parameters for Clear Indices Cache API The following previously deprecated url parameter have been removed: @@ -65,12 +71,14 @@ The following previously deprecated url parameter have been removed: * `request_cache` - use `request` instead * `field_data` - use `fielddata` instead +[float] ==== `network.breaker.inflight_requests.overhead` increased to 2 Previously the in flight requests circuit breaker considered only the raw byte representation. By bumping the value of `network.breaker.inflight_requests.overhead` from 1 to 2, this circuit breaker considers now also the memory overhead of representing the request as a structured object. +[float] ==== Parent circuit breaker changes The parent circuit breaker defines a new setting `indices.breaker.total.use_real_memory` which is @@ -79,6 +87,12 @@ heap memory instead of only considering the reserved memory by child circuit bre setting is `true`, the default parent breaker limit also changes from 70% to 95% of the JVM heap size. The previous behavior can be restored by setting `indices.breaker.total.use_real_memory` to `false`. +[float] ==== `fix` value for `index.shard.check_on_startup` is removed -Deprecated option value `fix` for setting `index.shard.check_on_startup` is not supported. \ No newline at end of file +Deprecated option value `fix` for setting `index.shard.check_on_startup` is not supported. + +[float] +==== `elasticsearch-translog` is removed + +Use the `elasticsearch-shard` tool to remove corrupted translog data. \ No newline at end of file diff --git a/docs/reference/migration/migrate_7_0/java.asciidoc b/docs/reference/migration/migrate_7_0/java.asciidoc index ea263283291..7d68ff2fb57 100644 --- a/docs/reference/migration/migrate_7_0/java.asciidoc +++ b/docs/reference/migration/migrate_7_0/java.asciidoc @@ -1,20 +1,29 @@ +[float] [[breaking_70_java_changes]] === Java API changes +[float] ==== `isShardsAcked` deprecated in `6.2` has been removed `isShardsAcked` has been replaced by `isShardsAcknowledged` in `CreateIndexResponse`, `RolloverResponse` and `CreateIndexClusterStateUpdateResponse`. +[float] ==== `prepareExecute` removed from the client api The `prepareExecute` method which created a request builder has been removed from the client api. Instead, construct a builder for the appropriate request directly. +[float] ==== Some Aggregation classes have moved packages * All classes present in `org.elasticsearch.search.aggregations.metrics.*` packages were moved to a single `org.elasticsearch.search.aggregations.metrics` package. +[float] +==== `Retry.withBackoff` methods with `Settings` removed + +The variants of `Retry.withBackoff` that included `Settings` have been removed +because `Settings` is no longer needed. \ No newline at end of file diff --git a/docs/reference/migration/migrate_7_0/low_level_restclient.asciidoc b/docs/reference/migration/migrate_7_0/low_level_restclient.asciidoc index 77f5266763f..0820c7f01cc 100644 --- a/docs/reference/migration/migrate_7_0/low_level_restclient.asciidoc +++ b/docs/reference/migration/migrate_7_0/low_level_restclient.asciidoc @@ -1,6 +1,8 @@ +[float] [[breaking_70_low_level_restclient_changes]] === Low-level REST client changes +[float] ==== Deprecated flavors of performRequest have been removed We deprecated the flavors of `performRequest` and `performRequestAsync` that @@ -8,6 +10,7 @@ do not take `Request` objects in 6.4.0 in favor of the flavors that take `Request` objects because those methods can be extended without breaking backwards compatibility. +[float] ==== Removed setHosts We deprecated `setHosts` in 6.4.0 in favor of `setNodes` because it supports diff --git a/docs/reference/migration/migrate_7_0/mappings.asciidoc b/docs/reference/migration/migrate_7_0/mappings.asciidoc index 4983cb2da57..5ee1615796c 100644 --- a/docs/reference/migration/migrate_7_0/mappings.asciidoc +++ b/docs/reference/migration/migrate_7_0/mappings.asciidoc @@ -1,36 +1,44 @@ +[float] [[breaking_70_mappings_changes]] === Mapping changes +[float] ==== The `_all` meta field is removed The `_all` field deprecated in 6 have now been removed. +[float] ==== The `_uid` meta field is removed This field used to index a composite key formed of the `_type` and the `_id`. Now that indices cannot have multiple types, this has been removed in favour of `_id`. +[float] ==== The `_default_` mapping is no longer allowed The `_default_` mapping has been deprecated in 6.0 and is now no longer allowed in 7.0. Trying to configure a `_default_` mapping on 7.x indices will result in an error. +[float] ==== `index_options` for numeric fields has been removed The `index_options` field for numeric fields has been deprecated in 6 and has now been removed. +[float] ==== Limiting the number of `nested` json objects To safeguard against out of memory errors, the number of nested json objects within a single document across all fields has been limited to 10000. This default limit can be changed with the index setting `index.mapping.nested_objects.limit`. +[float] ==== The `update_all_types` option has been removed This option is useless now that all indices have at most one type. +[float] ==== The `classic` similarity has been removed The `classic` similarity relied on coordination factors for scoring to be good @@ -39,6 +47,7 @@ Lucene, which means that the `classic` similarity now produces scores of lower quality. It is advised to switch to `BM25` instead, which is widely accepted as a better alternative. +[float] ==== Similarities fail when unsupported options are provided An error will now be thrown when unknown configuration options are provided diff --git a/docs/reference/migration/migrate_7_0/packaging.asciidoc b/docs/reference/migration/migrate_7_0/packaging.asciidoc index 934522db716..e2380613d8f 100644 --- a/docs/reference/migration/migrate_7_0/packaging.asciidoc +++ b/docs/reference/migration/migrate_7_0/packaging.asciidoc @@ -1,6 +1,8 @@ +[float] [[breaking_70_packaging_changes]] === Packaging changes +[float] [[systemd-service-file-config]] ==== systemd service file is no longer configuration @@ -9,6 +11,7 @@ was previously marked as a configuration file in rpm and deb packages. Overrides to the systemd elasticsearch service should be made in `/etc/systemd/system/elasticsearch.service.d/override.conf`. +[float] ==== tar package no longer includes windows specific files The tar package previously included files in the `bin` directory meant only diff --git a/docs/reference/migration/migrate_7_0/plugins.asciidoc b/docs/reference/migration/migrate_7_0/plugins.asciidoc index 462823a61fd..5fcd2bb9526 100644 --- a/docs/reference/migration/migrate_7_0/plugins.asciidoc +++ b/docs/reference/migration/migrate_7_0/plugins.asciidoc @@ -1,6 +1,8 @@ +[float] [[breaking_70_plugins_changes]] === Plugins changes +[float] ==== Azure Repository plugin * The legacy azure settings which where starting with `cloud.azure.storage.` prefix have been removed. @@ -12,6 +14,7 @@ You must set it per azure client instead. Like `azure.client.default.timeout: 10 See {plugins}/repository-azure-repository-settings.html#repository-azure-repository-settings[Azure Repository settings]. +[float] ==== Google Cloud Storage Repository plugin * The repository settings `application_name`, `connect_timeout` and `read_timeout` have been removed and @@ -19,11 +22,13 @@ must now be specified in the client settings instead. See {plugins}/repository-gcs-client.html#repository-gcs-client[Google Cloud Storage Client Settings]. +[float] ==== Analysis Plugin changes * The misspelled helper method `requriesAnalysisSettings(AnalyzerProvider provider)` has been renamed to `requiresAnalysisSettings` +[float] ==== File-based discovery plugin * This plugin has been removed since its functionality is now part of diff --git a/docs/reference/migration/migrate_7_0/restclient.asciidoc b/docs/reference/migration/migrate_7_0/restclient.asciidoc index 470996cfeff..3c0237db6e7 100644 --- a/docs/reference/migration/migrate_7_0/restclient.asciidoc +++ b/docs/reference/migration/migrate_7_0/restclient.asciidoc @@ -1,6 +1,8 @@ +[float] [[breaking_70_restclient_changes]] === High-level REST client changes +[float] ==== API methods accepting `Header` argument have been removed All API methods accepting headers as a `Header` varargs argument, deprecated @@ -12,6 +14,7 @@ In case you are specifying headers e.g. `client.index(indexRequest, new Header("name" "value"))` becomes `client.index(indexRequest, RequestOptions.DEFAULT.toBuilder().addHeader("name", "value").build());` +[float] ==== Cluster Health API default to `cluster` level The Cluster Health API used to default to `shards` level to ease migration diff --git a/docs/reference/migration/migrate_7_0/scripting.asciidoc b/docs/reference/migration/migrate_7_0/scripting.asciidoc index 79380f84204..de312c1c723 100644 --- a/docs/reference/migration/migrate_7_0/scripting.asciidoc +++ b/docs/reference/migration/migrate_7_0/scripting.asciidoc @@ -1,6 +1,8 @@ +[float] [[breaking_70_scripting_changes]] === Scripting changes +[float] ==== getDate() and getDates() removed Fields of type `long` and `date` had `getDate()` and `getDates()` methods @@ -12,6 +14,7 @@ now been removed. Instead, use `.value` on `date` fields, or explicitly parse `long` fields into a date object using `Instance.ofEpochMillis(doc["myfield"].value)`. +[float] ==== Script errors will return as `400` error codes Malformed scripts, either in search templates, ingest pipelines or search diff --git a/docs/reference/migration/migrate_7_0/search.asciidoc b/docs/reference/migration/migrate_7_0/search.asciidoc index a7d32896e97..b7aa15861af 100644 --- a/docs/reference/migration/migrate_7_0/search.asciidoc +++ b/docs/reference/migration/migrate_7_0/search.asciidoc @@ -1,6 +1,8 @@ +[float] [[breaking_70_search_changes]] === Search and Query DSL changes +[float] ==== Changes to queries * The default value for `transpositions` parameter of `fuzzy` query has been changed to `true`. @@ -18,6 +20,7 @@ * Attempts to generate multi-term phrase queries against non-text fields with a custom analyzer will now throw an exception +[float] ==== Adaptive replica selection enabled by default Adaptive replica selection has been enabled by default. If you wish to return to @@ -35,6 +38,7 @@ PUT /_cluster/settings -------------------------------------------------- // CONSOLE +[float] ==== Search API returns `400` for invalid requests The Search API returns `400 - Bad request` while it would previously return @@ -48,12 +52,14 @@ The Search API returns `400 - Bad request` while it would previously return * number of filters in the adjacency matrix aggregation is too large * script compilation errors +[float] ==== Scroll queries cannot use the `request_cache` anymore Setting `request_cache:true` on a query that creates a scroll (`scroll=1m`) has been deprecated in 6 and will now return a `400 - Bad request`. Scroll queries are not meant to be cached. +[float] ==== Scroll queries cannot use `rescore` anymore Including a rescore clause on a query that creates a scroll (`scroll=1m`) has been deprecated in 6.5 and will now return a `400 - Bad request`. Allowing @@ -61,6 +67,7 @@ rescore on scroll queries would break the scroll sort. In the 6.x line, the rescore clause was silently ignored (for scroll queries), and it was allowed in the 5.x line. +[float] ==== Term Suggesters supported distance algorithms The following string distance algorithms were given additional names in 6.2 and @@ -70,7 +77,7 @@ removed. * `levenstein` - replaced by `levenshtein` * `jarowinkler` - replaced by `jaro_winkler` - +[float] ==== Limiting the number of terms that can be used in a Terms Query request Executing a Terms Query with a lot of terms may degrade the cluster performance, @@ -79,7 +86,7 @@ To safeguard against this, the maximum number of terms that can be used in a Terms Query request has been limited to 65536. This default maximum can be changed for a particular index with the index setting `index.max_terms_count`. - +[float] ==== Limiting the length of regex that can be used in a Regexp Query request Executing a Regexp Query with a long regex string may degrade search performance. @@ -87,11 +94,13 @@ To safeguard against this, the maximum length of regex that can be used in a Regexp Query request has been limited to 1000. This default maximum can be changed for a particular index with the index setting `index.max_regex_length`. +[float] ==== Invalid `_search` request body Search requests with extra content after the main object will no longer be accepted by the `_search` endpoint. A parsing exception will be thrown instead. +[float] ==== Context Completion Suggester The ability to query and index context enabled suggestions without context, @@ -102,12 +111,14 @@ considerably. For geo context the value of the `path` parameter is now validated against the mapping, and the context is only accepted if `path` points to a field with `geo_point` type. +[float] ==== Semantics changed for `max_concurrent_shard_requests` `max_concurrent_shard_requests` used to limit the total number of concurrent shard requests a single high level search request can execute. In 7.0 this changed to be the max number of concurrent shard requests per node. The default is now `5`. +[float] ==== `max_score` set to `null` when scores are not tracked `max_score` used to be set to `0` whenever scores are not tracked. `null` is now used diff --git a/docs/reference/migration/migrate_7_0/settings.asciidoc b/docs/reference/migration/migrate_7_0/settings.asciidoc index e4b132b38d6..85648da4f0d 100644 --- a/docs/reference/migration/migrate_7_0/settings.asciidoc +++ b/docs/reference/migration/migrate_7_0/settings.asciidoc @@ -1,18 +1,21 @@ +[float] [[breaking_70_settings_changes]] - === Settings changes +[float] ==== The default for `node.name` is now the hostname `node.name` now defaults to the hostname at the time when Elasticsearch is started. Previously the default node name was the first eight characters of the node id. It can still be configured explicitly in `elasticsearch.yml`. +[float] ==== Percolator * The deprecated `index.percolator.map_unmapped_fields_as_string` setting has been removed in favour of the `index.percolator.map_unmapped_fields_as_text` setting. +[float] ==== Index thread pool * Internally, single-document index/delete/update requests are executed as bulk @@ -21,6 +24,7 @@ of the node id. It can still be configured explicitly in `elasticsearch.yml`. longer needed and has been removed. As such, the settings `thread_pool.index.size` and `thread_pool.index.queue_size` have been removed. +[float] [[write-thread-pool-fallback]] ==== Write thread pool fallback @@ -32,6 +36,7 @@ of the node id. It can still be configured explicitly in `elasticsearch.yml`. available to keep the display output in APIs as `bulk` instead of `write`. These fallback settings and this system property have been removed. +[float] [[remove-http-enabled]] ==== Http enabled setting removed @@ -39,6 +44,7 @@ of the node id. It can still be configured explicitly in `elasticsearch.yml`. use of the transport client. This setting has been removed, as the transport client will be removed in the future, thus requiring HTTP to always be enabled. +[float] [[remove-http-pipelining-setting]] ==== Http pipelining setting removed @@ -47,6 +53,7 @@ This setting has been removed, as disabling http pipelining support on the serve provided little value. The setting `http.pipelining.max_events` can still be used to limit the number of pipelined requests in-flight. +[float] ==== Cross-cluster search settings renamed The cross-cluster search remote cluster connection infrastructure is also used diff --git a/docs/reference/migration/migrate_7_0/snapshotstats.asciidoc b/docs/reference/migration/migrate_7_0/snapshotstats.asciidoc index 6dbd24b13a1..2098eb3574c 100644 --- a/docs/reference/migration/migrate_7_0/snapshotstats.asciidoc +++ b/docs/reference/migration/migrate_7_0/snapshotstats.asciidoc @@ -1,3 +1,4 @@ +[float] [[breaking_70_snapshotstats_changes]] === Snapshot stats changes @@ -7,6 +8,7 @@ Snapshot stats details are provided in a new structured way: * `incremental` section for those files that actually needed to be copied over as part of the incremental snapshotting. * In case of a snapshot that's still in progress, there's also a `processed` section for files that are in the process of being copied. +[float] ==== Deprecated `number_of_files`, `processed_files`, `total_size_in_bytes` and `processed_size_in_bytes` snapshot stats properties have been removed * Properties `number_of_files` and `total_size_in_bytes` are removed and should be replaced by values of nested object `total`. diff --git a/docs/reference/ml/apis/find-file-structure.asciidoc b/docs/reference/ml/apis/find-file-structure.asciidoc index f9a583a027a..e72555d2723 100644 --- a/docs/reference/ml/apis/find-file-structure.asciidoc +++ b/docs/reference/ml/apis/find-file-structure.asciidoc @@ -22,7 +22,7 @@ This API provides a starting point for ingesting data into {es} in a format that is suitable for subsequent use with other {ml} functionality. Unlike other {es} endpoints, the data that is posted to this endpoint does not -need to be UTF-8 encoded and in JSON format. It must, however, be text; binary +need to be UTF-8 encoded and in JSON format. It must, however, be text; binary file formats are not currently supported. The response from the API contains: @@ -122,6 +122,11 @@ to request analysis of 100000 lines to achieve some variety. is not specified and the delimiter is pipe (`|`), the default value is `true`. Otherwise, the default value is `false`. +`timeout`:: + (time) Sets the maximum amount of time that the structure analysis make take. + If the analysis is still running when the timeout expires then it will be + aborted. The default value is 25 seconds. + `timestamp_field`:: (string) The name of the field that contains the primary timestamp of each record in the file. In particular, if the file were ingested into an index, @@ -159,7 +164,7 @@ format corresponds to the primary timestamp, but you do not want to specify the full `grok_pattern`. If this parameter is not specified, the structure finder chooses the best format from -the formats it knows, which are: +the formats it knows, which are these Joda formats and their Java time equivalents: * `dd/MMM/YYYY:HH:mm:ss Z` * `EEE MMM dd HH:mm zzz YYYY` @@ -178,7 +183,7 @@ the formats it knows, which are: * `MMM dd HH:mm:ss` * `MMM dd HH:mm:ss,SSS` * `MMM dd YYYY HH:mm:ss` -* `MMM dd, YYYY K:mm:ss a` +* `MMM dd, YYYY h:mm:ss a` * `TAI64N` * `UNIX` * `UNIX_MS` @@ -197,7 +202,7 @@ the formats it knows, which are: The text file that you want to analyze. It must contain data that is suitable to be ingested into {es}. It does not need to be in JSON format and it does not -need to be UTF-8 encoded. The size is limited to the {es} HTTP receive buffer +need to be UTF-8 encoded. The size is limited to the {es} HTTP receive buffer size, which defaults to 100 Mb. @@ -245,6 +250,7 @@ POST _xpack/ml/find_file_structure // TEST If the request does not encounter errors, you receive the following result: + [source,js] ---- { @@ -365,49 +371,49 @@ If the request does not encounter errors, you receive the following result: "page_count" : { "count" : 24, "cardinality" : 24, - "min_value" : 180.0, - "max_value" : 768.0, + "min_value" : 180, + "max_value" : 768, "mean_value" : 387.0833333333333, "median_value" : 329.5, "top_hits" : [ { - "value" : 180.0, + "value" : 180, "count" : 1 }, { - "value" : 208.0, + "value" : 208, "count" : 1 }, { - "value" : 224.0, + "value" : 224, "count" : 1 }, { - "value" : 227.0, + "value" : 227, "count" : 1 }, { - "value" : 268.0, + "value" : 268, "count" : 1 }, { - "value" : 271.0, + "value" : 271, "count" : 1 }, { - "value" : 275.0, + "value" : 275, "count" : 1 }, { - "value" : 288.0, + "value" : 288, "count" : 1 }, { - "value" : 304.0, + "value" : 304, "count" : 1 }, { - "value" : 311.0, + "value" : 311, "count" : 1 } ] @@ -483,7 +489,1192 @@ If the request does not encounter errors, you receive the following result: `keyword` type as it is not considered specific enough to convert to the `date` type. <9> `field_stats` contains the most common values of each field, plus basic - numeric statistics for the numeric `page_count` field. This information + numeric statistics for the numeric `page_count` field. This information may provide clues that the data needs to be cleaned or transformed prior to use by other {ml} functionality. +The next example shows how it's possible to find the structure of some New York +City yellow cab trip data. The first `curl` command downloads the data, the +first 20000 lines of which are then piped into the `find_file_structure` +endpoint. The `lines_to_sample` query parameter of the endpoint is set to 20000 +to match what is specified in the `head` command. + +[source,js] +---- +curl -s "s3.amazonaws.com/nyc-tlc/trip+data/yellow_tripdata_2018-06.csv" | head -20000 | curl -s -H "Content-Type: application/json" -XPOST "localhost:9200/_xpack/ml/find_file_structure?pretty&lines_to_sample=20000" -T - +---- +// NOTCONSOLE +// Not converting to console because this shows how curl can be used + +-- +NOTE: The `Content-Type: application/json` header must be set even though in +this case the data is not JSON. (Alternatively the `Content-Type` can be set +to any other supported by Elasticsearch, but it must be set.) +-- + +If the request does not encounter errors, you receive the following result: + +[source,js] +---- +{ + "num_lines_analyzed" : 20000, + "num_messages_analyzed" : 19998, <1> + "sample_start" : "VendorID,tpep_pickup_datetime,tpep_dropoff_datetime,passenger_count,trip_distance,RatecodeID,store_and_fwd_flag,PULocationID,DOLocationID,payment_type,fare_amount,extra,mta_tax,tip_amount,tolls_amount,improvement_surcharge,total_amount\n\n1,2018-06-01 00:15:40,2018-06-01 00:16:46,1,.00,1,N,145,145,2,3,0.5,0.5,0,0,0.3,4.3\n", + "charset" : "UTF-8", + "has_byte_order_marker" : false, + "format" : "delimited", <2> + "multiline_start_pattern" : "^.*?,\"?\\d{4}-\\d{2}-\\d{2} \\d{2}:\\d{2}:\\d{2}", + "exclude_lines_pattern" : "^\"?VendorID\"?,\"?tpep_pickup_datetime\"?,\"?tpep_dropoff_datetime\"?,\"?passenger_count\"?,\"?trip_distance\"?,\"?RatecodeID\"?,\"?store_and_fwd_flag\"?,\"?PULocationID\"?,\"?DOLocationID\"?,\"?payment_type\"?,\"?fare_amount\"?,\"?extra\"?,\"?mta_tax\"?,\"?tip_amount\"?,\"?tolls_amount\"?,\"?improvement_surcharge\"?,\"?total_amount\"?", + "column_names" : [ <3> + "VendorID", + "tpep_pickup_datetime", + "tpep_dropoff_datetime", + "passenger_count", + "trip_distance", + "RatecodeID", + "store_and_fwd_flag", + "PULocationID", + "DOLocationID", + "payment_type", + "fare_amount", + "extra", + "mta_tax", + "tip_amount", + "tolls_amount", + "improvement_surcharge", + "total_amount" + ], + "has_header_row" : true, <4> + "delimiter" : ",", <5> + "quote" : "\"", <6> + "timestamp_field" : "tpep_pickup_datetime", <7> + "joda_timestamp_formats" : [ <8> + "YYYY-MM-dd HH:mm:ss" + ], + "java_timestamp_formats" : [ <9> + "yyyy-MM-dd HH:mm:ss" + ], + "need_client_timezone" : true, <10> + "mappings" : { + "@timestamp" : { + "type" : "date" + }, + "DOLocationID" : { + "type" : "long" + }, + "PULocationID" : { + "type" : "long" + }, + "RatecodeID" : { + "type" : "long" + }, + "VendorID" : { + "type" : "long" + }, + "extra" : { + "type" : "double" + }, + "fare_amount" : { + "type" : "double" + }, + "improvement_surcharge" : { + "type" : "double" + }, + "mta_tax" : { + "type" : "double" + }, + "passenger_count" : { + "type" : "long" + }, + "payment_type" : { + "type" : "long" + }, + "store_and_fwd_flag" : { + "type" : "keyword" + }, + "tip_amount" : { + "type" : "double" + }, + "tolls_amount" : { + "type" : "double" + }, + "total_amount" : { + "type" : "double" + }, + "tpep_dropoff_datetime" : { + "type" : "date", + "format" : "YYYY-MM-dd HH:mm:ss" + }, + "tpep_pickup_datetime" : { + "type" : "date", + "format" : "YYYY-MM-dd HH:mm:ss" + }, + "trip_distance" : { + "type" : "double" + } + }, + "field_stats" : { + "DOLocationID" : { + "count" : 19998, + "cardinality" : 240, + "min_value" : 1, + "max_value" : 265, + "mean_value" : 150.26532653265312, + "median_value" : 148, + "top_hits" : [ + { + "value" : 79, + "count" : 760 + }, + { + "value" : 48, + "count" : 683 + }, + { + "value" : 68, + "count" : 529 + }, + { + "value" : 170, + "count" : 506 + }, + { + "value" : 107, + "count" : 468 + }, + { + "value" : 249, + "count" : 457 + }, + { + "value" : 230, + "count" : 441 + }, + { + "value" : 186, + "count" : 432 + }, + { + "value" : 141, + "count" : 409 + }, + { + "value" : 263, + "count" : 386 + } + ] + }, + "PULocationID" : { + "count" : 19998, + "cardinality" : 154, + "min_value" : 1, + "max_value" : 265, + "mean_value" : 153.4042404240424, + "median_value" : 148, + "top_hits" : [ + { + "value" : 79, + "count" : 1067 + }, + { + "value" : 230, + "count" : 949 + }, + { + "value" : 148, + "count" : 940 + }, + { + "value" : 132, + "count" : 897 + }, + { + "value" : 48, + "count" : 853 + }, + { + "value" : 161, + "count" : 820 + }, + { + "value" : 234, + "count" : 750 + }, + { + "value" : 249, + "count" : 722 + }, + { + "value" : 164, + "count" : 663 + }, + { + "value" : 114, + "count" : 646 + } + ] + }, + "RatecodeID" : { + "count" : 19998, + "cardinality" : 5, + "min_value" : 1, + "max_value" : 5, + "mean_value" : 1.0656565656565653, + "median_value" : 1, + "top_hits" : [ + { + "value" : 1, + "count" : 19311 + }, + { + "value" : 2, + "count" : 468 + }, + { + "value" : 5, + "count" : 195 + }, + { + "value" : 4, + "count" : 17 + }, + { + "value" : 3, + "count" : 7 + } + ] + }, + "VendorID" : { + "count" : 19998, + "cardinality" : 2, + "min_value" : 1, + "max_value" : 2, + "mean_value" : 1.59005900590059, + "median_value" : 2, + "top_hits" : [ + { + "value" : 2, + "count" : 11800 + }, + { + "value" : 1, + "count" : 8198 + } + ] + }, + "extra" : { + "count" : 19998, + "cardinality" : 3, + "min_value" : -0.5, + "max_value" : 0.5, + "mean_value" : 0.4815981598159816, + "median_value" : 0.5, + "top_hits" : [ + { + "value" : 0.5, + "count" : 19281 + }, + { + "value" : 0, + "count" : 698 + }, + { + "value" : -0.5, + "count" : 19 + } + ] + }, + "fare_amount" : { + "count" : 19998, + "cardinality" : 208, + "min_value" : -100, + "max_value" : 300, + "mean_value" : 13.937719771977209, + "median_value" : 9.5, + "top_hits" : [ + { + "value" : 6, + "count" : 1004 + }, + { + "value" : 6.5, + "count" : 935 + }, + { + "value" : 5.5, + "count" : 909 + }, + { + "value" : 7, + "count" : 903 + }, + { + "value" : 5, + "count" : 889 + }, + { + "value" : 7.5, + "count" : 854 + }, + { + "value" : 4.5, + "count" : 802 + }, + { + "value" : 8.5, + "count" : 790 + }, + { + "value" : 8, + "count" : 789 + }, + { + "value" : 9, + "count" : 711 + } + ] + }, + "improvement_surcharge" : { + "count" : 19998, + "cardinality" : 3, + "min_value" : -0.3, + "max_value" : 0.3, + "mean_value" : 0.29915991599159913, + "median_value" : 0.3, + "top_hits" : [ + { + "value" : 0.3, + "count" : 19964 + }, + { + "value" : -0.3, + "count" : 22 + }, + { + "value" : 0, + "count" : 12 + } + ] + }, + "mta_tax" : { + "count" : 19998, + "cardinality" : 3, + "min_value" : -0.5, + "max_value" : 0.5, + "mean_value" : 0.4962246224622462, + "median_value" : 0.5, + "top_hits" : [ + { + "value" : 0.5, + "count" : 19868 + }, + { + "value" : 0, + "count" : 109 + }, + { + "value" : -0.5, + "count" : 21 + } + ] + }, + "passenger_count" : { + "count" : 19998, + "cardinality" : 7, + "min_value" : 0, + "max_value" : 6, + "mean_value" : 1.6201620162016201, + "median_value" : 1, + "top_hits" : [ + { + "value" : 1, + "count" : 14219 + }, + { + "value" : 2, + "count" : 2886 + }, + { + "value" : 5, + "count" : 1047 + }, + { + "value" : 3, + "count" : 804 + }, + { + "value" : 6, + "count" : 523 + }, + { + "value" : 4, + "count" : 406 + }, + { + "value" : 0, + "count" : 113 + } + ] + }, + "payment_type" : { + "count" : 19998, + "cardinality" : 4, + "min_value" : 1, + "max_value" : 4, + "mean_value" : 1.315631563156316, + "median_value" : 1, + "top_hits" : [ + { + "value" : 1, + "count" : 13936 + }, + { + "value" : 2, + "count" : 5857 + }, + { + "value" : 3, + "count" : 160 + }, + { + "value" : 4, + "count" : 45 + } + ] + }, + "store_and_fwd_flag" : { + "count" : 19998, + "cardinality" : 2, + "top_hits" : [ + { + "value" : "N", + "count" : 19910 + }, + { + "value" : "Y", + "count" : 88 + } + ] + }, + "tip_amount" : { + "count" : 19998, + "cardinality" : 717, + "min_value" : 0, + "max_value" : 128, + "mean_value" : 2.010959095909593, + "median_value" : 1.45, + "top_hits" : [ + { + "value" : 0, + "count" : 6917 + }, + { + "value" : 1, + "count" : 1178 + }, + { + "value" : 2, + "count" : 624 + }, + { + "value" : 3, + "count" : 248 + }, + { + "value" : 1.56, + "count" : 206 + }, + { + "value" : 1.46, + "count" : 205 + }, + { + "value" : 1.76, + "count" : 196 + }, + { + "value" : 1.45, + "count" : 195 + }, + { + "value" : 1.36, + "count" : 191 + }, + { + "value" : 1.5, + "count" : 187 + } + ] + }, + "tolls_amount" : { + "count" : 19998, + "cardinality" : 26, + "min_value" : 0, + "max_value" : 35, + "mean_value" : 0.2729697969796978, + "median_value" : 0, + "top_hits" : [ + { + "value" : 0, + "count" : 19107 + }, + { + "value" : 5.76, + "count" : 791 + }, + { + "value" : 10.5, + "count" : 36 + }, + { + "value" : 2.64, + "count" : 21 + }, + { + "value" : 11.52, + "count" : 8 + }, + { + "value" : 5.54, + "count" : 4 + }, + { + "value" : 8.5, + "count" : 4 + }, + { + "value" : 17.28, + "count" : 4 + }, + { + "value" : 2, + "count" : 2 + }, + { + "value" : 2.16, + "count" : 2 + } + ] + }, + "total_amount" : { + "count" : 19998, + "cardinality" : 1267, + "min_value" : -100.3, + "max_value" : 389.12, + "mean_value" : 17.499898989898995, + "median_value" : 12.35, + "top_hits" : [ + { + "value" : 7.3, + "count" : 478 + }, + { + "value" : 8.3, + "count" : 443 + }, + { + "value" : 8.8, + "count" : 420 + }, + { + "value" : 6.8, + "count" : 406 + }, + { + "value" : 7.8, + "count" : 405 + }, + { + "value" : 6.3, + "count" : 371 + }, + { + "value" : 9.8, + "count" : 368 + }, + { + "value" : 5.8, + "count" : 362 + }, + { + "value" : 9.3, + "count" : 332 + }, + { + "value" : 10.3, + "count" : 332 + } + ] + }, + "tpep_dropoff_datetime" : { + "count" : 19998, + "cardinality" : 9066, + "top_hits" : [ + { + "value" : "2018-06-01 01:12:12", + "count" : 10 + }, + { + "value" : "2018-06-01 00:32:15", + "count" : 9 + }, + { + "value" : "2018-06-01 00:44:27", + "count" : 9 + }, + { + "value" : "2018-06-01 00:46:42", + "count" : 9 + }, + { + "value" : "2018-06-01 01:03:22", + "count" : 9 + }, + { + "value" : "2018-06-01 01:05:13", + "count" : 9 + }, + { + "value" : "2018-06-01 00:11:20", + "count" : 8 + }, + { + "value" : "2018-06-01 00:16:03", + "count" : 8 + }, + { + "value" : "2018-06-01 00:19:47", + "count" : 8 + }, + { + "value" : "2018-06-01 00:25:17", + "count" : 8 + } + ] + }, + "tpep_pickup_datetime" : { + "count" : 19998, + "cardinality" : 8760, + "top_hits" : [ + { + "value" : "2018-06-01 00:01:23", + "count" : 12 + }, + { + "value" : "2018-06-01 00:04:31", + "count" : 10 + }, + { + "value" : "2018-06-01 00:05:38", + "count" : 10 + }, + { + "value" : "2018-06-01 00:09:50", + "count" : 10 + }, + { + "value" : "2018-06-01 00:12:01", + "count" : 10 + }, + { + "value" : "2018-06-01 00:14:17", + "count" : 10 + }, + { + "value" : "2018-06-01 00:00:34", + "count" : 9 + }, + { + "value" : "2018-06-01 00:00:40", + "count" : 9 + }, + { + "value" : "2018-06-01 00:02:53", + "count" : 9 + }, + { + "value" : "2018-06-01 00:05:40", + "count" : 9 + } + ] + }, + "trip_distance" : { + "count" : 19998, + "cardinality" : 1687, + "min_value" : 0, + "max_value" : 64.63, + "mean_value" : 3.6521062106210715, + "median_value" : 2.16, + "top_hits" : [ + { + "value" : 0.9, + "count" : 335 + }, + { + "value" : 0.8, + "count" : 320 + }, + { + "value" : 1.1, + "count" : 316 + }, + { + "value" : 0.7, + "count" : 304 + }, + { + "value" : 1.2, + "count" : 303 + }, + { + "value" : 1, + "count" : 296 + }, + { + "value" : 1.3, + "count" : 280 + }, + { + "value" : 1.5, + "count" : 268 + }, + { + "value" : 1.6, + "count" : 268 + }, + { + "value" : 0.6, + "count" : 256 + } + ] + } + } +} +---- +// NOTCONSOLE + +<1> `num_messages_analyzed` is 2 lower than `num_lines_analyzed` because only + data records count as messages. The first line contains the column names + and in this sample the second line is blank. +<2> Unlike the first example, in this case the `format` has been identified as + `delimited`. +<3> Because the `format` is `delimited`, the `column_names` field in the output + lists the column names in the order they appear in the sample. +<4> `has_header_row` indicates that for this sample the column names were in + the first row of the sample. (If they hadn't been then it would have been + a good idea to specify them in the `column_names` query parameter.) +<5> The `delimiter` for this sample is a comma, as it's a CSV file. +<6> The `quote` character is the default double quote. (The structure finder + does not attempt to deduce any other quote character, so if you have a + delimited file that's quoted with some other character you must specify it + using the `quote` query parameter.) +<7> The `timestamp_field` has been chosen to be `tpep_pickup_datetime`. + `tpep_dropoff_datetime` would work just as well, but `tpep_pickup_datetime` + was chosen because it comes first in the column order. If you prefer + `tpep_dropoff_datetime` then force it to be chosen using the + `timestamp_field` query parameter. +<8> `joda_timestamp_formats` are used to tell Logstash and Ingest pipeline how + to parse timestamps. +<9> `java_timestamp_formats` are the Java time formats recognized in the time + fields. In future Ingest pipeline will switch to use this format. +<10> The timestamp format in this sample doesn't specify a timezone, so to + accurately convert them to UTC timestamps to store in Elasticsearch it's + necessary to supply the timezone they relate to. `need_client_timezone` + will be `false` for timestamp formats that include the timezone. + +If you try to analyze a lot of data then the analysis will take a long time. +If you want to limit the amount of processing your {es} cluster performs for +a request, use the `timeout` query parameter. The analysis will be aborted and +an error returned when the timeout expires. For example, you can replace 20000 +lines in the previous example with 200000 and set a 1 second timeout on the +analysis: + +[source,js] +---- +curl -s "s3.amazonaws.com/nyc-tlc/trip+data/yellow_tripdata_2018-06.csv" | head -200000 | curl -s -H "Content-Type: application/json" -XPOST "localhost:9200/_xpack/ml/find_file_structure?pretty&lines_to_sample=200000&timeout=1s" -T - +---- +// NOTCONSOLE +// Not converting to console because this shows how curl can be used + +Unless you are using an incredibly fast computer you'll receive a timeout error: + +[source,js] +---- +{ + "error" : { + "root_cause" : [ + { + "type" : "timeout_exception", + "reason" : "Aborting structure analysis during [delimited record parsing] as it has taken longer than the timeout of [1s]" + } + ], + "type" : "timeout_exception", + "reason" : "Aborting structure analysis during [delimited record parsing] as it has taken longer than the timeout of [1s]" + }, + "status" : 500 +} +---- +// NOTCONSOLE + +-- +NOTE: If you try the example above yourself you will note that the overall +running time of the `curl` commands is considerably longer than 1 second. This +is because it takes a while to download 200000 lines of CSV from the internet, +and the timeout is measured from the time this endpoint starts to process the +data. +-- + +This is an example of analyzing {es}'s own log file: + +[source,js] +---- +curl -s -H "Content-Type: application/json" -XPOST "localhost:9200/_xpack/ml/find_file_structure?pretty" -T "$ES_HOME/logs/elasticsearch.log" +---- +// NOTCONSOLE +// Not converting to console because this shows how curl can be used + +If the request does not encounter errors, the result will look something like +this: + +[source,js] +---- +{ + "num_lines_analyzed" : 53, + "num_messages_analyzed" : 53, + "sample_start" : "[2018-09-27T14:39:28,518][INFO ][o.e.e.NodeEnvironment ] [node-0] using [1] data paths, mounts [[/ (/dev/disk1)]], net usable_space [165.4gb], net total_space [464.7gb], types [hfs]\n[2018-09-27T14:39:28,521][INFO ][o.e.e.NodeEnvironment ] [node-0] heap size [494.9mb], compressed ordinary object pointers [true]\n", + "charset" : "UTF-8", + "has_byte_order_marker" : false, + "format" : "semi_structured_text", <1> + "multiline_start_pattern" : "^\\[\\b\\d{4}-\\d{2}-\\d{2}T\\d{2}:\\d{2}:\\d{2},\\d{3}", <2> + "grok_pattern" : "\\[%{TIMESTAMP_ISO8601:timestamp}\\]\\[%{LOGLEVEL:loglevel}.*", <3> + "timestamp_field" : "timestamp", + "joda_timestamp_formats" : [ + "ISO8601" + ], + "java_timestamp_formats" : [ + "yyyy-MM-dd'T'HH:mm:ss,SSS" + ], + "need_client_timezone" : true, + "mappings" : { + "@timestamp" : { + "type" : "date" + }, + "loglevel" : { + "type" : "keyword" + }, + "message" : { + "type" : "text" + } + }, + "field_stats" : { + "loglevel" : { + "count" : 53, + "cardinality" : 3, + "top_hits" : [ + { + "value" : "INFO", + "count" : 51 + }, + { + "value" : "DEBUG", + "count" : 1 + }, + { + "value" : "WARN", + "count" : 1 + } + ] + }, + "timestamp" : { + "count" : 53, + "cardinality" : 28, + "top_hits" : [ + { + "value" : "2018-09-27T14:39:29,859", + "count" : 10 + }, + { + "value" : "2018-09-27T14:39:29,860", + "count" : 9 + }, + { + "value" : "2018-09-27T14:39:29,858", + "count" : 6 + }, + { + "value" : "2018-09-27T14:39:28,523", + "count" : 3 + }, + { + "value" : "2018-09-27T14:39:34,234", + "count" : 2 + }, + { + "value" : "2018-09-27T14:39:28,518", + "count" : 1 + }, + { + "value" : "2018-09-27T14:39:28,521", + "count" : 1 + }, + { + "value" : "2018-09-27T14:39:28,522", + "count" : 1 + }, + { + "value" : "2018-09-27T14:39:29,861", + "count" : 1 + }, + { + "value" : "2018-09-27T14:39:32,786", + "count" : 1 + } + ] + } + } +} +---- +// NOTCONSOLE + +<1> This time the `format` has been identified as `semi_structured_text`. +<2> The `multiline_start_pattern` is set on the basis that the timestamp appears + in the first line of each multi-line log message. +<3> A very simple `grok_pattern` has been created, which extracts the timestamp + and recognizable fields that appear in every analyzed message. In this case + the only field that was recognized beyond the timestamp was the log level. + +If you recognize more fields than the simple `grok_pattern` produced by the +structure finder unaided then you can resubmit the request specifying a more +advanced `grok_pattern` as a query parameter and the structure finder will +calculate `field_stats` for your additional fields. + +In the case of the {es} log a more complete Grok pattern is +`\[%{TIMESTAMP_ISO8601:timestamp}\]\[%{LOGLEVEL:loglevel} *\]\[%{JAVACLASS:class} *\] \[%{HOSTNAME:node}\] %{JAVALOGMESSAGE:message}`. +You can analyze the same log file again, submitting this `grok_pattern` as a +query parameter (appropriately URL escaped): + +[source,js] +---- +curl -s -H "Content-Type: application/json" -XPOST "localhost:9200/_xpack/ml/find_file_structure?pretty&format=semi_structured_text&grok_pattern=%5C%5B%25%7BTIMESTAMP_ISO8601:timestamp%7D%5C%5D%5C%5B%25%7BLOGLEVEL:loglevel%7D%20*%5C%5D%5C%5B%25%7BJAVACLASS:class%7D%20*%5C%5D%20%5C%5B%25%7BHOSTNAME:node%7D%5C%5D%20%25%7BJAVALOGMESSAGE:message%7D" -T "$ES_HOME/logs/elasticsearch.log" +---- +// NOTCONSOLE +// Not converting to console because this shows how curl can be used + +If the request does not encounter errors, the result will look something like +this: + +[source,js] +---- +{ + "num_lines_analyzed" : 53, + "num_messages_analyzed" : 53, + "sample_start" : "[2018-09-27T14:39:28,518][INFO ][o.e.e.NodeEnvironment ] [node-0] using [1] data paths, mounts [[/ (/dev/disk1)]], net usable_space [165.4gb], net total_space [464.7gb], types [hfs]\n[2018-09-27T14:39:28,521][INFO ][o.e.e.NodeEnvironment ] [node-0] heap size [494.9mb], compressed ordinary object pointers [true]\n", + "charset" : "UTF-8", + "has_byte_order_marker" : false, + "format" : "semi_structured_text", + "multiline_start_pattern" : "^\\[\\b\\d{4}-\\d{2}-\\d{2}T\\d{2}:\\d{2}:\\d{2},\\d{3}", + "grok_pattern" : "\\[%{TIMESTAMP_ISO8601:timestamp}\\]\\[%{LOGLEVEL:loglevel} *\\]\\[%{JAVACLASS:class} *\\] \\[%{HOSTNAME:node}\\] %{JAVALOGMESSAGE:message}", <1> + "timestamp_field" : "timestamp", + "joda_timestamp_formats" : [ + "ISO8601" + ], + "java_timestamp_formats" : [ + "yyyy-MM-dd'T'HH:mm:ss,SSS" + ], + "need_client_timezone" : true, + "mappings" : { + "@timestamp" : { + "type" : "date" + }, + "class" : { + "type" : "keyword" + }, + "loglevel" : { + "type" : "keyword" + }, + "message" : { + "type" : "text" + }, + "node" : { + "type" : "keyword" + } + }, + "field_stats" : { <2> + "class" : { + "count" : 53, + "cardinality" : 14, + "top_hits" : [ + { + "value" : "o.e.p.PluginsService", + "count" : 26 + }, + { + "value" : "o.e.c.m.MetaDataIndexTemplateService", + "count" : 8 + }, + { + "value" : "o.e.n.Node", + "count" : 7 + }, + { + "value" : "o.e.e.NodeEnvironment", + "count" : 2 + }, + { + "value" : "o.e.a.ActionModule", + "count" : 1 + }, + { + "value" : "o.e.c.s.ClusterApplierService", + "count" : 1 + }, + { + "value" : "o.e.c.s.MasterService", + "count" : 1 + }, + { + "value" : "o.e.d.DiscoveryModule", + "count" : 1 + }, + { + "value" : "o.e.g.GatewayService", + "count" : 1 + }, + { + "value" : "o.e.l.LicenseService", + "count" : 1 + } + ] + }, + "loglevel" : { + "count" : 53, + "cardinality" : 3, + "top_hits" : [ + { + "value" : "INFO", + "count" : 51 + }, + { + "value" : "DEBUG", + "count" : 1 + }, + { + "value" : "WARN", + "count" : 1 + } + ] + }, + "message" : { + "count" : 53, + "cardinality" : 53, + "top_hits" : [ + { + "value" : "Using REST wrapper from plugin org.elasticsearch.xpack.security.Security", + "count" : 1 + }, + { + "value" : "adding template [.monitoring-alerts] for index patterns [.monitoring-alerts-6]", + "count" : 1 + }, + { + "value" : "adding template [.monitoring-beats] for index patterns [.monitoring-beats-6-*]", + "count" : 1 + }, + { + "value" : "adding template [.monitoring-es] for index patterns [.monitoring-es-6-*]", + "count" : 1 + }, + { + "value" : "adding template [.monitoring-kibana] for index patterns [.monitoring-kibana-6-*]", + "count" : 1 + }, + { + "value" : "adding template [.monitoring-logstash] for index patterns [.monitoring-logstash-6-*]", + "count" : 1 + }, + { + "value" : "adding template [.triggered_watches] for index patterns [.triggered_watches*]", + "count" : 1 + }, + { + "value" : "adding template [.watch-history-9] for index patterns [.watcher-history-9*]", + "count" : 1 + }, + { + "value" : "adding template [.watches] for index patterns [.watches*]", + "count" : 1 + }, + { + "value" : "starting ...", + "count" : 1 + } + ] + }, + "node" : { + "count" : 53, + "cardinality" : 1, + "top_hits" : [ + { + "value" : "node-0", + "count" : 53 + } + ] + }, + "timestamp" : { + "count" : 53, + "cardinality" : 28, + "top_hits" : [ + { + "value" : "2018-09-27T14:39:29,859", + "count" : 10 + }, + { + "value" : "2018-09-27T14:39:29,860", + "count" : 9 + }, + { + "value" : "2018-09-27T14:39:29,858", + "count" : 6 + }, + { + "value" : "2018-09-27T14:39:28,523", + "count" : 3 + }, + { + "value" : "2018-09-27T14:39:34,234", + "count" : 2 + }, + { + "value" : "2018-09-27T14:39:28,518", + "count" : 1 + }, + { + "value" : "2018-09-27T14:39:28,521", + "count" : 1 + }, + { + "value" : "2018-09-27T14:39:28,522", + "count" : 1 + }, + { + "value" : "2018-09-27T14:39:29,861", + "count" : 1 + }, + { + "value" : "2018-09-27T14:39:32,786", + "count" : 1 + } + ] + } + } +} +---- +// NOTCONSOLE + +<1> The `grok_pattern` in the output is now the overridden one supplied in the + query parameter. +<2> The returned `field_stats` include entries for the fields from the + overridden `grok_pattern`. + +The URL escaping is hard, so if you are working interactively it is best to use +the {ml} UI! diff --git a/docs/reference/modules/cluster/misc.asciidoc b/docs/reference/modules/cluster/misc.asciidoc index bdc56999553..3f12bd255de 100644 --- a/docs/reference/modules/cluster/misc.asciidoc +++ b/docs/reference/modules/cluster/misc.asciidoc @@ -43,6 +43,12 @@ PUT /_cluster/settings ------------------------------- // CONSOLE +IMPORTANT: User-defined cluster metadata is not intended to store sensitive or +confidential information. Any information stored in user-defined cluster +metadata will be viewable by anyone with access to the +<> API, and is recorded in the +{es} logs. + [[cluster-max-tombstones]] ==== Index Tombstones diff --git a/docs/reference/query-dsl/type-query.asciidoc b/docs/reference/query-dsl/type-query.asciidoc index 96edda27eff..9825c68c74f 100644 --- a/docs/reference/query-dsl/type-query.asciidoc +++ b/docs/reference/query-dsl/type-query.asciidoc @@ -1,6 +1,8 @@ [[query-dsl-type-query]] === Type Query +deprecated[7.0.0, Types are being removed, prefer filtering on a field instead. For more information, please see <>.] + Filters documents matching the provided document / mapping type. [source,js] diff --git a/docs/reference/release-notes.asciidoc b/docs/reference/release-notes.asciidoc index 490249461e5..ffea569ca21 100644 --- a/docs/reference/release-notes.asciidoc +++ b/docs/reference/release-notes.asciidoc @@ -1,5 +1,5 @@ [[es-release-notes]] -= Release Notes += Release notes [partintro] -- diff --git a/docs/reference/release-notes/7.0.0-alpha1.asciidoc b/docs/reference/release-notes/7.0.0-alpha1.asciidoc index c3a03d77f81..eb1924d2452 100644 --- a/docs/reference/release-notes/7.0.0-alpha1.asciidoc +++ b/docs/reference/release-notes/7.0.0-alpha1.asciidoc @@ -1,5 +1,5 @@ [[release-notes-7.0.0-alpha1]] -== 7.0.0-alpha1 Release Notes +== 7.0.0-alpha1 release notes The changes listed below have been released for the first time in Elasticsearch 7.0.0-alpha1. diff --git a/docs/reference/release-notes/highlights.asciidoc b/docs/reference/release-notes/highlights.asciidoc index 0ab4106c22c..5b20b67061d 100644 --- a/docs/reference/release-notes/highlights.asciidoc +++ b/docs/reference/release-notes/highlights.asciidoc @@ -1,7 +1,7 @@ [[release-highlights]] -= {es} Release Highlights += {es} Release highlights ++++ -Release Highlights +Release highlights ++++ [partintro] diff --git a/docs/reference/rest-api/index.asciidoc b/docs/reference/rest-api/index.asciidoc index b80e8badf5b..c6243ab2598 100644 --- a/docs/reference/rest-api/index.asciidoc +++ b/docs/reference/rest-api/index.asciidoc @@ -19,7 +19,7 @@ directly to configure and access {xpack} features. include::info.asciidoc[] -include::{xes-repo-dir}/rest-api/graph/explore.asciidoc[] +include::{es-repo-dir}/graph/explore.asciidoc[] include::{es-repo-dir}/licensing/index.asciidoc[] include::{es-repo-dir}/migration/migration.asciidoc[] include::{es-repo-dir}/ml/apis/ml-api.asciidoc[] diff --git a/docs/reference/settings/ml-settings.asciidoc b/docs/reference/settings/ml-settings.asciidoc index ff5ec6f205e..493ce06962d 100644 --- a/docs/reference/settings/ml-settings.asciidoc +++ b/docs/reference/settings/ml-settings.asciidoc @@ -1,15 +1,22 @@ [role="xpack"] [[ml-settings]] -=== Machine Learning Settings in Elasticsearch +=== Machine learning settings in Elasticsearch ++++ -Machine Learning Settings +Machine learning settings ++++ You do not need to configure any settings to use {ml}. It is enabled by default. +All of these settings can be added to the `elasticsearch.yml` configuration file. +The dynamic settings can also be updated across a cluster with the +<>. + +TIP: Dynamic settings take precedence over settings in the `elasticsearch.yml` +file. + [float] [[general-ml-settings]] -==== General Machine Learning Settings +==== General machine learning settings `node.ml`:: Set to `true` (default) to identify the node as a _machine learning node_. + @@ -37,12 +44,6 @@ IMPORTANT: If you want to use {ml} features in your cluster, you must have `xpack.ml.enabled` set to `true` on all master-eligible nodes. This is the default behavior. -`xpack.ml.max_open_jobs`:: -The maximum number of jobs that can run on a node. Defaults to `20`. -The maximum number of jobs is also constrained by memory usage, so fewer -jobs than specified by this setting will run on a node if the estimated -memory use of the jobs would be higher than allowed. - `xpack.ml.max_machine_memory_percent`:: The maximum percentage of the machine's memory that {ml} may use for running analytics processes. (These processes are separate to the {es} JVM.) Defaults to @@ -57,8 +58,26 @@ that is greater than this setting value, an error occurs. Existing jobs are not affected when you update this setting. For more information about the `model_memory_limit` property, see <>. +`xpack.ml.max_open_jobs`:: +The maximum number of jobs that can run on a node. Defaults to `20`. +The maximum number of jobs is also constrained by memory usage, so fewer +jobs than specified by this setting will run on a node if the estimated +memory use of the jobs would be higher than allowed. + `xpack.ml.node_concurrent_job_allocations`:: The maximum number of jobs that can concurrently be in the `opening` state on each node. Typically, jobs spend a small amount of time in this state before they move to `open` state. Jobs that must restore large models when they are opening spend more time in the `opening` state. Defaults to `2`. + +[float] +[[advanced-ml-settings]] +==== Advanced machine learning settings + +These settings are for advanced use cases; the default values are generally +sufficient: + +`xpack.ml.max_anomaly_records`:: (<>) +The maximum number of records that are output per bucket. The default value is +`500`. + diff --git a/docs/reference/sql/appendix/syntax-reserved.asciidoc b/docs/reference/sql/appendix/syntax-reserved.asciidoc index 7a502d6eea9..6bbd81233bf 100644 --- a/docs/reference/sql/appendix/syntax-reserved.asciidoc +++ b/docs/reference/sql/appendix/syntax-reserved.asciidoc @@ -1,8 +1,7 @@ [role="xpack"] [testenv="basic"] -[appendix] [[sql-syntax-reserved]] -= Reserved Keywords +== Reserved keywords Table with reserved keywords that need to be quoted. Also provide an example to make it more obvious. diff --git a/docs/reference/sql/functions/math.asciidoc b/docs/reference/sql/functions/math.asciidoc index 604603f2973..e84a71b1e91 100644 --- a/docs/reference/sql/functions/math.asciidoc +++ b/docs/reference/sql/functions/math.asciidoc @@ -8,152 +8,674 @@ to be numeric. ==== Generic -* `ABS` +[[sql-functions-math-abs]] +===== `ABS` -https://en.wikipedia.org/wiki/Absolute_value[Absolute value], returns \[same type as input] - -["source","sql",subs="attributes,callouts,macros"] +.Synopsis: +[source, sql] -------------------------------------------------- -include-tagged::{sql-specs}/math.sql-spec[abs] +ABS(numeric_exp<1>) -------------------------------------------------- -* `CBRT` +*Input*: -https://en.wikipedia.org/wiki/Cube_root[Cube root], returns `double` +<1> numeric expression -// TODO make the example in the tests presentable +*Output*: numeric -* `CEIL` +.Description: -https://en.wikipedia.org/wiki/Floor_and_ceiling_functions[Ceiling], returns `double` +Returns the https://en.wikipedia.org/wiki/Absolute_value[absolute value] of `numeric_exp`. The return type is the same as the input type. -* `CEILING` - -Same as `CEIL` - -// TODO make the example in the tests presentable - -* `E` - -https://en.wikipedia.org/wiki/E_%28mathematical_constant%29[Euler's number], returns `2.7182818284590452354` - - -* https://en.wikipedia.org/wiki/Rounding#Round_half_up[Round] (`ROUND`) - -// TODO make the example in the tests presentable - -NOTE: This rounds "half up" meaning that `ROUND(-1.5)` results in `-1`. - - -* https://en.wikipedia.org/wiki/Floor_and_ceiling_functions[Floor] (`FLOOR`) - -// TODO make the example in the tests presentable - -* https://en.wikipedia.org/wiki/Natural_logarithm[Natural logarithm] (`LOG`) - -["source","sql",subs="attributes,callouts,macros"] +["source","sql",subs="attributes,macros"] -------------------------------------------------- -include-tagged::{sql-specs}/math.sql-spec[log] +include-tagged::{sql-specs}/docs.csv-spec[abs] -------------------------------------------------- -* https://en.wikipedia.org/wiki/Logarithm[Logarithm] base 10 (`LOG10`) +[[sql-functions-math-cbrt]] +===== `CBRT` -["source","sql",subs="attributes,callouts,macros"] +.Synopsis: +[source, sql] -------------------------------------------------- -include-tagged::{sql-specs}/math.sql-spec[log10] +CBRT(numeric_exp<1>) -------------------------------------------------- -* https://en.wikipedia.org/wiki/Square_root[Square root] (`SQRT`) +*Input*: -["source","sql",subs="attributes,callouts,macros"] +<1> numeric expression + +*Output*: double numeric value + +.Description: + +Returns the https://en.wikipedia.org/wiki/Cube_root[cube root] of `numeric_exp`. + +["source","sql",subs="attributes,macros"] -------------------------------------------------- -include-tagged::{sql-specs}/math.sql-spec[sqrt] +include-tagged::{sql-specs}/docs.csv-spec[mathInlineCbrtWithNegativeValue] -------------------------------------------------- -* https://en.wikipedia.org/wiki/Exponential_function[e^x^] (`EXP`) +[[sql-functions-math-ceil]] +===== `CEIL/CEILING` -["source","sql",subs="attributes,callouts,macros"] +.Synopsis: +[source, sql] -------------------------------------------------- -include-tagged::{sql-specs}/math.sql-spec[exp] +CEIL(numeric_exp<1>) +CEILING(numeric_exp<2>) -------------------------------------------------- -* https://docs.oracle.com/javase/8/docs/api/java/lang/Math.html#expm1-double-[e^x^ - 1] (`EXPM1`) +*Input*: -["source","sql",subs="attributes,callouts,macros"] +<1> numeric expression +<2> numeric expression + +*Output*: integer or long numeric value + +.Description: + +Returns the smallest integer greater than or equal to `numeric_exp`. + +["source","sql",subs="attributes,macros"] -------------------------------------------------- -include-tagged::{sql-specs}/math.sql-spec[expm1] +include-tagged::{sql-specs}/docs.csv-spec[mathInlineCeiling] +-------------------------------------------------- + +[[sql-functions-math-e]] +===== `E` + +.Synopsis: +[source, sql] +-------------------------------------------------- +E() +-------------------------------------------------- + +*Input*: _none_ + +*Output*: `2.718281828459045` + +.Description: + +Returns https://en.wikipedia.org/wiki/E_%28mathematical_constant%29[Euler's number]. + +["source","sql",subs="attributes,macros"] +-------------------------------------------------- +include-tagged::{sql-specs}/docs.csv-spec[mathEulersNumber] +-------------------------------------------------- + +[[sql-functions-math-exp]] +===== `EXP` + +.Synopsis: +[source, sql] +-------------------------------------------------- +EXP(numeric_exp<1>) +-------------------------------------------------- + +*Input*: + +<1> float numeric expression + +*Output*: double numeric value + +.Description: + +Returns https://en.wikipedia.org/wiki/Exponential_function[Euler's number at the power] of `numeric_exp` e^numeric_exp^. + +["source","sql",subs="attributes,macros"] +-------------------------------------------------- +include-tagged::{sql-specs}/docs.csv-spec[mathExpInline] +-------------------------------------------------- + +[[sql-functions-math-expm1]] +===== `EXPM1` + +.Synopsis: +[source, sql] +-------------------------------------------------- +EXPM1(numeric_exp<1>) +-------------------------------------------------- + +*Input*: + +<1> float numeric expression + +*Output*: double numeric value + +.Description: + +Returns https://docs.oracle.com/javase/8/docs/api/java/lang/Math.html#expm1-double-[Euler's number at the power] of `numeric_exp` minus 1 (e^numeric_exp^ - 1). + +["source","sql",subs="attributes,macros"] +-------------------------------------------------- +include-tagged::{sql-specs}/docs.csv-spec[mathExpm1Inline] +-------------------------------------------------- + +[[sql-functions-math-floor]] +===== `FLOOR` + +.Synopsis: +[source, sql] +-------------------------------------------------- +FLOOR(numeric_exp<1>) +-------------------------------------------------- + +*Input*: + +<1> numeric expression + +*Output*: integer or long numeric value + +.Description: + +Returns the largest integer less than or equal to `numeric_exp`. + +["source","sql",subs="attributes,macros"] +-------------------------------------------------- +include-tagged::{sql-specs}/docs.csv-spec[mathInlineFloor] +-------------------------------------------------- + +[[sql-functions-math-log]] +===== `LOG` + +.Synopsis: +[source, sql] +-------------------------------------------------- +LOG(numeric_exp<1>) +-------------------------------------------------- + +*Input*: + +<1> numeric expression + +*Output*: double numeric value + +.Description: + +Returns the https://en.wikipedia.org/wiki/Natural_logarithm[natural logarithm] of `numeric_exp`. + +["source","sql",subs="attributes,macros"] +-------------------------------------------------- +include-tagged::{sql-specs}/docs.csv-spec[mathInlineLog] +-------------------------------------------------- + +[[sql-functions-math-log10]] +===== `LOG10` + +.Synopsis: +[source, sql] +-------------------------------------------------- +LOG10(numeric_exp<1>) +-------------------------------------------------- + +*Input*: + +<1> numeric expression + +*Output*: double numeric value + +.Description: + +Returns the https://en.wikipedia.org/wiki/Common_logarithm[base 10 logarithm] of `numeric_exp`. + +["source","sql",subs="attributes,macros"] +-------------------------------------------------- +include-tagged::{sql-specs}/docs.csv-spec[mathInlineLog10] +-------------------------------------------------- + +[[sql-functions-math-pi]] +===== `PI` + +.Synopsis: +[source, sql] +-------------------------------------------------- +PI() +-------------------------------------------------- + +*Input*: _none_ + +*Output*: `3.141592653589793` + +.Description: + +Returns https://en.wikipedia.org/wiki/Pi[PI number]. + +["source","sql",subs="attributes,macros"] +-------------------------------------------------- +include-tagged::{sql-specs}/docs.csv-spec[mathPINumber] +-------------------------------------------------- + +[[sql-functions-math-power]] +===== `POWER` + +.Synopsis: +[source, sql] +-------------------------------------------------- +POWER(numeric_exp<1>, integer_exp<2>) +-------------------------------------------------- + +*Input*: + +<1> numeric expression +<2> integer expression + +*Output*: double numeric value + +.Description: + +Returns the value of `numeric_exp` to the power of `integer_exp`. + +["source","sql",subs="attributes,macros"] +-------------------------------------------------- +include-tagged::{sql-specs}/docs.csv-spec[mathInlinePowerPositive] +-------------------------------------------------- + +["source","sql",subs="attributes,macros"] +-------------------------------------------------- +include-tagged::{sql-specs}/docs.csv-spec[mathInlinePowerNegative] +-------------------------------------------------- + +[[sql-functions-math-round]] +===== `ROUND` + +.Synopsis: +[source, sql] +---- +ROUND(numeric_exp<1>[, integer_exp<2>]) +---- +*Input*: + +<1> numeric expression +<2> integer expression; optional + +*Output*: numeric + +.Description: +Returns `numeric_exp` rounded to `integer_exp` places right of the decimal point. If `integer_exp` is negative, +`numeric_exp` is rounded to |`integer_exp`| places to the left of the decimal point. If `integer_exp` is omitted, +the function will perform as if `integer_exp` would be 0. The returned numeric data type is the same as the data type +of `numeric_exp`. + +["source","sql",subs="attributes,macros"] +-------------------------------------------------- +include-tagged::{sql-specs}/docs.csv-spec[mathRoundWithPositiveParameter] +-------------------------------------------------- + +["source","sql",subs="attributes,macros"] +-------------------------------------------------- +include-tagged::{sql-specs}/docs.csv-spec[mathRoundWithNegativeParameter] +-------------------------------------------------- + +[[sql-functions-math-sign]] +===== `SIGN` + +.Synopsis: +[source, sql] +-------------------------------------------------- +SIGN(numeric_exp<1>) +-------------------------------------------------- + +*Input*: + +<1> numeric expression + +*Output*: [-1, 0, 1] + +.Description: + +Returns an indicator of the sign of `numeric_exp`. If `numeric_exp` is less than zero, –1 is returned. If `numeric_exp` equals zero, 0 is returned. If `numeric_exp` is greater than zero, 1 is returned. + +["source","sql",subs="attributes,macros"] +-------------------------------------------------- +include-tagged::{sql-specs}/docs.csv-spec[mathInlineSign] +-------------------------------------------------- + + +[[sql-functions-math-sqrt]] +===== `SQRT` + +.Synopsis: +[source, sql] +-------------------------------------------------- +SQRT(numeric_exp<1>) +-------------------------------------------------- + +*Input*: + +<1> numeric expression + +*Output*: double numeric value + +.Description: + +Returns https://en.wikipedia.org/wiki/Square_root[square root] of `numeric_exp`. + +["source","sql",subs="attributes,macros"] +-------------------------------------------------- +include-tagged::{sql-specs}/docs.csv-spec[mathInlineSqrt] +-------------------------------------------------- + +[[sql-functions-math-truncate]] +===== `TRUNCATE` + +.Synopsis: +[source, sql] +---- +TRUNCATE(numeric_exp<1>[, integer_exp<2>]) +---- +*Input*: + +<1> numeric expression +<2> integer expression; optional + +*Output*: numeric + +.Description: +Returns `numeric_exp` truncated to `integer_exp` places right of the decimal point. If `integer_exp` is negative, +`numeric_exp` is truncated to |`integer_exp`| places to the left of the decimal point. If `integer_exp` is omitted, +the function will perform as if `integer_exp` would be 0. The returned numeric data type is the same as the data type +of `numeric_exp`. + +["source","sql",subs="attributes,macros"] +-------------------------------------------------- +include-tagged::{sql-specs}/docs.csv-spec[mathTruncateWithPositiveParameter] +-------------------------------------------------- + +["source","sql",subs="attributes,macros"] +-------------------------------------------------- +include-tagged::{sql-specs}/docs.csv-spec[mathTruncateWithNegativeParameter] -------------------------------------------------- ==== Trigonometric -* Convert from https://en.wikipedia.org/wiki/Radian[radians] -to https://en.wikipedia.org/wiki/Degree_(angle)[degrees] (`DEGREES`) +[[sql-functions-math-acos]] +===== `ACOS` -["source","sql",subs="attributes,callouts,macros"] +.Synopsis: +[source, sql] -------------------------------------------------- -include-tagged::{sql-specs}/math.sql-spec[degrees] +ACOS(numeric_exp<1>) -------------------------------------------------- -* Convert from https://en.wikipedia.org/wiki/Degree_(angle)[degrees] -to https://en.wikipedia.org/wiki/Radian[radians] (`RADIANS`) +*Input*: -["source","sql",subs="attributes,callouts,macros"] +<1> numeric expression + +*Output*: double numeric value + +.Description: + +Returns the https://en.wikipedia.org/wiki/Inverse_trigonometric_functions[arccosine] of `numeric_exp` as an angle, expressed in radians. + +["source","sql",subs="attributes,macros"] -------------------------------------------------- -include-tagged::{sql-specs}/math.sql-spec[degrees] +include-tagged::{sql-specs}/docs.csv-spec[mathInlineAcos] -------------------------------------------------- -* https://en.wikipedia.org/wiki/Trigonometric_functions#sine[Sine] (`SIN`) +[[sql-functions-math-asin]] +===== `ASIN` -["source","sql",subs="attributes,callouts,macros"] +.Synopsis: +[source, sql] -------------------------------------------------- -include-tagged::{sql-specs}/math.sql-spec[sin] +ASIN(numeric_exp<1>) -------------------------------------------------- -* https://en.wikipedia.org/wiki/Trigonometric_functions#cosine[Cosine] (`COS`) +*Input*: -["source","sql",subs="attributes,callouts,macros"] +<1> numeric expression + +*Output*: double numeric value + +.Description: + +Returns the https://en.wikipedia.org/wiki/Inverse_trigonometric_functions[arcsine] of `numeric_exp` as an angle, expressed in radians. + +["source","sql",subs="attributes,macros"] -------------------------------------------------- -include-tagged::{sql-specs}/math.sql-spec[cos] +include-tagged::{sql-specs}/docs.csv-spec[mathInlineAsin] -------------------------------------------------- -* https://en.wikipedia.org/wiki/Trigonometric_functions#tangent[Tangent] (`TAN`) +[[sql-functions-math-atan]] +===== `ATAN` -["source","sql",subs="attributes,callouts,macros"] +.Synopsis: +[source, sql] -------------------------------------------------- -include-tagged::{sql-specs}/math.sql-spec[tan] +ATAN(numeric_exp<1>) -------------------------------------------------- -* https://en.wikipedia.org/wiki/Inverse_trigonometric_functions[Arc sine] (`ASIN`) +*Input*: -["source","sql",subs="attributes,callouts,macros"] +<1> numeric expression + +*Output*: double numeric value + +.Description: + +Returns the https://en.wikipedia.org/wiki/Inverse_trigonometric_functions[arctangent] of `numeric_exp` as an angle, expressed in radians. + +["source","sql",subs="attributes,macros"] -------------------------------------------------- -include-tagged::{sql-specs}/math.sql-spec[asin] +include-tagged::{sql-specs}/docs.csv-spec[mathInlineAtan] -------------------------------------------------- -* https://en.wikipedia.org/wiki/Inverse_trigonometric_functions[Arc cosine] (`ACOS`) +[[sql-functions-math-atan2]] +===== `ATAN2` -["source","sql",subs="attributes,callouts,macros"] +.Synopsis: +[source, sql] -------------------------------------------------- -include-tagged::{sql-specs}/math.sql-spec[acos] +ATAN2(ordinate<1>, abscisa<2>) -------------------------------------------------- -* https://en.wikipedia.org/wiki/Inverse_trigonometric_functions[Arc tangent] (`ATAN`) +*Input*: -["source","sql",subs="attributes,callouts,macros"] +<1> numeric expression +<2> numeric expression + +*Output*: double numeric value + +.Description: + +Returns the https://en.wikipedia.org/wiki/Atan2[arctangent of the `ordinate` and `abscisa` coordinates] specified as an angle, expressed in radians. + +["source","sql",subs="attributes,macros"] -------------------------------------------------- -include-tagged::{sql-specs}/math.sql-spec[atan] +include-tagged::{sql-specs}/docs.csv-spec[mathInlineAtan2] -------------------------------------------------- -* https://en.wikipedia.org/wiki/Hyperbolic_function[Hyperbolic sine] (`SINH`) +[[sql-functions-math-cos]] +===== `COS` -["source","sql",subs="attributes,callouts,macros"] +.Synopsis: +[source, sql] -------------------------------------------------- -include-tagged::{sql-specs}/math.sql-spec[sinh] +COS(numeric_exp<1>) -------------------------------------------------- -* https://en.wikipedia.org/wiki/Hyperbolic_function[Hyperbolic cosine] (`COSH`) +*Input*: -["source","sql",subs="attributes,callouts,macros"] +<1> numeric expression + +*Output*: double numeric value + +.Description: + +Returns the https://en.wikipedia.org/wiki/Trigonometric_functions#cosine[cosine] of `numeric_exp`, where `numeric_exp` is an angle expressed in radians. + +["source","sql",subs="attributes,macros"] -------------------------------------------------- -include-tagged::{sql-specs}/math.sql-spec[cosh] +include-tagged::{sql-specs}/docs.csv-spec[mathInlineCosine] +-------------------------------------------------- + +[[sql-functions-math-cosh]] +===== `COSH` + +.Synopsis: +[source, sql] +-------------------------------------------------- +COSH(numeric_exp<1>) +-------------------------------------------------- + +*Input*: + +<1> numeric expression + +*Output*: double numeric value + +.Description: + +Returns the https://en.wikipedia.org/wiki/Hyperbolic_function[hyperbolic cosine] of `numeric_exp`. + +["source","sql",subs="attributes,macros"] +-------------------------------------------------- +include-tagged::{sql-specs}/docs.csv-spec[mathInlineCosh] +-------------------------------------------------- + +[[sql-functions-math-cot]] +===== `COT` + +.Synopsis: +[source, sql] +-------------------------------------------------- +COT(numeric_exp<1>) +-------------------------------------------------- + +*Input*: + +<1> numeric expression + +*Output*: double numeric value + +.Description: + +Returns the https://en.wikipedia.org/wiki/Trigonometric_functions#Cosecant,_secant,_and_cotangent[cotangent] of `numeric_exp`, where `numeric_exp` is an angle expressed in radians. + +["source","sql",subs="attributes,macros"] +-------------------------------------------------- +include-tagged::{sql-specs}/docs.csv-spec[mathInlineCotangent] +-------------------------------------------------- + +[[sql-functions-math-degrees]] +===== `DEGREES` + +.Synopsis: +[source, sql] +-------------------------------------------------- +DEGREES(numeric_exp<1>) +-------------------------------------------------- + +*Input*: + +<1> numeric expression + +*Output*: double numeric value + +.Description: + +Convert from https://en.wikipedia.org/wiki/Radian[radians] +to https://en.wikipedia.org/wiki/Degree_(angle)[degrees]. + +["source","sql",subs="attributes,macros"] +-------------------------------------------------- +include-tagged::{sql-specs}/docs.csv-spec[mathInlineDegrees] +-------------------------------------------------- + +[[sql-functions-math-radians]] +===== `RADIANS` + +.Synopsis: +[source, sql] +-------------------------------------------------- +RADIANS(numeric_exp<1>) +-------------------------------------------------- + +*Input*: + +<1> numeric expression + +*Output*: double numeric value + +.Description: + +Convert from https://en.wikipedia.org/wiki/Degree_(angle)[degrees] +to https://en.wikipedia.org/wiki/Radian[radians]. + +["source","sql",subs="attributes,macros"] +-------------------------------------------------- +include-tagged::{sql-specs}/docs.csv-spec[mathInlineRadians] +-------------------------------------------------- + +[[sql-functions-math-sin]] +===== `SIN` + +.Synopsis: +[source, sql] +-------------------------------------------------- +SIN(numeric_exp<1>) +-------------------------------------------------- + +*Input*: + +<1> numeric expression + +*Output*: double numeric value + +.Description: + +Returns the https://en.wikipedia.org/wiki/Trigonometric_functions#sine[sine] of `numeric_exp`, where `numeric_exp` is an angle expressed in radians. + +["source","sql",subs="attributes,macros"] +-------------------------------------------------- +include-tagged::{sql-specs}/docs.csv-spec[mathInlineSine] +-------------------------------------------------- + +[[sql-functions-math-sinh]] +===== `SINH` + +.Synopsis: +[source, sql] +-------------------------------------------------- +SINH(numeric_exp<1>) +-------------------------------------------------- + +*Input*: + +<1> numeric expression + +*Output*: double numeric value + +.Description: + +Returns the https://en.wikipedia.org/wiki/Hyperbolic_function[hyperbolic sine] of `numeric_exp`. + +["source","sql",subs="attributes,macros"] +-------------------------------------------------- +include-tagged::{sql-specs}/docs.csv-spec[mathInlineSinh] +-------------------------------------------------- + +[[sql-functions-math-tan]] +===== `TAN` + +.Synopsis: +[source, sql] +-------------------------------------------------- +TAN(numeric_exp<1>) +-------------------------------------------------- + +*Input*: + +<1> numeric expression + +*Output*: double numeric value + +.Description: + +Returns the https://en.wikipedia.org/wiki/Trigonometric_functions#tangent[tangent] of `numeric_exp`, where `numeric_exp` is an angle expressed in radians. + +["source","sql",subs="attributes,macros"] +-------------------------------------------------- +include-tagged::{sql-specs}/docs.csv-spec[mathInlineTanget] -------------------------------------------------- diff --git a/docs/reference/sql/functions/string.asciidoc b/docs/reference/sql/functions/string.asciidoc index ccc11938028..873ac18d812 100644 --- a/docs/reference/sql/functions/string.asciidoc +++ b/docs/reference/sql/functions/string.asciidoc @@ -8,233 +8,456 @@ Functions for performing string manipulation. [[sql-functions-string-ascii]] ==== `ASCII` -*Input*: `string`, *Output*: `integer` +.Synopsis: +[source, sql] +-------------------------------------------------- +ASCII(string_exp<1>) +-------------------------------------------------- -Returns the ASCII code value of the leftmost character of string_exp as an integer. +*Input*: + +<1> string expression + +*Output*: integer + +.Description: + +Returns the ASCII code value of the leftmost character of `string_exp` as an integer. ["source","sql",subs="attributes,callouts,macros"] ----- +-------------------------------------------------- include-tagged::{sql-specs}/docs.csv-spec[stringAscii] ----- +-------------------------------------------------- [[sql-functions-string-bit-length]] ==== `BIT_LENGTH` -*Input*: `string`, *Output*: `integer` +.Synopsis: +[source, sql] +-------------------------------------------------- +BIT_LENGTH(string_exp<1>) +-------------------------------------------------- +*Input*: -Returns the length in bits of the input. +<1> string expression + +*Output*: integer + +.Description: + +Returns the length in bits of the `string_exp` input expression. ["source","sql",subs="attributes,callouts,macros"] ----- +-------------------------------------------------- include-tagged::{sql-specs}/docs.csv-spec[stringBitLength] ----- +-------------------------------------------------- [[sql-functions-string-char]] ==== `CHAR` -*Input*: `numeric`, *Output*: `string` +.Synopsis: +[source, sql] +-------------------------------------------------- +CHAR(code<1>) +-------------------------------------------------- +*Input*: + +<1> integer expression + +*Output*: string + +.Description: Returns the character that has the ASCII code value specified by the numeric input. The value should be between 0 and 255; otherwise, the return value is data source–dependent. ["source","sql",subs="attributes,callouts,macros"] ----- +-------------------------------------------------- include-tagged::{sql-specs}/docs.csv-spec[stringChar] ----- +-------------------------------------------------- [[sql-functions-string-char-length]] ==== `CHAR_LENGTH` -*Input*: `string`, *Output*: `integer` +.Synopsis: +[source, sql] +-------------------------------------------------- +CHAR_LENGTH(string_exp<1>) +-------------------------------------------------- +*Input*: + +<1> string expression + +*Output*: integer + +.Description: Returns the length in characters of the input, if the string expression is of a character data type; otherwise, returns the length in bytes of the string expression (the smallest integer not less than the number of bits divided by 8). ["source","sql",subs="attributes,callouts,macros"] ----- +-------------------------------------------------- include-tagged::{sql-specs}/docs.csv-spec[stringCharLength] ----- +-------------------------------------------------- [[sql-functions-string-concat]] ==== `CONCAT` -*Input*: `string1`, `string2`, *Output*: `string` +.Synopsis: +[source, sql] +-------------------------------------------------- +CONCAT(string_exp1<1>,string_exp2<2>) +-------------------------------------------------- +*Input*: -turns a character string that is the result of concatenating string1 to string2. If one of the string is `NULL`, -the other string will be returned. +<1> string expression +<2> string expression + +*Output*: string + +.Description: + +Returns a character string that is the result of concatenating `string_exp1` to `string_exp2`. If one of the string is `NULL`, the other string will be returned. ["source","sql",subs="attributes,callouts,macros"] ----- +-------------------------------------------------- include-tagged::{sql-specs}/docs.csv-spec[stringConcat] ----- +-------------------------------------------------- [[sql-functions-string-insert]] ==== `INSERT` -*Input*: `string1`, `start`, `length`, `string2`, *Output*: `string` +.Synopsis: +[source, sql] +-------------------------------------------------- +INSERT(source<1>, start<2>, length<3>, replacement<4>) +-------------------------------------------------- +*Input*: -Returns a string where length characters have been deleted from string1, beginning at start, and where string2 has been inserted into string1, beginning at start. +<1> string expression +<2> integer expression +<3> integer expression +<4> string expression + +*Output*: string + +.Description: + +Returns a string where `length` characters have been deleted from `source`, beginning at `start`, and where `replacement` has been inserted into `source`, beginning at `start`. ["source","sql",subs="attributes,callouts,macros"] ----- +-------------------------------------------------- include-tagged::{sql-specs}/docs.csv-spec[stringInsert] ----- +-------------------------------------------------- [[sql-functions-string-lcase]] ==== `LCASE` -*Input*: `string`, *Output*: `string` +.Synopsis: +[source, sql] +-------------------------------------------------- +LCASE(string_exp<1>) +-------------------------------------------------- +*Input*: -Returns a string equal to that in string, with all uppercase characters converted to lowercase. +<1> string expression + +*Output*: string + +.Description: + +Returns a string equal to that in `string_exp`, with all uppercase characters converted to lowercase. ["source","sql",subs="attributes,callouts,macros"] ----- +-------------------------------------------------- include-tagged::{sql-specs}/docs.csv-spec[stringLCase] ----- +-------------------------------------------------- [[sql-functions-string-left]] ==== `LEFT` -*Input*: `string`, *Output*: `string` +.Synopsis: +[source, sql] +-------------------------------------------------- +LEFT(string_exp<1>, count<2>) +-------------------------------------------------- +*Input*: -Returns the leftmost count characters of string. +<1> string expression +<2> integer expression + +*Output*: string + +.Description: + +Returns the leftmost count characters of `string_exp`. ["source","sql",subs="attributes,callouts,macros"] ----- +-------------------------------------------------- include-tagged::{sql-specs}/docs.csv-spec[stringLeft] ----- +-------------------------------------------------- [[sql-functions-string-length]] ==== `LENGTH` -*Input*: `string`, *Output*: `integer` +.Synopsis: +[source, sql] +-------------------------------------------------- +LENGTH(string_exp<1>) +-------------------------------------------------- +*Input*: -Returns the number of characters in string, excluding trailing blanks. +<1> string expression + +*Output*: integer + +.Description: + +Returns the number of characters in `string_exp`, excluding trailing blanks. ["source","sql",subs="attributes,callouts,macros"] ----- +-------------------------------------------------- include-tagged::{sql-specs}/docs.csv-spec[stringLength] ----- +-------------------------------------------------- [[sql-functions-string-locate]] ==== `LOCATE` -*Input*: `string1`, `string2`[, `start`]`, *Output*: `integer` +.Synopsis: +[source, sql] +-------------------------------------------------- +LOCATE(pattern<1>, source<2>[, start]<3>) +-------------------------------------------------- +*Input*: -Returns the starting position of the first occurrence of string1 within string2. The search for the first occurrence of string1 begins with the first character position in string2 unless the optional argument, start, is specified. If start is specified, the search begins with the character position indicated by the value of start. The first character position in string2 is indicated by the value 1. If string1 is not found within string2, the value 0 is returned. +<1> string expression +<2> string expression +<3> integer expression; optional + +*Output*: integer + +.Description: + +Returns the starting position of the first occurrence of `pattern` within `source`. The search for the first occurrence of `pattern` begins with the first character position in `source` unless the optional argument, `start`, is specified. If `start` is specified, the search begins with the character position indicated by the value of `start`. The first character position in `source` is indicated by the value 1. If `pattern` is not found within `source`, the value 0 is returned. ["source","sql",subs="attributes,callouts,macros"] ----- +-------------------------------------------------- include-tagged::{sql-specs}/docs.csv-spec[stringLocateWoStart] ----- +-------------------------------------------------- ["source","sql",subs="attributes,callouts,macros"] ----- +-------------------------------------------------- include-tagged::{sql-specs}/docs.csv-spec[stringLocateWithStart] ----- +-------------------------------------------------- [[sql-functions-string-ltrim]] ==== `LTRIM` -*Input*: `string`, *Output*: `string` +.Synopsis: +[source, sql] +-------------------------------------------------- +LTRIM(string_exp<1>) +-------------------------------------------------- +*Input*: -Returns the characters of string_exp, with leading blanks removed. +<1> string expression + +*Output*: string + +.Description: + +Returns the characters of `string_exp`, with leading blanks removed. ["source","sql",subs="attributes,callouts,macros"] ----- +-------------------------------------------------- include-tagged::{sql-specs}/docs.csv-spec[stringLTrim] ----- +-------------------------------------------------- [[sql-functions-string-position]] ==== `POSITION` -*Input*: `string1`, `string2`, *Output*: `integer` +.Synopsis: +[source, sql] +-------------------------------------------------- +POSITION(string_exp1<1>, string_exp2<2>) +-------------------------------------------------- +*Input*: -Returns the position of the string1 in string2. The result is an exact numeric. +<1> string expression +<2> string expression + +*Output*: integer + +.Description: + +Returns the position of the `string_exp1` in `string_exp2`. The result is an exact numeric. ["source","sql",subs="attributes,callouts,macros"] ----- +-------------------------------------------------- include-tagged::{sql-specs}/docs.csv-spec[stringPosition] ----- +-------------------------------------------------- [[sql-functions-string-repeat]] ==== `REPEAT` -*Input*: `string`, `count`, *Output*: `string` +.Synopsis: +[source, sql] +-------------------------------------------------- +REPEAT(string_exp<1>, count<2>) +-------------------------------------------------- +*Input*: -Returns a character string composed of string1 repeated count times. +<1> string expression +<2> integer expression + +*Output*: string + +.Description: + +Returns a character string composed of `string_exp` repeated `count` times. ["source","sql",subs="attributes,callouts,macros"] ----- +-------------------------------------------------- include-tagged::{sql-specs}/docs.csv-spec[stringRepeat] ----- +-------------------------------------------------- [[sql-functions-string-replace]] ==== `REPLACE` -*Input*: `string1`, `string2`, `string3`, *Output*: `string` +.Synopsis: +[source, sql] +-------------------------------------------------- +REPLACE(source<1>, pattern<2>, replacement<3>) +-------------------------------------------------- +*Input*: -Search string1 for occurrences of string2, and replace with string3. +<1> string expression +<2> string expression +<3> string expression + +*Output*: string + +.Description: + +Search `source` for occurrences of `pattern`, and replace with `replacement`. ["source","sql",subs="attributes,callouts,macros"] ----- +-------------------------------------------------- include-tagged::{sql-specs}/docs.csv-spec[stringReplace] ----- +-------------------------------------------------- [[sql-functions-string-right]] ==== `RIGHT` -*Input*: `string`, `count`, *Output*: `string` +.Synopsis: +[source, sql] +-------------------------------------------------- +RIGHT(string_exp<1>, count<2>) +-------------------------------------------------- +*Input*: -Returns the rightmost count characters of string. +<1> string expression +<2> integer expression + +*Output*: string + +.Description: + +Returns the rightmost count characters of `string_exp`. ["source","sql",subs="attributes,callouts,macros"] ----- +-------------------------------------------------- include-tagged::{sql-specs}/docs.csv-spec[stringRight] ----- +-------------------------------------------------- [[sql-functions-string-rtrim]] ==== `RTRIM` -*Input*: `string`, *Output*: `string` +.Synopsis: +[source, sql] +-------------------------------------------------- +RTRIM(string_exp<1>) +-------------------------------------------------- +*Input*: -Returns the characters of string with trailing blanks removed. +<1> string expression + +*Output*: string + +.Description: + +Returns the characters of `string_exp` with trailing blanks removed. ["source","sql",subs="attributes,callouts,macros"] ----- +-------------------------------------------------- include-tagged::{sql-specs}/docs.csv-spec[stringRTrim] ----- +-------------------------------------------------- [[sql-functions-string-space]] ==== `SPACE` -*Input*: `integer`, *Output*: `string` +.Synopsis: +[source, sql] +-------------------------------------------------- +SPACE(count<1>) +-------------------------------------------------- +*Input*: -Returns a character string consisting of count spaces. +<1> integer expression + +*Output*: string + +.Description: + +Returns a character string consisting of `count` spaces. ["source","sql",subs="attributes,callouts,macros"] ----- +-------------------------------------------------- include-tagged::{sql-specs}/docs.csv-spec[stringSpace] ----- +-------------------------------------------------- [[sql-functions-string-substring]] ==== `SUBSTRING` -*Input*: `string`, `start`, `length`, *Output*: `integer` +.Synopsis: +[source, sql] +-------------------------------------------------- +SUBSTRING(source<1>, start<2>, length<3>) +-------------------------------------------------- +*Input*: -Returns a character string that is derived from the string, beginning at the character position specified by `start` for `length` characters. +<1> string expression +<2> integer expression +<3> integer expression + +*Output*: string + +.Description: + +Returns a character string that is derived from `source`, beginning at the character position specified by `start` for `length` characters. ["source","sql",subs="attributes,callouts,macros"] ----- +-------------------------------------------------- include-tagged::{sql-specs}/docs.csv-spec[stringSubString] ----- +-------------------------------------------------- [[sql-functions-string-ucase]] ==== `UCASE` -*Input*: `string`, *Output*: `string` +.Synopsis: +[source, sql] +-------------------------------------------------- +UCASE(string_exp<1>) +-------------------------------------------------- +*Input*: + +<1> string expression + +*Output*: string + +.Description: Returns a string equal to that of the input, with all lowercase characters converted to uppercase. ["source","sql",subs="attributes,callouts,macros"] ----- +-------------------------------------------------- include-tagged::{sql-specs}/docs.csv-spec[stringUCase] ----- +-------------------------------------------------- diff --git a/docs/reference/sql/index.asciidoc b/docs/reference/sql/index.asciidoc index aa9eebea7b7..d7022452730 100644 --- a/docs/reference/sql/index.asciidoc +++ b/docs/reference/sql/index.asciidoc @@ -1,7 +1,7 @@ [role="xpack"] [testenv="basic"] [[xpack-sql]] -= SQL Access += SQL access :sql-tests: {xes-repo-dir}/../../qa/sql :sql-specs: {sql-tests}/src/main/resources diff --git a/docs/reference/sql/language/index-patterns.asciidoc b/docs/reference/sql/language/index-patterns.asciidoc index 58574e03cfb..655ab575c1a 100644 --- a/docs/reference/sql/language/index-patterns.asciidoc +++ b/docs/reference/sql/language/index-patterns.asciidoc @@ -22,6 +22,17 @@ it excludes (due to `-`) all indices that start with `l`. This notation is very convenient and powerful as it allows both inclusion and exclusion, depending on the target naming convention. +The same kind of patterns can also be used to query multiple indices or tables. + +For example: + +["source","sql",subs="attributes,callouts,macros"] +---- +include-tagged::{sql-specs}/docs.csv-spec[fromTablePatternQuoted] +---- + +NOTE: There is the restriction that all resolved concrete tables have the exact same mapping. + * SQL `LIKE` notation The common `LIKE` statement (including escaping if needed) to match a wildcard pattern, based on one `_` diff --git a/docs/reference/sql/language/syntax/select.asciidoc b/docs/reference/sql/language/syntax/select.asciidoc index 67291dee7a6..d1df5934678 100644 --- a/docs/reference/sql/language/syntax/select.asciidoc +++ b/docs/reference/sql/language/syntax/select.asciidoc @@ -88,7 +88,7 @@ where: Represents the name (optionally qualified) of an existing table, either a concrete or base one (actual index) or alias. -If the table name contains special SQL characters (such as `.`,`-`,etc...) use double quotes to escape them: +If the table name contains special SQL characters (such as `.`,`-`,`*`,etc...) use double quotes to escape them: ["source","sql",subs="attributes,callouts,macros"] ---- diff --git a/libs/grok/src/main/java/org/elasticsearch/grok/Grok.java b/libs/grok/src/main/java/org/elasticsearch/grok/Grok.java index 6c68710c6d8..c20737998fe 100644 --- a/libs/grok/src/main/java/org/elasticsearch/grok/Grok.java +++ b/libs/grok/src/main/java/org/elasticsearch/grok/Grok.java @@ -35,12 +35,12 @@ import java.io.InputStreamReader; import java.io.UncheckedIOException; import java.nio.charset.StandardCharsets; import java.util.ArrayList; +import java.util.Collections; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Locale; import java.util.Map; -import java.util.Collections; public final class Grok { @@ -184,6 +184,7 @@ public final class Grok { String namedPatternRef = groupMatch(NAME_GROUP, region, grokPattern); String subName = groupMatch(SUBNAME_GROUP, region, grokPattern); // TODO(tal): Support definitions + @SuppressWarnings("unused") String definition = groupMatch(DEFINITION_GROUP, region, grokPattern); String patternName = groupMatch(PATTERN_GROUP, region, grokPattern); diff --git a/libs/grok/src/test/java/org/elasticsearch/grok/GrokTests.java b/libs/grok/src/test/java/org/elasticsearch/grok/GrokTests.java index d30cf3d6fa2..f9d2055de1b 100644 --- a/libs/grok/src/test/java/org/elasticsearch/grok/GrokTests.java +++ b/libs/grok/src/test/java/org/elasticsearch/grok/GrokTests.java @@ -262,8 +262,6 @@ public class GrokTests extends ESTestCase { } public void testBooleanCaptures() { - Map bank = new HashMap<>(); - String pattern = "%{WORD:name}=%{WORD:status:boolean}"; Grok g = new Grok(basePatterns, pattern); diff --git a/libs/nio/src/test/java/org/elasticsearch/nio/FlushOperationTests.java b/libs/nio/src/test/java/org/elasticsearch/nio/FlushOperationTests.java index 0f3078715fd..4f2a320ad58 100644 --- a/libs/nio/src/test/java/org/elasticsearch/nio/FlushOperationTests.java +++ b/libs/nio/src/test/java/org/elasticsearch/nio/FlushOperationTests.java @@ -21,7 +21,6 @@ package org.elasticsearch.nio; import org.elasticsearch.test.ESTestCase; import org.junit.Before; -import org.mockito.ArgumentCaptor; import java.io.IOException; import java.nio.ByteBuffer; @@ -61,8 +60,6 @@ public class FlushOperationTests extends ESTestCase { ByteBuffer[] buffers = {ByteBuffer.allocate(10), ByteBuffer.allocate(15), ByteBuffer.allocate(3)}; FlushOperation writeOp = new FlushOperation(buffers, listener); - ArgumentCaptor buffersCaptor = ArgumentCaptor.forClass(ByteBuffer[].class); - writeOp.incrementIndex(5); assertFalse(writeOp.isFullyFlushed()); ByteBuffer[] byteBuffers = writeOp.getBuffersToWrite(); diff --git a/libs/x-content/src/main/java/org/elasticsearch/common/xcontent/XContentBuilder.java b/libs/x-content/src/main/java/org/elasticsearch/common/xcontent/XContentBuilder.java index eae5e48a557..51a4f86a0d3 100644 --- a/libs/x-content/src/main/java/org/elasticsearch/common/xcontent/XContentBuilder.java +++ b/libs/x-content/src/main/java/org/elasticsearch/common/xcontent/XContentBuilder.java @@ -25,6 +25,8 @@ import java.io.Flushable; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; +import java.math.BigDecimal; +import java.math.BigInteger; import java.nio.file.Path; import java.time.ZonedDateTime; import java.util.Arrays; @@ -103,7 +105,8 @@ public final class XContentBuilder implements Closeable, Flushable { writers.put(ZonedDateTime.class, (b, v) -> b.value(v.toString())); writers.put(Calendar.class, XContentBuilder::timeValue); writers.put(GregorianCalendar.class, XContentBuilder::timeValue); - + writers.put(BigInteger.class, (b, v) -> b.value((BigInteger) v)); + writers.put(BigDecimal.class, (b, v) -> b.value((BigDecimal) v)); Map, HumanReadableTransformer> humanReadableTransformer = new HashMap<>(); Map, Function> dateTransformers = new HashMap<>(); @@ -546,6 +549,81 @@ public final class XContentBuilder implements Closeable, Flushable { return this; } + //////////////////////////////////////////////////////////////////////////// + // BigInteger + ////////////////////////////////// + + public XContentBuilder field(String name, BigInteger value) throws IOException { + if (value == null) { + return nullField(name); + } + ensureNameNotNull(name); + generator.writeNumberField(name, value); + return this; + } + + public XContentBuilder array(String name, BigInteger[] values) throws IOException { + return field(name).values(values); + } + + private XContentBuilder values(BigInteger[] values) throws IOException { + if (values == null) { + return nullValue(); + } + startArray(); + for (BigInteger b : values) { + value(b); + } + endArray(); + return this; + } + + public XContentBuilder value(BigInteger value) throws IOException { + if (value == null) { + return nullValue(); + } + generator.writeNumber(value); + return this; + } + + + //////////////////////////////////////////////////////////////////////////// + // BigDecimal + ////////////////////////////////// + + public XContentBuilder field(String name, BigDecimal value) throws IOException { + if (value == null) { + return nullField(name); + } + ensureNameNotNull(name); + generator.writeNumberField(name, value); + return this; + } + + public XContentBuilder array(String name, BigDecimal[] values) throws IOException { + return field(name).values(values); + } + + private XContentBuilder values(BigDecimal[] values) throws IOException { + if (values == null) { + return nullValue(); + } + startArray(); + for (BigDecimal b : values) { + value(b); + } + endArray(); + return this; + } + + public XContentBuilder value(BigDecimal value) throws IOException { + if (value == null) { + return nullValue(); + } + generator.writeNumber(value); + return this; + } + //////////////////////////////////////////////////////////////////////////// // String ////////////////////////////////// diff --git a/libs/x-content/src/main/java/org/elasticsearch/common/xcontent/XContentGenerator.java b/libs/x-content/src/main/java/org/elasticsearch/common/xcontent/XContentGenerator.java index 142c1e399c7..48a82d91655 100644 --- a/libs/x-content/src/main/java/org/elasticsearch/common/xcontent/XContentGenerator.java +++ b/libs/x-content/src/main/java/org/elasticsearch/common/xcontent/XContentGenerator.java @@ -23,6 +23,8 @@ import java.io.Closeable; import java.io.Flushable; import java.io.IOException; import java.io.InputStream; +import java.math.BigDecimal; +import java.math.BigInteger; public interface XContentGenerator extends Closeable, Flushable { @@ -70,6 +72,14 @@ public interface XContentGenerator extends Closeable, Flushable { void writeNumber(short value) throws IOException; + void writeNumber(BigInteger value) throws IOException; + + void writeNumberField(String name, BigInteger value) throws IOException; + + void writeNumber(BigDecimal value) throws IOException; + + void writeNumberField(String name, BigDecimal value) throws IOException; + void writeStringField(String name, String value) throws IOException; void writeString(String value) throws IOException; diff --git a/libs/x-content/src/main/java/org/elasticsearch/common/xcontent/json/JsonXContentGenerator.java b/libs/x-content/src/main/java/org/elasticsearch/common/xcontent/json/JsonXContentGenerator.java index 6f09174a573..97d25653ad6 100644 --- a/libs/x-content/src/main/java/org/elasticsearch/common/xcontent/json/JsonXContentGenerator.java +++ b/libs/x-content/src/main/java/org/elasticsearch/common/xcontent/json/JsonXContentGenerator.java @@ -42,6 +42,8 @@ import java.io.BufferedInputStream; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; +import java.math.BigDecimal; +import java.math.BigInteger; import java.util.Objects; import java.util.Set; @@ -226,6 +228,19 @@ public class JsonXContentGenerator implements XContentGenerator { generator.writeNumberField(name, value); } + @Override + public void writeNumberField(String name, BigInteger value) throws IOException { + // as jackson's JsonGenerator doesn't have this method for BigInteger + // we have to implement it ourselves + generator.writeFieldName(name); + generator.writeNumber(value); + } + + @Override + public void writeNumberField(String name, BigDecimal value) throws IOException { + generator.writeNumberField(name, value); + } + @Override public void writeNumber(int value) throws IOException { generator.writeNumber(value); @@ -246,6 +261,16 @@ public class JsonXContentGenerator implements XContentGenerator { generator.writeNumber(value); } + @Override + public void writeNumber(BigInteger value) throws IOException { + generator.writeNumber(value); + } + + @Override + public void writeNumber(BigDecimal value) throws IOException { + generator.writeNumber(value); + } + @Override public void writeStringField(String name, String value) throws IOException { generator.writeStringField(name, value); diff --git a/libs/x-content/src/test/java/org/elasticsearch/common/xcontent/ObjectParserTests.java b/libs/x-content/src/test/java/org/elasticsearch/common/xcontent/ObjectParserTests.java index 6aa0a321adf..42d53bf4985 100644 --- a/libs/x-content/src/test/java/org/elasticsearch/common/xcontent/ObjectParserTests.java +++ b/libs/x-content/src/test/java/org/elasticsearch/common/xcontent/ObjectParserTests.java @@ -222,6 +222,7 @@ public class ObjectParserTests extends ESTestCase { public void testFailOnValueType() throws IOException { XContentParser parser = createParser(JsonXContent.jsonXContent, "{\"numeric_value\" : false}"); class TestStruct { + @SuppressWarnings("unused") public String test; } ObjectParser objectParser = new ObjectParser<>("foo"); diff --git a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/CommonAnalysisPlugin.java b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/CommonAnalysisPlugin.java index 175935258ad..59ecde8cf37 100644 --- a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/CommonAnalysisPlugin.java +++ b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/CommonAnalysisPlugin.java @@ -126,7 +126,6 @@ import org.elasticsearch.index.analysis.PreBuiltAnalyzerProviderFactory; import org.elasticsearch.index.analysis.PreConfiguredCharFilter; import org.elasticsearch.index.analysis.PreConfiguredTokenFilter; import org.elasticsearch.index.analysis.PreConfiguredTokenizer; -import org.elasticsearch.index.analysis.SoraniNormalizationFilterFactory; import org.elasticsearch.index.analysis.TokenFilterFactory; import org.elasticsearch.index.analysis.TokenizerFactory; import org.elasticsearch.indices.analysis.AnalysisModule.AnalysisProvider; @@ -276,6 +275,8 @@ public class CommonAnalysisPlugin extends Plugin implements AnalysisPlugin, Scri filters.put("sorani_normalization", SoraniNormalizationFilterFactory::new); filters.put("stemmer_override", requiresAnalysisSettings(StemmerOverrideTokenFilterFactory::new)); filters.put("stemmer", StemmerTokenFilterFactory::new); + filters.put("synonym", requiresAnalysisSettings(SynonymTokenFilterFactory::new)); + filters.put("synonym_graph", requiresAnalysisSettings(SynonymGraphTokenFilterFactory::new)); filters.put("trim", TrimTokenFilterFactory::new); filters.put("truncate", requiresAnalysisSettings(TruncateTokenFilterFactory::new)); filters.put("unique", UniqueTokenFilterFactory::new); diff --git a/server/src/main/java/org/elasticsearch/index/analysis/ESSolrSynonymParser.java b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/ESSolrSynonymParser.java similarity index 98% rename from server/src/main/java/org/elasticsearch/index/analysis/ESSolrSynonymParser.java rename to modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/ESSolrSynonymParser.java index 006973dd9b6..256e05982c6 100644 --- a/server/src/main/java/org/elasticsearch/index/analysis/ESSolrSynonymParser.java +++ b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/ESSolrSynonymParser.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.index.analysis; +package org.elasticsearch.analysis.common; import org.apache.logging.log4j.Logger; import org.apache.logging.log4j.LogManager; diff --git a/server/src/main/java/org/elasticsearch/index/analysis/ESWordnetSynonymParser.java b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/ESWordnetSynonymParser.java similarity index 98% rename from server/src/main/java/org/elasticsearch/index/analysis/ESWordnetSynonymParser.java rename to modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/ESWordnetSynonymParser.java index ebcd84e39d7..1e09011af67 100644 --- a/server/src/main/java/org/elasticsearch/index/analysis/ESWordnetSynonymParser.java +++ b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/ESWordnetSynonymParser.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.index.analysis; +package org.elasticsearch.analysis.common; import org.apache.logging.log4j.Logger; import org.apache.logging.log4j.LogManager; diff --git a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/KeepWordFilterFactory.java b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/KeepWordFilterFactory.java index df67f24cc7f..e89219da4d9 100644 --- a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/KeepWordFilterFactory.java +++ b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/KeepWordFilterFactory.java @@ -54,6 +54,7 @@ public class KeepWordFilterFactory extends AbstractTokenFilterFactory { private final CharArraySet keepWords; private static final String KEEP_WORDS_KEY = "keep_words"; private static final String KEEP_WORDS_PATH_KEY = KEEP_WORDS_KEY + "_path"; + @SuppressWarnings("unused") private static final String KEEP_WORDS_CASE_KEY = KEEP_WORDS_KEY + "_case"; // for javadoc // unsupported ancient option diff --git a/server/src/main/java/org/elasticsearch/index/analysis/SoraniNormalizationFilterFactory.java b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/SoraniNormalizationFilterFactory.java similarity index 90% rename from server/src/main/java/org/elasticsearch/index/analysis/SoraniNormalizationFilterFactory.java rename to modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/SoraniNormalizationFilterFactory.java index afe2f51ddd8..4cb2fa649fd 100644 --- a/server/src/main/java/org/elasticsearch/index/analysis/SoraniNormalizationFilterFactory.java +++ b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/SoraniNormalizationFilterFactory.java @@ -16,13 +16,15 @@ * specific language governing permissions and limitations * under the License. */ -package org.elasticsearch.index.analysis; +package org.elasticsearch.analysis.common; import org.apache.lucene.analysis.TokenStream; import org.apache.lucene.analysis.ckb.SoraniNormalizationFilter; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; import org.elasticsearch.index.IndexSettings; +import org.elasticsearch.index.analysis.AbstractTokenFilterFactory; +import org.elasticsearch.index.analysis.MultiTermAwareComponent; /** * Factory for {@link SoraniNormalizationFilter} diff --git a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/StemmerTokenFilterFactory.java b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/StemmerTokenFilterFactory.java index 67895e82e61..829d9746399 100644 --- a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/StemmerTokenFilterFactory.java +++ b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/StemmerTokenFilterFactory.java @@ -53,7 +53,6 @@ import org.apache.lucene.analysis.pt.PortugueseStemFilter; import org.apache.lucene.analysis.ru.RussianLightStemFilter; import org.apache.lucene.analysis.snowball.SnowballFilter; import org.apache.lucene.analysis.sv.SwedishLightStemFilter; -import org.elasticsearch.Version; import org.elasticsearch.common.Strings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; @@ -94,8 +93,6 @@ public class StemmerTokenFilterFactory extends AbstractTokenFilterFactory { @Override public TokenStream create(TokenStream tokenStream) { - final Version indexVersion = indexSettings.getIndexVersionCreated(); - if ("arabic".equalsIgnoreCase(language)) { return new ArabicStemFilter(tokenStream); } else if ("armenian".equalsIgnoreCase(language)) { diff --git a/server/src/main/java/org/elasticsearch/index/analysis/SynonymGraphTokenFilterFactory.java b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/SynonymGraphTokenFilterFactory.java similarity index 87% rename from server/src/main/java/org/elasticsearch/index/analysis/SynonymGraphTokenFilterFactory.java rename to modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/SynonymGraphTokenFilterFactory.java index 200e426fbd4..cccfdc8d7b7 100644 --- a/server/src/main/java/org/elasticsearch/index/analysis/SynonymGraphTokenFilterFactory.java +++ b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/SynonymGraphTokenFilterFactory.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.index.analysis; +package org.elasticsearch.analysis.common; import org.apache.lucene.analysis.Analyzer; import org.apache.lucene.analysis.TokenStream; @@ -26,16 +26,18 @@ import org.apache.lucene.analysis.synonym.SynonymMap; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; import org.elasticsearch.index.IndexSettings; +import org.elasticsearch.index.analysis.CharFilterFactory; +import org.elasticsearch.index.analysis.TokenFilterFactory; +import org.elasticsearch.index.analysis.TokenizerFactory; -import java.io.IOException; import java.util.List; import java.util.function.Function; public class SynonymGraphTokenFilterFactory extends SynonymTokenFilterFactory { - public SynonymGraphTokenFilterFactory(IndexSettings indexSettings, Environment env, AnalysisRegistry analysisRegistry, - String name, Settings settings) throws IOException { - super(indexSettings, env, analysisRegistry, name, settings); + SynonymGraphTokenFilterFactory(IndexSettings indexSettings, Environment env, + String name, Settings settings) { + super(indexSettings, env, name, settings); } @Override diff --git a/server/src/main/java/org/elasticsearch/index/analysis/SynonymTokenFilterFactory.java b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/SynonymTokenFilterFactory.java similarity index 84% rename from server/src/main/java/org/elasticsearch/index/analysis/SynonymTokenFilterFactory.java rename to modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/SynonymTokenFilterFactory.java index c18e8c94310..99810432cd8 100644 --- a/server/src/main/java/org/elasticsearch/index/analysis/SynonymTokenFilterFactory.java +++ b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/SynonymTokenFilterFactory.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.index.analysis; +package org.elasticsearch.analysis.common; import org.apache.lucene.analysis.Analyzer; import org.apache.lucene.analysis.TokenStream; @@ -26,8 +26,13 @@ import org.apache.lucene.analysis.synonym.SynonymMap; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; import org.elasticsearch.index.IndexSettings; +import org.elasticsearch.index.analysis.AbstractTokenFilterFactory; +import org.elasticsearch.index.analysis.Analysis; +import org.elasticsearch.index.analysis.CharFilterFactory; +import org.elasticsearch.index.analysis.CustomAnalyzer; +import org.elasticsearch.index.analysis.TokenFilterFactory; +import org.elasticsearch.index.analysis.TokenizerFactory; -import java.io.IOException; import java.io.Reader; import java.io.StringReader; import java.util.List; @@ -35,14 +40,14 @@ import java.util.function.Function; public class SynonymTokenFilterFactory extends AbstractTokenFilterFactory { - protected final String format; - protected final boolean expand; - protected final boolean lenient; + private final String format; + private final boolean expand; + private final boolean lenient; protected final Settings settings; protected final Environment environment; - public SynonymTokenFilterFactory(IndexSettings indexSettings, Environment env, AnalysisRegistry analysisRegistry, - String name, Settings settings) throws IOException { + SynonymTokenFilterFactory(IndexSettings indexSettings, Environment env, + String name, Settings settings) { super(indexSettings, name, settings); this.settings = settings; @@ -83,15 +88,15 @@ public class SynonymTokenFilterFactory extends AbstractTokenFilterFactory { }; } - protected Analyzer buildSynonymAnalyzer(TokenizerFactory tokenizer, List charFilters, - List tokenFilters) { + Analyzer buildSynonymAnalyzer(TokenizerFactory tokenizer, List charFilters, + List tokenFilters) { return new CustomAnalyzer("synonyms", tokenizer, charFilters.toArray(new CharFilterFactory[0]), tokenFilters.stream() .map(TokenFilterFactory::getSynonymFilter) .toArray(TokenFilterFactory[]::new)); } - protected SynonymMap buildSynonyms(Analyzer analyzer, Reader rules) { + SynonymMap buildSynonyms(Analyzer analyzer, Reader rules) { try { SynonymMap.Builder parser; if ("wordnet".equalsIgnoreCase(format)) { @@ -107,7 +112,7 @@ public class SynonymTokenFilterFactory extends AbstractTokenFilterFactory { } } - protected Reader getRulesFromSettings(Environment env) { + Reader getRulesFromSettings(Environment env) { Reader rulesReader; if (settings.getAsList("synonyms", null) != null) { List rulesList = Analysis.getWordList(env, settings, "synonyms"); diff --git a/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/CommonAnalysisFactoryTests.java b/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/CommonAnalysisFactoryTests.java index 50843065878..f9fca66cc54 100644 --- a/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/CommonAnalysisFactoryTests.java +++ b/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/CommonAnalysisFactoryTests.java @@ -24,8 +24,6 @@ import org.apache.lucene.analysis.en.PorterStemFilterFactory; import org.apache.lucene.analysis.miscellaneous.LimitTokenCountFilterFactory; import org.apache.lucene.analysis.reverse.ReverseStringFilterFactory; import org.apache.lucene.analysis.snowball.SnowballPorterFilterFactory; -import org.elasticsearch.index.analysis.SoraniNormalizationFilterFactory; -import org.elasticsearch.index.analysis.SynonymTokenFilterFactory; import org.elasticsearch.indices.analysis.AnalysisFactoryTestCase; import java.util.List; @@ -107,6 +105,7 @@ public class CommonAnalysisFactoryTests extends AnalysisFactoryTestCase { filters.put("stemmeroverride", StemmerOverrideTokenFilterFactory.class); filters.put("kstem", KStemTokenFilterFactory.class); filters.put("synonym", SynonymTokenFilterFactory.class); + filters.put("synonymgraph", SynonymGraphTokenFilterFactory.class); filters.put("dictionarycompoundword", DictionaryCompoundWordTokenFilterFactory.class); filters.put("hyphenationcompoundword", HyphenationCompoundWordTokenFilterFactory.class); filters.put("reversestring", ReverseTokenFilterFactory.class); diff --git a/server/src/test/java/org/elasticsearch/index/analysis/ESSolrSynonymParserTests.java b/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/ESSolrSynonymParserTests.java similarity index 98% rename from server/src/test/java/org/elasticsearch/index/analysis/ESSolrSynonymParserTests.java rename to modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/ESSolrSynonymParserTests.java index 31aa1a9be25..e6ed9b03855 100644 --- a/server/src/test/java/org/elasticsearch/index/analysis/ESSolrSynonymParserTests.java +++ b/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/ESSolrSynonymParserTests.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.index.analysis; +package org.elasticsearch.analysis.common; import org.apache.lucene.analysis.CharArraySet; import org.apache.lucene.analysis.StopFilter; diff --git a/server/src/test/java/org/elasticsearch/index/analysis/ESWordnetSynonymParserTests.java b/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/ESWordnetSynonymParserTests.java similarity index 98% rename from server/src/test/java/org/elasticsearch/index/analysis/ESWordnetSynonymParserTests.java rename to modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/ESWordnetSynonymParserTests.java index 6d0fd8944d4..18eaaedb5d0 100644 --- a/server/src/test/java/org/elasticsearch/index/analysis/ESWordnetSynonymParserTests.java +++ b/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/ESWordnetSynonymParserTests.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.index.analysis; +package org.elasticsearch.analysis.common; import org.apache.lucene.analysis.CharArraySet; import org.apache.lucene.analysis.StopFilter; diff --git a/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/HighlighterWithAnalyzersTests.java b/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/HighlighterWithAnalyzersTests.java index 96e8043570d..db39a27d5c1 100644 --- a/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/HighlighterWithAnalyzersTests.java +++ b/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/HighlighterWithAnalyzersTests.java @@ -21,9 +21,12 @@ package org.elasticsearch.analysis.common; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.query.Operator; import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.search.builder.SearchSourceBuilder; import org.elasticsearch.search.fetch.subphase.highlight.HighlightBuilder; import org.elasticsearch.test.ESIntegTestCase; @@ -31,11 +34,18 @@ import java.io.IOException; import java.util.Arrays; import java.util.Collection; +import static org.elasticsearch.client.Requests.searchRequest; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; +import static org.elasticsearch.index.query.QueryBuilders.boolQuery; +import static org.elasticsearch.index.query.QueryBuilders.matchPhrasePrefixQuery; import static org.elasticsearch.index.query.QueryBuilders.matchPhraseQuery; import static org.elasticsearch.index.query.QueryBuilders.matchQuery; +import static org.elasticsearch.index.query.QueryBuilders.termQuery; +import static org.elasticsearch.search.builder.SearchSourceBuilder.highlight; +import static org.elasticsearch.search.builder.SearchSourceBuilder.searchSource; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHighlight; +import static org.hamcrest.Matchers.anyOf; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.startsWith; @@ -153,4 +163,165 @@ public class HighlighterWithAnalyzersTests extends ESIntegTestCase { + "http://www.facebook.com http://elasticsearch.org " + "http://xing.com http://cnn.com http://quora.com")); } + + public void testSynonyms() throws IOException { + Settings.Builder builder = Settings.builder() + .put(indexSettings()) + .put("index.analysis.analyzer.synonym.tokenizer", "standard") + .putList("index.analysis.analyzer.synonym.filter", "synonym", "lowercase") + .put("index.analysis.filter.synonym.type", "synonym") + .putList("index.analysis.filter.synonym.synonyms", "fast,quick"); + + assertAcked(prepareCreate("test").setSettings(builder.build()) + .addMapping("type1", "field1", + "type=text,term_vector=with_positions_offsets,search_analyzer=synonym," + + "analyzer=standard,index_options=offsets")); + ensureGreen(); + + client().prepareIndex("test", "type1", "0").setSource( + "field1", "The quick brown fox jumps over the lazy dog").get(); + refresh(); + for (String highlighterType : new String[] {"plain", "fvh", "unified"}) { + logger.info("--> highlighting (type=" + highlighterType + ") and searching on field1"); + SearchSourceBuilder source = searchSource() + .query(matchQuery("field1", "quick brown fox").operator(Operator.AND)) + .highlighter( + highlight() + .field("field1") + .order("score") + .preTags("") + .postTags("") + .highlighterType(highlighterType)); + SearchResponse searchResponse = client().search(searchRequest("test").source(source)).actionGet(); + assertHighlight(searchResponse, 0, "field1", 0, 1, + equalTo("The quick brown fox jumps over the lazy dog")); + + source = searchSource() + .query(matchQuery("field1", "fast brown fox").operator(Operator.AND)) + .highlighter(highlight().field("field1").order("score").preTags("").postTags("")); + searchResponse = client().search(searchRequest("test").source(source)).actionGet(); + assertHighlight(searchResponse, 0, "field1", 0, 1, + equalTo("The quick brown fox jumps over the lazy dog")); + } + } + + public void testPhrasePrefix() throws IOException { + Settings.Builder builder = Settings.builder() + .put(indexSettings()) + .put("index.analysis.analyzer.synonym.tokenizer", "standard") + .putList("index.analysis.analyzer.synonym.filter", "synonym", "lowercase") + .put("index.analysis.filter.synonym.type", "synonym") + .putList("index.analysis.filter.synonym.synonyms", "quick => fast"); + + assertAcked(prepareCreate("first_test_index").setSettings(builder.build()).addMapping("type1", type1TermVectorMapping())); + + ensureGreen(); + + client().prepareIndex("first_test_index", "type1", "0").setSource( + "field0", "The quick brown fox jumps over the lazy dog", + "field1", "The quick brown fox jumps over the lazy dog").get(); + client().prepareIndex("first_test_index", "type1", "1").setSource("field1", + "The quick browse button is a fancy thing, right bro?").get(); + refresh(); + logger.info("--> highlighting and searching on field0"); + + SearchSourceBuilder source = searchSource() + .query(matchPhrasePrefixQuery("field0", "bro")) + .highlighter(highlight().field("field0").order("score").preTags("").postTags("")); + SearchResponse searchResponse = client().search(searchRequest("first_test_index").source(source)).actionGet(); + + assertHighlight(searchResponse, 0, "field0", 0, 1, equalTo("The quick brown fox jumps over the lazy dog")); + + source = searchSource() + .query(matchPhrasePrefixQuery("field0", "quick bro")) + .highlighter(highlight().field("field0").order("score").preTags("").postTags("")); + + searchResponse = client().search(searchRequest("first_test_index").source(source)).actionGet(); + assertHighlight(searchResponse, 0, "field0", 0, 1, + equalTo("The quick brown fox jumps over the lazy dog")); + + logger.info("--> highlighting and searching on field1"); + source = searchSource() + .query(boolQuery() + .should(matchPhrasePrefixQuery("field1", "test")) + .should(matchPhrasePrefixQuery("field1", "bro")) + ) + .highlighter(highlight().field("field1").order("score").preTags("").postTags("")); + + searchResponse = client().search(searchRequest("first_test_index").source(source)).actionGet(); + assertThat(searchResponse.getHits().totalHits, equalTo(2L)); + for (int i = 0; i < 2; i++) { + assertHighlight(searchResponse, i, "field1", 0, 1, anyOf( + equalTo("The quick browse button is a fancy thing, right bro?"), + equalTo("The quick brown fox jumps over the lazy dog"))); + } + + source = searchSource() + .query(matchPhrasePrefixQuery("field1", "quick bro")) + .highlighter(highlight().field("field1").order("score").preTags("").postTags("")); + + searchResponse = client().search(searchRequest("first_test_index").source(source)).actionGet(); + + assertHighlight(searchResponse, 0, "field1", 0, 1, anyOf( + equalTo("The quick browse button is a fancy thing, right bro?"), + equalTo("The quick brown fox jumps over the lazy dog"))); + assertHighlight(searchResponse, 1, "field1", 0, 1, anyOf( + equalTo("The quick browse button is a fancy thing, right bro?"), + equalTo("The quick brown fox jumps over the lazy dog"))); + + assertAcked(prepareCreate("second_test_index").setSettings(builder.build()).addMapping("doc", + "field4", "type=text,term_vector=with_positions_offsets,analyzer=synonym", + "field3", "type=text,analyzer=synonym")); + // with synonyms + client().prepareIndex("second_test_index", "doc", "0").setSource( + "type", "type2", + "field4", "The quick brown fox jumps over the lazy dog", + "field3", "The quick brown fox jumps over the lazy dog").get(); + client().prepareIndex("second_test_index", "doc", "1").setSource( + "type", "type2", + "field4", "The quick browse button is a fancy thing, right bro?").get(); + client().prepareIndex("second_test_index", "doc", "2").setSource( + "type", "type2", + "field4", "a quick fast blue car").get(); + refresh(); + + source = searchSource().postFilter(termQuery("type", "type2")).query(matchPhrasePrefixQuery("field3", "fast bro")) + .highlighter(highlight().field("field3").order("score").preTags("").postTags("")); + + searchResponse = client().search(searchRequest("second_test_index").source(source)).actionGet(); + + assertHighlight(searchResponse, 0, "field3", 0, 1, + equalTo("The quick brown fox jumps over the lazy dog")); + + logger.info("--> highlighting and searching on field4"); + source = searchSource().postFilter(termQuery("type", "type2")).query(matchPhrasePrefixQuery("field4", "the fast bro")) + .highlighter(highlight().field("field4").order("score").preTags("").postTags("")); + searchResponse = client().search(searchRequest("second_test_index").source(source)).actionGet(); + + assertHighlight(searchResponse, 0, "field4", 0, 1, anyOf( + equalTo("The quick browse button is a fancy thing, right bro?"), + equalTo("The quick brown fox jumps over the lazy dog"))); + assertHighlight(searchResponse, 1, "field4", 0, 1, anyOf( + equalTo("The quick browse button is a fancy thing, right bro?"), + equalTo("The quick brown fox jumps over the lazy dog"))); + + logger.info("--> highlighting and searching on field4"); + source = searchSource().postFilter(termQuery("type", "type2")) + .query(matchPhrasePrefixQuery("field4", "a fast quick blue ca")) + .highlighter(highlight().field("field4").order("score").preTags("").postTags("")); + searchResponse = client().search(searchRequest("second_test_index").source(source)).actionGet(); + + assertHighlight(searchResponse, 0, "field4", 0, 1, + anyOf(equalTo("a quick fast blue car"), + equalTo("a quick fast blue car"))); + } + + public static XContentBuilder type1TermVectorMapping() throws IOException { + return XContentFactory.jsonBuilder().startObject().startObject("type1") + .startObject("properties") + .startObject("field1").field("type", "text").field("term_vector", "with_positions_offsets").endObject() + .startObject("field2").field("type", "text").field("term_vector", "with_positions_offsets").endObject() + .endObject() + .endObject().endObject(); + } } diff --git a/modules/analysis-common/src/test/resources/rest-api-spec/test/analysis-common/40_token_filters.yml b/modules/analysis-common/src/test/resources/rest-api-spec/test/analysis-common/40_token_filters.yml index 150fa39dcb9..4106237f2cc 100644 --- a/modules/analysis-common/src/test/resources/rest-api-spec/test/analysis-common/40_token_filters.yml +++ b/modules/analysis-common/src/test/resources/rest-api-spec/test/analysis-common/40_token_filters.yml @@ -223,6 +223,68 @@ - match: { tokens.0.token: Foo } - match: { tokens.1.token: Bar! } +--- +"synonym": + - do: + indices.create: + index: test + body: + settings: + analysis: + filter: + my_synonym: + type: synonym + synonyms: ["car,auto"] + + - do: + indices.analyze: + index: test + body: + text: what car magazine + tokenizer: whitespace + filter: [ my_synonym ] + - length: { tokens: 4 } + - match: { tokens.0.token: what } + - match: { tokens.0.position: 0 } + - match: { tokens.1.token: car } + - match: { tokens.1.position: 1 } + - match: { tokens.2.token: auto } + - match: { tokens.2.position: 1 } + - match: { tokens.3.token: magazine } + - match: { tokens.3.position: 2 } + +--- +"synonym_graph": + - do: + indices.create: + index: test + body: + settings: + analysis: + filter: + my_graph_synonym: + type: synonym_graph + synonyms: [ "guinea pig,cavy" ] + + - do: + indices.analyze: + index: test + body: + text: my guinea pig snores + tokenizer: whitespace + filter: [ my_graph_synonym ] + - length: { tokens: 5 } + - match: { tokens.0.token: my } + - match: { tokens.1.token: cavy } + - match: { tokens.1.position: 1 } + - match: { tokens.1.positionLength: 2 } + - match: { tokens.2.token: guinea } + - match: { tokens.2.position: 1 } + - match: { tokens.3.token: pig } + - match: { tokens.3.position: 2 } + - match: { tokens.4.token: snores } + - match: { tokens.4.position: 3 } + --- "synonym_graph and flatten_graph": - do: diff --git a/modules/analysis-common/src/test/resources/rest-api-spec/test/indices.analyze/10_synonyms.yml b/modules/analysis-common/src/test/resources/rest-api-spec/test/indices.analyze/10_synonyms.yml index 774d30b0b04..840d836b13c 100644 --- a/modules/analysis-common/src/test/resources/rest-api-spec/test/indices.analyze/10_synonyms.yml +++ b/modules/analysis-common/src/test/resources/rest-api-spec/test/indices.analyze/10_synonyms.yml @@ -1,3 +1,4 @@ +--- "Synonym filter with char_filter": # Tests analyze with synonym and char_filter. This is in the analysis-common module # because there are no char filters in core. @@ -30,3 +31,49 @@ - match: { tokens.2.token: the } - match: { tokens.3.token: elasticsearch } - match: { tokens.4.token: man! } + +--- +"Non-standard position length": + - do: + indices.create: + index: test + body: + settings: + index: + analysis: + filter: + syns: + type: synonym + synonyms: [ "wtf,what the fudge" ] + analyzer: + custom_syns: + tokenizer: standard + filter: [ lowercase, syns ] + + - do: + indices.analyze: + index: test + body: + analyzer: custom_syns + text: "say what the fudge dude" + + - length: { tokens: 6 } + - match: { tokens.0.token: say } + - match: { tokens.0.position: 0 } + - match: { tokens.0.positionLength: null } + - match: { tokens.1.token: what } + - match: { tokens.1.position: 1 } + - match: { tokens.1.positionLength: null } + - match: { tokens.2.token: wtf } + - match: { tokens.2.position: 1 } + - match: { tokens.2.positionLength: 3 } + - match: { tokens.3.token: the } + - match: { tokens.3.position: 2 } + - match: { tokens.3.positionLength: null } + - match: { tokens.4.token: fudge } + - match: { tokens.4.position: 3 } + - match: { tokens.4.positionLength: null } + - match: { tokens.5.token: dude } + - match: { tokens.5.position: 4 } + - match: { tokens.5.positionLength: null } + diff --git a/modules/analysis-common/src/test/resources/rest-api-spec/test/indices/validate_query/10_synonyms.yml b/modules/analysis-common/src/test/resources/rest-api-spec/test/indices/validate_query/10_synonyms.yml new file mode 100644 index 00000000000..a0ef4463f21 --- /dev/null +++ b/modules/analysis-common/src/test/resources/rest-api-spec/test/indices/validate_query/10_synonyms.yml @@ -0,0 +1,82 @@ +--- +"validate query with synonyms": + - do: + indices.create: + index: test + body: + settings: + index: + analysis: + filter: + syns: + type: synonym + synonyms: [ "one,two" ] + analyzer: + syns: + tokenizer: standard + filter: [ syns ] + mappings: + test: + properties: + field: + type: text + analyzer: syns + + - do: + indices.validate_query: + index: test + explain: true + body: + query: + match_phrase_prefix: + field: + query: foo + + - is_true: valid + - length: { explanations: 1 } + - match: { explanations.0.explanation: "/field:\"foo\\*\"/" } + + - do: + indices.validate_query: + index: test + explain: true + body: + query: + match_phrase_prefix: + field: + query: foo bar + + - is_true: valid + - length: { explanations: 1 } + - match: { explanations.0.explanation: "field:\"foo bar*\"" } + + - do: + indices.validate_query: + index: test + explain: true + body: + query: + match_phrase_prefix: + field: + query: one bar + + - is_true: valid + - length: { explanations: 1 } + - match: { explanations.0.explanation: "field:\"(one two) bar*\"" } + + - do: + indices.validate_query: + index: test + explain: true + body: + query: + match_phrase_prefix: + field: + query: foo one + + - is_true: valid + - length: { explanations: 1 } + - match: { explanations.0.explanation: "field:\"foo (one* two*)\"" } + + + diff --git a/modules/analysis-common/src/test/resources/rest-api-spec/test/search.query/50_queries_with_synonyms.yml b/modules/analysis-common/src/test/resources/rest-api-spec/test/search.query/50_queries_with_synonyms.yml new file mode 100644 index 00000000000..c7a8122337e --- /dev/null +++ b/modules/analysis-common/src/test/resources/rest-api-spec/test/search.query/50_queries_with_synonyms.yml @@ -0,0 +1,307 @@ +--- +"Test common terms query with stacked tokens": + - do: + indices.create: + index: test + body: + settings: + analysis: + filter: + syns: + type: synonym + synonyms: [ "quick,fast" ] + analyzer: + syns: + tokenizer: standard + filter: [ "syns" ] + mappings: + test: + properties: + field1: + type: text + analyzer: syns + field2: + type: text + analyzer: syns + + - do: + index: + index: test + type: test + id: 3 + body: + field1: quick lazy huge brown pidgin + field2: the quick lazy huge brown fox jumps over the tree + + - do: + index: + index: test + type: test + id: 1 + body: + field1: the quick brown fox + + - do: + index: + index: test + type: test + id: 2 + body: + field1: the quick lazy huge brown fox jumps over the tree + refresh: true + + - do: + search: + body: + query: + common: + field1: + query: the fast brown + cutoff_frequency: 3 + low_freq_operator: or + - match: { hits.total: 3 } + - match: { hits.hits.0._id: "1" } + - match: { hits.hits.1._id: "2" } + - match: { hits.hits.2._id: "3" } + + - do: + search: + body: + query: + common: + field1: + query: the fast brown + cutoff_frequency: 3 + low_freq_operator: and + - match: { hits.total: 2 } + - match: { hits.hits.0._id: "1" } + - match: { hits.hits.1._id: "2" } + + - do: + search: + body: + query: + common: + field1: + query: the fast brown + cutoff_frequency: 3 + - match: { hits.total: 3 } + - match: { hits.hits.0._id: "1" } + - match: { hits.hits.1._id: "2" } + - match: { hits.hits.2._id: "3" } + + - do: + search: + body: + query: + common: + field1: + query: the fast huge fox + minimum_should_match: + low_freq: 3 + - match: { hits.total: 1 } + - match: { hits.hits.0._id: "2" } + + - do: + search: + body: + query: + common: + field1: + query: the fast lazy fox brown + cutoff_frequency: 1 + minimum_should_match: + high_freq: 5 + - match: { hits.total: 2 } + - match: { hits.hits.0._id: "2" } + - match: { hits.hits.1._id: "1" } + + - do: + search: + body: + query: + common: + field1: + query: the fast lazy fox brown + cutoff_frequency: 1 + minimum_should_match: + high_freq: 6 + - match: { hits.total: 1 } + - match: { hits.hits.0._id: "2" } + + - do: + search: + body: + query: + common: + field1: + query: the fast lazy fox brown + cutoff_frequency: 1 + - match: { hits.total: 1 } + - match: { hits.hits.0._id: "2" } + + - do: + search: + body: + query: + common: + field1: + query: the quick brown + cutoff_frequency: 3 + - match: { hits.total: 3 } + - match: { hits.hits.0._id: "1" } + - match: { hits.hits.1._id: "2" } + - match: { hits.hits.2._id: "3" } + + - do: + search: + body: + query: + match: + field1: + query: the fast brown + cutoff_frequency: 3 + operator: and + - match: { hits.total: 2 } + - match: { hits.hits.0._id: "1" } + - match: { hits.hits.1._id: "2" } + + - do: + search: + body: + query: + match: + field1: + query: the fast brown + cutoff_frequency: 3 + operator: or + - match: { hits.total: 3 } + - match: { hits.hits.0._id: "1" } + - match: { hits.hits.1._id: "2" } + - match: { hits.hits.2._id: "3" } + + - do: + search: + body: + query: + match: + field1: + query: the fast brown + cutoff_frequency: 3 + minimum_should_match: 3 + - match: { hits.total: 2 } + - match: { hits.hits.0._id: "1" } + - match: { hits.hits.1._id: "2" } + + - do: + search: + body: + query: + multi_match: + query: the fast brown + fields: [ "field1", "field2" ] + cutoff_frequency: 3 + operator: and + - match: { hits.total: 3 } + - match: { hits.hits.0._id: "3" } + - match: { hits.hits.1._id: "1" } + - match: { hits.hits.2._id: "2" } + +--- +"Test match query with synonyms - see #3881 for extensive description of the issue": + - do: + indices.create: + index: test + body: + settings: + analysis: + filter: + synonym: + type: synonym + synonyms: [ "quick,fast" ] + analyzer: + index: + type: custom + tokenizer: standard + filter: lowercase + search: + type: custom + tokenizer: standard + filter: [ lowercase, synonym ] + mappings: + test: + properties: + text: + type: text + analyzer: index + search_analyzer: search + + - do: + index: + index: test + type: test + id: 1 + body: + text: quick brown fox + refresh: true + + - do: + search: + body: + query: + match: + text: + query: quick + operator: and + - match: { hits.total: 1 } + + - do: + search: + body: + query: + match: + text: + query: quick brown + operator: and + - match: { hits.total: 1 } + + - do: + search: + body: + query: + match: + text: + query: fast + operator: and + - match: { hits.total: 1 } + + - do: + index: + index: test + type: test + id: 2 + body: + text: fast brown fox + refresh: true + + - do: + search: + body: + query: + match: + text: + query: quick + operator: and + - match: { hits.total: 2 } + + - do: + search: + body: + query: + match: + text: + query: quick brown + operator: and + - match: { hits.total: 2 } + + diff --git a/modules/analysis-common/src/test/resources/rest-api-spec/test/search.query/60_synonym_graph.yml b/modules/analysis-common/src/test/resources/rest-api-spec/test/search.query/60_synonym_graph.yml new file mode 100644 index 00000000000..6b4c482efca --- /dev/null +++ b/modules/analysis-common/src/test/resources/rest-api-spec/test/search.query/60_synonym_graph.yml @@ -0,0 +1,207 @@ +setup: + - do: + indices.create: + index: test + body: + settings: + index: + number_of_shards: 1 # keep scoring stable + analysis: + filter: + syns: + type: synonym + synonyms: [ "wtf, what the fudge", "foo, bar baz" ] + graph_syns: + type: synonym_graph + synonyms: [ "wtf, what the fudge", "foo, bar baz" ] + analyzer: + lower_syns: + type: custom + tokenizer: standard + filter: [ lowercase, syns ] + lower_graph_syns: + type: custom + tokenizer: standard + filter: [ lowercase, graph_syns ] + mappings: + test: + properties: + field: + type: text + + - do: + index: + index: test + type: test + id: 1 + body: + text: say wtf happened foo + - do: + index: + index: test + type: test + id: 2 + body: + text: bar baz what the fudge man + + - do: + index: + index: test + type: test + id: 3 + body: + text: wtf + + - do: + index: + index: test + type: test + id: 4 + body: + text: what is the name for fudge + + - do: + index: + index: test + type: test + id: 5 + body: + text: bar two three + + - do: + index: + index: test + type: test + id: 6 + body: + text: bar baz two three + refresh: true + +--- +"simple multiterm phrase": + - do: + search: + body: + query: + match_phrase: + text: + query: foo two three + analyzer: lower_syns + - match: { hits.total: 1 } + - match: { hits.hits.0._id: "5" } # incorrect match because we're not using graph synonyms + + - do: + search: + body: + query: + match_phrase: + text: + query: foo two three + analyzer: lower_graph_syns + - match: { hits.total: 1 } + - match: { hits.hits.0._id: "6" } # correct match because we're using graph synonyms + +--- +"simple multiterm and": + - do: + search: + body: + query: + match: + text: + query: say what the fudge + analyzer: lower_syns + operator: and + - match: { hits.total: 1 } + - match: { hits.hits.0._id: "1" } # non-graph synonyms coincidentally give us the correct answer here + + - do: + search: + body: + query: + match: + text: + query: say what the fudge + analyzer: lower_graph_syns + operator: and + - match: { hits.total: 1 } + - match: { hits.hits.0._id: "1" } + +--- +"minimum should match": + - do: + search: + body: + query: + match: + text: + query: three what the fudge foo + operator: or + analyzer: lower_graph_syns + auto_generate_synonyms_phrase_query: false + - match: { hits.total: 6 } + + - do: + search: + body: + query: + match: + text: + query: three what the fudge foo + operator: or + analyzer: lower_graph_syns + minimum_should_match: 80% + - match: { hits.total: 3 } + - match: { hits.hits.0._id: "2" } + - match: { hits.hits.1._id: "6" } + - match: { hits.hits.2._id: "1" } + +--- +"multiterm synonyms phrase": + - do: + search: + body: + query: + match: + text: + query: wtf + operator: and + analyzer: lower_graph_syns + - match: { hits.total: 3 } + - match: { hits.hits.0._id: "2" } + - match: { hits.hits.1._id: "3" } + - match: { hits.hits.2._id: "1" } + +--- +"phrase prefix": + - do: + index: + index: test + type: test + id: 7 + body: + text: "WTFD!" + + - do: + index: + index: test + type: test + id: 8 + body: + text: "Weird Al's WHAT THE FUDGESICLE" + refresh: true + + - do: + search: + body: + query: + match_phrase_prefix: + text: + query: wtf + analyzer: lower_graph_syns + - match: { hits.total: 5 } + - match: { hits.hits.0._id: "3" } + - match: { hits.hits.1._id: "7" } + - match: { hits.hits.2._id: "1" } + - match: { hits.hits.3._id: "8" } + - match: { hits.hits.4._id: "2" } diff --git a/modules/analysis-common/src/test/resources/rest-api-spec/test/search.suggest/30_synonyms.yml b/modules/analysis-common/src/test/resources/rest-api-spec/test/search.suggest/30_synonyms.yml new file mode 100644 index 00000000000..85bc348fa41 --- /dev/null +++ b/modules/analysis-common/src/test/resources/rest-api-spec/test/search.suggest/30_synonyms.yml @@ -0,0 +1,44 @@ +--- +"suggestions with synonyms": + - do: + indices.create: + index: test + body: + settings: + analysis: + analyzer: + suggest_analyzer_synonyms: + type: custom + tokenizer: standard + filter: [ lowercase, my_synonyms ] + filter: + my_synonyms: + type: synonym + synonyms: [ "foo,renamed"] + mappings: + test: + properties: + field: + type: completion + analyzer: suggest_analyzer_synonyms + + - do: + index: + index: test + type: test + id: 1 + body: + field: + input: [ "Foo Fighters" ] + refresh: true + + - do: + search: + index: test + body: + suggest: + text: r + test: + completion: + field: field + - match: {suggest.test.0.options.0.text: Foo Fighters} diff --git a/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/IngestCommonPlugin.java b/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/IngestCommonPlugin.java index d9dba2cc100..41e96253f28 100644 --- a/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/IngestCommonPlugin.java +++ b/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/IngestCommonPlugin.java @@ -31,6 +31,8 @@ import org.elasticsearch.common.settings.SettingsFilter; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.grok.Grok; import org.elasticsearch.grok.ThreadWatchdog; +import org.elasticsearch.ingest.DropProcessor; +import org.elasticsearch.ingest.PipelineProcessor; import org.elasticsearch.ingest.Processor; import org.elasticsearch.plugins.ActionPlugin; import org.elasticsearch.plugins.IngestPlugin; diff --git a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/ScriptProcessorTests.java b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/ScriptProcessorTests.java index 72bc337e9c9..10fcf5fe602 100644 --- a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/ScriptProcessorTests.java +++ b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/ScriptProcessorTests.java @@ -19,10 +19,6 @@ package org.elasticsearch.ingest.common; -import java.util.Collections; -import java.util.HashMap; -import java.util.Map; - import org.elasticsearch.common.settings.Settings; import org.elasticsearch.ingest.IngestDocument; import org.elasticsearch.ingest.RandomDocumentPicks; @@ -33,6 +29,10 @@ import org.elasticsearch.script.ScriptService; import org.elasticsearch.script.ScriptType; import org.elasticsearch.test.ESTestCase; +import java.util.Collections; +import java.util.HashMap; +import java.util.Map; + import static org.hamcrest.Matchers.hasKey; import static org.hamcrest.core.Is.is; @@ -52,7 +52,8 @@ public class ScriptProcessorTests extends ESTestCase { ctx.put("bytes_total", randomBytesTotal); return null; } - ) + ), + Collections.emptyMap() ) ), new HashMap<>(ScriptModule.CORE_CONTEXTS) diff --git a/modules/ingest-common/src/test/resources/rest-api-spec/test/ingest/210_pipeline_processor.yml b/modules/ingest-common/src/test/resources/rest-api-spec/test/ingest/210_pipeline_processor.yml index 355ba2d4210..c7c5df1e06f 100644 --- a/modules/ingest-common/src/test/resources/rest-api-spec/test/ingest/210_pipeline_processor.yml +++ b/modules/ingest-common/src/test/resources/rest-api-spec/test/ingest/210_pipeline_processor.yml @@ -110,4 +110,4 @@ teardown: pipeline: "outer" body: {} - match: { error.root_cause.0.type: "exception" } -- match: { error.root_cause.0.reason: "java.lang.IllegalArgumentException: java.lang.IllegalStateException: Recursive invocation of pipeline [inner] detected." } +- match: { error.root_cause.0.reason: "java.lang.IllegalArgumentException: java.lang.IllegalStateException: Cycle detected for pipeline: inner" } diff --git a/modules/ingest-common/src/test/resources/rest-api-spec/test/ingest/220_drop_processor.yml b/modules/ingest-common/src/test/resources/rest-api-spec/test/ingest/220_drop_processor.yml new file mode 100644 index 00000000000..3be038aca24 --- /dev/null +++ b/modules/ingest-common/src/test/resources/rest-api-spec/test/ingest/220_drop_processor.yml @@ -0,0 +1,59 @@ +--- +teardown: +- do: + ingest.delete_pipeline: + id: "my_pipeline" + ignore: 404 + +--- +"Test Drop Processor": +- do: + ingest.put_pipeline: + id: "my_pipeline" + body: > + { + "description" : "pipeline with drop", + "processors" : [ + { + "drop" : { + "if": "ctx.foo == 'bar'" + } + } + ] + } +- match: { acknowledged: true } + +- do: + index: + index: test + type: test + id: 1 + pipeline: "my_pipeline" + body: { + foo: "bar" + } + +- do: + index: + index: test + type: test + id: 2 + pipeline: "my_pipeline" + body: { + foo: "blub" + } + +- do: + catch: missing + get: + index: test + type: test + id: 1 +- match: { found: false } + +- do: + get: + index: test + type: test + id: 2 +- match: { _source.foo: "blub" } diff --git a/modules/ingest-common/src/test/resources/rest-api-spec/test/ingest/90_simulate.yml b/modules/ingest-common/src/test/resources/rest-api-spec/test/ingest/90_simulate.yml index 776a8af0c24..46c4fb0a69e 100644 --- a/modules/ingest-common/src/test/resources/rest-api-spec/test/ingest/90_simulate.yml +++ b/modules/ingest-common/src/test/resources/rest-api-spec/test/ingest/90_simulate.yml @@ -605,3 +605,150 @@ teardown: - length: { docs.0.processor_results.1: 2 } - match: { docs.0.processor_results.1.tag: "rename-1" } - match: { docs.0.processor_results.1.doc._source.new_status: 200 } + +--- +"Test verbose simulate with Pipeline Processor with Circular Pipelines": +- do: + ingest.put_pipeline: + id: "outer" + body: > + { + "description" : "outer pipeline", + "processors" : [ + { + "pipeline" : { + "pipeline": "inner" + } + } + ] + } +- match: { acknowledged: true } + +- do: + ingest.put_pipeline: + id: "inner" + body: > + { + "description" : "inner pipeline", + "processors" : [ + { + "pipeline" : { + "pipeline": "outer" + } + } + ] + } +- match: { acknowledged: true } + +- do: + catch: /illegal_state_exception/ + ingest.simulate: + verbose: true + body: > + { + "pipeline": { + "processors" : [ + { + "pipeline" : { + "pipeline": "outer" + } + } + ] + } + , + "docs": [ + { + "_index": "index", + "_type": "type", + "_id": "id", + "_source": { + "field1": "123.42 400 " + } + } + ] + } +- match: { error.root_cause.0.type: "illegal_state_exception" } +- match: { error.root_cause.0.reason: "Cycle detected for pipeline: inner" } + +--- +"Test verbose simulate with Pipeline Processor with Multiple Pipelines": +- do: + ingest.put_pipeline: + id: "pipeline1" + body: > + { + "processors": [ + { + "set": { + "field": "pipeline1", + "value": true + } + }, + { + "pipeline": { + "pipeline": "pipeline2" + } + } + ] + } +- match: { acknowledged: true } + +- do: + ingest.put_pipeline: + id: "pipeline2" + body: > + { + "processors": [ + { + "set": { + "field": "pipeline2", + "value": true + } + } + ] + } +- match: { acknowledged: true } + +- do: + ingest.simulate: + verbose: true + body: > + { + "pipeline": { + "processors": [ + { + "set": { + "field": "pipeline0", + "value": true + } + }, + { + "pipeline": { + "pipeline": "pipeline1" + } + } + ] + }, + "docs": [ + { + "_index": "index", + "_type": "type", + "_id": "id", + "_source": { + "field1": "123.42 400 " + } + } + ] + } +- length: { docs: 1 } +- length: { docs.0.processor_results: 3 } +- match: { docs.0.processor_results.0.doc._source.pipeline0: true } +- is_false: docs.0.processor_results.0.doc._source.pipeline1 +- is_false: docs.0.processor_results.0.doc._source.pipeline2 +- match: { docs.0.processor_results.1.doc._source.pipeline0: true } +- match: { docs.0.processor_results.1.doc._source.pipeline1: true } +- is_false: docs.0.processor_results.1.doc._source.pipeline2 +- match: { docs.0.processor_results.2.doc._source.pipeline0: true } +- match: { docs.0.processor_results.2.doc._source.pipeline1: true } +- match: { docs.0.processor_results.2.doc._source.pipeline2: true } + diff --git a/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/ExpressionExecutableScript.java b/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/ExpressionExecutableScript.java deleted file mode 100644 index f9cdae40457..00000000000 --- a/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/ExpressionExecutableScript.java +++ /dev/null @@ -1,88 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.script.expression; - -import org.apache.lucene.expressions.Expression; -import org.elasticsearch.script.ExecutableScript; -import org.elasticsearch.script.GeneralScriptException; - -import java.util.HashMap; -import java.util.Map; - -/** - * A bridge to evaluate an {@link Expression} against a map of variables in the context - * of an {@link ExecutableScript}. - */ -public class ExpressionExecutableScript implements ExecutableScript { - public final Expression expression; - public final Map functionValuesMap; - public final ReplaceableConstDoubleValues[] functionValuesArray; - - public ExpressionExecutableScript(Expression expression, Map vars) { - this.expression = expression; - int functionValuesLength = expression.variables.length; - - if (vars.size() != functionValuesLength) { - throw new GeneralScriptException("Error using " + expression + ". " + - "The number of variables in an executable expression script [" + - functionValuesLength + "] must match the number of variables in the variable map" + - " [" + vars.size() + "]."); - } - - functionValuesArray = new ReplaceableConstDoubleValues[functionValuesLength]; - functionValuesMap = new HashMap<>(); - - for (int functionValuesIndex = 0; functionValuesIndex < functionValuesLength; ++functionValuesIndex) { - String variableName = expression.variables[functionValuesIndex]; - functionValuesArray[functionValuesIndex] = new ReplaceableConstDoubleValues(); - functionValuesMap.put(variableName, functionValuesArray[functionValuesIndex]); - } - - for (String varsName : vars.keySet()) { - setNextVar(varsName, vars.get(varsName)); - } - } - - @Override - public void setNextVar(String name, Object value) { - if (functionValuesMap.containsKey(name)) { - if (value instanceof Number) { - double doubleValue = ((Number)value).doubleValue(); - functionValuesMap.get(name).setValue(doubleValue); - } else { - throw new GeneralScriptException("Error using " + expression + ". " + - "Executable expressions scripts can only process numbers." + - " The variable [" + name + "] is not a number."); - } - } else { - throw new GeneralScriptException("Error using " + expression + ". " + - "The variable [" + name + "] does not exist in the executable expressions script."); - } - } - - @Override - public Object run() { - try { - return expression.evaluate(functionValuesArray); - } catch (Exception exception) { - throw new GeneralScriptException("Error evaluating " + expression, exception); - } - } -} diff --git a/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/ExpressionScriptEngine.java b/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/ExpressionScriptEngine.java index 55f8deb0592..9d305a5f2d9 100644 --- a/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/ExpressionScriptEngine.java +++ b/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/ExpressionScriptEngine.java @@ -41,7 +41,6 @@ import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.script.BucketAggregationScript; import org.elasticsearch.script.BucketAggregationSelectorScript; import org.elasticsearch.script.ClassPermission; -import org.elasticsearch.script.ExecutableScript; import org.elasticsearch.script.FilterScript; import org.elasticsearch.script.ScoreScript; import org.elasticsearch.script.ScriptContext; @@ -112,9 +111,6 @@ public class ExpressionScriptEngine extends AbstractComponent implements ScriptE if (context.instanceClazz.equals(SearchScript.class)) { SearchScript.Factory factory = (p, lookup) -> newSearchScript(expr, lookup, p); return context.factoryClazz.cast(factory); - } else if (context.instanceClazz.equals(ExecutableScript.class)) { - ExecutableScript.Factory factory = (p) -> new ExpressionExecutableScript(expr, p); - return context.factoryClazz.cast(factory); } else if (context.instanceClazz.equals(BucketAggregationScript.class)) { return context.factoryClazz.cast(newBucketAggregationScriptFactory(expr)); } else if (context.instanceClazz.equals(BucketAggregationSelectorScript.class)) { diff --git a/modules/lang-expression/src/test/java/org/elasticsearch/script/expression/MoreExpressionTests.java b/modules/lang-expression/src/test/java/org/elasticsearch/script/expression/MoreExpressionTests.java index 6d7ab1d2595..11310710769 100644 --- a/modules/lang-expression/src/test/java/org/elasticsearch/script/expression/MoreExpressionTests.java +++ b/modules/lang-expression/src/test/java/org/elasticsearch/script/expression/MoreExpressionTests.java @@ -19,8 +19,6 @@ package org.elasticsearch.script.expression; -import org.apache.lucene.expressions.Expression; -import org.apache.lucene.expressions.js.JavascriptCompiler; import org.elasticsearch.action.search.SearchPhaseExecutionException; import org.elasticsearch.action.search.SearchRequestBuilder; import org.elasticsearch.action.search.SearchResponse; @@ -33,7 +31,6 @@ import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.index.query.functionscore.ScoreFunctionBuilder; import org.elasticsearch.index.query.functionscore.ScoreFunctionBuilders; import org.elasticsearch.plugins.Plugin; -import org.elasticsearch.script.GeneralScriptException; import org.elasticsearch.script.Script; import org.elasticsearch.script.ScriptType; import org.elasticsearch.search.SearchHits; @@ -504,68 +501,6 @@ public class MoreExpressionTests extends ESIntegTestCase { message.contains("text variable"), equalTo(true)); } - // series of unit test for using expressions as executable scripts - public void testExecutableScripts() throws Exception { - assumeTrue("test creates classes directly, cannot run with security manager", System.getSecurityManager() == null); - Map vars = new HashMap<>(); - vars.put("a", 2.5); - vars.put("b", 3); - vars.put("xyz", -1); - - Expression expr = JavascriptCompiler.compile("a+b+xyz"); - - ExpressionExecutableScript ees = new ExpressionExecutableScript(expr, vars); - assertEquals((Double) ees.run(), 4.5, 0.001); - - ees.setNextVar("b", -2.5); - assertEquals((Double) ees.run(), -1, 0.001); - - ees.setNextVar("a", -2.5); - ees.setNextVar("b", -2.5); - ees.setNextVar("xyz", -2.5); - assertEquals((Double) ees.run(), -7.5, 0.001); - - String message; - - try { - vars = new HashMap<>(); - vars.put("a", 1); - ees = new ExpressionExecutableScript(expr, vars); - ees.run(); - fail("An incorrect number of variables were allowed to be used in an expression."); - } catch (GeneralScriptException se) { - message = se.getMessage(); - assertThat(message + " should have contained number of variables", message.contains("number of variables"), equalTo(true)); - } - - try { - vars = new HashMap<>(); - vars.put("a", 1); - vars.put("b", 3); - vars.put("c", -1); - ees = new ExpressionExecutableScript(expr, vars); - ees.run(); - fail("A variable was allowed to be set that does not exist in the expression."); - } catch (GeneralScriptException se) { - message = se.getMessage(); - assertThat(message + " should have contained does not exist in", message.contains("does not exist in"), equalTo(true)); - } - - try { - vars = new HashMap<>(); - vars.put("a", 1); - vars.put("b", 3); - vars.put("xyz", "hello"); - ees = new ExpressionExecutableScript(expr, vars); - ees.run(); - fail("A non-number was allowed to be use in the expression."); - } catch (GeneralScriptException se) { - message = se.getMessage(); - assertThat(message + " should have contained process numbers", message.contains("process numbers"), equalTo(true)); - } - - } - // test to make sure expressions are not allowed to be used as update scripts public void testInvalidUpdateScript() throws Exception { try { diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/Compiler.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/Compiler.java index 97dddbdfe52..e6ed475a7be 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/Compiler.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/Compiler.java @@ -28,11 +28,14 @@ import org.elasticsearch.painless.spi.Whitelist; import org.objectweb.asm.util.Printer; import java.lang.reflect.Constructor; +import java.lang.reflect.Method; import java.net.MalformedURLException; import java.net.URL; import java.security.CodeSource; import java.security.SecureClassLoader; import java.security.cert.Certificate; +import java.util.Collections; +import java.util.HashMap; import java.util.Map; import java.util.concurrent.atomic.AtomicInteger; @@ -89,16 +92,11 @@ final class Compiler { */ @Override public Class findClass(String name) throws ClassNotFoundException { - if (scriptClass.getName().equals(name)) { - return scriptClass; + Class found = additionalClasses.get(name); + if (found != null) { + return found; } - if (factoryClass != null && factoryClass.getName().equals(name)) { - return factoryClass; - } - if (statefulFactoryClass != null && statefulFactoryClass.getName().equals(name)) { - return statefulFactoryClass; - } - Class found = painlessLookup.canonicalTypeNameToType(name.replace('$', '.')); + found = painlessLookup.canonicalTypeNameToType(name.replace('$', '.')); return found != null ? found : super.findClass(name); } @@ -155,21 +153,16 @@ final class Compiler { */ private final Class scriptClass; - /** - * The class/interface to create the {@code scriptClass} instance. - */ - private final Class factoryClass; - - /** - * An optional class/interface to create the {@code factoryClass} instance. - */ - private final Class statefulFactoryClass; - /** * The whitelist the script will use. */ private final PainlessLookup painlessLookup; + /** + * Classes that do not exist in the lookup, but are needed by the script factories. + */ + private final Map> additionalClasses; + /** * Standard constructor. * @param scriptClass The class/interface the script will implement. @@ -179,9 +172,36 @@ final class Compiler { */ Compiler(Class scriptClass, Class factoryClass, Class statefulFactoryClass, PainlessLookup painlessLookup) { this.scriptClass = scriptClass; - this.factoryClass = factoryClass; - this.statefulFactoryClass = statefulFactoryClass; this.painlessLookup = painlessLookup; + Map> additionalClasses = new HashMap<>(); + additionalClasses.put(scriptClass.getName(), scriptClass); + addFactoryMethod(additionalClasses, factoryClass, "newInstance"); + addFactoryMethod(additionalClasses, statefulFactoryClass, "newFactory"); + addFactoryMethod(additionalClasses, statefulFactoryClass, "newInstance"); + this.additionalClasses = Collections.unmodifiableMap(additionalClasses); + } + + private static void addFactoryMethod(Map> additionalClasses, Class factoryClass, String methodName) { + if (factoryClass == null) { + return; + } + + Method factoryMethod = null; + for (Method method : factoryClass.getMethods()) { + if (methodName.equals(method.getName())) { + factoryMethod = method; + break; + } + } + if (factoryMethod == null) { + return; + } + + additionalClasses.put(factoryClass.getName(), factoryClass); + for (int i = 0; i < factoryMethod.getParameterTypes().length; ++i) { + Class parameterClazz = factoryMethod.getParameterTypes()[i]; + additionalClasses.put(parameterClazz.getName(), parameterClazz); + } } /** diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/Locals.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/Locals.java index e07c016ddd0..d819f53bf0b 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/Locals.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/Locals.java @@ -295,7 +295,6 @@ public final class Locals { public final Class clazz; public final boolean readonly; private final int slot; - private boolean used; public Variable(Location location, String name, Class clazz, int slot, boolean readonly) { this.location = location; diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/MethodWriter.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/MethodWriter.java index dca638b3ddd..a39453838f3 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/MethodWriter.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/MethodWriter.java @@ -347,17 +347,39 @@ public final class MethodWriter extends GeneratorAdapter { } switch (operation) { - case MUL: math(GeneratorAdapter.MUL, getType(clazz)); break; - case DIV: math(GeneratorAdapter.DIV, getType(clazz)); break; - case REM: math(GeneratorAdapter.REM, getType(clazz)); break; - case ADD: math(GeneratorAdapter.ADD, getType(clazz)); break; - case SUB: math(GeneratorAdapter.SUB, getType(clazz)); break; - case LSH: math(GeneratorAdapter.SHL, getType(clazz)); break; - case USH: math(GeneratorAdapter.USHR, getType(clazz)); break; - case RSH: math(GeneratorAdapter.SHR, getType(clazz)); break; - case BWAND: math(GeneratorAdapter.AND, getType(clazz)); break; - case XOR: math(GeneratorAdapter.XOR, getType(clazz)); break; - case BWOR: math(GeneratorAdapter.OR, getType(clazz)); break; + case MUL: + math(GeneratorAdapter.MUL, getType(clazz)); + break; + case DIV: + math(GeneratorAdapter.DIV, getType(clazz)); + break; + case REM: + math(GeneratorAdapter.REM, getType(clazz)); + break; + case ADD: + math(GeneratorAdapter.ADD, getType(clazz)); + break; + case SUB: + math(GeneratorAdapter.SUB, getType(clazz)); + break; + case LSH: + math(GeneratorAdapter.SHL, getType(clazz)); + break; + case USH: + math(GeneratorAdapter.USHR, getType(clazz)); + break; + case RSH: + math(GeneratorAdapter.SHR, getType(clazz)); + break; + case BWAND: + math(GeneratorAdapter.AND, getType(clazz)); + break; + case XOR: + math(GeneratorAdapter.XOR, getType(clazz)); + break; + case BWOR: + math(GeneratorAdapter.OR, getType(clazz)); + break; default: throw location.createError(new IllegalStateException("Illegal tree structure.")); } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/PainlessScriptEngine.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/PainlessScriptEngine.java index 3a2a6d1452d..5ed305751c8 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/PainlessScriptEngine.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/PainlessScriptEngine.java @@ -26,7 +26,6 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.painless.Compiler.Loader; import org.elasticsearch.painless.lookup.PainlessLookupBuilder; import org.elasticsearch.painless.spi.Whitelist; -import org.elasticsearch.script.ExecutableScript; import org.elasticsearch.script.ScriptContext; import org.elasticsearch.script.ScriptEngine; import org.elasticsearch.script.ScriptException; @@ -38,6 +37,7 @@ import org.objectweb.asm.commons.GeneratorAdapter; import java.lang.invoke.MethodType; import java.lang.reflect.Constructor; +import java.lang.reflect.InvocationTargetException; import java.lang.reflect.Method; import java.security.AccessControlContext; import java.security.AccessController; @@ -101,7 +101,7 @@ public final class PainlessScriptEngine extends AbstractComponent implements Scr for (Map.Entry, List> entry : contexts.entrySet()) { ScriptContext context = entry.getKey(); - if (context.instanceClazz.equals(SearchScript.class) || context.instanceClazz.equals(ExecutableScript.class)) { + if (context.instanceClazz.equals(SearchScript.class)) { contextsToCompilers.put(context, new Compiler(GenericElasticsearchScript.class, null, null, PainlessLookupBuilder.buildFromWhitelists(entry.getValue()))); } else { @@ -127,26 +127,33 @@ public final class PainlessScriptEngine extends AbstractComponent implements Scr Compiler compiler = contextsToCompilers.get(context); if (context.instanceClazz.equals(SearchScript.class)) { - GenericElasticsearchScript painlessScript = - (GenericElasticsearchScript)compile(compiler, scriptName, scriptSource, params); + Constructor constructor = compile(compiler, scriptName, scriptSource, params); + boolean needsScore; + + try { + GenericElasticsearchScript newInstance = (GenericElasticsearchScript)constructor.newInstance(); + needsScore = newInstance.needs_score(); + } catch (InstantiationException | IllegalAccessException | InvocationTargetException e) { + throw new IllegalArgumentException("internal error"); + } SearchScript.Factory factory = (p, lookup) -> new SearchScript.LeafFactory() { @Override public SearchScript newInstance(final LeafReaderContext context) { - return new ScriptImpl(painlessScript, p, lookup, context); + try { + // a new instance is required for the class bindings model to work correctly + GenericElasticsearchScript newInstance = (GenericElasticsearchScript)constructor.newInstance(); + return new ScriptImpl(newInstance, p, lookup, context); + } catch (InstantiationException | IllegalAccessException | InvocationTargetException e) { + throw new IllegalArgumentException("internal error"); + } } @Override public boolean needs_score() { - return painlessScript.needs_score(); + return needsScore; } }; return context.factoryClazz.cast(factory); - } else if (context.instanceClazz.equals(ExecutableScript.class)) { - GenericElasticsearchScript painlessScript = - (GenericElasticsearchScript)compile(compiler, scriptName, scriptSource, params); - - ExecutableScript.Factory factory = (p) -> new ScriptImpl(painlessScript, p, null, null); - return context.factoryClazz.cast(factory); } else { // Check we ourselves are not being called by unprivileged code. SpecialPermission.check(); @@ -367,7 +374,7 @@ public final class PainlessScriptEngine extends AbstractComponent implements Scr } } - Object compile(Compiler compiler, String scriptName, String source, Map params, Object... args) { + Constructor compile(Compiler compiler, String scriptName, String source, Map params) { final CompilerSettings compilerSettings = buildCompilerSettings(params); // Check we ourselves are not being called by unprivileged code. @@ -383,14 +390,14 @@ public final class PainlessScriptEngine extends AbstractComponent implements Scr try { // Drop all permissions to actually compile the code itself. - return AccessController.doPrivileged(new PrivilegedAction() { + return AccessController.doPrivileged(new PrivilegedAction>() { @Override - public Object run() { + public Constructor run() { String name = scriptName == null ? source : scriptName; Constructor constructor = compiler.compile(loader, new MainMethodReserved(), name, source, compilerSettings); try { - return constructor.newInstance(args); + return constructor; } catch (Exception exception) { // Catch everything to let the user know this is something caused internally. throw new IllegalStateException( "An internal error occurred attempting to define the script [" + name + "].", exception); diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/ScriptImpl.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/ScriptImpl.java index 067bf38cb36..2f31694ff3c 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/ScriptImpl.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/ScriptImpl.java @@ -20,7 +20,6 @@ package org.elasticsearch.painless; import org.apache.lucene.index.LeafReaderContext; -import org.elasticsearch.script.ExecutableScript; import org.elasticsearch.script.SearchScript; import org.elasticsearch.search.lookup.LeafSearchLookup; import org.elasticsearch.search.lookup.SearchLookup; @@ -31,7 +30,7 @@ import java.util.function.DoubleSupplier; import java.util.function.Function; /** - * ScriptImpl can be used as either an {@link ExecutableScript} or a {@link SearchScript} + * ScriptImpl can be used as a {@link SearchScript} * to run a previously compiled Painless script. */ final class ScriptImpl extends SearchScript { diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessCast.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessCast.java index f87f8a134b8..98968465d34 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessCast.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessCast.java @@ -19,10 +19,15 @@ package org.elasticsearch.painless.lookup; +import java.util.Objects; + public class PainlessCast { /** Create a standard cast with no boxing/unboxing. */ public static PainlessCast originalTypetoTargetType(Class originalType, Class targetType, boolean explicitCast) { + Objects.requireNonNull(originalType); + Objects.requireNonNull(targetType); + return new PainlessCast(originalType, targetType, explicitCast, null, null, null, null); } @@ -30,6 +35,10 @@ public class PainlessCast { public static PainlessCast unboxOriginalType( Class originalType, Class targetType, boolean explicitCast, Class unboxOriginalType) { + Objects.requireNonNull(originalType); + Objects.requireNonNull(targetType); + Objects.requireNonNull(unboxOriginalType); + return new PainlessCast(originalType, targetType, explicitCast, unboxOriginalType, null, null, null); } @@ -37,6 +46,10 @@ public class PainlessCast { public static PainlessCast unboxTargetType( Class originalType, Class targetType, boolean explicitCast, Class unboxTargetType) { + Objects.requireNonNull(originalType); + Objects.requireNonNull(targetType); + Objects.requireNonNull(unboxTargetType); + return new PainlessCast(originalType, targetType, explicitCast, null, unboxTargetType, null, null); } @@ -44,6 +57,10 @@ public class PainlessCast { public static PainlessCast boxOriginalType( Class originalType, Class targetType, boolean explicitCast, Class boxOriginalType) { + Objects.requireNonNull(originalType); + Objects.requireNonNull(targetType); + Objects.requireNonNull(boxOriginalType); + return new PainlessCast(originalType, targetType, explicitCast, null, null, boxOriginalType, null); } @@ -51,6 +68,10 @@ public class PainlessCast { public static PainlessCast boxTargetType( Class originalType, Class targetType, boolean explicitCast, Class boxTargetType) { + Objects.requireNonNull(originalType); + Objects.requireNonNull(targetType); + Objects.requireNonNull(boxTargetType); + return new PainlessCast(originalType, targetType, explicitCast, null, null, null, boxTargetType); } @@ -73,4 +94,30 @@ public class PainlessCast { this.boxOriginalType = boxOriginalType; this.boxTargetType = boxTargetType; } + + @Override + public boolean equals(Object object) { + if (this == object) { + return true; + } + + if (object == null || getClass() != object.getClass()) { + return false; + } + + PainlessCast that = (PainlessCast)object; + + return explicitCast == that.explicitCast && + Objects.equals(originalType, that.originalType) && + Objects.equals(targetType, that.targetType) && + Objects.equals(unboxOriginalType, that.unboxOriginalType) && + Objects.equals(unboxTargetType, that.unboxTargetType) && + Objects.equals(boxOriginalType, that.boxOriginalType) && + Objects.equals(boxTargetType, that.boxTargetType); + } + + @Override + public int hashCode() { + return Objects.hash(originalType, targetType, explicitCast, unboxOriginalType, unboxTargetType, boxOriginalType, boxTargetType); + } } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessClass.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessClass.java index f5d6c97bb2f..786b4c6a3b9 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessClass.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessClass.java @@ -22,6 +22,7 @@ package org.elasticsearch.painless.lookup; import java.lang.invoke.MethodHandle; import java.util.Collections; import java.util.Map; +import java.util.Objects; public final class PainlessClass { @@ -57,4 +58,29 @@ public final class PainlessClass { this.functionalInterfaceMethod = functionalInterfaceMethod; } + + @Override + public boolean equals(Object object) { + if (this == object) { + return true; + } + + if (object == null || getClass() != object.getClass()) { + return false; + } + + PainlessClass that = (PainlessClass)object; + + return Objects.equals(constructors, that.constructors) && + Objects.equals(staticMethods, that.staticMethods) && + Objects.equals(methods, that.methods) && + Objects.equals(staticFields, that.staticFields) && + Objects.equals(fields, that.fields) && + Objects.equals(functionalInterfaceMethod, that.functionalInterfaceMethod); + } + + @Override + public int hashCode() { + return Objects.hash(constructors, staticMethods, methods, staticFields, fields, functionalInterfaceMethod); + } } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessClassBinding.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessClassBinding.java index 3418b2d8244..0f28830b3d4 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessClassBinding.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessClassBinding.java @@ -22,6 +22,7 @@ package org.elasticsearch.painless.lookup; import java.lang.reflect.Constructor; import java.lang.reflect.Method; import java.util.List; +import java.util.Objects; public class PainlessClassBinding { @@ -38,4 +39,28 @@ public class PainlessClassBinding { this.returnType = returnType; this.typeParameters = typeParameters; } + + @Override + public boolean equals(Object object) { + if (this == object) { + return true; + } + + if (object == null || getClass() != object.getClass()) { + return false; + } + + PainlessClassBinding that = (PainlessClassBinding)object; + + return Objects.equals(javaConstructor, that.javaConstructor) && + Objects.equals(javaMethod, that.javaMethod) && + Objects.equals(returnType, that.returnType) && + Objects.equals(typeParameters, that.typeParameters); + } + + @Override + public int hashCode() { + + return Objects.hash(javaConstructor, javaMethod, returnType, typeParameters); + } } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessClassBuilder.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessClassBuilder.java index 92100d1bda0..fbf9e45bf16 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessClassBuilder.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessClassBuilder.java @@ -22,6 +22,7 @@ package org.elasticsearch.painless.lookup; import java.lang.invoke.MethodHandle; import java.util.HashMap; import java.util.Map; +import java.util.Objects; final class PainlessClassBuilder { @@ -57,4 +58,29 @@ final class PainlessClassBuilder { return new PainlessClass(constructors, staticMethods, methods, staticFields, fields, getterMethodHandles, setterMethodHandles, functionalInterfaceMethod); } + + @Override + public boolean equals(Object object) { + if (this == object) { + return true; + } + + if (object == null || getClass() != object.getClass()) { + return false; + } + + PainlessClassBuilder that = (PainlessClassBuilder)object; + + return Objects.equals(constructors, that.constructors) && + Objects.equals(staticMethods, that.staticMethods) && + Objects.equals(methods, that.methods) && + Objects.equals(staticFields, that.staticFields) && + Objects.equals(fields, that.fields) && + Objects.equals(functionalInterfaceMethod, that.functionalInterfaceMethod); + } + + @Override + public int hashCode() { + return Objects.hash(constructors, staticMethods, methods, staticFields, fields, functionalInterfaceMethod); + } } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessConstructor.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessConstructor.java index a3dc6c8122b..0f890e88b73 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessConstructor.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessConstructor.java @@ -23,6 +23,7 @@ import java.lang.invoke.MethodHandle; import java.lang.invoke.MethodType; import java.lang.reflect.Constructor; import java.util.List; +import java.util.Objects; public class PainlessConstructor { @@ -37,4 +38,26 @@ public class PainlessConstructor { this.methodHandle = methodHandle; this.methodType = methodType; } + + @Override + public boolean equals(Object object) { + if (this == object) { + return true; + } + + if (object == null || getClass() != object.getClass()) { + return false; + } + + PainlessConstructor that = (PainlessConstructor)object; + + return Objects.equals(javaConstructor, that.javaConstructor) && + Objects.equals(typeParameters, that.typeParameters) && + Objects.equals(methodType, that.methodType); + } + + @Override + public int hashCode() { + return Objects.hash(javaConstructor, typeParameters, methodType); + } } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessField.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessField.java index 9567e97331c..72a57159b44 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessField.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessField.java @@ -21,6 +21,7 @@ package org.elasticsearch.painless.lookup; import java.lang.invoke.MethodHandle; import java.lang.reflect.Field; +import java.util.Objects; public final class PainlessField { @@ -37,4 +38,25 @@ public final class PainlessField { this.getterMethodHandle = getterMethodHandle; this.setterMethodHandle = setterMethodHandle; } + + @Override + public boolean equals(Object object) { + if (this == object) { + return true; + } + + if (object == null || getClass() != object.getClass()) { + return false; + } + + PainlessField that = (PainlessField)object; + + return Objects.equals(javaField, that.javaField) && + Objects.equals(typeParameter, that.typeParameter); + } + + @Override + public int hashCode() { + return Objects.hash(javaField, typeParameter); + } } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessLookupBuilder.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessLookupBuilder.java index ce451f3dca8..b3bc8580b38 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessLookupBuilder.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessLookupBuilder.java @@ -20,8 +20,8 @@ package org.elasticsearch.painless.lookup; import org.elasticsearch.painless.spi.Whitelist; -import org.elasticsearch.painless.spi.WhitelistClassBinding; import org.elasticsearch.painless.spi.WhitelistClass; +import org.elasticsearch.painless.spi.WhitelistClassBinding; import org.elasticsearch.painless.spi.WhitelistConstructor; import org.elasticsearch.painless.spi.WhitelistField; import org.elasticsearch.painless.spi.WhitelistMethod; @@ -34,7 +34,6 @@ import java.lang.reflect.Field; import java.lang.reflect.Method; import java.lang.reflect.Modifier; import java.util.ArrayList; -import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; @@ -51,155 +50,10 @@ import static org.elasticsearch.painless.lookup.PainlessLookupUtility.typesToCan public final class PainlessLookupBuilder { - private static class PainlessConstructorCacheKey { - - private final Class targetClass; - private final List> typeParameters; - - private PainlessConstructorCacheKey(Class targetClass, List> typeParameters) { - this.targetClass = targetClass; - this.typeParameters = Collections.unmodifiableList(typeParameters); - } - - @Override - public boolean equals(Object object) { - if (this == object) { - return true; - } - - if (object == null || getClass() != object.getClass()) { - return false; - } - - PainlessConstructorCacheKey that = (PainlessConstructorCacheKey)object; - - return Objects.equals(targetClass, that.targetClass) && - Objects.equals(typeParameters, that.typeParameters); - } - - @Override - public int hashCode() { - return Objects.hash(targetClass, typeParameters); - } - } - - private static class PainlessMethodCacheKey { - - private final Class targetClass; - private final String methodName; - private final Class returnType; - private final List> typeParameters; - - private PainlessMethodCacheKey(Class targetClass, String methodName, Class returnType, List> typeParameters) { - this.targetClass = targetClass; - this.methodName = methodName; - this.returnType = returnType; - this.typeParameters = Collections.unmodifiableList(typeParameters); - } - - @Override - public boolean equals(Object object) { - if (this == object) { - return true; - } - - if (object == null || getClass() != object.getClass()) { - return false; - } - - PainlessMethodCacheKey that = (PainlessMethodCacheKey)object; - - return Objects.equals(targetClass, that.targetClass) && - Objects.equals(methodName, that.methodName) && - Objects.equals(returnType, that.returnType) && - Objects.equals(typeParameters, that.typeParameters); - } - - @Override - public int hashCode() { - return Objects.hash(targetClass, methodName, returnType, typeParameters); - } - } - - private static class PainlessFieldCacheKey { - - private final Class targetClass; - private final String fieldName; - private final Class typeParameter; - - private PainlessFieldCacheKey(Class targetClass, String fieldName, Class typeParameter) { - this.targetClass = targetClass; - this.fieldName = fieldName; - this.typeParameter = typeParameter; - } - - @Override - public boolean equals(Object object) { - if (this == object) { - return true; - } - - if (object == null || getClass() != object.getClass()) { - return false; - } - - PainlessFieldCacheKey that = (PainlessFieldCacheKey) object; - - return Objects.equals(targetClass, that.targetClass) && - Objects.equals(fieldName, that.fieldName) && - Objects.equals(typeParameter, that.typeParameter); - } - - @Override - public int hashCode() { - return Objects.hash(targetClass, fieldName, typeParameter); - } - } - - private static class PainlessClassBindingCacheKey { - - private final Class targetClass; - private final String methodName; - private final Class methodReturnType; - private final List> methodTypeParameters; - - private PainlessClassBindingCacheKey(Class targetClass, - String methodName, Class returnType, List> typeParameters) { - - this.targetClass = targetClass; - this.methodName = methodName; - this.methodReturnType = returnType; - this.methodTypeParameters = Collections.unmodifiableList(typeParameters); - } - - @Override - public boolean equals(Object object) { - if (this == object) { - return true; - } - - if (object == null || getClass() != object.getClass()) { - return false; - } - - PainlessClassBindingCacheKey that = (PainlessClassBindingCacheKey)object; - - return Objects.equals(targetClass, that.targetClass) && - Objects.equals(methodName, that.methodName) && - Objects.equals(methodReturnType, that.methodReturnType) && - Objects.equals(methodTypeParameters, that.methodTypeParameters); - } - - @Override - public int hashCode() { - return Objects.hash(targetClass, methodName, methodReturnType, methodTypeParameters); - } - } - - private static final Map painlessConstructorCache = new HashMap<>(); - private static final Map painlessMethodCache = new HashMap<>(); - private static final Map painlessFieldCache = new HashMap<>(); - private static final Map painlessClassBindingCache = new HashMap<>(); + private static final Map painlessConstructorCache = new HashMap<>(); + private static final Map painlessMethodCache = new HashMap<>(); + private static final Map painlessFieldCache = new HashMap<>(); + private static final Map painlessClassBindingCache = new HashMap<>(); private static final Pattern CLASS_NAME_PATTERN = Pattern.compile("^[_a-zA-Z][._a-zA-Z0-9]*$"); private static final Pattern METHOD_NAME_PATTERN = Pattern.compile("^[_a-zA-Z][_a-zA-Z0-9]*$"); @@ -335,8 +189,7 @@ public final class PainlessLookupBuilder { throw new IllegalArgumentException("invalid class name [" + canonicalClassName + "]"); } - - Class existingClass = canonicalClassNamesToClasses.get(typeToCanonicalTypeName(clazz)); + Class existingClass = canonicalClassNamesToClasses.get(canonicalClassName); if (existingClass != null && existingClass != clazz) { throw new IllegalArgumentException("class [" + canonicalClassName + "] " + @@ -360,22 +213,22 @@ public final class PainlessLookupBuilder { throw new IllegalArgumentException("must use no_import parameter on class [" + canonicalClassName + "] with no package"); } } else { - Class importedPainlessClass = canonicalClassNamesToClasses.get(importedCanonicalClassName); + Class importedClass = canonicalClassNamesToClasses.get(importedCanonicalClassName); - if (importedPainlessClass == null) { + if (importedClass == null) { if (importClassName) { if (existingPainlessClassBuilder != null) { throw new IllegalArgumentException( - "inconsistent no_import parameters found for class [" + canonicalClassName + "]"); + "inconsistent no_import parameter found for class [" + canonicalClassName + "]"); } canonicalClassNamesToClasses.put(importedCanonicalClassName, clazz); } - } else if (importedPainlessClass != clazz) { + } else if (importedClass != clazz) { throw new IllegalArgumentException("imported class [" + importedCanonicalClassName + "] cannot represent multiple " + - "classes [" + canonicalClassName + "] and [" + typeToCanonicalTypeName(importedPainlessClass) + "]"); + "classes [" + canonicalClassName + "] and [" + typeToCanonicalTypeName(importedClass) + "]"); } else if (importClassName == false) { - throw new IllegalArgumentException("inconsistent no_import parameters found for class [" + canonicalClassName + "]"); + throw new IllegalArgumentException("inconsistent no_import parameter found for class [" + canonicalClassName + "]"); } } } @@ -440,36 +293,32 @@ public final class PainlessLookupBuilder { try { javaConstructor = targetClass.getConstructor(javaTypeParameters.toArray(new Class[typeParametersSize])); } catch (NoSuchMethodException nsme) { - throw new IllegalArgumentException("constructor reflection object " + - "[[" + targetCanonicalClassName + "], " + typesToCanonicalTypeNames(typeParameters) + "] not found", nsme); + throw new IllegalArgumentException("reflection object not found for constructor " + + "[[" + targetCanonicalClassName + "], " + typesToCanonicalTypeNames(typeParameters) + "]", nsme); } + MethodHandle methodHandle; + + try { + methodHandle = MethodHandles.publicLookup().in(targetClass).unreflectConstructor(javaConstructor); + } catch (IllegalAccessException iae) { + throw new IllegalArgumentException("method handle not found for constructor " + + "[[" + targetCanonicalClassName + "], " + typesToCanonicalTypeNames(typeParameters) + "]", iae); + } + + MethodType methodType = methodHandle.type(); + String painlessConstructorKey = buildPainlessConstructorKey(typeParametersSize); - PainlessConstructor painlessConstructor = painlessClassBuilder.constructors.get(painlessConstructorKey); + PainlessConstructor existingPainlessConstructor = painlessClassBuilder.constructors.get(painlessConstructorKey); + PainlessConstructor newPainlessConstructor = new PainlessConstructor(javaConstructor, typeParameters, methodHandle, methodType); - if (painlessConstructor == null) { - MethodHandle methodHandle; - - try { - methodHandle = MethodHandles.publicLookup().in(targetClass).unreflectConstructor(javaConstructor); - } catch (IllegalAccessException iae) { - throw new IllegalArgumentException("constructor method handle " + - "[[" + targetCanonicalClassName + "], " + typesToCanonicalTypeNames(typeParameters) + "] not found", iae); - } - - MethodType methodType = methodHandle.type(); - - painlessConstructor = painlessConstructorCache.computeIfAbsent( - new PainlessConstructorCacheKey(targetClass, typeParameters), - key -> new PainlessConstructor(javaConstructor, typeParameters, methodHandle, methodType) - ); - - painlessClassBuilder.constructors.put(painlessConstructorKey, painlessConstructor); - } else if (painlessConstructor.typeParameters.equals(typeParameters) == false){ - throw new IllegalArgumentException("cannot have constructors " + + if (existingPainlessConstructor == null) { + newPainlessConstructor = painlessConstructorCache.computeIfAbsent(newPainlessConstructor, key -> key); + painlessClassBuilder.constructors.put(painlessConstructorKey, newPainlessConstructor); + } else if (newPainlessConstructor.equals(existingPainlessConstructor) == false){ + throw new IllegalArgumentException("cannot add constructors with the same arity but are not equivalent for constructors " + "[[" + targetCanonicalClassName + "], " + typesToCanonicalTypeNames(typeParameters) + "] and " + - "[[" + targetCanonicalClassName + "], " + typesToCanonicalTypeNames(painlessConstructor.typeParameters) + "] " + - "with the same arity and different type parameters"); + "[[" + targetCanonicalClassName + "], " + typesToCanonicalTypeNames(existingPainlessConstructor.typeParameters) + "]"); } } @@ -578,8 +427,8 @@ public final class PainlessLookupBuilder { try { javaMethod = targetClass.getMethod(methodName, javaTypeParameters.toArray(new Class[typeParametersSize])); } catch (NoSuchMethodException nsme) { - throw new IllegalArgumentException("method reflection object [[" + targetCanonicalClassName + "], " + - "[" + methodName + "], " + typesToCanonicalTypeNames(typeParameters) + "] not found", nsme); + throw new IllegalArgumentException("reflection object not found for method [[" + targetCanonicalClassName + "], " + + "[" + methodName + "], " + typesToCanonicalTypeNames(typeParameters) + "]", nsme); } } else { try { @@ -591,9 +440,9 @@ public final class PainlessLookupBuilder { "[" + typeToCanonicalTypeName(augmentedClass) + "] must be static"); } } catch (NoSuchMethodException nsme) { - throw new IllegalArgumentException("method reflection object [[" + targetCanonicalClassName + "], " + - "[" + methodName + "], " + typesToCanonicalTypeNames(typeParameters) + "] not found " + - "with augmented target class [" + typeToCanonicalTypeName(augmentedClass) + "]", nsme); + throw new IllegalArgumentException("reflection object not found for method " + + "[[" + targetCanonicalClassName + "], [" + methodName + "], " + typesToCanonicalTypeNames(typeParameters) + "] " + + "with augmented class [" + typeToCanonicalTypeName(augmentedClass) + "]", nsme); } } @@ -604,79 +453,54 @@ public final class PainlessLookupBuilder { typesToCanonicalTypeNames(typeParameters) + "]"); } - String painlessMethodKey = buildPainlessMethodKey(methodName, typeParametersSize); + MethodHandle methodHandle; - if (augmentedClass == null && Modifier.isStatic(javaMethod.getModifiers())) { - PainlessMethod painlessMethod = painlessClassBuilder.staticMethods.get(painlessMethodKey); - - if (painlessMethod == null) { - MethodHandle methodHandle; - - try { - methodHandle = MethodHandles.publicLookup().in(targetClass).unreflect(javaMethod); - } catch (IllegalAccessException iae) { - throw new IllegalArgumentException("static method handle [[" + targetClass.getCanonicalName() + "], " + - "[" + methodName + "], " + typesToCanonicalTypeNames(typeParameters) + "] not found", iae); - } - - MethodType methodType = methodHandle.type(); - - painlessMethod = painlessMethodCache.computeIfAbsent( - new PainlessMethodCacheKey(targetClass, methodName, returnType, typeParameters), - key -> new PainlessMethod(javaMethod, targetClass, returnType, typeParameters, methodHandle, methodType)); - - painlessClassBuilder.staticMethods.put(painlessMethodKey, painlessMethod); - } else if (painlessMethod.returnType == returnType && painlessMethod.typeParameters.equals(typeParameters) == false) { - throw new IllegalArgumentException("cannot have static methods " + - "[[" + targetCanonicalClassName + "], [" + methodName + "], " + - "[" + typeToCanonicalTypeName(returnType) + "], " + - typesToCanonicalTypeNames(typeParameters) + "] and " + - "[[" + targetCanonicalClassName + "], [" + methodName + "], " + - "[" + typeToCanonicalTypeName(painlessMethod.returnType) + "], " + - typesToCanonicalTypeNames(painlessMethod.typeParameters) + "] " + - "with the same arity and different return type or type parameters"); + if (augmentedClass == null) { + try { + methodHandle = MethodHandles.publicLookup().in(targetClass).unreflect(javaMethod); + } catch (IllegalAccessException iae) { + throw new IllegalArgumentException("method handle not found for method " + + "[[" + targetClass.getCanonicalName() + "], [" + methodName + "], " + + typesToCanonicalTypeNames(typeParameters) + "]", iae); } } else { - PainlessMethod painlessMethod = painlessClassBuilder.methods.get(painlessMethodKey); - - if (painlessMethod == null) { - MethodHandle methodHandle; - - if (augmentedClass == null) { - try { - methodHandle = MethodHandles.publicLookup().in(targetClass).unreflect(javaMethod); - } catch (IllegalAccessException iae) { - throw new IllegalArgumentException("method handle [[" + targetClass.getCanonicalName() + "], " + - "[" + methodName + "], " + typesToCanonicalTypeNames(typeParameters) + "] not found", iae); - } - } else { - try { - methodHandle = MethodHandles.publicLookup().in(augmentedClass).unreflect(javaMethod); - } catch (IllegalAccessException iae) { - throw new IllegalArgumentException("method handle [[" + targetClass.getCanonicalName() + "], " + - "[" + methodName + "], " + typesToCanonicalTypeNames(typeParameters) + "] not found " + - "with augmented target class [" + typeToCanonicalTypeName(augmentedClass) + "]", iae); - } - } - - MethodType methodType = methodHandle.type(); - - painlessMethod = painlessMethodCache.computeIfAbsent( - new PainlessMethodCacheKey(targetClass, methodName, returnType, typeParameters), - key -> new PainlessMethod(javaMethod, targetClass, returnType, typeParameters, methodHandle, methodType)); - - painlessClassBuilder.methods.put(painlessMethodKey, painlessMethod); - } else if (painlessMethod.returnType == returnType && painlessMethod.typeParameters.equals(typeParameters) == false) { - throw new IllegalArgumentException("cannot have methods " + - "[[" + targetCanonicalClassName + "], [" + methodName + "], " + - "[" + typeToCanonicalTypeName(returnType) + "], " + - typesToCanonicalTypeNames(typeParameters) + "] and " + - "[[" + targetCanonicalClassName + "], [" + methodName + "], " + - "[" + typeToCanonicalTypeName(painlessMethod.returnType) + "], " + - typesToCanonicalTypeNames(painlessMethod.typeParameters) + "] " + - "with the same arity and different return type or type parameters"); + try { + methodHandle = MethodHandles.publicLookup().in(augmentedClass).unreflect(javaMethod); + } catch (IllegalAccessException iae) { + throw new IllegalArgumentException("method handle not found for method " + + "[[" + targetClass.getCanonicalName() + "], [" + methodName + "], " + + typesToCanonicalTypeNames(typeParameters) + "]" + + "with augmented class [" + typeToCanonicalTypeName(augmentedClass) + "]", iae); } } + + MethodType methodType = methodHandle.type(); + + boolean isStatic = augmentedClass == null && Modifier.isStatic(javaMethod.getModifiers()); + String painlessMethodKey = buildPainlessMethodKey(methodName, typeParametersSize); + PainlessMethod existingPainlessMethod = isStatic ? + painlessClassBuilder.staticMethods.get(painlessMethodKey) : + painlessClassBuilder.methods.get(painlessMethodKey); + PainlessMethod newPainlessMethod = + new PainlessMethod(javaMethod, targetClass, returnType, typeParameters, methodHandle, methodType); + + if (existingPainlessMethod == null) { + newPainlessMethod = painlessMethodCache.computeIfAbsent(newPainlessMethod, key -> key); + + if (isStatic) { + painlessClassBuilder.staticMethods.put(painlessMethodKey, newPainlessMethod); + } else { + painlessClassBuilder.methods.put(painlessMethodKey, newPainlessMethod); + } + } else if (newPainlessMethod.equals(existingPainlessMethod) == false) { + throw new IllegalArgumentException("cannot add methods with the same name and arity but are not equivalent for methods " + + "[[" + targetCanonicalClassName + "], [" + methodName + "], " + + "[" + typeToCanonicalTypeName(returnType) + "], " + + typesToCanonicalTypeNames(typeParameters) + "] and " + + "[[" + targetCanonicalClassName + "], [" + methodName + "], " + + "[" + typeToCanonicalTypeName(existingPainlessMethod.returnType) + "], " + + typesToCanonicalTypeNames(existingPainlessMethod.typeParameters) + "]"); + } } public void addPainlessField(String targetCanonicalClassName, String fieldName, String canonicalTypeNameParameter) { @@ -687,7 +511,8 @@ public final class PainlessLookupBuilder { Class targetClass = canonicalClassNamesToClasses.get(targetCanonicalClassName); if (targetClass == null) { - throw new IllegalArgumentException("class [" + targetCanonicalClassName + "] not found"); + throw new IllegalArgumentException("target class [" + targetCanonicalClassName + "] not found for field " + + "[[" + targetCanonicalClassName + "], [" + fieldName + "], [" + canonicalTypeNameParameter + "]]"); } Class typeParameter = canonicalTypeNameToType(canonicalTypeNameParameter); @@ -721,7 +546,8 @@ public final class PainlessLookupBuilder { PainlessClassBuilder painlessClassBuilder = classesToPainlessClassBuilders.get(targetClass); if (painlessClassBuilder == null) { - throw new IllegalArgumentException("class [" + targetCanonicalClassName + "] not found"); + throw new IllegalArgumentException("target class [" + targetCanonicalClassName + "] not found for field " + + "[[" + targetCanonicalClassName + "], [" + fieldName + "], [" + typeToCanonicalTypeName(typeParameter) + "]]"); } if (isValidType(typeParameter) == false) { @@ -735,7 +561,7 @@ public final class PainlessLookupBuilder { javaField = targetClass.getField(fieldName); } catch (NoSuchFieldException nsme) { throw new IllegalArgumentException( - "field reflection object [[" + targetCanonicalClassName + "], [" + fieldName + "] not found", nsme); + "reflection object not found for field [[" + targetCanonicalClassName + "], [" + fieldName + "]", nsme); } if (javaField.getType() != typeToJavaType(typeParameter)) { @@ -760,20 +586,18 @@ public final class PainlessLookupBuilder { throw new IllegalArgumentException("static field [[" + targetCanonicalClassName + "], [" + fieldName + "]] must be final"); } - PainlessField painlessField = painlessClassBuilder.staticFields.get(painlessFieldKey); + PainlessField existingPainlessField = painlessClassBuilder.staticFields.get(painlessFieldKey); + PainlessField newPainlessField = new PainlessField(javaField, typeParameter, methodHandleGetter, null); - if (painlessField == null) { - painlessField = painlessFieldCache.computeIfAbsent( - new PainlessFieldCacheKey(targetClass, fieldName, typeParameter), - key -> new PainlessField(javaField, typeParameter, methodHandleGetter, null)); - - painlessClassBuilder.staticFields.put(painlessFieldKey, painlessField); - } else if (painlessField.typeParameter != typeParameter) { - throw new IllegalArgumentException("cannot have static fields " + + if (existingPainlessField == null) { + newPainlessField = painlessFieldCache.computeIfAbsent(newPainlessField, key -> key); + painlessClassBuilder.staticFields.put(painlessFieldKey, newPainlessField); + } else if (newPainlessField.equals(existingPainlessField) == false) { + throw new IllegalArgumentException("cannot add fields with the same name but are not equivalent for fields " + "[[" + targetCanonicalClassName + "], [" + fieldName + "], [" + typeToCanonicalTypeName(typeParameter) + "] and " + - "[[" + targetCanonicalClassName + "], [" + painlessField.javaField.getName() + "], " + - typeToCanonicalTypeName(painlessField.typeParameter) + "] " + + "[[" + targetCanonicalClassName + "], [" + existingPainlessField.javaField.getName() + "], " + + typeToCanonicalTypeName(existingPainlessField.typeParameter) + "] " + "with the same name and different type parameters"); } } else { @@ -786,35 +610,41 @@ public final class PainlessLookupBuilder { "setter method handle not found for field [[" + targetCanonicalClassName + "], [" + fieldName + "]]"); } - PainlessField painlessField = painlessClassBuilder.fields.get(painlessFieldKey); + PainlessField existingPainlessField = painlessClassBuilder.fields.get(painlessFieldKey); + PainlessField newPainlessField = new PainlessField(javaField, typeParameter, methodHandleGetter, methodHandleSetter); - if (painlessField == null) { - painlessField = painlessFieldCache.computeIfAbsent( - new PainlessFieldCacheKey(targetClass, painlessFieldKey, typeParameter), - key -> new PainlessField(javaField, typeParameter, methodHandleGetter, methodHandleSetter)); - - painlessClassBuilder.fields.put(fieldName, painlessField); - } else if (painlessField.typeParameter != typeParameter) { - throw new IllegalArgumentException("cannot have fields " + + if (existingPainlessField == null) { + newPainlessField = painlessFieldCache.computeIfAbsent(newPainlessField, key -> key); + painlessClassBuilder.fields.put(painlessFieldKey, newPainlessField); + } else if (newPainlessField.equals(existingPainlessField) == false) { + throw new IllegalArgumentException("cannot add fields with the same name but are not equivalent for fields " + "[[" + targetCanonicalClassName + "], [" + fieldName + "], [" + typeToCanonicalTypeName(typeParameter) + "] and " + - "[[" + targetCanonicalClassName + "], [" + painlessField.javaField.getName() + "], " + - typeToCanonicalTypeName(painlessField.typeParameter) + "] " + + "[[" + targetCanonicalClassName + "], [" + existingPainlessField.javaField.getName() + "], " + + typeToCanonicalTypeName(existingPainlessField.typeParameter) + "] " + "with the same name and different type parameters"); } } } - public void addImportedPainlessMethod(ClassLoader classLoader, String targetCanonicalClassName, + public void addImportedPainlessMethod(ClassLoader classLoader, String targetJavaClassName, String methodName, String returnCanonicalTypeName, List canonicalTypeNameParameters) { Objects.requireNonNull(classLoader); - Objects.requireNonNull(targetCanonicalClassName); + Objects.requireNonNull(targetJavaClassName); Objects.requireNonNull(methodName); Objects.requireNonNull(returnCanonicalTypeName); Objects.requireNonNull(canonicalTypeNameParameters); - Class targetClass = canonicalClassNamesToClasses.get(targetCanonicalClassName); + Class targetClass; + + try { + targetClass = Class.forName(targetJavaClassName, true, classLoader); + } catch (ClassNotFoundException cnfe) { + throw new IllegalArgumentException("class [" + targetJavaClassName + "] not found", cnfe); + } + + String targetCanonicalClassName = typeToCanonicalTypeName(targetClass); if (targetClass == null) { throw new IllegalArgumentException("target class [" + targetCanonicalClassName + "] not found for imported method " + @@ -913,35 +743,33 @@ public final class PainlessLookupBuilder { throw new IllegalArgumentException("imported method and class binding cannot have the same name [" + methodName + "]"); } - PainlessMethod importedPainlessMethod = painlessMethodKeysToImportedPainlessMethods.get(painlessMethodKey); + MethodHandle methodHandle; - if (importedPainlessMethod == null) { - MethodHandle methodHandle; + try { + methodHandle = MethodHandles.publicLookup().in(targetClass).unreflect(javaMethod); + } catch (IllegalAccessException iae) { + throw new IllegalArgumentException("imported method handle [[" + targetClass.getCanonicalName() + "], " + + "[" + methodName + "], " + typesToCanonicalTypeNames(typeParameters) + "] not found", iae); + } - try { - methodHandle = MethodHandles.publicLookup().in(targetClass).unreflect(javaMethod); - } catch (IllegalAccessException iae) { - throw new IllegalArgumentException("imported method handle [[" + targetClass.getCanonicalName() + "], " + - "[" + methodName + "], " + typesToCanonicalTypeNames(typeParameters) + "] not found", iae); - } + MethodType methodType = methodHandle.type(); - MethodType methodType = methodHandle.type(); + PainlessMethod existingImportedPainlessMethod = painlessMethodKeysToImportedPainlessMethods.get(painlessMethodKey); + PainlessMethod newImportedPainlessMethod = + new PainlessMethod(javaMethod, targetClass, returnType, typeParameters, methodHandle, methodType); - importedPainlessMethod = painlessMethodCache.computeIfAbsent( - new PainlessMethodCacheKey(targetClass, methodName, returnType, typeParameters), - key -> new PainlessMethod(javaMethod, targetClass, returnType, typeParameters, methodHandle, methodType)); - - painlessMethodKeysToImportedPainlessMethods.put(painlessMethodKey, importedPainlessMethod); - } else if (importedPainlessMethod.returnType == returnType && - importedPainlessMethod.typeParameters.equals(typeParameters) == false) { - throw new IllegalArgumentException("cannot have imported methods " + + if (existingImportedPainlessMethod == null) { + newImportedPainlessMethod = painlessMethodCache.computeIfAbsent(newImportedPainlessMethod, key -> key); + painlessMethodKeysToImportedPainlessMethods.put(painlessMethodKey, newImportedPainlessMethod); + } else if (newImportedPainlessMethod.equals(existingImportedPainlessMethod) == false) { + throw new IllegalArgumentException("cannot add imported methods with the same name and arity " + + "but are not equivalent for methods " + "[[" + targetCanonicalClassName + "], [" + methodName + "], " + "[" + typeToCanonicalTypeName(returnType) + "], " + typesToCanonicalTypeNames(typeParameters) + "] and " + "[[" + targetCanonicalClassName + "], [" + methodName + "], " + - "[" + typeToCanonicalTypeName(importedPainlessMethod.returnType) + "], " + - typesToCanonicalTypeNames(importedPainlessMethod.typeParameters) + "] " + - "with the same arity and different return type or type parameters"); + "[" + typeToCanonicalTypeName(existingImportedPainlessMethod.returnType) + "], " + + typesToCanonicalTypeNames(existingImportedPainlessMethod.typeParameters) + "]"); } } @@ -987,7 +815,6 @@ public final class PainlessLookupBuilder { } public void addPainlessClassBinding(Class targetClass, String methodName, Class returnType, List> typeParameters) { - Objects.requireNonNull(targetClass); Objects.requireNonNull(methodName); Objects.requireNonNull(returnType); @@ -1100,31 +927,24 @@ public final class PainlessLookupBuilder { throw new IllegalArgumentException("class binding and imported method cannot have the same name [" + methodName + "]"); } - PainlessClassBinding painlessClassBinding = painlessMethodKeysToPainlessClassBindings.get(painlessMethodKey); + PainlessClassBinding existingPainlessClassBinding = painlessMethodKeysToPainlessClassBindings.get(painlessMethodKey); + PainlessClassBinding newPainlessClassBinding = + new PainlessClassBinding(javaConstructor, javaMethod, returnType, typeParameters); - if (painlessClassBinding == null) { - Constructor finalJavaConstructor = javaConstructor; - Method finalJavaMethod = javaMethod; - - painlessClassBinding = painlessClassBindingCache.computeIfAbsent( - new PainlessClassBindingCacheKey(targetClass, methodName, returnType, typeParameters), - key -> new PainlessClassBinding(finalJavaConstructor, finalJavaMethod, returnType, typeParameters)); - - painlessMethodKeysToPainlessClassBindings.put(painlessMethodKey, painlessClassBinding); - } else if (painlessClassBinding.javaConstructor.equals(javaConstructor) == false || - painlessClassBinding.javaMethod.equals(javaMethod) == false || - painlessClassBinding.returnType != returnType || - painlessClassBinding.typeParameters.equals(typeParameters) == false) { - throw new IllegalArgumentException("cannot have class bindings " + + if (existingPainlessClassBinding == null) { + newPainlessClassBinding = painlessClassBindingCache.computeIfAbsent(newPainlessClassBinding, key -> key); + painlessMethodKeysToPainlessClassBindings.put(painlessMethodKey, newPainlessClassBinding); + } else if (newPainlessClassBinding.equals(existingPainlessClassBinding)) { + throw new IllegalArgumentException("cannot add class bindings with the same name and arity " + + "but are not equivalent for methods " + "[[" + targetCanonicalClassName + "], " + "[" + methodName + "], " + "[" + typeToCanonicalTypeName(returnType) + "], " + typesToCanonicalTypeNames(typeParameters) + "] and " + "[[" + targetCanonicalClassName + "], " + "[" + methodName + "], " + - "[" + typeToCanonicalTypeName(painlessClassBinding.returnType) + "], " + - typesToCanonicalTypeNames(painlessClassBinding.typeParameters) + "] and " + - "with the same name and arity but different constructors or methods"); + "[" + typeToCanonicalTypeName(existingPainlessClassBinding.returnType) + "], " + + typesToCanonicalTypeNames(existingPainlessClassBinding.typeParameters) + "]"); } } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessMethod.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessMethod.java index 89462170ae5..ce10d7a1b89 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessMethod.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessMethod.java @@ -24,6 +24,7 @@ import java.lang.invoke.MethodType; import java.lang.reflect.Method; import java.util.Collections; import java.util.List; +import java.util.Objects; public class PainlessMethod { @@ -44,4 +45,28 @@ public class PainlessMethod { this.methodHandle = methodHandle; this.methodType = methodType; } + + @Override + public boolean equals(Object object) { + if (this == object) { + return true; + } + + if (object == null || getClass() != object.getClass()) { + return false; + } + + PainlessMethod that = (PainlessMethod)object; + + return Objects.equals(javaMethod, that.javaMethod) && + Objects.equals(targetClass, that.targetClass) && + Objects.equals(returnType, that.returnType) && + Objects.equals(typeParameters, that.typeParameters) && + Objects.equals(methodType, that.methodType); + } + + @Override + public int hashCode() { + return Objects.hash(javaMethod, targetClass, returnType, typeParameters, methodType); + } } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SSource.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SSource.java index 8abd3c7185d..01946066af9 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SSource.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SSource.java @@ -417,18 +417,33 @@ public final class SSource extends AStatement { for (AStatement statement : statements) { statement.write(writer, globals); } - if (!methodEscape) { switch (scriptClassInfo.getExecuteMethod().getReturnType().getSort()) { - case org.objectweb.asm.Type.VOID: break; - case org.objectweb.asm.Type.BOOLEAN: writer.push(false); break; - case org.objectweb.asm.Type.BYTE: writer.push(0); break; - case org.objectweb.asm.Type.SHORT: writer.push(0); break; - case org.objectweb.asm.Type.INT: writer.push(0); break; - case org.objectweb.asm.Type.LONG: writer.push(0L); break; - case org.objectweb.asm.Type.FLOAT: writer.push(0f); break; - case org.objectweb.asm.Type.DOUBLE: writer.push(0d); break; - default: writer.visitInsn(Opcodes.ACONST_NULL); + case org.objectweb.asm.Type.VOID: + break; + case org.objectweb.asm.Type.BOOLEAN: + writer.push(false); + break; + case org.objectweb.asm.Type.BYTE: + writer.push(0); + break; + case org.objectweb.asm.Type.SHORT: + writer.push(0); + break; + case org.objectweb.asm.Type.INT: + writer.push(0); + break; + case org.objectweb.asm.Type.LONG: + writer.push(0L); + break; + case org.objectweb.asm.Type.FLOAT: + writer.push(0f); + break; + case org.objectweb.asm.Type.DOUBLE: + writer.push(0d); + break; + default: + writer.visitInsn(Opcodes.ACONST_NULL); } writer.returnValue(); } diff --git a/modules/lang-painless/src/test/java/org/elasticsearch/painless/BaseClassTests.java b/modules/lang-painless/src/test/java/org/elasticsearch/painless/BaseClassTests.java index c68302bde56..1adc953deb5 100644 --- a/modules/lang-painless/src/test/java/org/elasticsearch/painless/BaseClassTests.java +++ b/modules/lang-painless/src/test/java/org/elasticsearch/painless/BaseClassTests.java @@ -68,28 +68,29 @@ public class BaseClassTests extends ScriptTestCase { } } - public void testGets() { + public void testGets() throws Exception { Compiler compiler = new Compiler(Gets.class, null, null, painlessLookup); Map map = new HashMap<>(); map.put("s", 1); - assertEquals(1, ((Gets)scriptEngine.compile(compiler, null, "testInt", emptyMap(), "s", -1, null)).execute()); - assertEquals(Collections.emptyMap(), ((Gets)scriptEngine.compile(compiler, null, "testMap", emptyMap(), "s", -1, null)).execute()); - assertEquals(Collections.singletonMap("1", "1"), - ((Gets)scriptEngine.compile(compiler, null, "testMap", emptyMap(), "s", -1, Collections.singletonMap("1", "1"))).execute()); - assertEquals("s", ((Gets)scriptEngine.compile(compiler, null, "testString", emptyMap(), "s", -1, null)).execute()); - assertEquals(map, - ((Gets)scriptEngine.compile(compiler, null, "testMap.put(testString, testInt); testMap", emptyMap(), "s", -1, null)).execute()); + assertEquals(1, ((Gets)scriptEngine.compile(compiler, null, "testInt", emptyMap()).newInstance("s", -1, null)).execute()); + assertEquals(Collections.emptyMap(), + ((Gets)scriptEngine.compile(compiler, null, "testMap", emptyMap()).newInstance("s", -1, null)).execute()); + assertEquals(Collections.singletonMap("1", "1"), ((Gets)scriptEngine.compile( + compiler, null, "testMap", emptyMap()).newInstance("s", -1, Collections.singletonMap("1", "1"))).execute()); + assertEquals("s", ((Gets)scriptEngine.compile(compiler, null, "testString", emptyMap()).newInstance("s", -1, null)).execute()); + assertEquals(map, ((Gets)scriptEngine.compile( + compiler, null, "testMap.put(testString, testInt); testMap", emptyMap()).newInstance("s", -1, null)).execute()); } public abstract static class NoArgs { public static final String[] PARAMETERS = new String[] {}; public abstract Object execute(); } - public void testNoArgs() { + public void testNoArgs() throws Exception { Compiler compiler = new Compiler(NoArgs.class, null, null, painlessLookup); - assertEquals(1, ((NoArgs)scriptEngine.compile(compiler, null, "1", emptyMap())).execute()); - assertEquals("foo", ((NoArgs)scriptEngine.compile(compiler, null, "'foo'", emptyMap())).execute()); + assertEquals(1, ((NoArgs)scriptEngine.compile(compiler, null, "1", emptyMap()).newInstance()).execute()); + assertEquals("foo", ((NoArgs)scriptEngine.compile(compiler, null, "'foo'", emptyMap()).newInstance()).execute()); Exception e = expectScriptThrows(IllegalArgumentException.class, () -> scriptEngine.compile(compiler, null, "doc", emptyMap())); @@ -110,12 +111,12 @@ public class BaseClassTests extends ScriptTestCase { public static final String[] PARAMETERS = new String[] {"arg"}; public abstract Object execute(Object arg); } - public void testOneArg() { + public void testOneArg() throws Exception { Compiler compiler = new Compiler(OneArg.class, null, null, painlessLookup); Object rando = randomInt(); - assertEquals(rando, ((OneArg)scriptEngine.compile(compiler, null, "arg", emptyMap())).execute(rando)); + assertEquals(rando, ((OneArg)scriptEngine.compile(compiler, null, "arg", emptyMap()).newInstance()).execute(rando)); rando = randomAlphaOfLength(5); - assertEquals(rando, ((OneArg)scriptEngine.compile(compiler, null, "arg", emptyMap())).execute(rando)); + assertEquals(rando, ((OneArg)scriptEngine.compile(compiler, null, "arg", emptyMap()).newInstance()).execute(rando)); Compiler noargs = new Compiler(NoArgs.class, null, null, painlessLookup); Exception e = expectScriptThrows(IllegalArgumentException.class, () -> @@ -131,34 +132,38 @@ public class BaseClassTests extends ScriptTestCase { public static final String[] PARAMETERS = new String[] {"arg"}; public abstract Object execute(String[] arg); } - public void testArrayArg() { + public void testArrayArg() throws Exception { Compiler compiler = new Compiler(ArrayArg.class, null, null, painlessLookup); String rando = randomAlphaOfLength(5); - assertEquals(rando, ((ArrayArg)scriptEngine.compile(compiler, null, "arg[0]", emptyMap())).execute(new String[] {rando, "foo"})); + assertEquals(rando, + ((ArrayArg)scriptEngine.compile(compiler, null, "arg[0]", emptyMap()).newInstance()).execute(new String[] {rando, "foo"})); } public abstract static class PrimitiveArrayArg { public static final String[] PARAMETERS = new String[] {"arg"}; public abstract Object execute(int[] arg); } - public void testPrimitiveArrayArg() { + public void testPrimitiveArrayArg() throws Exception { Compiler compiler = new Compiler(PrimitiveArrayArg.class, null, null, painlessLookup); int rando = randomInt(); - assertEquals(rando, ((PrimitiveArrayArg)scriptEngine.compile(compiler, null, "arg[0]", emptyMap())).execute(new int[] {rando, 10})); + assertEquals(rando, ((PrimitiveArrayArg)scriptEngine.compile( + compiler, null, "arg[0]", emptyMap()).newInstance()).execute(new int[] {rando, 10})); } public abstract static class DefArrayArg { public static final String[] PARAMETERS = new String[] {"arg"}; public abstract Object execute(Object[] arg); } - public void testDefArrayArg() { + public void testDefArrayArg()throws Exception { Compiler compiler = new Compiler(DefArrayArg.class, null, null, painlessLookup); Object rando = randomInt(); - assertEquals(rando, ((DefArrayArg)scriptEngine.compile(compiler, null, "arg[0]", emptyMap())).execute(new Object[] {rando, 10})); + assertEquals(rando, + ((DefArrayArg)scriptEngine.compile(compiler, null, "arg[0]", emptyMap()).newInstance()).execute(new Object[] {rando, 10})); rando = randomAlphaOfLength(5); - assertEquals(rando, ((DefArrayArg)scriptEngine.compile(compiler, null, "arg[0]", emptyMap())).execute(new Object[] {rando, 10})); - assertEquals(5, - ((DefArrayArg)scriptEngine.compile(compiler, null, "arg[0].length()", emptyMap())).execute(new Object[] {rando, 10})); + assertEquals(rando, + ((DefArrayArg)scriptEngine.compile(compiler, null, "arg[0]", emptyMap()).newInstance()).execute(new Object[] {rando, 10})); + assertEquals(5, ((DefArrayArg)scriptEngine.compile( + compiler, null, "arg[0].length()", emptyMap()).newInstance()).execute(new Object[] {rando, 10})); } public abstract static class ManyArgs { @@ -169,24 +174,24 @@ public class BaseClassTests extends ScriptTestCase { public abstract boolean needsC(); public abstract boolean needsD(); } - public void testManyArgs() { + public void testManyArgs() throws Exception { Compiler compiler = new Compiler(ManyArgs.class, null, null, painlessLookup); int rando = randomInt(); - assertEquals(rando, ((ManyArgs)scriptEngine.compile(compiler, null, "a", emptyMap())).execute(rando, 0, 0, 0)); - assertEquals(10, ((ManyArgs)scriptEngine.compile(compiler, null, "a + b + c + d", emptyMap())).execute(1, 2, 3, 4)); + assertEquals(rando, ((ManyArgs)scriptEngine.compile(compiler, null, "a", emptyMap()).newInstance()).execute(rando, 0, 0, 0)); + assertEquals(10, ((ManyArgs)scriptEngine.compile(compiler, null, "a + b + c + d", emptyMap()).newInstance()).execute(1, 2, 3, 4)); // While we're here we can verify that painless correctly finds used variables - ManyArgs script = (ManyArgs)scriptEngine.compile(compiler, null, "a", emptyMap()); + ManyArgs script = (ManyArgs)scriptEngine.compile(compiler, null, "a", emptyMap()).newInstance(); assertTrue(script.needsA()); assertFalse(script.needsB()); assertFalse(script.needsC()); assertFalse(script.needsD()); - script = (ManyArgs)scriptEngine.compile(compiler, null, "a + b + c", emptyMap()); + script = (ManyArgs)scriptEngine.compile(compiler, null, "a + b + c", emptyMap()).newInstance(); assertTrue(script.needsA()); assertTrue(script.needsB()); assertTrue(script.needsC()); assertFalse(script.needsD()); - script = (ManyArgs)scriptEngine.compile(compiler, null, "a + b + c + d", emptyMap()); + script = (ManyArgs)scriptEngine.compile(compiler, null, "a + b + c + d", emptyMap()).newInstance(); assertTrue(script.needsA()); assertTrue(script.needsB()); assertTrue(script.needsC()); @@ -197,10 +202,11 @@ public class BaseClassTests extends ScriptTestCase { public static final String[] PARAMETERS = new String[] {"arg"}; public abstract Object execute(String... arg); } - public void testVararg() { + public void testVararg() throws Exception { Compiler compiler = new Compiler(VarargTest.class, null, null, painlessLookup); - assertEquals("foo bar baz", ((VarargTest)scriptEngine.compile(compiler, null, "String.join(' ', Arrays.asList(arg))", emptyMap())) - .execute("foo", "bar", "baz")); + assertEquals("foo bar baz", + ((VarargTest)scriptEngine.compile(compiler, null, "String.join(' ', Arrays.asList(arg))", emptyMap()).newInstance()) + .execute("foo", "bar", "baz")); } public abstract static class DefaultMethods { @@ -213,26 +219,29 @@ public class BaseClassTests extends ScriptTestCase { return execute(a, b, c, 1); } } - public void testDefaultMethods() { + public void testDefaultMethods() throws Exception { Compiler compiler = new Compiler(DefaultMethods.class, null, null, painlessLookup); int rando = randomInt(); - assertEquals(rando, ((DefaultMethods)scriptEngine.compile(compiler, null, "a", emptyMap())).execute(rando, 0, 0, 0)); - assertEquals(rando, ((DefaultMethods)scriptEngine.compile(compiler, null, "a", emptyMap())).executeWithASingleOne(rando, 0, 0)); - assertEquals(10, ((DefaultMethods)scriptEngine.compile(compiler, null, "a + b + c + d", emptyMap())).execute(1, 2, 3, 4)); - assertEquals(4, ((DefaultMethods)scriptEngine.compile(compiler, null, "a + b + c + d", emptyMap())).executeWithOne()); - assertEquals(7, ((DefaultMethods)scriptEngine.compile(compiler, null, "a + b + c + d", emptyMap())).executeWithASingleOne(1, 2, 3)); + assertEquals(rando, ((DefaultMethods)scriptEngine.compile(compiler, null, "a", emptyMap()).newInstance()).execute(rando, 0, 0, 0)); + assertEquals(rando, + ((DefaultMethods)scriptEngine.compile(compiler, null, "a", emptyMap()).newInstance()).executeWithASingleOne(rando, 0, 0)); + assertEquals(10, + ((DefaultMethods)scriptEngine.compile(compiler, null, "a + b + c + d", emptyMap()).newInstance()).execute(1, 2, 3, 4)); + assertEquals(4, ((DefaultMethods)scriptEngine.compile(compiler, null, "a + b + c + d", emptyMap()).newInstance()).executeWithOne()); + assertEquals(7, ((DefaultMethods)scriptEngine.compile( + compiler, null, "a + b + c + d", emptyMap()).newInstance()).executeWithASingleOne(1, 2, 3)); } public abstract static class ReturnsVoid { public static final String[] PARAMETERS = new String[] {"map"}; public abstract void execute(Map map); } - public void testReturnsVoid() { + public void testReturnsVoid() throws Exception { Compiler compiler = new Compiler(ReturnsVoid.class, null, null, painlessLookup); Map map = new HashMap<>(); - ((ReturnsVoid)scriptEngine.compile(compiler, null, "map.a = 'foo'", emptyMap())).execute(map); + ((ReturnsVoid)scriptEngine.compile(compiler, null, "map.a = 'foo'", emptyMap()).newInstance()).execute(map); assertEquals(singletonMap("a", "foo"), map); - ((ReturnsVoid)scriptEngine.compile(compiler, null, "map.remove('a')", emptyMap())).execute(map); + ((ReturnsVoid)scriptEngine.compile(compiler, null, "map.remove('a')", emptyMap()).newInstance()).execute(map); assertEquals(emptyMap(), map); String debug = Debugger.toString(ReturnsVoid.class, "int i = 0", new CompilerSettings()); @@ -246,19 +255,23 @@ public class BaseClassTests extends ScriptTestCase { public static final String[] PARAMETERS = new String[] {}; public abstract boolean execute(); } - public void testReturnsPrimitiveBoolean() { + public void testReturnsPrimitiveBoolean() throws Exception { Compiler compiler = new Compiler(ReturnsPrimitiveBoolean.class, null, null, painlessLookup); - assertEquals(true, ((ReturnsPrimitiveBoolean)scriptEngine.compile(compiler, null, "true", emptyMap())).execute()); - assertEquals(false, ((ReturnsPrimitiveBoolean)scriptEngine.compile(compiler, null, "false", emptyMap())).execute()); - assertEquals(true, ((ReturnsPrimitiveBoolean)scriptEngine.compile(compiler, null, "Boolean.TRUE", emptyMap())).execute()); - assertEquals(false, ((ReturnsPrimitiveBoolean)scriptEngine.compile(compiler, null, "Boolean.FALSE", emptyMap())).execute()); - - assertEquals(true, ((ReturnsPrimitiveBoolean)scriptEngine.compile(compiler, null, "def i = true; i", emptyMap())).execute()); + assertEquals(true, ((ReturnsPrimitiveBoolean)scriptEngine.compile(compiler, null, "true", emptyMap()).newInstance()).execute()); + assertEquals(false, ((ReturnsPrimitiveBoolean)scriptEngine.compile(compiler, null, "false", emptyMap()).newInstance()).execute()); assertEquals(true, - ((ReturnsPrimitiveBoolean)scriptEngine.compile(compiler, null, "def i = Boolean.TRUE; i", emptyMap())).execute()); + ((ReturnsPrimitiveBoolean)scriptEngine.compile(compiler, null, "Boolean.TRUE", emptyMap()).newInstance()).execute()); + assertEquals(false, + ((ReturnsPrimitiveBoolean)scriptEngine.compile(compiler, null, "Boolean.FALSE", emptyMap()).newInstance()).execute()); - assertEquals(true, ((ReturnsPrimitiveBoolean)scriptEngine.compile(compiler, null, "true || false", emptyMap())).execute()); + assertEquals(true, + ((ReturnsPrimitiveBoolean)scriptEngine.compile(compiler, null, "def i = true; i", emptyMap()).newInstance()).execute()); + assertEquals(true, ((ReturnsPrimitiveBoolean)scriptEngine.compile( + compiler, null, "def i = Boolean.TRUE; i", emptyMap()).newInstance()).execute()); + + assertEquals(true, + ((ReturnsPrimitiveBoolean)scriptEngine.compile(compiler, null, "true || false", emptyMap()).newInstance()).execute()); String debug = Debugger.toString(ReturnsPrimitiveBoolean.class, "false", new CompilerSettings()); assertThat(debug, containsString("ICONST_0")); @@ -266,41 +279,44 @@ public class BaseClassTests extends ScriptTestCase { assertThat(debug, containsString("IRETURN")); Exception e = expectScriptThrows(ClassCastException.class, () -> - ((ReturnsPrimitiveBoolean)scriptEngine.compile(compiler, null, "1L", emptyMap())).execute()); + ((ReturnsPrimitiveBoolean)scriptEngine.compile(compiler, null, "1L", emptyMap()).newInstance()).execute()); assertEquals("Cannot cast from [long] to [boolean].", e.getMessage()); e = expectScriptThrows(ClassCastException.class, () -> - ((ReturnsPrimitiveBoolean)scriptEngine.compile(compiler, null, "1.1f", emptyMap())).execute()); + ((ReturnsPrimitiveBoolean)scriptEngine.compile(compiler, null, "1.1f", emptyMap()).newInstance()).execute()); assertEquals("Cannot cast from [float] to [boolean].", e.getMessage()); e = expectScriptThrows(ClassCastException.class, () -> - ((ReturnsPrimitiveBoolean)scriptEngine.compile(compiler, null, "1.1d", emptyMap())).execute()); + ((ReturnsPrimitiveBoolean)scriptEngine.compile(compiler, null, "1.1d", emptyMap()).newInstance()).execute()); assertEquals("Cannot cast from [double] to [boolean].", e.getMessage()); expectScriptThrows(ClassCastException.class, () -> - ((ReturnsPrimitiveBoolean)scriptEngine.compile(compiler, null, "def i = 1L; i", emptyMap())).execute()); + ((ReturnsPrimitiveBoolean)scriptEngine.compile(compiler, null, "def i = 1L; i", emptyMap()).newInstance()).execute()); expectScriptThrows(ClassCastException.class, () -> - ((ReturnsPrimitiveBoolean)scriptEngine.compile(compiler, null, "def i = 1.1f; i", emptyMap())).execute()); + ((ReturnsPrimitiveBoolean)scriptEngine.compile(compiler, null, "def i = 1.1f; i", emptyMap()).newInstance()).execute()); expectScriptThrows(ClassCastException.class, () -> - ((ReturnsPrimitiveBoolean)scriptEngine.compile(compiler, null, "def i = 1.1d; i", emptyMap())).execute()); + ((ReturnsPrimitiveBoolean)scriptEngine.compile(compiler, null, "def i = 1.1d; i", emptyMap()).newInstance()).execute()); - assertEquals(false, ((ReturnsPrimitiveBoolean)scriptEngine.compile(compiler, null, "int i = 0", emptyMap())).execute()); + assertEquals(false, + ((ReturnsPrimitiveBoolean)scriptEngine.compile(compiler, null, "int i = 0", emptyMap()).newInstance()).execute()); } public abstract static class ReturnsPrimitiveInt { public static final String[] PARAMETERS = new String[] {}; public abstract int execute(); } - public void testReturnsPrimitiveInt() { + public void testReturnsPrimitiveInt() throws Exception { Compiler compiler = new Compiler(ReturnsPrimitiveInt.class, null, null, painlessLookup); - assertEquals(1, ((ReturnsPrimitiveInt)scriptEngine.compile(compiler, null, "1", emptyMap())).execute()); - assertEquals(1, ((ReturnsPrimitiveInt)scriptEngine.compile(compiler, null, "(int) 1L", emptyMap())).execute()); - assertEquals(1, ((ReturnsPrimitiveInt)scriptEngine.compile(compiler, null, "(int) 1.1d", emptyMap())).execute()); - assertEquals(1, ((ReturnsPrimitiveInt)scriptEngine.compile(compiler, null, "(int) 1.1f", emptyMap())).execute()); - assertEquals(1, ((ReturnsPrimitiveInt)scriptEngine.compile(compiler, null, "Integer.valueOf(1)", emptyMap())).execute()); + assertEquals(1, ((ReturnsPrimitiveInt)scriptEngine.compile(compiler, null, "1", emptyMap()).newInstance()).execute()); + assertEquals(1, ((ReturnsPrimitiveInt)scriptEngine.compile(compiler, null, "(int) 1L", emptyMap()).newInstance()).execute()); + assertEquals(1, ((ReturnsPrimitiveInt)scriptEngine.compile(compiler, null, "(int) 1.1d", emptyMap()).newInstance()).execute()); + assertEquals(1, ((ReturnsPrimitiveInt)scriptEngine.compile(compiler, null, "(int) 1.1f", emptyMap()).newInstance()).execute()); + assertEquals(1, + ((ReturnsPrimitiveInt)scriptEngine.compile(compiler, null, "Integer.valueOf(1)", emptyMap()).newInstance()).execute()); - assertEquals(1, ((ReturnsPrimitiveInt)scriptEngine.compile(compiler, null, "def i = 1; i", emptyMap())).execute()); - assertEquals(1, ((ReturnsPrimitiveInt)scriptEngine.compile(compiler, null, "def i = Integer.valueOf(1); i", emptyMap())).execute()); + assertEquals(1, ((ReturnsPrimitiveInt)scriptEngine.compile(compiler, null, "def i = 1; i", emptyMap()).newInstance()).execute()); + assertEquals(1, ((ReturnsPrimitiveInt)scriptEngine.compile( + compiler, null, "def i = Integer.valueOf(1); i", emptyMap()).newInstance()).execute()); - assertEquals(2, ((ReturnsPrimitiveInt)scriptEngine.compile(compiler, null, "1 + 1", emptyMap())).execute()); + assertEquals(2, ((ReturnsPrimitiveInt)scriptEngine.compile(compiler, null, "1 + 1", emptyMap()).newInstance()).execute()); String debug = Debugger.toString(ReturnsPrimitiveInt.class, "1", new CompilerSettings()); assertThat(debug, containsString("ICONST_1")); @@ -308,88 +324,99 @@ public class BaseClassTests extends ScriptTestCase { assertThat(debug, containsString("IRETURN")); Exception e = expectScriptThrows(ClassCastException.class, () -> - ((ReturnsPrimitiveInt)scriptEngine.compile(compiler, null, "1L", emptyMap())).execute()); + ((ReturnsPrimitiveInt)scriptEngine.compile(compiler, null, "1L", emptyMap()).newInstance()).execute()); assertEquals("Cannot cast from [long] to [int].", e.getMessage()); e = expectScriptThrows(ClassCastException.class, () -> - ((ReturnsPrimitiveInt)scriptEngine.compile(compiler, null, "1.1f", emptyMap())).execute()); + ((ReturnsPrimitiveInt)scriptEngine.compile(compiler, null, "1.1f", emptyMap()).newInstance()).execute()); assertEquals("Cannot cast from [float] to [int].", e.getMessage()); e = expectScriptThrows(ClassCastException.class, () -> - ((ReturnsPrimitiveInt)scriptEngine.compile(compiler, null, "1.1d", emptyMap())).execute()); + ((ReturnsPrimitiveInt)scriptEngine.compile(compiler, null, "1.1d", emptyMap()).newInstance()).execute()); assertEquals("Cannot cast from [double] to [int].", e.getMessage()); expectScriptThrows(ClassCastException.class, () -> - ((ReturnsPrimitiveInt)scriptEngine.compile(compiler, null, "def i = 1L; i", emptyMap())).execute()); + ((ReturnsPrimitiveInt)scriptEngine.compile(compiler, null, "def i = 1L; i", emptyMap()).newInstance()).execute()); expectScriptThrows(ClassCastException.class, () -> - ((ReturnsPrimitiveInt)scriptEngine.compile(compiler, null, "def i = 1.1f; i", emptyMap())).execute()); + ((ReturnsPrimitiveInt)scriptEngine.compile(compiler, null, "def i = 1.1f; i", emptyMap()).newInstance()).execute()); expectScriptThrows(ClassCastException.class, () -> - ((ReturnsPrimitiveInt)scriptEngine.compile(compiler, null, "def i = 1.1d; i", emptyMap())).execute()); + ((ReturnsPrimitiveInt)scriptEngine.compile(compiler, null, "def i = 1.1d; i", emptyMap()).newInstance()).execute()); - assertEquals(0, ((ReturnsPrimitiveInt)scriptEngine.compile(compiler, null, "int i = 0", emptyMap())).execute()); + assertEquals(0, ((ReturnsPrimitiveInt)scriptEngine.compile(compiler, null, "int i = 0", emptyMap()).newInstance()).execute()); } public abstract static class ReturnsPrimitiveFloat { public static final String[] PARAMETERS = new String[] {}; public abstract float execute(); } - public void testReturnsPrimitiveFloat() { + public void testReturnsPrimitiveFloat() throws Exception { Compiler compiler = new Compiler(ReturnsPrimitiveFloat.class, null, null, painlessLookup); - assertEquals(1.1f, ((ReturnsPrimitiveFloat)scriptEngine.compile(compiler, null, "1.1f", emptyMap())).execute(), 0); - assertEquals(1.1f, ((ReturnsPrimitiveFloat)scriptEngine.compile(compiler, null, "(float) 1.1d", emptyMap())).execute(), 0); - assertEquals(1.1f, ((ReturnsPrimitiveFloat)scriptEngine.compile(compiler, null, "def d = 1.1f; d", emptyMap())).execute(), 0); + assertEquals(1.1f, ((ReturnsPrimitiveFloat)scriptEngine.compile(compiler, null, "1.1f", emptyMap()).newInstance()).execute(), 0); assertEquals(1.1f, - ((ReturnsPrimitiveFloat)scriptEngine.compile(compiler, null, "def d = Float.valueOf(1.1f); d", emptyMap())).execute(), 0); + ((ReturnsPrimitiveFloat)scriptEngine.compile(compiler, null, "(float) 1.1d", emptyMap()).newInstance()).execute(), 0); + assertEquals(1.1f, + ((ReturnsPrimitiveFloat)scriptEngine.compile(compiler, null, "def d = 1.1f; d", emptyMap()).newInstance()).execute(), 0); + assertEquals(1.1f, ((ReturnsPrimitiveFloat)scriptEngine.compile( + compiler, null, "def d = Float.valueOf(1.1f); d", emptyMap()).newInstance()).execute(), 0); - assertEquals(1.1f + 6.7f, ((ReturnsPrimitiveFloat)scriptEngine.compile(compiler, null, "1.1f + 6.7f", emptyMap())).execute(), 0); + assertEquals(1.1f + 6.7f, + ((ReturnsPrimitiveFloat)scriptEngine.compile(compiler, null, "1.1f + 6.7f", emptyMap()).newInstance()).execute(), 0); Exception e = expectScriptThrows(ClassCastException.class, () -> - ((ReturnsPrimitiveFloat)scriptEngine.compile(compiler, null, "1.1d", emptyMap())).execute()); + ((ReturnsPrimitiveFloat)scriptEngine.compile(compiler, null, "1.1d", emptyMap()).newInstance()).execute()); assertEquals("Cannot cast from [double] to [float].", e.getMessage()); e = expectScriptThrows(ClassCastException.class, () -> - ((ReturnsPrimitiveFloat)scriptEngine.compile(compiler, null, "def d = 1.1d; d", emptyMap())).execute()); - e = expectScriptThrows(ClassCastException.class, () -> - ((ReturnsPrimitiveFloat)scriptEngine.compile(compiler, null, "def d = Double.valueOf(1.1); d", emptyMap())).execute()); + ((ReturnsPrimitiveFloat)scriptEngine.compile(compiler, null, "def d = 1.1d; d", emptyMap()).newInstance()).execute()); + e = expectScriptThrows(ClassCastException.class, () -> ((ReturnsPrimitiveFloat)scriptEngine.compile( + compiler, null, "def d = Double.valueOf(1.1); d", emptyMap()).newInstance()).execute()); String debug = Debugger.toString(ReturnsPrimitiveFloat.class, "1f", new CompilerSettings()); assertThat(debug, containsString("FCONST_1")); // The important thing here is that we have the bytecode for returning a float instead of an object assertThat(debug, containsString("FRETURN")); - assertEquals(0.0f, ((ReturnsPrimitiveFloat)scriptEngine.compile(compiler, null, "int i = 0", emptyMap())).execute(), 0); + assertEquals(0.0f, + ((ReturnsPrimitiveFloat)scriptEngine.compile(compiler, null, "int i = 0", emptyMap()).newInstance()).execute(), 0); } public abstract static class ReturnsPrimitiveDouble { public static final String[] PARAMETERS = new String[] {}; public abstract double execute(); } - public void testReturnsPrimitiveDouble() { + public void testReturnsPrimitiveDouble() throws Exception { Compiler compiler = new Compiler(ReturnsPrimitiveDouble.class, null, null, painlessLookup); - assertEquals(1.0, ((ReturnsPrimitiveDouble)scriptEngine.compile(compiler, null, "1", emptyMap())).execute(), 0); - assertEquals(1.0, ((ReturnsPrimitiveDouble)scriptEngine.compile(compiler, null, "1L", emptyMap())).execute(), 0); - assertEquals(1.1, ((ReturnsPrimitiveDouble)scriptEngine.compile(compiler, null, "1.1d", emptyMap())).execute(), 0); - assertEquals((double) 1.1f, ((ReturnsPrimitiveDouble)scriptEngine.compile(compiler, null, "1.1f", emptyMap())).execute(), 0); - assertEquals(1.1, ((ReturnsPrimitiveDouble)scriptEngine.compile(compiler, null, "Double.valueOf(1.1)", emptyMap())).execute(), 0); + assertEquals(1.0, ((ReturnsPrimitiveDouble)scriptEngine.compile(compiler, null, "1", emptyMap()).newInstance()).execute(), 0); + assertEquals(1.0, ((ReturnsPrimitiveDouble)scriptEngine.compile(compiler, null, "1L", emptyMap()).newInstance()).execute(), 0); + assertEquals(1.1, ((ReturnsPrimitiveDouble)scriptEngine.compile(compiler, null, "1.1d", emptyMap()).newInstance()).execute(), 0); assertEquals((double) 1.1f, - ((ReturnsPrimitiveDouble)scriptEngine.compile(compiler, null, "Float.valueOf(1.1f)", emptyMap())).execute(), 0); + ((ReturnsPrimitiveDouble)scriptEngine.compile(compiler, null, "1.1f", emptyMap()).newInstance()).execute(), 0); + assertEquals(1.1, ((ReturnsPrimitiveDouble)scriptEngine.compile( + compiler, null, "Double.valueOf(1.1)", emptyMap()).newInstance()).execute(), 0); + assertEquals((double) 1.1f, ((ReturnsPrimitiveDouble)scriptEngine.compile( + compiler, null, "Float.valueOf(1.1f)", emptyMap()).newInstance()).execute(), 0); - assertEquals(1.0, ((ReturnsPrimitiveDouble)scriptEngine.compile(compiler, null, "def d = 1; d", emptyMap())).execute(), 0); - assertEquals(1.0, ((ReturnsPrimitiveDouble)scriptEngine.compile(compiler, null, "def d = 1L; d", emptyMap())).execute(), 0); - assertEquals(1.1, ((ReturnsPrimitiveDouble)scriptEngine.compile(compiler, null, "def d = 1.1d; d", emptyMap())).execute(), 0); - assertEquals((double) 1.1f, - ((ReturnsPrimitiveDouble)scriptEngine.compile(compiler, null, "def d = 1.1f; d", emptyMap())).execute(), 0); + assertEquals(1.0, + ((ReturnsPrimitiveDouble)scriptEngine.compile(compiler, null, "def d = 1; d", emptyMap()).newInstance()).execute(), 0); + assertEquals(1.0, + ((ReturnsPrimitiveDouble)scriptEngine.compile(compiler, null, "def d = 1L; d", emptyMap()).newInstance()).execute(), 0); assertEquals(1.1, - ((ReturnsPrimitiveDouble)scriptEngine.compile(compiler, null, "def d = Double.valueOf(1.1); d", emptyMap())).execute(), 0); + ((ReturnsPrimitiveDouble)scriptEngine.compile(compiler, null, "def d = 1.1d; d", emptyMap()).newInstance()).execute(), 0); assertEquals((double) 1.1f, - ((ReturnsPrimitiveDouble)scriptEngine.compile(compiler, null, "def d = Float.valueOf(1.1f); d", emptyMap())).execute(), 0); + ((ReturnsPrimitiveDouble)scriptEngine.compile(compiler, null, "def d = 1.1f; d", emptyMap()).newInstance()).execute(), 0); + assertEquals(1.1, ((ReturnsPrimitiveDouble)scriptEngine.compile( + compiler, null, "def d = Double.valueOf(1.1); d", emptyMap()).newInstance()).execute(), 0); + assertEquals((double) 1.1f, ((ReturnsPrimitiveDouble)scriptEngine.compile( + compiler, null, "def d = Float.valueOf(1.1f); d", emptyMap()).newInstance()).execute(), 0); - assertEquals(1.1 + 6.7, ((ReturnsPrimitiveDouble)scriptEngine.compile(compiler, null, "1.1 + 6.7", emptyMap())).execute(), 0); + assertEquals(1.1 + 6.7, + ((ReturnsPrimitiveDouble)scriptEngine.compile(compiler, null, "1.1 + 6.7", emptyMap()).newInstance()).execute(), 0); String debug = Debugger.toString(ReturnsPrimitiveDouble.class, "1", new CompilerSettings()); assertThat(debug, containsString("DCONST_1")); // The important thing here is that we have the bytecode for returning a double instead of an object assertThat(debug, containsString("DRETURN")); - assertEquals(0.0, ((ReturnsPrimitiveDouble)scriptEngine.compile(compiler, null, "int i = 0", emptyMap())).execute(), 0); + assertEquals(0.0, + ((ReturnsPrimitiveDouble)scriptEngine.compile(compiler, null, "int i = 0", emptyMap()).newInstance()).execute(), 0); } public abstract static class NoArgumentsConstant { diff --git a/modules/lang-painless/src/test/java/org/elasticsearch/painless/BasicAPITests.java b/modules/lang-painless/src/test/java/org/elasticsearch/painless/BasicAPITests.java index 9863db0b21e..c5cc723ca84 100644 --- a/modules/lang-painless/src/test/java/org/elasticsearch/painless/BasicAPITests.java +++ b/modules/lang-painless/src/test/java/org/elasticsearch/painless/BasicAPITests.java @@ -67,7 +67,8 @@ public class BasicAPITests extends ScriptTestCase { ctx.put("_source", _source); params.put("ctx", ctx); - assertEquals("testvalue", exec("ctx._source['load'].5 = ctx._source['load'].remove('load5')", params, true)); + assertEquals("testvalue", exec("params.ctx._source['load'].5 = params.ctx._source['load'].remove('load5')", + params, true)); } /** Test loads and stores with a list */ diff --git a/modules/lang-painless/src/test/java/org/elasticsearch/painless/BasicStatementTests.java b/modules/lang-painless/src/test/java/org/elasticsearch/painless/BasicStatementTests.java index 0f5844c6599..6f632b5df48 100644 --- a/modules/lang-painless/src/test/java/org/elasticsearch/painless/BasicStatementTests.java +++ b/modules/lang-painless/src/test/java/org/elasticsearch/painless/BasicStatementTests.java @@ -262,19 +262,19 @@ public class BasicStatementTests extends ScriptTestCase { "for (int i = 0; i < array.length; i++) { sum += array[i] } return sum", Collections.emptyMap(), Collections.singletonMap(CompilerSettings.MAX_LOOP_COUNTER, "0"), - null, true + true )); assertEquals(6L, exec("long sum = 0; long[] array = new long[] { 1, 2, 3 };" + "int i = 0; while (i < array.length) { sum += array[i++] } return sum", Collections.emptyMap(), Collections.singletonMap(CompilerSettings.MAX_LOOP_COUNTER, "0"), - null, true + true )); assertEquals(6L, exec("long sum = 0; long[] array = new long[] { 1, 2, 3 };" + "int i = 0; do { sum += array[i++] } while (i < array.length); return sum", Collections.emptyMap(), Collections.singletonMap(CompilerSettings.MAX_LOOP_COUNTER, "0"), - null, true + true )); } diff --git a/modules/lang-painless/src/test/java/org/elasticsearch/painless/BindingsTests.java b/modules/lang-painless/src/test/java/org/elasticsearch/painless/BindingsTests.java index 4bcc557d3dc..167deb3a20b 100644 --- a/modules/lang-painless/src/test/java/org/elasticsearch/painless/BindingsTests.java +++ b/modules/lang-painless/src/test/java/org/elasticsearch/painless/BindingsTests.java @@ -19,46 +19,51 @@ package org.elasticsearch.painless; -import org.elasticsearch.script.ExecutableScript; +import org.elasticsearch.painless.spi.Whitelist; +import org.elasticsearch.script.ScriptContext; import java.util.Collections; -import java.util.HashMap; +import java.util.List; import java.util.Map; public class BindingsTests extends ScriptTestCase { + public abstract static class BindingsTestScript { + public static final String[] PARAMETERS = { "test", "bound" }; + public abstract int execute(int test, int bound); + public interface Factory { + BindingsTestScript newInstance(); + } + public static final ScriptContext CONTEXT = new ScriptContext<>("bindings_test", Factory.class); + } + + @Override + protected Map, List> scriptContexts() { + Map, List> contexts = super.scriptContexts(); + contexts.put(BindingsTestScript.CONTEXT, Whitelist.BASE_WHITELISTS); + return contexts; + } + public void testBasicBinding() { assertEquals(15, exec("testAddWithState(4, 5, 6, 0.0)")); } public void testRepeatedBinding() { - String script = "testAddWithState(4, 5, params.test, 0.0)"; - Map params = new HashMap<>(); - ExecutableScript.Factory factory = scriptEngine.compile(null, script, ExecutableScript.CONTEXT, Collections.emptyMap()); - ExecutableScript executableScript = factory.newInstance(params); + String script = "testAddWithState(4, 5, test, 0.0)"; + BindingsTestScript.Factory factory = scriptEngine.compile(null, script, BindingsTestScript.CONTEXT, Collections.emptyMap()); + BindingsTestScript executableScript = factory.newInstance(); - executableScript.setNextVar("test", 5); - assertEquals(14, executableScript.run()); - - executableScript.setNextVar("test", 4); - assertEquals(13, executableScript.run()); - - executableScript.setNextVar("test", 7); - assertEquals(16, executableScript.run()); + assertEquals(14, executableScript.execute(5, 0)); + assertEquals(13, executableScript.execute(4, 0)); + assertEquals(16, executableScript.execute(7, 0)); } public void testBoundBinding() { - String script = "testAddWithState(4, params.bound, params.test, 0.0)"; - Map params = new HashMap<>(); - ExecutableScript.Factory factory = scriptEngine.compile(null, script, ExecutableScript.CONTEXT, Collections.emptyMap()); - ExecutableScript executableScript = factory.newInstance(params); + String script = "testAddWithState(4, bound, test, 0.0)"; + BindingsTestScript.Factory factory = scriptEngine.compile(null, script, BindingsTestScript.CONTEXT, Collections.emptyMap()); + BindingsTestScript executableScript = factory.newInstance(); - executableScript.setNextVar("test", 5); - executableScript.setNextVar("bound", 1); - assertEquals(10, executableScript.run()); - - executableScript.setNextVar("test", 4); - executableScript.setNextVar("bound", 2); - assertEquals(9, executableScript.run()); + assertEquals(10, executableScript.execute(5, 1)); + assertEquals(9, executableScript.execute(4, 2)); } } diff --git a/modules/lang-painless/src/test/java/org/elasticsearch/painless/NeedsScoreTests.java b/modules/lang-painless/src/test/java/org/elasticsearch/painless/NeedsScoreTests.java index 50a377b8818..86f2af32d16 100644 --- a/modules/lang-painless/src/test/java/org/elasticsearch/painless/NeedsScoreTests.java +++ b/modules/lang-painless/src/test/java/org/elasticsearch/painless/NeedsScoreTests.java @@ -23,7 +23,6 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.IndexService; import org.elasticsearch.index.query.QueryShardContext; import org.elasticsearch.painless.spi.Whitelist; -import org.elasticsearch.script.ExecutableScript; import org.elasticsearch.script.ScriptContext; import org.elasticsearch.script.SearchScript; import org.elasticsearch.search.lookup.SearchLookup; @@ -45,7 +44,6 @@ public class NeedsScoreTests extends ESSingleNodeTestCase { Map, List> contexts = new HashMap<>(); contexts.put(SearchScript.CONTEXT, Whitelist.BASE_WHITELISTS); - contexts.put(ExecutableScript.CONTEXT, Whitelist.BASE_WHITELISTS); PainlessScriptEngine service = new PainlessScriptEngine(Settings.EMPTY, contexts); QueryShardContext shardContext = index.newQueryShardContext(0, null, () -> 0, null); diff --git a/modules/lang-painless/src/test/java/org/elasticsearch/painless/ReservedWordTests.java b/modules/lang-painless/src/test/java/org/elasticsearch/painless/ReservedWordTests.java deleted file mode 100644 index 08b78b1c708..00000000000 --- a/modules/lang-painless/src/test/java/org/elasticsearch/painless/ReservedWordTests.java +++ /dev/null @@ -1,96 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.painless; - -import java.util.Collections; -import java.util.HashMap; - -/** Tests for special reserved words such as _score */ -public class ReservedWordTests extends ScriptTestCase { - - /** check that we can't declare a variable of _score, its really reserved! */ - public void testScoreVar() { - IllegalArgumentException expected = expectScriptThrows(IllegalArgumentException.class, () -> { - exec("int _score = 5; return _score;"); - }); - assertTrue(expected.getMessage().contains("Variable [_score] is already defined")); - } - - /** check that we can't write to _score, its read-only! */ - public void testScoreStore() { - IllegalArgumentException expected = expectScriptThrows(IllegalArgumentException.class, () -> { - exec("_score = 5; return _score;"); - }); - assertTrue(expected.getMessage().contains("Variable [_score] is read-only")); - } - - /** check that we can't declare a variable of doc, its really reserved! */ - public void testDocVar() { - IllegalArgumentException expected = expectScriptThrows(IllegalArgumentException.class, () -> { - exec("int doc = 5; return doc;"); - }); - assertTrue(expected.getMessage().contains("Variable [doc] is already defined")); - } - - /** check that we can't write to doc, its read-only! */ - public void testDocStore() { - IllegalArgumentException expected = expectScriptThrows(IllegalArgumentException.class, () -> { - exec("doc = 5; return doc;"); - }); - assertTrue(expected.getMessage().contains("Variable [doc] is read-only")); - } - - /** check that we can't declare a variable of ctx, its really reserved! */ - public void testCtxVar() { - IllegalArgumentException expected = expectScriptThrows(IllegalArgumentException.class, () -> { - exec("int ctx = 5; return ctx;"); - }); - assertTrue(expected.getMessage().contains("Variable [ctx] is already defined")); - } - - /** check that we can't write to ctx, its read-only! */ - public void testCtxStore() { - IllegalArgumentException expected = expectScriptThrows(IllegalArgumentException.class, () -> { - exec("ctx = 5; return ctx;"); - }); - assertTrue(expected.getMessage().contains("Variable [ctx] is read-only")); - } - - /** check that we can modify its contents though */ - public void testCtxStoreMap() { - assertEquals(5, exec("ctx.foo = 5; return ctx.foo;", Collections.singletonMap("ctx", new HashMap()), true)); - } - - /** check that we can't declare a variable of _value, its really reserved! */ - public void testAggregationValueVar() { - IllegalArgumentException expected = expectScriptThrows(IllegalArgumentException.class, () -> { - exec("int _value = 5; return _value;"); - }); - assertTrue(expected.getMessage().contains("Variable [_value] is already defined")); - } - - /** check that we can't write to _value, its read-only! */ - public void testAggregationValueStore() { - IllegalArgumentException expected = expectScriptThrows(IllegalArgumentException.class, () -> { - exec("_value = 5; return _value;"); - }); - assertTrue(expected.getMessage().contains("Variable [_value] is read-only")); - } -} diff --git a/modules/lang-painless/src/test/java/org/elasticsearch/painless/ScoreTests.java b/modules/lang-painless/src/test/java/org/elasticsearch/painless/ScoreTests.java deleted file mode 100644 index 3d19dedd3b0..00000000000 --- a/modules/lang-painless/src/test/java/org/elasticsearch/painless/ScoreTests.java +++ /dev/null @@ -1,72 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.painless; - -import org.apache.lucene.search.Scorable; - -import java.util.Collections; - -public class ScoreTests extends ScriptTestCase { - - /** Most of a dummy scorer impl that requires overriding just score(). */ - abstract class MockScorer extends Scorable { - @Override - public int docID() { - return 0; - } - } - - public void testScoreWorks() { - assertEquals(2.5, exec("_score", Collections.emptyMap(), Collections.emptyMap(), - new MockScorer() { - @Override - public float score() { - return 2.5f; - } - }, - true)); - } - - public void testScoreNotUsed() { - assertEquals(3.5, exec("3.5", Collections.emptyMap(), Collections.emptyMap(), - new MockScorer() { - @Override - public float score() { - throw new AssertionError("score() should not be called"); - } - }, - true)); - } - - public void testScoreCached() { - assertEquals(9.0, exec("_score + _score", Collections.emptyMap(), Collections.emptyMap(), - new MockScorer() { - private boolean used = false; - @Override - public float score() { - if (used == false) { - return 4.5f; - } - throw new AssertionError("score() should not be called twice"); - } - }, - true)); - } -} diff --git a/modules/lang-painless/src/test/java/org/elasticsearch/painless/ScriptEngineTests.java b/modules/lang-painless/src/test/java/org/elasticsearch/painless/ScriptEngineTests.java index 37b1ff68ec5..6594225842e 100644 --- a/modules/lang-painless/src/test/java/org/elasticsearch/painless/ScriptEngineTests.java +++ b/modules/lang-painless/src/test/java/org/elasticsearch/painless/ScriptEngineTests.java @@ -19,10 +19,7 @@ package org.elasticsearch.painless; -import org.elasticsearch.script.ExecutableScript; - import java.util.Arrays; -import java.util.Collections; import java.util.HashMap; import java.util.Map; @@ -73,37 +70,4 @@ public class ScriptEngineTests extends ScriptTestCase { assertEquals("value1", exec("return params.l.3.prop1;", vars, true)); } - - public void testChangingVarsCrossExecution1() { - Map vars = new HashMap<>(); - Map ctx = new HashMap<>(); - vars.put("ctx", ctx); - - ExecutableScript.Factory factory = - scriptEngine.compile(null, "return ctx.value;", ExecutableScript.CONTEXT, Collections.emptyMap()); - ExecutableScript script = factory.newInstance(vars); - - ctx.put("value", 1); - Object o = script.run(); - assertEquals(1, ((Number) o).intValue()); - - ctx.put("value", 2); - o = script.run(); - assertEquals(2, ((Number) o).intValue()); - } - - public void testChangingVarsCrossExecution2() { - Map vars = new HashMap<>(); - ExecutableScript.Factory factory = - scriptEngine.compile(null, "return params['value'];", ExecutableScript.CONTEXT, Collections.emptyMap()); - ExecutableScript script = factory.newInstance(vars); - - script.setNextVar("value", 1); - Object value = script.run(); - assertEquals(1, ((Number)value).intValue()); - - script.setNextVar("value", 2); - value = script.run(); - assertEquals(2, ((Number)value).intValue()); - } } diff --git a/modules/lang-painless/src/test/java/org/elasticsearch/painless/ScriptTestCase.java b/modules/lang-painless/src/test/java/org/elasticsearch/painless/ScriptTestCase.java index 577b120fc90..e69a1ad5dcf 100644 --- a/modules/lang-painless/src/test/java/org/elasticsearch/painless/ScriptTestCase.java +++ b/modules/lang-painless/src/test/java/org/elasticsearch/painless/ScriptTestCase.java @@ -20,24 +20,23 @@ package org.elasticsearch.painless; import junit.framework.AssertionFailedError; -import org.apache.lucene.search.Scorable; -import org.elasticsearch.common.lucene.ScorerAware; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.painless.antlr.Walker; import org.elasticsearch.painless.lookup.PainlessLookup; import org.elasticsearch.painless.lookup.PainlessLookupBuilder; import org.elasticsearch.painless.spi.Whitelist; -import org.elasticsearch.script.ExecutableScript; import org.elasticsearch.script.ScriptContext; import org.elasticsearch.script.ScriptException; import org.elasticsearch.script.SearchScript; import org.elasticsearch.test.ESTestCase; import org.junit.Before; +import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; +import static org.elasticsearch.painless.PainlessExecuteAction.PainlessTestScript; import static org.elasticsearch.painless.node.SSource.MainMethodReserved; import static org.hamcrest.Matchers.hasSize; @@ -69,7 +68,7 @@ public abstract class ScriptTestCase extends ESTestCase { protected Map, List> scriptContexts() { Map, List> contexts = new HashMap<>(); contexts.put(SearchScript.CONTEXT, Whitelist.BASE_WHITELISTS); - contexts.put(ExecutableScript.CONTEXT, Whitelist.BASE_WHITELISTS); + contexts.put(PainlessTestScript.CONTEXT, Whitelist.BASE_WHITELISTS); return contexts; } @@ -87,11 +86,11 @@ public abstract class ScriptTestCase extends ESTestCase { public Object exec(String script, Map vars, boolean picky) { Map compilerSettings = new HashMap<>(); compilerSettings.put(CompilerSettings.INITIAL_CALL_SITE_DEPTH, random().nextBoolean() ? "0" : "10"); - return exec(script, vars, compilerSettings, null, picky); + return exec(script, vars, compilerSettings, picky); } /** Compiles and returns the result of {@code script} with access to {@code vars} and compile-time parameters */ - public Object exec(String script, Map vars, Map compileParams, Scorable scorer, boolean picky) { + public Object exec(String script, Map vars, Map compileParams, boolean picky) { // test for ambiguity errors before running the actual script if picky is true if (picky) { ScriptClassInfo scriptClassInfo = new ScriptClassInfo(PAINLESS_LOOKUP, GenericElasticsearchScript.class); @@ -99,15 +98,12 @@ public abstract class ScriptTestCase extends ESTestCase { pickySettings.setPicky(true); pickySettings.setRegexesEnabled(CompilerSettings.REGEX_ENABLED.get(scriptEngineSettings())); Walker.buildPainlessTree( - scriptClassInfo, new MainMethodReserved(), getTestName(), script, pickySettings, PAINLESS_LOOKUP, null); + scriptClassInfo, new MainMethodReserved(), getTestName(), script, pickySettings, PAINLESS_LOOKUP, null); } // test actual script execution - ExecutableScript.Factory factory = scriptEngine.compile(null, script, ExecutableScript.CONTEXT, compileParams); - ExecutableScript executableScript = factory.newInstance(vars); - if (scorer != null) { - ((ScorerAware)executableScript).setScorer(scorer); - } - return executableScript.run(); + PainlessTestScript.Factory factory = scriptEngine.compile(null, script, PainlessTestScript.CONTEXT, compileParams); + PainlessTestScript testScript = factory.newInstance(vars == null ? Collections.emptyMap() : vars); + return testScript.execute(); } /** diff --git a/modules/lang-painless/src/test/java/org/elasticsearch/painless/StringTests.java b/modules/lang-painless/src/test/java/org/elasticsearch/painless/StringTests.java index 55cce62b819..c73d6c2071a 100644 --- a/modules/lang-painless/src/test/java/org/elasticsearch/painless/StringTests.java +++ b/modules/lang-painless/src/test/java/org/elasticsearch/painless/StringTests.java @@ -233,7 +233,7 @@ public class StringTests extends ScriptTestCase { ctx.put("_id", "somerandomid"); params.put("ctx", ctx); - assertEquals("somerandomid.somerandomid", exec("ctx._id += '.' + ctx._id", params, false)); + assertEquals("somerandomid.somerandomid", exec("params.ctx._id += '.' + params.ctx._id", params, false)); assertEquals("somerandomid.somerandomid", exec("String x = 'somerandomid'; x += '.' + x")); assertEquals("somerandomid.somerandomid", exec("def x = 'somerandomid'; x += '.' + x")); } diff --git a/modules/lang-painless/src/test/java/org/elasticsearch/painless/WhenThingsGoWrongTests.java b/modules/lang-painless/src/test/java/org/elasticsearch/painless/WhenThingsGoWrongTests.java index 79d2fe0c53d..32d74d0837c 100644 --- a/modules/lang-painless/src/test/java/org/elasticsearch/painless/WhenThingsGoWrongTests.java +++ b/modules/lang-painless/src/test/java/org/elasticsearch/painless/WhenThingsGoWrongTests.java @@ -130,7 +130,7 @@ public class WhenThingsGoWrongTests extends ScriptTestCase { public void testBogusParameter() { IllegalArgumentException expected = expectThrows(IllegalArgumentException.class, () -> { - exec("return 5;", null, Collections.singletonMap("bogusParameterKey", "bogusParameterValue"), null, true); + exec("return 5;", null, Collections.singletonMap("bogusParameterKey", "bogusParameterValue"), true); }); assertTrue(expected.getMessage().contains("Unrecognized compile-time parameter")); } @@ -253,7 +253,7 @@ public class WhenThingsGoWrongTests extends ScriptTestCase { public void testRCurlyNotDelim() { IllegalArgumentException e = expectScriptThrows(IllegalArgumentException.class, () -> { // We don't want PICKY here so we get the normal error message - exec("def i = 1} return 1", emptyMap(), emptyMap(), null, false); + exec("def i = 1} return 1", emptyMap(), emptyMap(), false); }); assertEquals("unexpected token ['}'] was expecting one of [{, ';'}].", e.getMessage()); } @@ -285,7 +285,7 @@ public class WhenThingsGoWrongTests extends ScriptTestCase { public void testCanNotOverrideRegexEnabled() { IllegalArgumentException e = expectThrows(IllegalArgumentException.class, - () -> exec("", null, singletonMap(CompilerSettings.REGEX_ENABLED.getKey(), "true"), null, false)); + () -> exec("", null, singletonMap(CompilerSettings.REGEX_ENABLED.getKey(), "true"), false)); assertEquals("[painless.regex.enabled] can only be set on node startup.", e.getMessage()); } diff --git a/modules/parent-join/src/test/java/org/elasticsearch/join/aggregations/ChildrenIT.java b/modules/parent-join/src/test/java/org/elasticsearch/join/aggregations/ChildrenIT.java index f7f3b89773b..61f00647f3c 100644 --- a/modules/parent-join/src/test/java/org/elasticsearch/join/aggregations/ChildrenIT.java +++ b/modules/parent-join/src/test/java/org/elasticsearch/join/aggregations/ChildrenIT.java @@ -91,7 +91,7 @@ public class ChildrenIT extends ParentChildTestCase { String category = categories[j] = uniqueCategories[catIndex++ % uniqueCategories.length]; Control control = categoryToControl.get(category); if (control == null) { - categoryToControl.put(category, control = new Control(category)); + categoryToControl.put(category, control = new Control()); } control.articleIds.add(id); } @@ -457,14 +457,8 @@ public class ChildrenIT extends ParentChildTestCase { } private static final class Control { - - final String category; final Set articleIds = new HashSet<>(); final Set commentIds = new HashSet<>(); final Map> commenterToCommentId = new HashMap<>(); - - private Control(String category) { - this.category = category; - } } } diff --git a/modules/percolator/src/main/java/org/elasticsearch/percolator/QueryAnalyzer.java b/modules/percolator/src/main/java/org/elasticsearch/percolator/QueryAnalyzer.java index 33c40c2739c..ebebfa01b67 100644 --- a/modules/percolator/src/main/java/org/elasticsearch/percolator/QueryAnalyzer.java +++ b/modules/percolator/src/main/java/org/elasticsearch/percolator/QueryAnalyzer.java @@ -24,6 +24,7 @@ import org.apache.lucene.index.Term; import org.apache.lucene.queries.BlendedTermQuery; import org.apache.lucene.queries.CommonTermsQuery; import org.apache.lucene.search.BooleanClause; +import org.apache.lucene.search.BooleanClause.Occur; import org.apache.lucene.search.BooleanQuery; import org.apache.lucene.search.BoostQuery; import org.apache.lucene.search.ConstantScoreQuery; @@ -38,7 +39,6 @@ import org.apache.lucene.search.Query; import org.apache.lucene.search.SynonymQuery; import org.apache.lucene.search.TermInSetQuery; import org.apache.lucene.search.TermQuery; -import org.apache.lucene.search.BooleanClause.Occur; import org.apache.lucene.search.spans.SpanFirstQuery; import org.apache.lucene.search.spans.SpanNearQuery; import org.apache.lucene.search.spans.SpanNotQuery; @@ -489,43 +489,51 @@ final class QueryAnalyzer { return subResult; } } - int msm = 0; - boolean verified = true; - boolean matchAllDocs = true; - boolean hasDuplicateTerms = false;Set extractions = new HashSet<>(); - Set seenRangeFields = new HashSet<>(); - for (Result result : conjunctions) { - // In case that there are duplicate query extractions we need to be careful with incrementing msm, - // because that could lead to valid matches not becoming candidate matches: - // query: (field:val1 AND field:val2) AND (field:val2 AND field:val3) - // doc: field: val1 val2 val3 - // So lets be protective and decrease the msm: - int resultMsm = result.minimumShouldMatch; - for (QueryExtraction queryExtraction : result.extractions) { - if (queryExtraction.range != null) { - // In case of range queries each extraction does not simply increment the minimum_should_match - // for that percolator query like for a term based extraction, so that can lead to more false - // positives for percolator queries with range queries than term based queries. - // The is because the way number fields are extracted from the document to be percolated. - // Per field a single range is extracted and if a percolator query has two or more range queries - // on the same field, then the minimum should match can be higher than clauses in the CoveringQuery. - // Therefore right now the minimum should match is incremented once per number field when processing - // the percolator query at index time. - if (seenRangeFields.add(queryExtraction.range.fieldName)) { - resultMsm = 1; - } else { - resultMsm = 0; - } - } + int msm = 0; + boolean verified = true; + boolean matchAllDocs = true; + boolean hasDuplicateTerms = false; + Set extractions = new HashSet<>(); + Set seenRangeFields = new HashSet<>(); + for (Result result : conjunctions) { + // In case that there are duplicate query extractions we need to be careful with + // incrementing msm, + // because that could lead to valid matches not becoming candidate matches: + // query: (field:val1 AND field:val2) AND (field:val2 AND field:val3) + // doc: field: val1 val2 val3 + // So lets be protective and decrease the msm: + int resultMsm = result.minimumShouldMatch; + for (QueryExtraction queryExtraction : result.extractions) { + if (queryExtraction.range != null) { + // In case of range queries each extraction does not simply increment the + // minimum_should_match + // for that percolator query like for a term based extraction, so that can lead + // to more false + // positives for percolator queries with range queries than term based queries. + // The is because the way number fields are extracted from the document to be + // percolated. + // Per field a single range is extracted and if a percolator query has two or + // more range queries + // on the same field, then the minimum should match can be higher than clauses + // in the CoveringQuery. + // Therefore right now the minimum should match is incremented once per number + // field when processing + // the percolator query at index time. + if (seenRangeFields.add(queryExtraction.range.fieldName)) { + resultMsm = 1; + } else { + resultMsm = 0; + } + } - if (extractions.contains(queryExtraction)) { + if (extractions.contains(queryExtraction)) { - resultMsm = 0; - verified = false; - break; - } - } - msm += resultMsm; + resultMsm = 0; + verified = false; + break; + } + } + msm += resultMsm; if (result.verified == false // If some inner extractions are optional, the result can't be verified diff --git a/modules/rank-eval/src/main/java/org/elasticsearch/index/rankeval/DiscountedCumulativeGain.java b/modules/rank-eval/src/main/java/org/elasticsearch/index/rankeval/DiscountedCumulativeGain.java index a6a6830a99c..1494743959b 100644 --- a/modules/rank-eval/src/main/java/org/elasticsearch/index/rankeval/DiscountedCumulativeGain.java +++ b/modules/rank-eval/src/main/java/org/elasticsearch/index/rankeval/DiscountedCumulativeGain.java @@ -33,7 +33,7 @@ import java.util.Collections; import java.util.Comparator; import java.util.List; import java.util.Objects; -import java.util.Optional; +import java.util.OptionalInt; import java.util.stream.Collectors; import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg; @@ -119,8 +119,8 @@ public class DiscountedCumulativeGain implements EvaluationMetric { @Override - public Optional forcedSearchSize() { - return Optional.of(k); + public OptionalInt forcedSearchSize() { + return OptionalInt.of(k); } @Override @@ -130,9 +130,13 @@ public class DiscountedCumulativeGain implements EvaluationMetric { List ratingsInSearchHits = new ArrayList<>(ratedHits.size()); int unratedResults = 0; for (RatedSearchHit hit : ratedHits) { - // unknownDocRating might be null, in which case unrated docs will be ignored in the dcg calculation. - // we still need to add them as a placeholder so the rank of the subsequent ratings is correct - ratingsInSearchHits.add(hit.getRating().orElse(unknownDocRating)); + if (hit.getRating().isPresent()) { + ratingsInSearchHits.add(hit.getRating().getAsInt()); + } else { + // unknownDocRating might be null, in which case unrated docs will be ignored in the dcg calculation. + // we still need to add them as a placeholder so the rank of the subsequent ratings is correct + ratingsInSearchHits.add(unknownDocRating); + } if (hit.getRating().isPresent() == false) { unratedResults++; } diff --git a/modules/rank-eval/src/main/java/org/elasticsearch/index/rankeval/EvaluationMetric.java b/modules/rank-eval/src/main/java/org/elasticsearch/index/rankeval/EvaluationMetric.java index d1e89890477..e111af6bd7b 100644 --- a/modules/rank-eval/src/main/java/org/elasticsearch/index/rankeval/EvaluationMetric.java +++ b/modules/rank-eval/src/main/java/org/elasticsearch/index/rankeval/EvaluationMetric.java @@ -29,7 +29,7 @@ import java.util.ArrayList; import java.util.Collection; import java.util.List; import java.util.Map; -import java.util.Optional; +import java.util.OptionalInt; import java.util.stream.Collectors; /** @@ -64,9 +64,9 @@ public interface EvaluationMetric extends ToXContentObject, NamedWriteable { DocumentKey key = new DocumentKey(hit.getIndex(), hit.getId()); RatedDocument ratedDoc = ratedDocumentMap.get(key); if (ratedDoc != null) { - ratedSearchHits.add(new RatedSearchHit(hit, Optional.of(ratedDoc.getRating()))); + ratedSearchHits.add(new RatedSearchHit(hit, OptionalInt.of(ratedDoc.getRating()))); } else { - ratedSearchHits.add(new RatedSearchHit(hit, Optional.empty())); + ratedSearchHits.add(new RatedSearchHit(hit, OptionalInt.empty())); } } return ratedSearchHits; @@ -93,7 +93,7 @@ public interface EvaluationMetric extends ToXContentObject, NamedWriteable { * this method. The default implementation returns an empty optional. * @return the number of search hits this metrics requests */ - default Optional forcedSearchSize() { - return Optional.empty(); + default OptionalInt forcedSearchSize() { + return OptionalInt.empty(); } } diff --git a/modules/rank-eval/src/main/java/org/elasticsearch/index/rankeval/ExpectedReciprocalRank.java b/modules/rank-eval/src/main/java/org/elasticsearch/index/rankeval/ExpectedReciprocalRank.java index 39e1266504d..2c4fa58a799 100644 --- a/modules/rank-eval/src/main/java/org/elasticsearch/index/rankeval/ExpectedReciprocalRank.java +++ b/modules/rank-eval/src/main/java/org/elasticsearch/index/rankeval/ExpectedReciprocalRank.java @@ -32,7 +32,7 @@ import java.io.IOException; import java.util.ArrayList; import java.util.List; import java.util.Objects; -import java.util.Optional; +import java.util.OptionalInt; import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg; import static org.elasticsearch.common.xcontent.ConstructingObjectParser.optionalConstructorArg; @@ -126,8 +126,8 @@ public class ExpectedReciprocalRank implements EvaluationMetric { @Override - public Optional forcedSearchSize() { - return Optional.of(k); + public OptionalInt forcedSearchSize() { + return OptionalInt.of(k); } @Override @@ -139,9 +139,13 @@ public class ExpectedReciprocalRank implements EvaluationMetric { List ratingsInSearchHits = new ArrayList<>(ratedHits.size()); int unratedResults = 0; for (RatedSearchHit hit : ratedHits) { - // unknownDocRating might be null, in which case unrated will be ignored in the calculation. - // we still need to add them as a placeholder so the rank of the subsequent ratings is correct - ratingsInSearchHits.add(hit.getRating().orElse(unknownDocRating)); + if (hit.getRating().isPresent()) { + ratingsInSearchHits.add(hit.getRating().getAsInt()); + } else { + // unknownDocRating might be null, in which case unrated docs will be ignored in the dcg calculation. + // we still need to add them as a placeholder so the rank of the subsequent ratings is correct + ratingsInSearchHits.add(unknownDocRating); + } if (hit.getRating().isPresent() == false) { unratedResults++; } diff --git a/modules/rank-eval/src/main/java/org/elasticsearch/index/rankeval/MeanReciprocalRank.java b/modules/rank-eval/src/main/java/org/elasticsearch/index/rankeval/MeanReciprocalRank.java index 5781f13dafe..8b0ed42acb5 100644 --- a/modules/rank-eval/src/main/java/org/elasticsearch/index/rankeval/MeanReciprocalRank.java +++ b/modules/rank-eval/src/main/java/org/elasticsearch/index/rankeval/MeanReciprocalRank.java @@ -30,7 +30,7 @@ import org.elasticsearch.search.SearchHit; import java.io.IOException; import java.util.List; import java.util.Objects; -import java.util.Optional; +import java.util.OptionalInt; import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg; import static org.elasticsearch.common.xcontent.ConstructingObjectParser.optionalConstructorArg; @@ -90,8 +90,8 @@ public class MeanReciprocalRank implements EvaluationMetric { } @Override - public Optional forcedSearchSize() { - return Optional.of(k); + public OptionalInt forcedSearchSize() { + return OptionalInt.of(k); } @Override @@ -115,9 +115,9 @@ public class MeanReciprocalRank implements EvaluationMetric { int firstRelevant = -1; int rank = 1; for (RatedSearchHit hit : ratedHits) { - Optional rating = hit.getRating(); + OptionalInt rating = hit.getRating(); if (rating.isPresent()) { - if (rating.get() >= this.relevantRatingThreshhold) { + if (rating.getAsInt() >= this.relevantRatingThreshhold) { firstRelevant = rank; break; } diff --git a/modules/rank-eval/src/main/java/org/elasticsearch/index/rankeval/PrecisionAtK.java b/modules/rank-eval/src/main/java/org/elasticsearch/index/rankeval/PrecisionAtK.java index 136158ea5cb..bb5a579ead6 100644 --- a/modules/rank-eval/src/main/java/org/elasticsearch/index/rankeval/PrecisionAtK.java +++ b/modules/rank-eval/src/main/java/org/elasticsearch/index/rankeval/PrecisionAtK.java @@ -30,7 +30,7 @@ import org.elasticsearch.search.SearchHit; import java.io.IOException; import java.util.List; import java.util.Objects; -import java.util.Optional; +import java.util.OptionalInt; import javax.naming.directory.SearchResult; @@ -144,8 +144,8 @@ public class PrecisionAtK implements EvaluationMetric { } @Override - public Optional forcedSearchSize() { - return Optional.of(k); + public OptionalInt forcedSearchSize() { + return OptionalInt.of(k); } public static PrecisionAtK fromXContent(XContentParser parser) { @@ -164,9 +164,9 @@ public class PrecisionAtK implements EvaluationMetric { int falsePositives = 0; List ratedSearchHits = joinHitsWithRatings(hits, ratedDocs); for (RatedSearchHit hit : ratedSearchHits) { - Optional rating = hit.getRating(); + OptionalInt rating = hit.getRating(); if (rating.isPresent()) { - if (rating.get() >= this.relevantRatingThreshhold) { + if (rating.getAsInt() >= this.relevantRatingThreshhold) { truePositives++; } else { falsePositives++; diff --git a/modules/rank-eval/src/main/java/org/elasticsearch/index/rankeval/RatedSearchHit.java b/modules/rank-eval/src/main/java/org/elasticsearch/index/rankeval/RatedSearchHit.java index 9d8f4cc33d6..4b76d837b95 100644 --- a/modules/rank-eval/src/main/java/org/elasticsearch/index/rankeval/RatedSearchHit.java +++ b/modules/rank-eval/src/main/java/org/elasticsearch/index/rankeval/RatedSearchHit.java @@ -33,7 +33,7 @@ import org.elasticsearch.search.SearchHit; import java.io.IOException; import java.util.Objects; -import java.util.Optional; +import java.util.OptionalInt; /** * Combines a {@link SearchHit} with a document rating. @@ -41,16 +41,16 @@ import java.util.Optional; public class RatedSearchHit implements Writeable, ToXContentObject { private final SearchHit searchHit; - private final Optional rating; + private final OptionalInt rating; - public RatedSearchHit(SearchHit searchHit, Optional rating) { + public RatedSearchHit(SearchHit searchHit, OptionalInt rating) { this.searchHit = searchHit; this.rating = rating; } RatedSearchHit(StreamInput in) throws IOException { this(SearchHit.readSearchHit(in), - in.readBoolean() == true ? Optional.of(in.readVInt()) : Optional.empty()); + in.readBoolean() == true ? OptionalInt.of(in.readVInt()) : OptionalInt.empty()); } @Override @@ -58,7 +58,7 @@ public class RatedSearchHit implements Writeable, ToXContentObject { searchHit.writeTo(out); out.writeBoolean(rating.isPresent()); if (rating.isPresent()) { - out.writeVInt(rating.get()); + out.writeVInt(rating.getAsInt()); } } @@ -66,7 +66,7 @@ public class RatedSearchHit implements Writeable, ToXContentObject { return this.searchHit; } - public Optional getRating() { + public OptionalInt getRating() { return this.rating; } @@ -75,22 +75,21 @@ public class RatedSearchHit implements Writeable, ToXContentObject { throws IOException { builder.startObject(); builder.field("hit", (ToXContent) searchHit); - builder.field("rating", rating.orElse(null)); + builder.field("rating", rating.isPresent() ? rating.getAsInt() : null); builder.endObject(); return builder; } private static final ParseField HIT_FIELD = new ParseField("hit"); private static final ParseField RATING_FIELD = new ParseField("rating"); - @SuppressWarnings("unchecked") private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>("rated_hit", true, - a -> new RatedSearchHit((SearchHit) a[0], (Optional) a[1])); + a -> new RatedSearchHit((SearchHit) a[0], (OptionalInt) a[1])); static { PARSER.declareObject(ConstructingObjectParser.constructorArg(), (p, c) -> SearchHit.fromXContent(p), HIT_FIELD); PARSER.declareField(ConstructingObjectParser.constructorArg(), - (p) -> p.currentToken() == XContentParser.Token.VALUE_NULL ? Optional.empty() : Optional.of(p.intValue()), RATING_FIELD, - ValueType.INT_OR_NULL); + (p) -> p.currentToken() == XContentParser.Token.VALUE_NULL ? OptionalInt.empty() : OptionalInt.of(p.intValue()), + RATING_FIELD, ValueType.INT_OR_NULL); } public static RatedSearchHit parse(XContentParser parser) throws IOException { diff --git a/modules/rank-eval/src/main/java/org/elasticsearch/index/rankeval/TransportRankEvalAction.java b/modules/rank-eval/src/main/java/org/elasticsearch/index/rankeval/TransportRankEvalAction.java index 80d3d674aed..2e9e57d2c73 100644 --- a/modules/rank-eval/src/main/java/org/elasticsearch/index/rankeval/TransportRankEvalAction.java +++ b/modules/rank-eval/src/main/java/org/elasticsearch/index/rankeval/TransportRankEvalAction.java @@ -119,7 +119,7 @@ public class TransportRankEvalAction extends HandledTransportAction randomIntBetween(0, 10)), - original.getIgnoreUnlabeled(), original.forcedSearchSize().get()); + original.getIgnoreUnlabeled(), original.forcedSearchSize().getAsInt()); break; case 2: pAtK = new PrecisionAtK(original.getRelevantRatingThreshold(), - original.getIgnoreUnlabeled(), original.forcedSearchSize().get() + 1); + original.getIgnoreUnlabeled(), original.forcedSearchSize().getAsInt() + 1); break; default: throw new IllegalStateException("The test should only allow three parameters mutated"); diff --git a/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/RankEvalRequestIT.java b/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/RankEvalRequestIT.java index 7d594c852da..cdad280fd9a 100644 --- a/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/RankEvalRequestIT.java +++ b/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/RankEvalRequestIT.java @@ -128,7 +128,7 @@ public class RankEvalRequestIT extends ESIntegTestCase { if (id.equals("1") || id.equals("6")) { assertFalse(hit.getRating().isPresent()); } else { - assertEquals(RELEVANT_RATING_1, hit.getRating().get().intValue()); + assertEquals(RELEVANT_RATING_1, hit.getRating().getAsInt()); } } } @@ -139,7 +139,7 @@ public class RankEvalRequestIT extends ESIntegTestCase { for (RatedSearchHit hit : hitsAndRatings) { String id = hit.getSearchHit().getId(); if (id.equals("1")) { - assertEquals(RELEVANT_RATING_1, hit.getRating().get().intValue()); + assertEquals(RELEVANT_RATING_1, hit.getRating().getAsInt()); } else { assertFalse(hit.getRating().isPresent()); } diff --git a/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/RankEvalResponseTests.java b/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/RankEvalResponseTests.java index 673808f8369..3223ec0266d 100644 --- a/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/RankEvalResponseTests.java +++ b/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/RankEvalResponseTests.java @@ -50,7 +50,7 @@ import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; -import java.util.Optional; +import java.util.OptionalInt; import java.util.function.Predicate; import static java.util.Collections.singleton; @@ -182,6 +182,6 @@ public class RankEvalResponseTests extends ESTestCase { SearchHit hit = new SearchHit(docId, docId + "", new Text(""), Collections.emptyMap()); hit.shard(new SearchShardTarget("testnode", new Index(index, "uuid"), 0, null)); hit.score(1.0f); - return new RatedSearchHit(hit, rating != null ? Optional.of(rating) : Optional.empty()); + return new RatedSearchHit(hit, rating != null ? OptionalInt.of(rating) : OptionalInt.empty()); } } diff --git a/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/RatedSearchHitTests.java b/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/RatedSearchHitTests.java index 622c49a9886..1f45392b7f2 100644 --- a/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/RatedSearchHitTests.java +++ b/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/RatedSearchHitTests.java @@ -30,7 +30,7 @@ import org.elasticsearch.test.ESTestCase; import java.io.IOException; import java.util.Collections; -import java.util.Optional; +import java.util.OptionalInt; import static org.elasticsearch.test.EqualsHashCodeTestUtils.checkEqualsAndHashCode; import static org.elasticsearch.test.XContentTestUtils.insertRandomFields; @@ -38,8 +38,8 @@ import static org.elasticsearch.test.XContentTestUtils.insertRandomFields; public class RatedSearchHitTests extends ESTestCase { public static RatedSearchHit randomRatedSearchHit() { - Optional rating = randomBoolean() ? Optional.empty() - : Optional.of(randomIntBetween(0, 5)); + OptionalInt rating = randomBoolean() ? OptionalInt.empty() + : OptionalInt.of(randomIntBetween(0, 5)); SearchHit searchHit = new SearchHit(randomIntBetween(0, 10), randomAlphaOfLength(10), new Text(randomAlphaOfLength(10)), Collections.emptyMap()); RatedSearchHit ratedSearchHit = new RatedSearchHit(searchHit, rating); @@ -47,11 +47,11 @@ public class RatedSearchHitTests extends ESTestCase { } private static RatedSearchHit mutateTestItem(RatedSearchHit original) { - Optional rating = original.getRating(); + OptionalInt rating = original.getRating(); SearchHit hit = original.getSearchHit(); switch (randomIntBetween(0, 1)) { case 0: - rating = rating.isPresent() ? Optional.of(rating.get() + 1) : Optional.of(randomInt(5)); + rating = rating.isPresent() ? OptionalInt.of(rating.getAsInt() + 1) : OptionalInt.of(randomInt(5)); break; case 1: hit = new SearchHit(hit.docId(), hit.getId() + randomAlphaOfLength(10), diff --git a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/AbstractAsyncBulkByScrollAction.java b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/AbstractAsyncBulkByScrollAction.java index 731a27aa72c..e55dab1c38f 100644 --- a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/AbstractAsyncBulkByScrollAction.java +++ b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/AbstractAsyncBulkByScrollAction.java @@ -36,7 +36,6 @@ import org.elasticsearch.action.delete.DeleteRequest; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.client.ParentTaskAssigningClient; import org.elasticsearch.cluster.ClusterState; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.util.concurrent.AbstractRunnable; @@ -104,7 +103,6 @@ public abstract class AbstractAsyncBulkByScrollAction listener; private final Retry bulkRetry; private final ScrollableHitSource scrollSource; - private final Settings settings; /** * This BiFunction is used to apply various changes depending of the Reindex action and the search hit, @@ -113,15 +111,9 @@ public abstract class AbstractAsyncBulkByScrollAction, ScrollableHitSource.Hit, RequestWrapper> scriptApplier; - public AbstractAsyncBulkByScrollAction(BulkByScrollTask task, Logger logger, ParentTaskAssigningClient client, - ThreadPool threadPool, Request mainRequest, ScriptService scriptService, - ClusterState clusterState, ActionListener listener) { - this(task, logger, client, threadPool, mainRequest, scriptService, clusterState, listener, client.settings()); - } - public AbstractAsyncBulkByScrollAction(BulkByScrollTask task, Logger logger, ParentTaskAssigningClient client, ThreadPool threadPool, Request mainRequest, ScriptService scriptService, ClusterState clusterState, - ActionListener listener, Settings settings) { + ActionListener listener) { this.task = task; if (!task.isWorker()) { @@ -131,7 +123,6 @@ public abstract class AbstractAsyncBulkByScrollAction params; - private UpdateScript executable; - private Map context; - public ScriptApplier(WorkerBulkByScrollTaskState taskWorker, ScriptService scriptService, Script script, @@ -765,16 +753,8 @@ public abstract class AbstractAsyncBulkByScrollAction(); - } else { - context.clear(); - } + Map context = new HashMap<>(); context.put(IndexFieldMapper.NAME, doc.getIndex()); context.put(TypeFieldMapper.NAME, doc.getType()); context.put(IdFieldMapper.NAME, doc.getId()); @@ -787,7 +767,9 @@ public abstract class AbstractAsyncBulkByScrollAction createdThreads = emptyList(); - AsyncIndexBySearchAction(BulkByScrollTask task, Logger logger, ParentTaskAssigningClient client, - ThreadPool threadPool, ReindexRequest request, ScriptService scriptService, ClusterState clusterState, - ActionListener listener) { - this(task, logger, client, threadPool, request, scriptService, clusterState, listener, client.settings()); - } - AsyncIndexBySearchAction(BulkByScrollTask task, Logger logger, ParentTaskAssigningClient client, ThreadPool threadPool, ReindexRequest request, ScriptService scriptService, ClusterState clusterState, - ActionListener listener, Settings settings) { - super(task, logger, client, threadPool, request, scriptService, clusterState, listener, settings); + ActionListener listener) { + super(task, logger, client, threadPool, request, scriptService, clusterState, listener); } @Override diff --git a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/TransportUpdateByQueryAction.java b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/TransportUpdateByQueryAction.java index 00d14822ba0..13eb113b6b4 100644 --- a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/TransportUpdateByQueryAction.java +++ b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/TransportUpdateByQueryAction.java @@ -82,16 +82,10 @@ public class TransportUpdateByQueryAction extends HandledTransportAction { - AsyncIndexBySearchAction(BulkByScrollTask task, Logger logger, ParentTaskAssigningClient client, - ThreadPool threadPool, UpdateByQueryRequest request, ScriptService scriptService, ClusterState clusterState, - ActionListener listener) { - this(task, logger, client, threadPool, request, scriptService, clusterState, listener, client.settings()); - } - AsyncIndexBySearchAction(BulkByScrollTask task, Logger logger, ParentTaskAssigningClient client, ThreadPool threadPool, UpdateByQueryRequest request, ScriptService scriptService, ClusterState clusterState, - ActionListener listener, Settings settings) { - super(task, logger, client, threadPool, request, scriptService, clusterState, listener, settings); + ActionListener listener) { + super(task, logger, client, threadPool, request, scriptService, clusterState, listener); } @Override diff --git a/modules/reindex/src/test/java/org/elasticsearch/client/documentation/ReindexDocumentationIT.java b/modules/reindex/src/test/java/org/elasticsearch/client/documentation/ReindexDocumentationIT.java index 93c785e754a..b5960592508 100644 --- a/modules/reindex/src/test/java/org/elasticsearch/client/documentation/ReindexDocumentationIT.java +++ b/modules/reindex/src/test/java/org/elasticsearch/client/documentation/ReindexDocumentationIT.java @@ -45,6 +45,7 @@ import java.util.Collections; public class ReindexDocumentationIT extends ESIntegTestCase { + @SuppressWarnings("unused") public void reindex() { Client client = client(); // tag::reindex1 @@ -55,6 +56,7 @@ public class ReindexDocumentationIT extends ESIntegTestCase { // end::reindex1 } + @SuppressWarnings("unused") public void updateByQuery() { Client client = client(); { @@ -165,6 +167,7 @@ public class ReindexDocumentationIT extends ESIntegTestCase { } } + @SuppressWarnings("unused") public void deleteByQuery() { Client client = client(); // tag::delete-by-query-sync diff --git a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/AbstractAsyncBulkByScrollActionScriptTestCase.java b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/AbstractAsyncBulkByScrollActionScriptTestCase.java index 94f375e9333..e838b89eb38 100644 --- a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/AbstractAsyncBulkByScrollActionScriptTestCase.java +++ b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/AbstractAsyncBulkByScrollActionScriptTestCase.java @@ -20,11 +20,10 @@ package org.elasticsearch.index.reindex; import org.elasticsearch.action.ActionRequest; -import org.elasticsearch.index.reindex.AbstractAsyncBulkByScrollAction.OpType; -import org.elasticsearch.index.reindex.AbstractAsyncBulkByScrollAction.RequestWrapper; import org.elasticsearch.action.delete.DeleteRequest; import org.elasticsearch.action.index.IndexRequest; -import org.elasticsearch.script.ExecutableScript; +import org.elasticsearch.index.reindex.AbstractAsyncBulkByScrollAction.OpType; +import org.elasticsearch.index.reindex.AbstractAsyncBulkByScrollAction.RequestWrapper; import org.elasticsearch.script.ScriptService; import org.elasticsearch.script.UpdateScript; import org.junit.Before; @@ -56,15 +55,12 @@ public abstract class AbstractAsyncBulkByScrollActionScriptTestCase< protected T applyScript(Consumer> scriptBody) { IndexRequest index = new IndexRequest("index", "type", "1").source(singletonMap("foo", "bar")); ScrollableHitSource.Hit doc = new ScrollableHitSource.BasicHit("test", "type", "id", 0); - UpdateScript updateScript = new UpdateScript(Collections.emptyMap()) { + UpdateScript.Factory factory = (params, ctx) -> new UpdateScript(Collections.emptyMap(), ctx) { @Override - public void execute(Map ctx) { + public void execute() { scriptBody.accept(ctx); } - }; - UpdateScript.Factory factory = params -> updateScript; - ExecutableScript simpleExecutableScript = new SimpleExecutableScript(scriptBody); - when(scriptService.compile(any(), eq(ExecutableScript.CONTEXT))).thenReturn(params -> simpleExecutableScript); + };; when(scriptService.compile(any(), eq(UpdateScript.CONTEXT))).thenReturn(factory); AbstractAsyncBulkByScrollAction action = action(scriptService, request().setScript(mockScript(""))); RequestWrapper result = action.buildScriptApplier().apply(AbstractAsyncBulkByScrollAction.wrap(index), doc); diff --git a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/AsyncBulkByScrollActionTests.java b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/AsyncBulkByScrollActionTests.java index 6a5610de37a..5c12e85bb4c 100644 --- a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/AsyncBulkByScrollActionTests.java +++ b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/AsyncBulkByScrollActionTests.java @@ -672,7 +672,7 @@ public class AsyncBulkByScrollActionTests extends ESTestCase { private class DummyAsyncBulkByScrollAction extends AbstractAsyncBulkByScrollAction { DummyAsyncBulkByScrollAction() { super(testTask, AsyncBulkByScrollActionTests.this.logger, new ParentTaskAssigningClient(client, localNode, testTask), - client.threadPool(), testRequest, null, null, listener, Settings.EMPTY); + client.threadPool(), testRequest, null, null, listener); } @Override diff --git a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/ReindexMetadataTests.java b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/ReindexMetadataTests.java index ec34da777b5..2fe6dd91cd1 100644 --- a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/ReindexMetadataTests.java +++ b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/ReindexMetadataTests.java @@ -21,7 +21,6 @@ package org.elasticsearch.index.reindex; import org.elasticsearch.index.reindex.ScrollableHitSource.Hit; import org.elasticsearch.action.index.IndexRequest; -import org.elasticsearch.common.settings.Settings; /** * Index-by-search test for ttl, timestamp, and routing. @@ -78,7 +77,7 @@ public class ReindexMetadataTests extends AbstractAsyncBulkByScrollActionMetadat private class TestAction extends TransportReindexAction.AsyncIndexBySearchAction { TestAction() { super(ReindexMetadataTests.this.task, ReindexMetadataTests.this.logger, null, ReindexMetadataTests.this.threadPool, request(), - null, null, listener(), Settings.EMPTY); + null, null, listener()); } public ReindexRequest mainRequest() { diff --git a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/ReindexScriptTests.java b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/ReindexScriptTests.java index a90b60357c4..732bc9acdb6 100644 --- a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/ReindexScriptTests.java +++ b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/ReindexScriptTests.java @@ -21,7 +21,6 @@ package org.elasticsearch.index.reindex; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.common.lucene.uid.Versions; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.script.ScriptService; import java.util.Map; @@ -104,7 +103,7 @@ public class ReindexScriptTests extends AbstractAsyncBulkByScrollActionScriptTes @Override protected TransportReindexAction.AsyncIndexBySearchAction action(ScriptService scriptService, ReindexRequest request) { - return new TransportReindexAction.AsyncIndexBySearchAction(task, logger, null, threadPool, request, scriptService, null, - listener(), Settings.EMPTY); + return new TransportReindexAction.AsyncIndexBySearchAction(task, logger, null, threadPool, request, scriptService, + null, listener()); } } diff --git a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/RetryTests.java b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/RetryTests.java index 9ea2a24bfb1..916c18e38f7 100644 --- a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/RetryTests.java +++ b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/RetryTests.java @@ -188,7 +188,7 @@ public class RetryTests extends ESIntegTestCase { } Retry retry = new Retry(BackoffPolicy.exponentialBackoff(), client().threadPool()); - BulkResponse initialBulkResponse = retry.withBackoff(client()::bulk, bulk.request(), client().settings()).actionGet(); + BulkResponse initialBulkResponse = retry.withBackoff(client()::bulk, bulk.request()).actionGet(); assertFalse(initialBulkResponse.buildFailureMessage(), initialBulkResponse.hasFailures()); client().admin().indices().prepareRefresh("source").get(); diff --git a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/UpdateByQueryMetadataTests.java b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/UpdateByQueryMetadataTests.java index d3f62af907d..3ce8884ff92 100644 --- a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/UpdateByQueryMetadataTests.java +++ b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/UpdateByQueryMetadataTests.java @@ -21,7 +21,6 @@ package org.elasticsearch.index.reindex; import org.elasticsearch.index.reindex.ScrollableHitSource.Hit; import org.elasticsearch.action.index.IndexRequest; -import org.elasticsearch.common.settings.Settings; public class UpdateByQueryMetadataTests extends AbstractAsyncBulkByScrollActionMetadataTestCase { @@ -44,8 +43,7 @@ public class UpdateByQueryMetadataTests private class TestAction extends TransportUpdateByQueryAction.AsyncIndexBySearchAction { TestAction() { super(UpdateByQueryMetadataTests.this.task, UpdateByQueryMetadataTests.this.logger, null, - UpdateByQueryMetadataTests.this.threadPool, request(), null, null, listener(), - Settings.EMPTY); + UpdateByQueryMetadataTests.this.threadPool, request(), null, null, listener()); } @Override diff --git a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/UpdateByQueryWithScriptTests.java b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/UpdateByQueryWithScriptTests.java index 8c9744aa0dd..90b78b9e108 100644 --- a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/UpdateByQueryWithScriptTests.java +++ b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/UpdateByQueryWithScriptTests.java @@ -19,7 +19,6 @@ package org.elasticsearch.index.reindex; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.script.ScriptService; import java.util.Date; @@ -54,7 +53,7 @@ public class UpdateByQueryWithScriptTests @Override protected TransportUpdateByQueryAction.AsyncIndexBySearchAction action(ScriptService scriptService, UpdateByQueryRequest request) { - return new TransportUpdateByQueryAction.AsyncIndexBySearchAction(task, logger, null, threadPool, request, scriptService, null, - listener(), Settings.EMPTY); + return new TransportUpdateByQueryAction.AsyncIndexBySearchAction(task, logger, null, threadPool, request, scriptService, + null, listener()); } } diff --git a/modules/transport-netty4/build.gradle b/modules/transport-netty4/build.gradle index e7c36ff506e..05e3b1807f0 100644 --- a/modules/transport-netty4/build.gradle +++ b/modules/transport-netty4/build.gradle @@ -34,13 +34,13 @@ compileTestJava.options.compilerArgs << "-Xlint:-cast,-deprecation,-rawtypes,-tr dependencies { // network stack - compile "io.netty:netty-buffer:4.1.28.Final" - compile "io.netty:netty-codec:4.1.28.Final" - compile "io.netty:netty-codec-http:4.1.28.Final" - compile "io.netty:netty-common:4.1.28.Final" - compile "io.netty:netty-handler:4.1.28.Final" - compile "io.netty:netty-resolver:4.1.28.Final" - compile "io.netty:netty-transport:4.1.28.Final" + compile "io.netty:netty-buffer:4.1.29.Final" + compile "io.netty:netty-codec:4.1.29.Final" + compile "io.netty:netty-codec-http:4.1.29.Final" + compile "io.netty:netty-common:4.1.29.Final" + compile "io.netty:netty-handler:4.1.29.Final" + compile "io.netty:netty-resolver:4.1.29.Final" + compile "io.netty:netty-transport:4.1.29.Final" } dependencyLicenses { diff --git a/modules/transport-netty4/licenses/netty-buffer-4.1.28.Final.jar.sha1 b/modules/transport-netty4/licenses/netty-buffer-4.1.28.Final.jar.sha1 deleted file mode 100644 index f8a652d0dd1..00000000000 --- a/modules/transport-netty4/licenses/netty-buffer-4.1.28.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -d6c2d13492778009d33f60e05ed90bcb535d1fd1 \ No newline at end of file diff --git a/modules/transport-netty4/licenses/netty-buffer-4.1.29.Final.jar.sha1 b/modules/transport-netty4/licenses/netty-buffer-4.1.29.Final.jar.sha1 new file mode 100644 index 00000000000..17798a82aa7 --- /dev/null +++ b/modules/transport-netty4/licenses/netty-buffer-4.1.29.Final.jar.sha1 @@ -0,0 +1 @@ +c3809f72e4b535b343b7dfa3c0c8210dad2fa5ea \ No newline at end of file diff --git a/modules/transport-netty4/licenses/netty-codec-4.1.28.Final.jar.sha1 b/modules/transport-netty4/licenses/netty-codec-4.1.28.Final.jar.sha1 deleted file mode 100644 index 70799bf1032..00000000000 --- a/modules/transport-netty4/licenses/netty-codec-4.1.28.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -a38361d893900947524f8a9da980555950e73d6a \ No newline at end of file diff --git a/modules/transport-netty4/licenses/netty-codec-4.1.29.Final.jar.sha1 b/modules/transport-netty4/licenses/netty-codec-4.1.29.Final.jar.sha1 new file mode 100644 index 00000000000..f892420795b --- /dev/null +++ b/modules/transport-netty4/licenses/netty-codec-4.1.29.Final.jar.sha1 @@ -0,0 +1 @@ +1651bc2e279216773c234cafe402d68d2a5adc90 \ No newline at end of file diff --git a/modules/transport-netty4/licenses/netty-codec-http-4.1.28.Final.jar.sha1 b/modules/transport-netty4/licenses/netty-codec-http-4.1.28.Final.jar.sha1 deleted file mode 100644 index e1d34ebf89b..00000000000 --- a/modules/transport-netty4/licenses/netty-codec-http-4.1.28.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -897100c1022c780b0a436b9349e507e8fa9800dc \ No newline at end of file diff --git a/modules/transport-netty4/licenses/netty-codec-http-4.1.29.Final.jar.sha1 b/modules/transport-netty4/licenses/netty-codec-http-4.1.29.Final.jar.sha1 new file mode 100644 index 00000000000..aa97345bad1 --- /dev/null +++ b/modules/transport-netty4/licenses/netty-codec-http-4.1.29.Final.jar.sha1 @@ -0,0 +1 @@ +454688b88cea27a4d407202d1fc79a6522345b5e \ No newline at end of file diff --git a/modules/transport-netty4/licenses/netty-common-4.1.28.Final.jar.sha1 b/modules/transport-netty4/licenses/netty-common-4.1.28.Final.jar.sha1 deleted file mode 100644 index bc951426832..00000000000 --- a/modules/transport-netty4/licenses/netty-common-4.1.28.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -df69ce8bb9b544a71e7bbee290253cf7c93e6bad \ No newline at end of file diff --git a/modules/transport-netty4/licenses/netty-common-4.1.29.Final.jar.sha1 b/modules/transport-netty4/licenses/netty-common-4.1.29.Final.jar.sha1 new file mode 100644 index 00000000000..47140876e6a --- /dev/null +++ b/modules/transport-netty4/licenses/netty-common-4.1.29.Final.jar.sha1 @@ -0,0 +1 @@ +a5d6a735ed07d8f197daa48db7f097cfc971ee5e \ No newline at end of file diff --git a/modules/transport-netty4/licenses/netty-handler-4.1.28.Final.jar.sha1 b/modules/transport-netty4/licenses/netty-handler-4.1.28.Final.jar.sha1 deleted file mode 100644 index 80dc8b8f6fe..00000000000 --- a/modules/transport-netty4/licenses/netty-handler-4.1.28.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -a035784682da0126bc25f10713dac732b5082a6d \ No newline at end of file diff --git a/modules/transport-netty4/licenses/netty-handler-4.1.29.Final.jar.sha1 b/modules/transport-netty4/licenses/netty-handler-4.1.29.Final.jar.sha1 new file mode 100644 index 00000000000..7c2d407f75e --- /dev/null +++ b/modules/transport-netty4/licenses/netty-handler-4.1.29.Final.jar.sha1 @@ -0,0 +1 @@ +1acf1d94799296a2517533ec75ce7e155e9c4ea7 \ No newline at end of file diff --git a/modules/transport-netty4/licenses/netty-resolver-4.1.28.Final.jar.sha1 b/modules/transport-netty4/licenses/netty-resolver-4.1.28.Final.jar.sha1 deleted file mode 100644 index afe004bd716..00000000000 --- a/modules/transport-netty4/licenses/netty-resolver-4.1.28.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -f33557dcb31fa20da075ac05e4808115e32ef9b7 \ No newline at end of file diff --git a/modules/transport-netty4/licenses/netty-resolver-4.1.29.Final.jar.sha1 b/modules/transport-netty4/licenses/netty-resolver-4.1.29.Final.jar.sha1 new file mode 100644 index 00000000000..bac08f57079 --- /dev/null +++ b/modules/transport-netty4/licenses/netty-resolver-4.1.29.Final.jar.sha1 @@ -0,0 +1 @@ +bbec1dc913732e4773893c14d795b15d6c1e878e \ No newline at end of file diff --git a/modules/transport-netty4/licenses/netty-transport-4.1.28.Final.jar.sha1 b/modules/transport-netty4/licenses/netty-transport-4.1.28.Final.jar.sha1 deleted file mode 100644 index af19a16d6ed..00000000000 --- a/modules/transport-netty4/licenses/netty-transport-4.1.28.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -d2ef28f49d726737f0ffe84bf66529b3bf6e0c0d \ No newline at end of file diff --git a/modules/transport-netty4/licenses/netty-transport-4.1.29.Final.jar.sha1 b/modules/transport-netty4/licenses/netty-transport-4.1.29.Final.jar.sha1 new file mode 100644 index 00000000000..0ce64132afb --- /dev/null +++ b/modules/transport-netty4/licenses/netty-transport-4.1.29.Final.jar.sha1 @@ -0,0 +1 @@ +c190b90f70e2ae8a48c068afad709e8728fcaa39 \ No newline at end of file diff --git a/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4HttpServerTransport.java b/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4HttpServerTransport.java index 5f2e8b8c871..a73b5057280 100644 --- a/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4HttpServerTransport.java +++ b/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4HttpServerTransport.java @@ -48,6 +48,7 @@ import org.elasticsearch.common.network.NetworkService; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Setting.Property; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.settings.SettingsException; import org.elasticsearch.common.unit.ByteSizeUnit; import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.util.BigArrays; @@ -68,6 +69,7 @@ import java.net.InetSocketAddress; import java.util.Arrays; import java.util.concurrent.TimeUnit; import java.util.regex.Pattern; +import java.util.regex.PatternSyntaxException; import static org.elasticsearch.common.util.concurrent.EsExecutors.daemonThreadFactory; import static org.elasticsearch.http.HttpTransportSettings.SETTING_CORS_ALLOW_CREDENTIALS; @@ -241,11 +243,15 @@ public class Netty4HttpServerTransport extends AbstractHttpServerTransport { } else if (origin.equals(ANY_ORIGIN)) { builder = Netty4CorsConfigBuilder.forAnyOrigin(); } else { - Pattern p = RestUtils.checkCorsSettingForRegex(origin); - if (p == null) { - builder = Netty4CorsConfigBuilder.forOrigins(RestUtils.corsSettingAsArray(origin)); - } else { - builder = Netty4CorsConfigBuilder.forPattern(p); + try { + Pattern p = RestUtils.checkCorsSettingForRegex(origin); + if (p == null) { + builder = Netty4CorsConfigBuilder.forOrigins(RestUtils.corsSettingAsArray(origin)); + } else { + builder = Netty4CorsConfigBuilder.forPattern(p); + } + } catch (PatternSyntaxException e) { + throw new SettingsException("Bad regex in [" + SETTING_CORS_ALLOW_ORIGIN.getKey() + "]: [" + origin + "]", e); } } if (SETTING_CORS_ALLOW_CREDENTIALS.get(settings)) { diff --git a/modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/Netty4HttpServerTransportTests.java b/modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/Netty4HttpServerTransportTests.java index 1c3c71d710d..63e38823acb 100644 --- a/modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/Netty4HttpServerTransportTests.java +++ b/modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/Netty4HttpServerTransportTests.java @@ -44,6 +44,7 @@ import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.network.NetworkService; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.settings.SettingsException; import org.elasticsearch.common.transport.TransportAddress; import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.unit.TimeValue; @@ -75,6 +76,7 @@ import java.util.Set; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicReference; +import java.util.regex.PatternSyntaxException; import java.util.stream.Collectors; import static org.elasticsearch.common.Strings.collectionToDelimitedString; @@ -148,6 +150,17 @@ public class Netty4HttpServerTransportTests extends ESTestCase { assertFalse(corsConfig.isCredentialsAllowed()); } + public void testCorsConfigWithBadRegex() { + final Settings settings = Settings.builder() + .put(SETTING_CORS_ENABLED.getKey(), true) + .put(SETTING_CORS_ALLOW_ORIGIN.getKey(), "/[*/") + .put(SETTING_CORS_ALLOW_CREDENTIALS.getKey(), true) + .build(); + SettingsException e = expectThrows(SettingsException.class, () -> Netty4HttpServerTransport.buildCorsConfig(settings)); + assertThat(e.getMessage(), containsString("Bad regex in [http.cors.allow-origin]: [/[*/]")); + assertThat(e.getCause(), instanceOf(PatternSyntaxException.class)); + } + /** * Test that {@link Netty4HttpServerTransport} supports the "Expect: 100-continue" HTTP header * @throws InterruptedException if the client communication with the server is interrupted diff --git a/modules/transport-netty4/src/test/java/org/elasticsearch/transport/netty4/SimpleNetty4TransportTests.java b/modules/transport-netty4/src/test/java/org/elasticsearch/transport/netty4/SimpleNetty4TransportTests.java index 9d6f016086c..4e63727024f 100644 --- a/modules/transport-netty4/src/test/java/org/elasticsearch/transport/netty4/SimpleNetty4TransportTests.java +++ b/modules/transport-netty4/src/test/java/org/elasticsearch/transport/netty4/SimpleNetty4TransportTests.java @@ -58,7 +58,7 @@ public class SimpleNetty4TransportTests extends AbstractSimpleTransportTestCase BigArrays.NON_RECYCLING_INSTANCE, namedWriteableRegistry, new NoneCircuitBreakerService()) { @Override - protected Version executeHandshake(DiscoveryNode node, TcpChannel channel, TimeValue timeout) throws IOException, + public Version executeHandshake(DiscoveryNode node, TcpChannel channel, TimeValue timeout) throws IOException, InterruptedException { if (doHandshake) { return super.executeHandshake(node, channel, timeout); diff --git a/plugins/analysis-icu/src/main/java/org/elasticsearch/index/mapper/ICUCollationKeywordFieldMapper.java b/plugins/analysis-icu/src/main/java/org/elasticsearch/index/mapper/ICUCollationKeywordFieldMapper.java index 0235e6e8136..2c20d4b4784 100644 --- a/plugins/analysis-icu/src/main/java/org/elasticsearch/index/mapper/ICUCollationKeywordFieldMapper.java +++ b/plugins/analysis-icu/src/main/java/org/elasticsearch/index/mapper/ICUCollationKeywordFieldMapper.java @@ -23,7 +23,6 @@ import com.ibm.icu.text.Collator; import com.ibm.icu.text.RawCollationKey; import com.ibm.icu.text.RuleBasedCollator; import com.ibm.icu.util.ULocale; - import org.apache.lucene.document.Field; import org.apache.lucene.document.SortedSetDocValuesField; import org.apache.lucene.index.IndexOptions; @@ -34,6 +33,7 @@ import org.apache.lucene.search.MultiTermQuery; import org.apache.lucene.search.Query; import org.apache.lucene.search.TermQuery; import org.apache.lucene.util.BytesRef; +import org.elasticsearch.common.Nullable; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.lucene.Lucene; import org.elasticsearch.common.settings.Settings; @@ -86,6 +86,7 @@ public class ICUCollationKeywordFieldMapper extends FieldMapper { this.collator = ref.collator; } + @Override public CollationFieldType clone() { return new CollationFieldType(this); } @@ -158,18 +159,25 @@ public class ICUCollationKeywordFieldMapper extends FieldMapper { @Override public Query fuzzyQuery(Object value, Fuzziness fuzziness, int prefixLength, int maxExpansions, boolean transpositions) { - throw new UnsupportedOperationException(); + throw new UnsupportedOperationException("[fuzzy] queries are not supported on [" + CONTENT_TYPE + "] fields."); } @Override public Query prefixQuery(String value, MultiTermQuery.RewriteMethod method, QueryShardContext context) { - throw new UnsupportedOperationException(); + throw new UnsupportedOperationException("[prefix] queries are not supported on [" + CONTENT_TYPE + "] fields."); + } + + @Override + public Query wildcardQuery(String value, + @Nullable MultiTermQuery.RewriteMethod method, + QueryShardContext context) { + throw new UnsupportedOperationException("[wildcard] queries are not supported on [" + CONTENT_TYPE + "] fields."); } @Override public Query regexpQuery(String value, int flags, int maxDeterminizedStates, MultiTermQuery.RewriteMethod method, QueryShardContext context) { - throw new UnsupportedOperationException(); + throw new UnsupportedOperationException("[regexp] queries are not supported on [" + CONTENT_TYPE + "] fields."); } public static DocValueFormat COLLATE_FORMAT = new DocValueFormat() { @@ -239,7 +247,6 @@ public class ICUCollationKeywordFieldMapper extends FieldMapper { private boolean numeric = false; private String variableTop = null; private boolean hiraganaQuaternaryMode = false; - private String nullValue = Defaults.NULL_VALUE; public Builder(String name) { super(name, Defaults.FIELD_TYPE, Defaults.FIELD_TYPE); diff --git a/plugins/analysis-icu/src/test/java/org/elasticsearch/index/mapper/CollationFieldTypeTests.java b/plugins/analysis-icu/src/test/java/org/elasticsearch/index/mapper/CollationFieldTypeTests.java index 71d8f25bf9f..a261e8b3b7e 100644 --- a/plugins/analysis-icu/src/test/java/org/elasticsearch/index/mapper/CollationFieldTypeTests.java +++ b/plugins/analysis-icu/src/test/java/org/elasticsearch/index/mapper/CollationFieldTypeTests.java @@ -121,6 +121,14 @@ public class CollationFieldTypeTests extends FieldTypeTestCase { () -> ft.prefixQuery("prefix", null, null)); } + public void testWildcardQuery() { + MappedFieldType ft = createDefaultFieldType(); + ft.setName("field"); + ft.setIndexOptions(IndexOptions.DOCS); + expectThrows(UnsupportedOperationException.class, + () -> ft.wildcardQuery("foo*", null, null)); + } + public void testRangeQuery() { MappedFieldType ft = createDefaultFieldType(); ft.setName("field"); diff --git a/plugins/discovery-ec2/src/test/java/org/elasticsearch/discovery/ec2/AmazonEC2Mock.java b/plugins/discovery-ec2/src/test/java/org/elasticsearch/discovery/ec2/AmazonEC2Mock.java index aa08447fd20..8a8477b2294 100644 --- a/plugins/discovery-ec2/src/test/java/org/elasticsearch/discovery/ec2/AmazonEC2Mock.java +++ b/plugins/discovery-ec2/src/test/java/org/elasticsearch/discovery/ec2/AmazonEC2Mock.java @@ -509,7 +509,7 @@ import com.amazonaws.services.ec2.model.UpdateSecurityGroupRuleDescriptionsIngre import com.amazonaws.services.ec2.model.UpdateSecurityGroupRuleDescriptionsIngressResult; import com.amazonaws.services.ec2.waiters.AmazonEC2Waiters; import org.apache.logging.log4j.Logger; -import org.elasticsearch.common.logging.ESLoggerFactory; +import org.apache.logging.log4j.LogManager; import java.util.ArrayList; import java.util.Collection; @@ -521,7 +521,7 @@ import java.util.regex.Pattern; public class AmazonEC2Mock implements AmazonEC2 { - private static final Logger logger = ESLoggerFactory.getLogger(AmazonEC2Mock.class.getName()); + private static final Logger logger = LogManager.getLogger(AmazonEC2Mock.class); public static final String PREFIX_PRIVATE_IP = "10.0.0."; public static final String PREFIX_PUBLIC_IP = "8.8.8."; diff --git a/plugins/discovery-gce/src/main/java/org/elasticsearch/discovery/gce/RetryHttpInitializerWrapper.java b/plugins/discovery-gce/src/main/java/org/elasticsearch/discovery/gce/RetryHttpInitializerWrapper.java index dc7f8bf8596..cc9ae0c8daf 100644 --- a/plugins/discovery-gce/src/main/java/org/elasticsearch/discovery/gce/RetryHttpInitializerWrapper.java +++ b/plugins/discovery-gce/src/main/java/org/elasticsearch/discovery/gce/RetryHttpInitializerWrapper.java @@ -19,6 +19,8 @@ package org.elasticsearch.discovery.gce; +import org.apache.logging.log4j.Logger; +import org.apache.logging.log4j.LogManager; import com.google.api.client.auth.oauth2.Credential; import com.google.api.client.googleapis.testing.auth.oauth2.MockGoogleCredential; import com.google.api.client.http.HttpBackOffIOExceptionHandler; @@ -29,19 +31,14 @@ import com.google.api.client.http.HttpResponse; import com.google.api.client.http.HttpUnsuccessfulResponseHandler; import com.google.api.client.util.ExponentialBackOff; import com.google.api.client.util.Sleeper; -import org.apache.logging.log4j.Logger; import org.elasticsearch.cloud.gce.util.Access; -import org.elasticsearch.common.logging.ESLoggerFactory; import org.elasticsearch.common.unit.TimeValue; import java.io.IOException; import java.util.Objects; public class RetryHttpInitializerWrapper implements HttpRequestInitializer { - - private TimeValue maxWait; - - private static final Logger logger = ESLoggerFactory.getLogger(RetryHttpInitializerWrapper.class.getName()); + private static final Logger logger = LogManager.getLogger(RetryHttpInitializerWrapper.class); // Intercepts the request for filling in the "Authorization" // header field, as well as recovering from certain unsuccessful @@ -52,6 +49,8 @@ public class RetryHttpInitializerWrapper implements HttpRequestInitializer { // A sleeper; you can replace it with a mock in your test. private final Sleeper sleeper; + private TimeValue maxWait; + public RetryHttpInitializerWrapper(Credential wrappedCredential) { this(wrappedCredential, Sleeper.DEFAULT, TimeValue.timeValueMillis(ExponentialBackOff.DEFAULT_MAX_ELAPSED_TIME_MILLIS)); } diff --git a/plugins/examples/script-expert-scoring/src/main/java/org/elasticsearch/example/expertscript/ExpertScriptPlugin.java b/plugins/examples/script-expert-scoring/src/main/java/org/elasticsearch/example/expertscript/ExpertScriptPlugin.java index cead97696a0..18289d321a5 100644 --- a/plugins/examples/script-expert-scoring/src/main/java/org/elasticsearch/example/expertscript/ExpertScriptPlugin.java +++ b/plugins/examples/script-expert-scoring/src/main/java/org/elasticsearch/example/expertscript/ExpertScriptPlugin.java @@ -31,8 +31,10 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.plugins.ScriptPlugin; import org.elasticsearch.script.ScoreScript; +import org.elasticsearch.script.ScoreScript.LeafFactory; import org.elasticsearch.script.ScriptContext; import org.elasticsearch.script.ScriptEngine; +import org.elasticsearch.search.lookup.SearchLookup; /** * An example script plugin that adds a {@link ScriptEngine} implementing expert scoring. @@ -53,81 +55,106 @@ public class ExpertScriptPlugin extends Plugin implements ScriptPlugin { } @Override - public T compile(String scriptName, String scriptSource, ScriptContext context, Map params) { + public T compile(String scriptName, String scriptSource, + ScriptContext context, Map params) { if (context.equals(ScoreScript.CONTEXT) == false) { - throw new IllegalArgumentException(getType() + " scripts cannot be used for context [" + context.name + "]"); + throw new IllegalArgumentException(getType() + + " scripts cannot be used for context [" + + context.name + "]"); } // we use the script "source" as the script identifier if ("pure_df".equals(scriptSource)) { - ScoreScript.Factory factory = (p, lookup) -> new ScoreScript.LeafFactory() { - final String field; - final String term; - { - if (p.containsKey("field") == false) { - throw new IllegalArgumentException("Missing parameter [field]"); - } - if (p.containsKey("term") == false) { - throw new IllegalArgumentException("Missing parameter [term]"); - } - field = p.get("field").toString(); - term = p.get("term").toString(); - } - - @Override - public ScoreScript newInstance(LeafReaderContext context) throws IOException { - PostingsEnum postings = context.reader().postings(new Term(field, term)); - if (postings == null) { - // the field and/or term don't exist in this segment, so always return 0 - return new ScoreScript(p, lookup, context) { - @Override - public double execute() { - return 0.0d; - } - }; - } - return new ScoreScript(p, lookup, context) { - int currentDocid = -1; - @Override - public void setDocument(int docid) { - // advance has undefined behavior calling with a docid <= its current docid - if (postings.docID() < docid) { - try { - postings.advance(docid); - } catch (IOException e) { - throw new UncheckedIOException(e); - } - } - currentDocid = docid; - } - @Override - public double execute() { - if (postings.docID() != currentDocid) { - // advance moved past the current doc, so this doc has no occurrences of the term - return 0.0d; - } - try { - return postings.freq(); - } catch (IOException e) { - throw new UncheckedIOException(e); - } - } - }; - } - - @Override - public boolean needs_score() { - return false; - } - }; + ScoreScript.Factory factory = PureDfLeafFactory::new; return context.factoryClazz.cast(factory); } - throw new IllegalArgumentException("Unknown script name " + scriptSource); + throw new IllegalArgumentException("Unknown script name " + + scriptSource); } @Override public void close() { // optionally close resources } + + private static class PureDfLeafFactory implements LeafFactory { + private final Map params; + private final SearchLookup lookup; + private final String field; + private final String term; + + private PureDfLeafFactory( + Map params, SearchLookup lookup) { + if (params.containsKey("field") == false) { + throw new IllegalArgumentException( + "Missing parameter [field]"); + } + if (params.containsKey("term") == false) { + throw new IllegalArgumentException( + "Missing parameter [term]"); + } + this.params = params; + this.lookup = lookup; + field = params.get("field").toString(); + term = params.get("term").toString(); + } + + @Override + public boolean needs_score() { + return false; // Return true if the script needs the score + } + + @Override + public ScoreScript newInstance(LeafReaderContext context) + throws IOException { + PostingsEnum postings = context.reader().postings( + new Term(field, term)); + if (postings == null) { + /* + * the field and/or term don't exist in this segment, + * so always return 0 + */ + return new ScoreScript(params, lookup, context) { + @Override + public double execute() { + return 0.0d; + } + }; + } + return new ScoreScript(params, lookup, context) { + int currentDocid = -1; + @Override + public void setDocument(int docid) { + /* + * advance has undefined behavior calling with + * a docid <= its current docid + */ + if (postings.docID() < docid) { + try { + postings.advance(docid); + } catch (IOException e) { + throw new UncheckedIOException(e); + } + } + currentDocid = docid; + } + @Override + public double execute() { + if (postings.docID() != currentDocid) { + /* + * advance moved past the current doc, so this doc + * has no occurrences of the term + */ + return 0.0d; + } + try { + return postings.freq(); + } catch (IOException e) { + throw new UncheckedIOException(e); + } + } + }; + } + } } // end::expert_engine } diff --git a/plugins/ingest-geoip/build.gradle b/plugins/ingest-geoip/build.gradle index 54facc5aad2..387bc58c574 100644 --- a/plugins/ingest-geoip/build.gradle +++ b/plugins/ingest-geoip/build.gradle @@ -30,7 +30,7 @@ dependencies { compile("com.fasterxml.jackson.core:jackson-databind:${versions.jackson}") compile('com.maxmind.db:maxmind-db:1.2.2') - testCompile 'org.elasticsearch:geolite2-databases:20180303' + testCompile 'org.elasticsearch:geolite2-databases:20180911' } task copyDefaultGeoIp2DatabaseFiles(type: Copy) { diff --git a/plugins/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/GeoIpProcessorTests.java b/plugins/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/GeoIpProcessorTests.java index 4da680f186e..4e09662ed4a 100644 --- a/plugins/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/GeoIpProcessorTests.java +++ b/plugins/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/GeoIpProcessorTests.java @@ -134,8 +134,8 @@ public class GeoIpProcessorTests extends ESTestCase { assertThat(geoData.get("city_name"), equalTo("Hollywood")); assertThat(geoData.get("timezone"), equalTo("America/New_York")); Map location = new HashMap<>(); - location.put("lat", 26.0252d); - location.put("lon", -80.296d); + location.put("lat", 25.9825d); + location.put("lon", -80.3434d); assertThat(geoData.get("location"), equalTo(location)); } @@ -197,7 +197,7 @@ public class GeoIpProcessorTests extends ESTestCase { } public void testAsn() throws Exception { - String ip = "82.170.213.79"; + String ip = "82.171.64.0"; InputStream database = getDatabaseFileInputStream("/GeoLite2-ASN.mmdb"); GeoIpProcessor processor = new GeoIpProcessor(randomAlphaOfLength(10), "source_field", new DatabaseReader.Builder(database).build(), "target_field", EnumSet.allOf(GeoIpProcessor.Property.class), false, @@ -213,7 +213,7 @@ public class GeoIpProcessorTests extends ESTestCase { Map geoData = (Map) ingestDocument.getSourceAndMetadata().get("target_field"); assertThat(geoData.size(), equalTo(3)); assertThat(geoData.get("ip"), equalTo(ip)); - assertThat(geoData.get("asn"), equalTo(5615)); + assertThat(geoData.get("asn"), equalTo(1136)); assertThat(geoData.get("organization_name"), equalTo("KPN B.V.")); } diff --git a/plugins/ingest-geoip/src/test/resources/rest-api-spec/test/ingest_geoip/20_geoip_processor.yml b/plugins/ingest-geoip/src/test/resources/rest-api-spec/test/ingest_geoip/20_geoip_processor.yml index 012ca717318..d5c1f8e5c48 100644 --- a/plugins/ingest-geoip/src/test/resources/rest-api-spec/test/ingest_geoip/20_geoip_processor.yml +++ b/plugins/ingest-geoip/src/test/resources/rest-api-spec/test/ingest_geoip/20_geoip_processor.yml @@ -33,8 +33,8 @@ - length: { _source.geoip: 6 } - match: { _source.geoip.city_name: "Minneapolis" } - match: { _source.geoip.country_iso_code: "US" } - - match: { _source.geoip.location.lon: -93.2166 } - - match: { _source.geoip.location.lat: 44.9759 } + - match: { _source.geoip.location.lon: -93.2323 } + - match: { _source.geoip.location.lat: 44.9733 } - match: { _source.geoip.region_iso_code: "US-MN" } - match: { _source.geoip.region_name: "Minnesota" } - match: { _source.geoip.continent_name: "North America" } @@ -80,8 +80,8 @@ - match: { _source.geoip.city_name: "Minneapolis" } - match: { _source.geoip.country_iso_code: "US" } - match: { _source.geoip.ip: "128.101.101.101" } - - match: { _source.geoip.location.lon: -93.2166 } - - match: { _source.geoip.location.lat: 44.9759 } + - match: { _source.geoip.location.lon: -93.2323 } + - match: { _source.geoip.location.lat: 44.9733 } - match: { _source.geoip.timezone: "America/Chicago" } - match: { _source.geoip.country_name: "United States" } - match: { _source.geoip.region_iso_code: "US-MN" } @@ -193,8 +193,8 @@ - length: { _source.geoip: 6 } - match: { _source.geoip.city_name: "Minneapolis" } - match: { _source.geoip.country_iso_code: "US" } - - match: { _source.geoip.location.lon: -93.2166 } - - match: { _source.geoip.location.lat: 44.9759 } + - match: { _source.geoip.location.lon: -93.2323 } + - match: { _source.geoip.location.lat: 44.9733 } - match: { _source.geoip.region_iso_code: "US-MN" } - match: { _source.geoip.region_name: "Minnesota" } - match: { _source.geoip.continent_name: "North America" } @@ -224,15 +224,15 @@ type: test id: 1 pipeline: "my_pipeline" - body: {field1: "82.170.213.79"} + body: {field1: "82.171.64.0"} - do: get: index: test type: test id: 1 - - match: { _source.field1: "82.170.213.79" } + - match: { _source.field1: "82.171.64.0" } - length: { _source.geoip: 3 } - - match: { _source.geoip.ip: "82.170.213.79" } - - match: { _source.geoip.asn: 5615 } + - match: { _source.geoip.ip: "82.171.64.0" } + - match: { _source.geoip.asn: 1136 } - match: { _source.geoip.organization_name: "KPN B.V." } diff --git a/plugins/mapper-annotated-text/src/main/java/org/elasticsearch/index/mapper/annotatedtext/AnnotatedTextFieldMapper.java b/plugins/mapper-annotated-text/src/main/java/org/elasticsearch/index/mapper/annotatedtext/AnnotatedTextFieldMapper.java index 8cc38d130ff..c9448dd88e7 100644 --- a/plugins/mapper-annotated-text/src/main/java/org/elasticsearch/index/mapper/annotatedtext/AnnotatedTextFieldMapper.java +++ b/plugins/mapper-annotated-text/src/main/java/org/elasticsearch/index/mapper/annotatedtext/AnnotatedTextFieldMapper.java @@ -295,7 +295,10 @@ public class AnnotatedTextFieldMapper extends FieldMapper { StringBuilder sb = new StringBuilder(); sb.append(textMinusMarkup); sb.append("\n"); - annotations.forEach(a -> {sb.append(a); sb.append("\n");}); + annotations.forEach(a -> { + sb.append(a); + sb.append("\n"); + }); return sb.toString(); } diff --git a/plugins/mapper-annotated-text/src/test/java/org/elasticsearch/index/mapper/annotatedtext/AnnotatedTextFieldMapperTests.java b/plugins/mapper-annotated-text/src/test/java/org/elasticsearch/index/mapper/annotatedtext/AnnotatedTextFieldMapperTests.java index 8a51b9a494b..06f4b728c8c 100644 --- a/plugins/mapper-annotated-text/src/test/java/org/elasticsearch/index/mapper/annotatedtext/AnnotatedTextFieldMapperTests.java +++ b/plugins/mapper-annotated-text/src/test/java/org/elasticsearch/index/mapper/annotatedtext/AnnotatedTextFieldMapperTests.java @@ -82,20 +82,15 @@ public class AnnotatedTextFieldMapperTests extends ESSingleNodeTestCase { @Before public void setup() { Settings settings = Settings.builder() - .put("index.analysis.filter.mySynonyms.type", "synonym") - .putList("index.analysis.filter.mySynonyms.synonyms", Collections.singletonList("car, auto")) - .put("index.analysis.analyzer.synonym.tokenizer", "standard") - .put("index.analysis.analyzer.synonym.filter", "mySynonyms") - // Stop filter remains in server as it is part of lucene-core .put("index.analysis.analyzer.my_stop_analyzer.tokenizer", "standard") .put("index.analysis.analyzer.my_stop_analyzer.filter", "stop") .build(); indexService = createIndex("test", settings); parser = indexService.mapperService().documentMapperParser(); - } - - - + } + + + @Override protected Collection> getPlugins() { List> classpathPlugins = new ArrayList<>(); @@ -107,16 +102,16 @@ public class AnnotatedTextFieldMapperTests extends ESSingleNodeTestCase { protected String getFieldType() { return "annotated_text"; - } - + } + public void testAnnotationInjection() throws IOException { - + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("field").field("type", getFieldType()).endObject().endObject() .endObject().endObject()); DocumentMapper mapper = indexService.mapperService().merge("type", - new CompressedXContent(mapping), MergeReason.MAPPING_UPDATE); + new CompressedXContent(mapping), MergeReason.MAPPING_UPDATE); // Use example of typed and untyped annotations String annotatedText = "He paid [Stormy Daniels](Stephanie+Clifford&Payee) hush money"; @@ -140,12 +135,12 @@ public class AnnotatedTextFieldMapperTests extends ESSingleNodeTestCase { try (Engine.Searcher searcher = shard.acquireSearcher("test")) { LeafReader leaf = searcher.getDirectoryReader().leaves().get(0).reader(); TermsEnum terms = leaf.terms("field").iterator(); - + assertTrue(terms.seekExact(new BytesRef("stormy"))); PostingsEnum postings = terms.postings(null, PostingsEnum.POSITIONS); assertEquals(0, postings.nextDoc()); - assertEquals(2, postings.nextPosition()); - + assertEquals(2, postings.nextPosition()); + assertTrue(terms.seekExact(new BytesRef("Stephanie Clifford"))); postings = terms.postings(null, PostingsEnum.POSITIONS); assertEquals(0, postings.nextDoc()); @@ -156,23 +151,23 @@ public class AnnotatedTextFieldMapperTests extends ESSingleNodeTestCase { assertEquals(0, postings.nextDoc()); assertEquals(2, postings.nextPosition()); - + assertTrue(terms.seekExact(new BytesRef("hush"))); postings = terms.postings(null, PostingsEnum.POSITIONS); assertEquals(0, postings.nextDoc()); - assertEquals(4, postings.nextPosition()); - + assertEquals(4, postings.nextPosition()); + } - } - + } + public void testToleranceForBadAnnotationMarkup() throws IOException { - + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("field").field("type", getFieldType()).endObject().endObject() .endObject().endObject()); DocumentMapper mapper = indexService.mapperService().merge("type", - new CompressedXContent(mapping), MergeReason.MAPPING_UPDATE); + new CompressedXContent(mapping), MergeReason.MAPPING_UPDATE); String annotatedText = "foo [bar](MissingEndBracket baz"; SourceToParse sourceToParse = SourceToParse.source("test", "type", "1", BytesReference @@ -195,12 +190,12 @@ public class AnnotatedTextFieldMapperTests extends ESSingleNodeTestCase { try (Engine.Searcher searcher = shard.acquireSearcher("test")) { LeafReader leaf = searcher.getDirectoryReader().leaves().get(0).reader(); TermsEnum terms = leaf.terms("field").iterator(); - + assertTrue(terms.seekExact(new BytesRef("foo"))); PostingsEnum postings = terms.postings(null, PostingsEnum.POSITIONS); assertEquals(0, postings.nextDoc()); - assertEquals(0, postings.nextPosition()); - + assertEquals(0, postings.nextPosition()); + assertTrue(terms.seekExact(new BytesRef("bar"))); postings = terms.postings(null, PostingsEnum.POSITIONS); assertEquals(0, postings.nextDoc()); @@ -209,18 +204,18 @@ public class AnnotatedTextFieldMapperTests extends ESSingleNodeTestCase { assertFalse(terms.seekExact(new BytesRef("MissingEndBracket"))); // Bad markup means value is treated as plain text and fed through tokenisation assertTrue(terms.seekExact(new BytesRef("missingendbracket"))); - + } - } - + } + public void testAgainstTermVectorsAPI() throws IOException { String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("tvfield").field("type", getFieldType()) .field("term_vector", "with_positions_offsets_payloads") .endObject().endObject() - .endObject().endObject()); - indexService.mapperService().merge("type", new CompressedXContent(mapping), MergeReason.MAPPING_UPDATE); - + .endObject().endObject()); + indexService.mapperService().merge("type", new CompressedXContent(mapping), MergeReason.MAPPING_UPDATE); + int max = between(3, 10); BulkRequestBuilder bulk = client().prepareBulk(); @@ -231,13 +226,13 @@ public class AnnotatedTextFieldMapperTests extends ESSingleNodeTestCase { bulk.get(); TermVectorsRequest request = new TermVectorsRequest("test", "type", "0").termStatistics(true); - + IndicesService indicesService = getInstanceFromNode(IndicesService.class); IndexService test = indicesService.indexService(resolveIndex("test")); IndexShard shard = test.getShardOrNull(0); assertThat(shard, notNullValue()); - TermVectorsResponse response = TermVectorsService.getTermVectors(shard, request); - assertEquals(1, response.getFields().size()); + TermVectorsResponse response = TermVectorsService.getTermVectors(shard, request); + assertEquals(1, response.getFields().size()); Terms terms = response.getFields().terms("tvfield"); TermsEnum iterator = terms.iterator(); @@ -245,14 +240,14 @@ public class AnnotatedTextFieldMapperTests extends ESSingleNodeTestCase { Set foundTerms = new HashSet<>(); while ((term = iterator.next()) != null) { foundTerms.add(term.utf8ToString()); - } + } //Check we have both text and annotation tokens assertTrue(foundTerms.contains("brown")); assertTrue(foundTerms.contains("Color")); assertTrue(foundTerms.contains("fox")); - - } - + + } + // ===== Code below copied from TextFieldMapperTests ======== public void testDefaults() throws IOException { @@ -616,7 +611,7 @@ public class AnnotatedTextFieldMapperTests extends ESSingleNodeTestCase { assertThat(doc.rootDoc().getField("field6").fieldType().storeTermVectorPositions(), equalTo(true)); assertThat(doc.rootDoc().getField("field6").fieldType().storeTermVectorPayloads(), equalTo(true)); } - + public void testNullConfigValuesFail() throws MapperParsingException, IOException { String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject() .startObject("type") @@ -677,5 +672,5 @@ public class AnnotatedTextFieldMapperTests extends ESSingleNodeTestCase { } - + } diff --git a/plugins/mapper-annotated-text/src/test/resources/rest-api-spec/test/mapper_annotatedtext/10_basic.yml b/plugins/mapper-annotated-text/src/test/resources/rest-api-spec/test/mapper_annotatedtext/10_basic.yml index 64e0b863bf9..d55ee0ff15b 100644 --- a/plugins/mapper-annotated-text/src/test/resources/rest-api-spec/test/mapper_annotatedtext/10_basic.yml +++ b/plugins/mapper-annotated-text/src/test/resources/rest-api-spec/test/mapper_annotatedtext/10_basic.yml @@ -4,8 +4,8 @@ --- "annotated highlighter on annotated text": - skip: - version: " - 6.99.99" - reason: Annotated text type introduced in 7.0.0-alpha1 + version: " - 6.4.99" + reason: Annotated text type introduced in 6.5.0 - do: indices.create: diff --git a/plugins/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3BlobStore.java b/plugins/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3BlobStore.java index 05218caa065..7715c7086a6 100644 --- a/plugins/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3BlobStore.java +++ b/plugins/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3BlobStore.java @@ -19,9 +19,11 @@ package org.elasticsearch.repositories.s3; +import com.amazonaws.AmazonServiceException; import com.amazonaws.services.s3.model.CannedAccessControlList; import com.amazonaws.services.s3.model.DeleteObjectsRequest; import com.amazonaws.services.s3.model.DeleteObjectsRequest.KeyVersion; +import com.amazonaws.services.s3.model.HeadBucketRequest; import com.amazonaws.services.s3.model.ObjectListing; import com.amazonaws.services.s3.model.S3ObjectSummary; import com.amazonaws.services.s3.model.StorageClass; @@ -66,14 +68,23 @@ class S3BlobStore extends AbstractComponent implements BlobStore { // Note: the method client.doesBucketExist() may return 'true' is the bucket exists // but we don't have access to it (ie, 403 Forbidden response code) - // Also, if invalid security credentials are used to execute this method, the - // client is not able to distinguish between bucket permission errors and - // invalid credential errors, and this method could return an incorrect result. try (AmazonS3Reference clientReference = clientReference()) { SocketAccess.doPrivilegedVoid(() -> { - if (clientReference.client().doesBucketExist(bucket) == false) { - throw new IllegalArgumentException("The bucket [" + bucket + "] does not exist. Please create it before " - + " creating an s3 snapshot repository backed by it."); + try { + clientReference.client().headBucket(new HeadBucketRequest(bucket)); + } catch (final AmazonServiceException e) { + if (e.getStatusCode() == 301) { + throw new IllegalArgumentException("the bucket [" + bucket + "] is in a different region than you configured", e); + } else if (e.getStatusCode() == 403) { + throw new IllegalArgumentException("you do not have permissions to access the bucket [" + bucket + "]", e); + } else if (e.getStatusCode() == 404) { + throw new IllegalArgumentException( + "the bucket [" + bucket + "] does not exist;" + + " please create it before creating an S3 snapshot repository backed by it", + e); + } else { + throw new IllegalArgumentException("error checking the existence of bucket [" + bucket + "]", e); + } } }); } @@ -158,7 +169,9 @@ class S3BlobStore extends AbstractComponent implements BlobStore { return cannedACL; } - public StorageClass getStorageClass() { return storageClass; } + public StorageClass getStorageClass() { + return storageClass; + } public static StorageClass initStorageClass(String storageClass) { if ((storageClass == null) || storageClass.equals("")) { diff --git a/plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/MockAmazonS3.java b/plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/MockAmazonS3.java index b5fb01869ae..acb2b19a0f9 100644 --- a/plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/MockAmazonS3.java +++ b/plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/MockAmazonS3.java @@ -20,6 +20,7 @@ package org.elasticsearch.repositories.s3; import com.amazonaws.AmazonClientException; +import com.amazonaws.AmazonServiceException; import com.amazonaws.SdkClientException; import com.amazonaws.services.s3.AbstractAmazonS3; import com.amazonaws.services.s3.model.AmazonS3Exception; @@ -27,6 +28,8 @@ import com.amazonaws.services.s3.model.DeleteObjectRequest; import com.amazonaws.services.s3.model.DeleteObjectsRequest; import com.amazonaws.services.s3.model.DeleteObjectsResult; import com.amazonaws.services.s3.model.GetObjectRequest; +import com.amazonaws.services.s3.model.HeadBucketRequest; +import com.amazonaws.services.s3.model.HeadBucketResult; import com.amazonaws.services.s3.model.ListObjectsRequest; import com.amazonaws.services.s3.model.ObjectListing; import com.amazonaws.services.s3.model.ObjectMetadata; @@ -73,8 +76,15 @@ class MockAmazonS3 extends AbstractAmazonS3 { } @Override - public boolean doesBucketExist(final String bucket) { - return this.bucket.equalsIgnoreCase(bucket); + public HeadBucketResult headBucket(final HeadBucketRequest headBucketRequest) throws SdkClientException, AmazonServiceException { + if (this.bucket.equalsIgnoreCase(headBucketRequest.getBucketName())) { + return new HeadBucketResult(); + } else { + final AmazonServiceException e = + new AmazonServiceException("bucket [" + headBucketRequest.getBucketName() + "] does not exist"); + e.setStatusCode(404); + throw e; + } } @Override diff --git a/plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/RepositoryCredentialsTests.java b/plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/RepositoryCredentialsTests.java index fb6114a6cb2..1c3c47943a0 100644 --- a/plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/RepositoryCredentialsTests.java +++ b/plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/RepositoryCredentialsTests.java @@ -19,9 +19,13 @@ package org.elasticsearch.repositories.s3; +import com.amazonaws.AmazonClientException; +import com.amazonaws.AmazonServiceException; import com.amazonaws.auth.AWSCredentials; import com.amazonaws.auth.AWSCredentialsProvider; import com.amazonaws.services.s3.AmazonS3; +import com.amazonaws.services.s3.model.HeadBucketRequest; +import com.amazonaws.services.s3.model.HeadBucketResult; import org.elasticsearch.cluster.metadata.RepositoryMetaData; import org.elasticsearch.common.SuppressForbidden; import org.elasticsearch.common.settings.MockSecureSettings; @@ -57,9 +61,10 @@ public class RepositoryCredentialsTests extends ESTestCase { } @Override - public boolean doesBucketExist(String bucketName) { - return true; + public HeadBucketResult headBucket(HeadBucketRequest headBucketRequest) throws AmazonClientException, AmazonServiceException { + return new HeadBucketResult(); } + } static final class ProxyS3Service extends S3Service { diff --git a/plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3RepositoryTests.java b/plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3RepositoryTests.java index dcc46661bef..b76af23402c 100644 --- a/plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3RepositoryTests.java +++ b/plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3RepositoryTests.java @@ -19,7 +19,11 @@ package org.elasticsearch.repositories.s3; +import com.amazonaws.AmazonServiceException; +import com.amazonaws.SdkClientException; import com.amazonaws.services.s3.AbstractAmazonS3; +import com.amazonaws.services.s3.model.HeadBucketRequest; +import com.amazonaws.services.s3.model.HeadBucketResult; import org.elasticsearch.cluster.metadata.RepositoryMetaData; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.ByteSizeUnit; @@ -42,8 +46,8 @@ public class S3RepositoryTests extends ESTestCase { private static class DummyS3Client extends AbstractAmazonS3 { @Override - public boolean doesBucketExist(String bucketName) { - return true; + public HeadBucketResult headBucket(final HeadBucketRequest request) throws SdkClientException, AmazonServiceException { + return new HeadBucketResult(); } @Override diff --git a/plugins/transport-nio/build.gradle b/plugins/transport-nio/build.gradle index cb8916b857c..d6d1793066a 100644 --- a/plugins/transport-nio/build.gradle +++ b/plugins/transport-nio/build.gradle @@ -29,13 +29,13 @@ dependencies { compile "org.elasticsearch:elasticsearch-nio:${version}" // network stack - compile "io.netty:netty-buffer:4.1.28.Final" - compile "io.netty:netty-codec:4.1.28.Final" - compile "io.netty:netty-codec-http:4.1.28.Final" - compile "io.netty:netty-common:4.1.28.Final" - compile "io.netty:netty-handler:4.1.28.Final" - compile "io.netty:netty-resolver:4.1.28.Final" - compile "io.netty:netty-transport:4.1.28.Final" + compile "io.netty:netty-buffer:4.1.29.Final" + compile "io.netty:netty-codec:4.1.29.Final" + compile "io.netty:netty-codec-http:4.1.29.Final" + compile "io.netty:netty-common:4.1.29.Final" + compile "io.netty:netty-handler:4.1.29.Final" + compile "io.netty:netty-resolver:4.1.29.Final" + compile "io.netty:netty-transport:4.1.29.Final" } dependencyLicenses { diff --git a/plugins/transport-nio/licenses/netty-buffer-4.1.28.Final.jar.sha1 b/plugins/transport-nio/licenses/netty-buffer-4.1.28.Final.jar.sha1 deleted file mode 100644 index f8a652d0dd1..00000000000 --- a/plugins/transport-nio/licenses/netty-buffer-4.1.28.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -d6c2d13492778009d33f60e05ed90bcb535d1fd1 \ No newline at end of file diff --git a/plugins/transport-nio/licenses/netty-buffer-4.1.29.Final.jar.sha1 b/plugins/transport-nio/licenses/netty-buffer-4.1.29.Final.jar.sha1 new file mode 100644 index 00000000000..17798a82aa7 --- /dev/null +++ b/plugins/transport-nio/licenses/netty-buffer-4.1.29.Final.jar.sha1 @@ -0,0 +1 @@ +c3809f72e4b535b343b7dfa3c0c8210dad2fa5ea \ No newline at end of file diff --git a/plugins/transport-nio/licenses/netty-codec-4.1.28.Final.jar.sha1 b/plugins/transport-nio/licenses/netty-codec-4.1.28.Final.jar.sha1 deleted file mode 100644 index 70799bf1032..00000000000 --- a/plugins/transport-nio/licenses/netty-codec-4.1.28.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -a38361d893900947524f8a9da980555950e73d6a \ No newline at end of file diff --git a/plugins/transport-nio/licenses/netty-codec-4.1.29.Final.jar.sha1 b/plugins/transport-nio/licenses/netty-codec-4.1.29.Final.jar.sha1 new file mode 100644 index 00000000000..f892420795b --- /dev/null +++ b/plugins/transport-nio/licenses/netty-codec-4.1.29.Final.jar.sha1 @@ -0,0 +1 @@ +1651bc2e279216773c234cafe402d68d2a5adc90 \ No newline at end of file diff --git a/plugins/transport-nio/licenses/netty-codec-http-4.1.28.Final.jar.sha1 b/plugins/transport-nio/licenses/netty-codec-http-4.1.28.Final.jar.sha1 deleted file mode 100644 index e1d34ebf89b..00000000000 --- a/plugins/transport-nio/licenses/netty-codec-http-4.1.28.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -897100c1022c780b0a436b9349e507e8fa9800dc \ No newline at end of file diff --git a/plugins/transport-nio/licenses/netty-codec-http-4.1.29.Final.jar.sha1 b/plugins/transport-nio/licenses/netty-codec-http-4.1.29.Final.jar.sha1 new file mode 100644 index 00000000000..aa97345bad1 --- /dev/null +++ b/plugins/transport-nio/licenses/netty-codec-http-4.1.29.Final.jar.sha1 @@ -0,0 +1 @@ +454688b88cea27a4d407202d1fc79a6522345b5e \ No newline at end of file diff --git a/plugins/transport-nio/licenses/netty-common-4.1.28.Final.jar.sha1 b/plugins/transport-nio/licenses/netty-common-4.1.28.Final.jar.sha1 deleted file mode 100644 index bc951426832..00000000000 --- a/plugins/transport-nio/licenses/netty-common-4.1.28.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -df69ce8bb9b544a71e7bbee290253cf7c93e6bad \ No newline at end of file diff --git a/plugins/transport-nio/licenses/netty-common-4.1.29.Final.jar.sha1 b/plugins/transport-nio/licenses/netty-common-4.1.29.Final.jar.sha1 new file mode 100644 index 00000000000..47140876e6a --- /dev/null +++ b/plugins/transport-nio/licenses/netty-common-4.1.29.Final.jar.sha1 @@ -0,0 +1 @@ +a5d6a735ed07d8f197daa48db7f097cfc971ee5e \ No newline at end of file diff --git a/plugins/transport-nio/licenses/netty-handler-4.1.28.Final.jar.sha1 b/plugins/transport-nio/licenses/netty-handler-4.1.28.Final.jar.sha1 deleted file mode 100644 index 80dc8b8f6fe..00000000000 --- a/plugins/transport-nio/licenses/netty-handler-4.1.28.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -a035784682da0126bc25f10713dac732b5082a6d \ No newline at end of file diff --git a/plugins/transport-nio/licenses/netty-handler-4.1.29.Final.jar.sha1 b/plugins/transport-nio/licenses/netty-handler-4.1.29.Final.jar.sha1 new file mode 100644 index 00000000000..7c2d407f75e --- /dev/null +++ b/plugins/transport-nio/licenses/netty-handler-4.1.29.Final.jar.sha1 @@ -0,0 +1 @@ +1acf1d94799296a2517533ec75ce7e155e9c4ea7 \ No newline at end of file diff --git a/plugins/transport-nio/licenses/netty-resolver-4.1.28.Final.jar.sha1 b/plugins/transport-nio/licenses/netty-resolver-4.1.28.Final.jar.sha1 deleted file mode 100644 index afe004bd716..00000000000 --- a/plugins/transport-nio/licenses/netty-resolver-4.1.28.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -f33557dcb31fa20da075ac05e4808115e32ef9b7 \ No newline at end of file diff --git a/plugins/transport-nio/licenses/netty-resolver-4.1.29.Final.jar.sha1 b/plugins/transport-nio/licenses/netty-resolver-4.1.29.Final.jar.sha1 new file mode 100644 index 00000000000..bac08f57079 --- /dev/null +++ b/plugins/transport-nio/licenses/netty-resolver-4.1.29.Final.jar.sha1 @@ -0,0 +1 @@ +bbec1dc913732e4773893c14d795b15d6c1e878e \ No newline at end of file diff --git a/plugins/transport-nio/licenses/netty-transport-4.1.28.Final.jar.sha1 b/plugins/transport-nio/licenses/netty-transport-4.1.28.Final.jar.sha1 deleted file mode 100644 index af19a16d6ed..00000000000 --- a/plugins/transport-nio/licenses/netty-transport-4.1.28.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -d2ef28f49d726737f0ffe84bf66529b3bf6e0c0d \ No newline at end of file diff --git a/plugins/transport-nio/licenses/netty-transport-4.1.29.Final.jar.sha1 b/plugins/transport-nio/licenses/netty-transport-4.1.29.Final.jar.sha1 new file mode 100644 index 00000000000..0ce64132afb --- /dev/null +++ b/plugins/transport-nio/licenses/netty-transport-4.1.29.Final.jar.sha1 @@ -0,0 +1 @@ +c190b90f70e2ae8a48c068afad709e8728fcaa39 \ No newline at end of file diff --git a/plugins/transport-nio/src/main/java/org/elasticsearch/http/nio/NioHttpServerTransport.java b/plugins/transport-nio/src/main/java/org/elasticsearch/http/nio/NioHttpServerTransport.java index 9c672c1caf1..a7f8768bb69 100644 --- a/plugins/transport-nio/src/main/java/org/elasticsearch/http/nio/NioHttpServerTransport.java +++ b/plugins/transport-nio/src/main/java/org/elasticsearch/http/nio/NioHttpServerTransport.java @@ -27,6 +27,7 @@ import org.elasticsearch.common.network.NetworkService; import org.elasticsearch.common.recycler.Recycler; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.settings.SettingsException; import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.common.util.PageCacheRecycler; @@ -57,6 +58,7 @@ import java.nio.channels.SocketChannel; import java.util.Arrays; import java.util.function.Consumer; import java.util.regex.Pattern; +import java.util.regex.PatternSyntaxException; import static org.elasticsearch.common.settings.Setting.intSetting; import static org.elasticsearch.common.util.concurrent.EsExecutors.daemonThreadFactory; @@ -176,11 +178,15 @@ public class NioHttpServerTransport extends AbstractHttpServerTransport { } else if (origin.equals(ANY_ORIGIN)) { builder = NioCorsConfigBuilder.forAnyOrigin(); } else { - Pattern p = RestUtils.checkCorsSettingForRegex(origin); - if (p == null) { - builder = NioCorsConfigBuilder.forOrigins(RestUtils.corsSettingAsArray(origin)); - } else { - builder = NioCorsConfigBuilder.forPattern(p); + try { + Pattern p = RestUtils.checkCorsSettingForRegex(origin); + if (p == null) { + builder = NioCorsConfigBuilder.forOrigins(RestUtils.corsSettingAsArray(origin)); + } else { + builder = NioCorsConfigBuilder.forPattern(p); + } + } catch (PatternSyntaxException e) { + throw new SettingsException("Bad regex in [" + SETTING_CORS_ALLOW_ORIGIN.getKey() + "]: [" + origin + "]", e); } } if (SETTING_CORS_ALLOW_CREDENTIALS.get(settings)) { diff --git a/plugins/transport-nio/src/test/java/org/elasticsearch/http/nio/NioHttpClient.java b/plugins/transport-nio/src/test/java/org/elasticsearch/http/nio/NioHttpClient.java index 785b6ea5970..b49d5b51866 100644 --- a/plugins/transport-nio/src/test/java/org/elasticsearch/http/nio/NioHttpClient.java +++ b/plugins/transport-nio/src/test/java/org/elasticsearch/http/nio/NioHttpClient.java @@ -32,9 +32,9 @@ import io.netty.handler.codec.http.HttpRequestEncoder; import io.netty.handler.codec.http.HttpResponse; import io.netty.handler.codec.http.HttpResponseDecoder; import org.apache.logging.log4j.Logger; +import org.apache.logging.log4j.LogManager; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.PlainActionFuture; -import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.network.NetworkService; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.ByteSizeUnit; @@ -86,11 +86,11 @@ class NioHttpClient implements Closeable { return list; } + private static final Logger logger = LogManager.getLogger(NioHttpClient.class); + private final NioGroup nioGroup; - private final Logger logger; NioHttpClient() { - logger = Loggers.getLogger(NioHttpClient.class, Settings.EMPTY); try { nioGroup = new NioGroup(daemonThreadFactory(Settings.EMPTY, "nio-http-client"), 1, (s) -> new EventHandler(this::onException, s)); diff --git a/plugins/transport-nio/src/test/java/org/elasticsearch/http/nio/NioHttpServerTransportTests.java b/plugins/transport-nio/src/test/java/org/elasticsearch/http/nio/NioHttpServerTransportTests.java index 8acec830f11..13b8e60336e 100644 --- a/plugins/transport-nio/src/test/java/org/elasticsearch/http/nio/NioHttpServerTransportTests.java +++ b/plugins/transport-nio/src/test/java/org/elasticsearch/http/nio/NioHttpServerTransportTests.java @@ -37,6 +37,7 @@ import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.network.NetworkService; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.settings.SettingsException; import org.elasticsearch.common.transport.TransportAddress; import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.util.MockBigArrays; @@ -65,6 +66,7 @@ import java.util.Collections; import java.util.HashSet; import java.util.Set; import java.util.concurrent.atomic.AtomicReference; +import java.util.regex.PatternSyntaxException; import java.util.stream.Collectors; import static org.elasticsearch.http.HttpTransportSettings.SETTING_CORS_ALLOW_CREDENTIALS; @@ -139,6 +141,17 @@ public class NioHttpServerTransportTests extends ESTestCase { assertFalse(corsConfig.isCredentialsAllowed()); } + public void testCorsConfigWithBadRegex() { + final Settings settings = Settings.builder() + .put(SETTING_CORS_ENABLED.getKey(), true) + .put(SETTING_CORS_ALLOW_ORIGIN.getKey(), "/[*/") + .put(SETTING_CORS_ALLOW_CREDENTIALS.getKey(), true) + .build(); + SettingsException e = expectThrows(SettingsException.class, () -> NioHttpServerTransport.buildCorsConfig(settings)); + assertThat(e.getMessage(), containsString("Bad regex in [http.cors.allow-origin]: [/[*/]")); + assertThat(e.getCause(), instanceOf(PatternSyntaxException.class)); + } + /** * Test that {@link NioHttpServerTransport} supports the "Expect: 100-continue" HTTP header * @throws InterruptedException if the client communication with the server is interrupted diff --git a/plugins/transport-nio/src/test/java/org/elasticsearch/transport/nio/SimpleNioTransportTests.java b/plugins/transport-nio/src/test/java/org/elasticsearch/transport/nio/SimpleNioTransportTests.java index baae00f81a3..8f6d78b481d 100644 --- a/plugins/transport-nio/src/test/java/org/elasticsearch/transport/nio/SimpleNioTransportTests.java +++ b/plugins/transport-nio/src/test/java/org/elasticsearch/transport/nio/SimpleNioTransportTests.java @@ -62,7 +62,7 @@ public class SimpleNioTransportTests extends AbstractSimpleTransportTestCase { new NoneCircuitBreakerService()) { @Override - protected Version executeHandshake(DiscoveryNode node, TcpChannel channel, TimeValue timeout) throws IOException, + public Version executeHandshake(DiscoveryNode node, TcpChannel channel, TimeValue timeout) throws IOException, InterruptedException { if (doHandshake) { return super.executeHandshake(node, channel, timeout); diff --git a/qa/evil-tests/src/test/java/org/elasticsearch/common/logging/EvilLoggerConfigurationTests.java b/qa/evil-tests/src/test/java/org/elasticsearch/common/logging/EvilLoggerConfigurationTests.java index f53c9d3b1f5..53e55faecf9 100644 --- a/qa/evil-tests/src/test/java/org/elasticsearch/common/logging/EvilLoggerConfigurationTests.java +++ b/qa/evil-tests/src/test/java/org/elasticsearch/common/logging/EvilLoggerConfigurationTests.java @@ -58,7 +58,7 @@ public class EvilLoggerConfigurationTests extends ESTestCase { } public void testResolveMultipleConfigs() throws Exception { - final Level level = ESLoggerFactory.getLogger("test").getLevel(); + final Level level = LogManager.getLogger("test").getLevel(); try { final Path configDir = getDataPath("config"); final Settings settings = Settings.builder() @@ -106,7 +106,7 @@ public class EvilLoggerConfigurationTests extends ESTestCase { LogConfigurator.configure(environment); final String loggerName = "test"; - final Logger logger = ESLoggerFactory.getLogger(loggerName); + final Logger logger = LogManager.getLogger(loggerName); assertThat(logger.getLevel().toString(), equalTo(level)); } @@ -122,7 +122,7 @@ public class EvilLoggerConfigurationTests extends ESTestCase { // args should overwrite whatever is in the config final String loggerName = "test_resolve_order"; - final Logger logger = ESLoggerFactory.getLogger(loggerName); + final Logger logger = LogManager.getLogger(loggerName); assertTrue(logger.isTraceEnabled()); } @@ -134,14 +134,14 @@ public class EvilLoggerConfigurationTests extends ESTestCase { final Environment environment = new Environment(settings, configDir); LogConfigurator.configure(environment); - assertThat(ESLoggerFactory.getLogger("x").getLevel(), equalTo(Level.TRACE)); - assertThat(ESLoggerFactory.getLogger("x.y").getLevel(), equalTo(Level.DEBUG)); + assertThat(LogManager.getLogger("x").getLevel(), equalTo(Level.TRACE)); + assertThat(LogManager.getLogger("x.y").getLevel(), equalTo(Level.DEBUG)); final Level level = randomFrom(Level.TRACE, Level.DEBUG, Level.INFO, Level.WARN, Level.ERROR); - Loggers.setLevel(ESLoggerFactory.getLogger("x"), level); + Loggers.setLevel(LogManager.getLogger("x"), level); - assertThat(ESLoggerFactory.getLogger("x").getLevel(), equalTo(level)); - assertThat(ESLoggerFactory.getLogger("x.y").getLevel(), equalTo(level)); + assertThat(LogManager.getLogger("x").getLevel(), equalTo(level)); + assertThat(LogManager.getLogger("x.y").getLevel(), equalTo(level)); } public void testMissingConfigFile() { diff --git a/qa/evil-tests/src/test/java/org/elasticsearch/common/logging/EvilLoggerTests.java b/qa/evil-tests/src/test/java/org/elasticsearch/common/logging/EvilLoggerTests.java index 5013148fb74..bebdb320db4 100644 --- a/qa/evil-tests/src/test/java/org/elasticsearch/common/logging/EvilLoggerTests.java +++ b/qa/evil-tests/src/test/java/org/elasticsearch/common/logging/EvilLoggerTests.java @@ -82,7 +82,7 @@ public class EvilLoggerTests extends ESTestCase { public void testLocationInfoTest() throws IOException, UserException { setupLogging("location_info"); - final Logger testLogger = ESLoggerFactory.getLogger("test"); + final Logger testLogger = LogManager.getLogger("test"); testLogger.error("This is an error message"); testLogger.warn("This is a warning message"); @@ -108,7 +108,7 @@ public class EvilLoggerTests extends ESTestCase { public void testDeprecationLogger() throws IOException, UserException { setupLogging("deprecation"); - final DeprecationLogger deprecationLogger = new DeprecationLogger(ESLoggerFactory.getLogger("deprecation")); + final DeprecationLogger deprecationLogger = new DeprecationLogger(LogManager.getLogger("deprecation")); final int deprecatedIterations = randomIntBetween(0, 256); for (int i = 0; i < deprecatedIterations; i++) { @@ -135,7 +135,7 @@ public class EvilLoggerTests extends ESTestCase { public void testConcurrentDeprecationLogger() throws IOException, UserException, BrokenBarrierException, InterruptedException { setupLogging("deprecation"); - final DeprecationLogger deprecationLogger = new DeprecationLogger(ESLoggerFactory.getLogger("deprecation")); + final DeprecationLogger deprecationLogger = new DeprecationLogger(LogManager.getLogger("deprecation")); final int numberOfThreads = randomIntBetween(2, 4); final CyclicBarrier barrier = new CyclicBarrier(1 + numberOfThreads); @@ -214,7 +214,7 @@ public class EvilLoggerTests extends ESTestCase { public void testDeprecationLoggerMaybeLog() throws IOException, UserException { setupLogging("deprecation"); - final DeprecationLogger deprecationLogger = new DeprecationLogger(ESLoggerFactory.getLogger("deprecation")); + final DeprecationLogger deprecationLogger = new DeprecationLogger(LogManager.getLogger("deprecation")); final int iterations = randomIntBetween(1, 16); @@ -286,12 +286,12 @@ public class EvilLoggerTests extends ESTestCase { public void testFindAppender() throws IOException, UserException { setupLogging("find_appender"); - final Logger hasConsoleAppender = ESLoggerFactory.getLogger("has_console_appender"); + final Logger hasConsoleAppender = LogManager.getLogger("has_console_appender"); final Appender testLoggerConsoleAppender = Loggers.findAppender(hasConsoleAppender, ConsoleAppender.class); assertNotNull(testLoggerConsoleAppender); assertThat(testLoggerConsoleAppender.getName(), equalTo("console")); - final Logger hasCountingNoOpAppender = ESLoggerFactory.getLogger("has_counting_no_op_appender"); + final Logger hasCountingNoOpAppender = LogManager.getLogger("has_counting_no_op_appender"); assertNull(Loggers.findAppender(hasCountingNoOpAppender, ConsoleAppender.class)); final Appender countingNoOpAppender = Loggers.findAppender(hasCountingNoOpAppender, CountingNoOpAppender.class); assertThat(countingNoOpAppender.getName(), equalTo("counting_no_op")); diff --git a/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/RecoveryIT.java b/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/RecoveryIT.java index 7bd5cc3a8d2..19c0549d9d2 100644 --- a/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/RecoveryIT.java +++ b/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/RecoveryIT.java @@ -111,7 +111,6 @@ public class RecoveryIT extends AbstractRollingTestCase { return future; } - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/pull/33616") public void testRecoveryWithConcurrentIndexing() throws Exception { final String index = "recovery_with_concurrent_indexing"; Response response = client().performRequest(new Request("GET", "_nodes")); @@ -149,12 +148,12 @@ public class RecoveryIT extends AbstractRollingTestCase { break; case UPGRADED: updateIndexSettings(index, Settings.builder().put(INDEX_ROUTING_ALLOCATION_ENABLE_SETTING.getKey(), (String)null)); - asyncIndexDocs(index, 60, 50).get(); + asyncIndexDocs(index, 60, 45).get(); ensureGreen(index); client().performRequest(new Request("POST", index + "/_refresh")); - assertCount(index, "_only_nodes:" + nodes.get(0), 110); - assertCount(index, "_only_nodes:" + nodes.get(1), 110); - assertCount(index, "_only_nodes:" + nodes.get(2), 110); + assertCount(index, "_only_nodes:" + nodes.get(0), 105); + assertCount(index, "_only_nodes:" + nodes.get(1), 105); + assertCount(index, "_only_nodes:" + nodes.get(2), 105); break; default: throw new IllegalStateException("unknown type " + CLUSTER_TYPE); @@ -166,7 +165,7 @@ public class RecoveryIT extends AbstractRollingTestCase { request.addParameter("preference", preference); final Response response = client().performRequest(request); final int actualCount = Integer.parseInt(ObjectPath.createFromResponse(response).evaluate("count").toString()); - assertThat(actualCount, equalTo(expectedCount)); + assertThat("preference [" + preference + "]", actualCount, equalTo(expectedCount)); } @@ -184,7 +183,6 @@ public class RecoveryIT extends AbstractRollingTestCase { } - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/pull/33616") public void testRelocationWithConcurrentIndexing() throws Exception { final String index = "relocation_with_concurrent_indexing"; switch (CLUSTER_TYPE) { @@ -227,7 +225,7 @@ public class RecoveryIT extends AbstractRollingTestCase { .put(IndexMetaData.INDEX_NUMBER_OF_REPLICAS_SETTING.getKey(), 2) .put("index.routing.allocation.include._id", (String)null) ); - asyncIndexDocs(index, 60, 50).get(); + asyncIndexDocs(index, 60, 45).get(); ensureGreen(index); client().performRequest(new Request("POST", index + "/_refresh")); Response response = client().performRequest(new Request("GET", "_nodes")); @@ -235,9 +233,9 @@ public class RecoveryIT extends AbstractRollingTestCase { final Map nodeMap = objectPath.evaluate("nodes"); List nodes = new ArrayList<>(nodeMap.keySet()); - assertCount(index, "_only_nodes:" + nodes.get(0), 110); - assertCount(index, "_only_nodes:" + nodes.get(1), 110); - assertCount(index, "_only_nodes:" + nodes.get(2), 110); + assertCount(index, "_only_nodes:" + nodes.get(0), 105); + assertCount(index, "_only_nodes:" + nodes.get(1), 105); + assertCount(index, "_only_nodes:" + nodes.get(2), 105); break; default: throw new IllegalStateException("unknown type " + CLUSTER_TYPE); diff --git a/qa/smoke-test-client/src/test/java/org/elasticsearch/smoketest/ESSmokeClientTestCase.java b/qa/smoke-test-client/src/test/java/org/elasticsearch/smoketest/ESSmokeClientTestCase.java index 8f322719487..676608f3be8 100644 --- a/qa/smoke-test-client/src/test/java/org/elasticsearch/smoketest/ESSmokeClientTestCase.java +++ b/qa/smoke-test-client/src/test/java/org/elasticsearch/smoketest/ESSmokeClientTestCase.java @@ -20,11 +20,11 @@ package org.elasticsearch.smoketest; import org.apache.logging.log4j.Logger; +import org.apache.logging.log4j.LogManager; import org.apache.lucene.util.LuceneTestCase; import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse; import org.elasticsearch.client.Client; import org.elasticsearch.client.transport.TransportClient; -import org.elasticsearch.common.logging.ESLoggerFactory; import org.elasticsearch.common.network.NetworkModule; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.transport.TransportAddress; @@ -69,7 +69,7 @@ public abstract class ESSmokeClientTestCase extends LuceneTestCase { */ public static final String TESTS_CLUSTER = "tests.cluster"; - protected static final Logger logger = ESLoggerFactory.getLogger(ESSmokeClientTestCase.class.getName()); + protected static final Logger logger = LogManager.getLogger(ESSmokeClientTestCase.class); private static final AtomicInteger counter = new AtomicInteger(); private static Client client; diff --git a/qa/vagrant/src/main/java/org/elasticsearch/packaging/util/Archives.java b/qa/vagrant/src/main/java/org/elasticsearch/packaging/util/Archives.java index 45629f286fc..cbee11ea41c 100644 --- a/qa/vagrant/src/main/java/org/elasticsearch/packaging/util/Archives.java +++ b/qa/vagrant/src/main/java/org/elasticsearch/packaging/util/Archives.java @@ -186,8 +186,7 @@ public class Archives { "elasticsearch-env", "elasticsearch-keystore", "elasticsearch-plugin", - "elasticsearch-shard", - "elasticsearch-translog" + "elasticsearch-shard" ).forEach(executable -> { assertThat(es.bin(executable), file(File, owner, owner, p755)); diff --git a/qa/vagrant/src/main/java/org/elasticsearch/packaging/util/Installation.java b/qa/vagrant/src/main/java/org/elasticsearch/packaging/util/Installation.java index 620ccd5e442..8dea694492b 100644 --- a/qa/vagrant/src/main/java/org/elasticsearch/packaging/util/Installation.java +++ b/qa/vagrant/src/main/java/org/elasticsearch/packaging/util/Installation.java @@ -102,7 +102,6 @@ public class Installation { public final Path elasticsearchKeystore = platformExecutable("elasticsearch-keystore"); public final Path elasticsearchCertutil = platformExecutable("elasticsearch-certutil"); public final Path elasticsearchShard = platformExecutable("elasticsearch-shard"); - public final Path elasticsearchTranslog = platformExecutable("elasticsearch-translog"); private Path platformExecutable(String name) { final String platformExecutableName = Platforms.WINDOWS diff --git a/qa/vagrant/src/main/java/org/elasticsearch/packaging/util/Packages.java b/qa/vagrant/src/main/java/org/elasticsearch/packaging/util/Packages.java index 56de8223166..bdad7d56791 100644 --- a/qa/vagrant/src/main/java/org/elasticsearch/packaging/util/Packages.java +++ b/qa/vagrant/src/main/java/org/elasticsearch/packaging/util/Packages.java @@ -187,8 +187,7 @@ public class Packages { "elasticsearch", "elasticsearch-plugin", "elasticsearch-keystore", - "elasticsearch-shard", - "elasticsearch-translog" + "elasticsearch-shard" ).forEach(executable -> assertThat(es.bin(executable), file(File, "root", "root", p755))); Stream.of( diff --git a/qa/vagrant/src/test/resources/packaging/utils/packages.bash b/qa/vagrant/src/test/resources/packaging/utils/packages.bash index f6ba68d84d4..0e218ce21f3 100644 --- a/qa/vagrant/src/test/resources/packaging/utils/packages.bash +++ b/qa/vagrant/src/test/resources/packaging/utils/packages.bash @@ -96,7 +96,6 @@ verify_package_installation() { assert_file "$ESHOME/bin/elasticsearch" f root root 755 assert_file "$ESHOME/bin/elasticsearch-plugin" f root root 755 assert_file "$ESHOME/bin/elasticsearch-shard" f root root 755 - assert_file "$ESHOME/bin/elasticsearch-translog" f root root 755 assert_file "$ESHOME/lib" d root root 755 assert_file "$ESCONFIG" d root elasticsearch 2750 assert_file "$ESCONFIG/elasticsearch.keystore" f root elasticsearch 660 diff --git a/qa/vagrant/src/test/resources/packaging/utils/tar.bash b/qa/vagrant/src/test/resources/packaging/utils/tar.bash index 23901cbae99..95e293df054 100644 --- a/qa/vagrant/src/test/resources/packaging/utils/tar.bash +++ b/qa/vagrant/src/test/resources/packaging/utils/tar.bash @@ -95,7 +95,6 @@ verify_archive_installation() { assert_file "$ESHOME/bin/elasticsearch-keystore" f elasticsearch elasticsearch 755 assert_file "$ESHOME/bin/elasticsearch-plugin" f elasticsearch elasticsearch 755 assert_file "$ESHOME/bin/elasticsearch-shard" f elasticsearch elasticsearch 755 - assert_file "$ESHOME/bin/elasticsearch-translog" f elasticsearch elasticsearch 755 assert_file "$ESCONFIG" d elasticsearch elasticsearch 755 assert_file "$ESCONFIG/elasticsearch.yml" f elasticsearch elasticsearch 660 assert_file "$ESCONFIG/jvm.options" f elasticsearch elasticsearch 660 diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/_common.json b/rest-api-spec/src/main/resources/rest-api-spec/api/_common.json index 6edd6d80320..69a1f8fb8ce 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/_common.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/_common.json @@ -23,7 +23,7 @@ }, "filter_path": { "type": "list", - "description": "A comma-separated list of filters used to reduce the respone." + "description": "A comma-separated list of filters used to reduce the response." } } } diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/delete_by_query_rethrottle.json b/rest-api-spec/src/main/resources/rest-api-spec/api/delete_by_query_rethrottle.json new file mode 100644 index 00000000000..f49af01cfc3 --- /dev/null +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/delete_by_query_rethrottle.json @@ -0,0 +1,25 @@ +{ + "delete_by_query_rethrottle": { + "documentation": "https://www.elastic.co/guide/en/elasticsearch/reference/current/docs-delete-by-query.html", + "methods": ["POST"], + "url": { + "path": "/_delete_by_query/{task_id}/_rethrottle", + "paths": ["/_delete_by_query/{task_id}/_rethrottle"], + "parts": { + "task_id": { + "type": "string", + "required" : true, + "description": "The task id to rethrottle" + } + }, + "params": { + "requests_per_second": { + "type": "number", + "required": true, + "description": "The throttle to set on this request in floating sub-requests per second. -1 means set no throttle." + } + } + }, + "body": null + } +} diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/reindex_rethrottle.json b/rest-api-spec/src/main/resources/rest-api-spec/api/reindex_rethrottle.json index 4004409ab68..2763eb8983f 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/reindex_rethrottle.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/reindex_rethrottle.json @@ -4,7 +4,7 @@ "methods": ["POST"], "url": { "path": "/_reindex/{task_id}/_rethrottle", - "paths": ["/_reindex/{task_id}/_rethrottle", "/_update_by_query/{task_id}/_rethrottle", "/_delete_by_query/{task_id}/_rethrottle"], + "paths": ["/_reindex/{task_id}/_rethrottle"], "parts": { "task_id": { "type": "string", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/update_by_query_rethrottle.json b/rest-api-spec/src/main/resources/rest-api-spec/api/update_by_query_rethrottle.json new file mode 100644 index 00000000000..9ec2540b430 --- /dev/null +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/update_by_query_rethrottle.json @@ -0,0 +1,25 @@ +{ + "update_by_query_rethrottle": { + "documentation": "https://www.elastic.co/guide/en/elasticsearch/reference/current/docs-update-by-query.html", + "methods": ["POST"], + "url": { + "path": "/_update_by_query/{task_id}/_rethrottle", + "paths": ["/_update_by_query/{task_id}/_rethrottle"], + "parts": { + "task_id": { + "type": "string", + "required" : true, + "description": "The task id to rethrottle" + } + }, + "params": { + "requests_per_second": { + "type": "number", + "required": true, + "description": "The throttle to set on this request in floating sub-requests per second. -1 means set no throttle." + } + } + }, + "body": null + } +} diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/bulk/10_basic.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/bulk/10_basic.yml index 233ff32b418..dc7242f2875 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/bulk/10_basic.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/bulk/10_basic.yml @@ -1,18 +1,22 @@ --- "Array of objects": + + - skip: + version: " - 6.99.99" + reason: include_type_name was introduced in 7.0.0 + - do: bulk: + include_type_name: false refresh: true body: - index: _index: test_index - _type: test_type _id: test_id - f1: v1 f2: 42 - index: _index: test_index - _type: test_type _id: test_id2 - f1: v2 f2: 47 @@ -25,23 +29,26 @@ --- "Empty _id": + + - skip: + version: " - 6.99.99" + reason: include_type_name was introduced in 7.0.0 + - do: bulk: + include_type_name: false refresh: true body: - index: _index: test - _type: type _id: '' - f: 1 - index: _index: test - _type: type _id: id - f: 2 - index: _index: test - _type: type - f: 3 - match: { errors: true } - match: { items.0.index.status: 400 } @@ -59,12 +66,17 @@ --- "empty action": + - skip: + version: " - 6.99.99" + reason: include_type_name was introduced in 7.0.0 + - do: catch: /Malformed action\/metadata line \[3\], expected FIELD_NAME but found \[END_OBJECT\]/ headers: Content-Type: application/json bulk: + include_type_name: false body: | - {"index": {"_index": "test_index", "_type": "test_type", "_id": "test_id"}} + {"index": {"_index": "test_index", "_id": "test_id"}} {"f1": "v1", "f2": 42} {} diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/bulk/11_basic_with_types.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/bulk/11_basic_with_types.yml new file mode 100644 index 00000000000..233ff32b418 --- /dev/null +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/bulk/11_basic_with_types.yml @@ -0,0 +1,70 @@ +--- +"Array of objects": + - do: + bulk: + refresh: true + body: + - index: + _index: test_index + _type: test_type + _id: test_id + - f1: v1 + f2: 42 + - index: + _index: test_index + _type: test_type + _id: test_id2 + - f1: v2 + f2: 47 + + - do: + count: + index: test_index + + - match: {count: 2} + +--- +"Empty _id": + - do: + bulk: + refresh: true + body: + - index: + _index: test + _type: type + _id: '' + - f: 1 + - index: + _index: test + _type: type + _id: id + - f: 2 + - index: + _index: test + _type: type + - f: 3 + - match: { errors: true } + - match: { items.0.index.status: 400 } + - match: { items.0.index.error.type: illegal_argument_exception } + - match: { items.0.index.error.reason: if _id is specified it must not be empty } + - match: { items.1.index.result: created } + - match: { items.2.index.result: created } + + - do: + count: + index: test + + - match: { count: 2 } + +--- +"empty action": + + - do: + catch: /Malformed action\/metadata line \[3\], expected FIELD_NAME but found \[END_OBJECT\]/ + headers: + Content-Type: application/json + bulk: + body: | + {"index": {"_index": "test_index", "_type": "test_type", "_id": "test_id"}} + {"f1": "v1", "f2": 42} + {} diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/bulk/20_list_of_strings.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/bulk/20_list_of_strings.yml index def91f42807..742cf49c38e 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/bulk/20_list_of_strings.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/bulk/20_list_of_strings.yml @@ -1,12 +1,18 @@ --- "List of strings": + + - skip: + version: " - 6.99.99" + reason: include_type_name was introduced in 7.0.0 + - do: bulk: + include_type_name: false refresh: true body: - - '{"index": {"_index": "test_index", "_type": "test_type", "_id": "test_id"}}' + - '{"index": {"_index": "test_index", "_id": "test_id"}}' - '{"f1": "v1", "f2": 42}' - - '{"index": {"_index": "test_index", "_type": "test_type", "_id": "test_id2"}}' + - '{"index": {"_index": "test_index", "_id": "test_id2"}}' - '{"f1": "v2", "f2": 47}' - do: diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/bulk/21_list_of_strings_with_types.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/bulk/21_list_of_strings_with_types.yml new file mode 100644 index 00000000000..def91f42807 --- /dev/null +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/bulk/21_list_of_strings_with_types.yml @@ -0,0 +1,17 @@ +--- +"List of strings": + - do: + bulk: + refresh: true + body: + - '{"index": {"_index": "test_index", "_type": "test_type", "_id": "test_id"}}' + - '{"f1": "v1", "f2": 42}' + - '{"index": {"_index": "test_index", "_type": "test_type", "_id": "test_id2"}}' + - '{"f1": "v2", "f2": 47}' + + - do: + count: + index: test_index + + - match: {count: 2} + diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/bulk/30_big_string.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/bulk/30_big_string.yml index 1d117253c9b..15a70fa3f37 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/bulk/30_big_string.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/bulk/30_big_string.yml @@ -1,12 +1,18 @@ --- "One big string": + + - skip: + version: " - 6.99.99" + reason: include_type_name was introduced in 7.0.0 + - do: bulk: + include_type_name: false refresh: true body: | - {"index": {"_index": "test_index", "_type": "test_type", "_id": "test_id"}} + {"index": {"_index": "test_index", "_id": "test_id"}} {"f1": "v1", "f2": 42} - {"index": {"_index": "test_index", "_type": "test_type", "_id": "test_id2"}} + {"index": {"_index": "test_index", "_id": "test_id2"}} {"f1": "v2", "f2": 47} - do: diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/bulk/31_big_string_with_types.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/bulk/31_big_string_with_types.yml new file mode 100644 index 00000000000..1d117253c9b --- /dev/null +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/bulk/31_big_string_with_types.yml @@ -0,0 +1,17 @@ +--- +"One big string": + - do: + bulk: + refresh: true + body: | + {"index": {"_index": "test_index", "_type": "test_type", "_id": "test_id"}} + {"f1": "v1", "f2": 42} + {"index": {"_index": "test_index", "_type": "test_type", "_id": "test_id2"}} + {"f1": "v2", "f2": 47} + + - do: + count: + index: test_index + + - match: {count: 2} + diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/bulk/40_source.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/bulk/40_source.yml index c852c376cc0..bf4bd079574 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/bulk/40_source.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/bulk/40_source.yml @@ -1,37 +1,43 @@ --- "Source filtering": + + - skip: + version: " - 6.99.99" + reason: include_type_name was introduced in 7.0.0 + - do: index: + include_type_name: false refresh: true index: test_index - type: test_type id: test_id_1 body: { "foo": "bar", "bar": "foo" } - do: index: + include_type_name: false refresh: true index: test_index - type: test_type id: test_id_2 body: { "foo": "qux", "bar": "pux" } - do: index: + include_type_name: false refresh: true index: test_index - type: test_type id: test_id_3 body: { "foo": "corge", "bar": "forge" } - do: bulk: + include_type_name: false refresh: true body: | - { "update": { "_index": "test_index", "_type": "test_type", "_id": "test_id_1", "_source": true } } + { "update": { "_index": "test_index", "_id": "test_id_1", "_source": true } } { "doc": { "foo": "baz" } } - { "update": { "_index": "test_index", "_type": "test_type", "_id": "test_id_2" } } + { "update": { "_index": "test_index", "_id": "test_id_2" } } { "_source": true, "doc": { "foo": "quux" } } - match: { items.0.update.get._source.foo: baz } @@ -39,8 +45,8 @@ - do: bulk: + include_type_name: false index: test_index - type: test_type _source: true body: | { "update": { "_id": "test_id_3" } } @@ -50,11 +56,12 @@ - do: bulk: + include_type_name: false refresh: true body: | - { "update": { "_index": "test_index", "_type": "test_type", "_id": "test_id_1", "_source": {"includes": "bar"} } } + { "update": { "_index": "test_index", "_id": "test_id_1", "_source": {"includes": "bar"} } } { "doc": { "foo": "baz" } } - { "update": { "_index": "test_index", "_type": "test_type", "_id": "test_id_2" } } + { "update": { "_index": "test_index", "_id": "test_id_2" } } { "_source": {"includes": "foo"}, "doc": { "foo": "quux" } } - match: { items.0.update.get._source.bar: foo } @@ -64,8 +71,8 @@ - do: bulk: + include_type_name: false index: test_index - type: test_type _source_include: foo body: | { "update": { "_id": "test_id_3" } } diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/bulk/41_source_with_types.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/bulk/41_source_with_types.yml new file mode 100644 index 00000000000..c852c376cc0 --- /dev/null +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/bulk/41_source_with_types.yml @@ -0,0 +1,76 @@ +--- +"Source filtering": + - do: + index: + refresh: true + index: test_index + type: test_type + id: test_id_1 + body: { "foo": "bar", "bar": "foo" } + + - do: + index: + refresh: true + index: test_index + type: test_type + id: test_id_2 + body: { "foo": "qux", "bar": "pux" } + + - do: + index: + refresh: true + index: test_index + type: test_type + id: test_id_3 + body: { "foo": "corge", "bar": "forge" } + + + - do: + bulk: + refresh: true + body: | + { "update": { "_index": "test_index", "_type": "test_type", "_id": "test_id_1", "_source": true } } + { "doc": { "foo": "baz" } } + { "update": { "_index": "test_index", "_type": "test_type", "_id": "test_id_2" } } + { "_source": true, "doc": { "foo": "quux" } } + + - match: { items.0.update.get._source.foo: baz } + - match: { items.1.update.get._source.foo: quux } + + - do: + bulk: + index: test_index + type: test_type + _source: true + body: | + { "update": { "_id": "test_id_3" } } + { "doc": { "foo": "garply" } } + + - match: { items.0.update.get._source.foo: garply } + + - do: + bulk: + refresh: true + body: | + { "update": { "_index": "test_index", "_type": "test_type", "_id": "test_id_1", "_source": {"includes": "bar"} } } + { "doc": { "foo": "baz" } } + { "update": { "_index": "test_index", "_type": "test_type", "_id": "test_id_2" } } + { "_source": {"includes": "foo"}, "doc": { "foo": "quux" } } + + - match: { items.0.update.get._source.bar: foo } + - is_false: items.0.update.get._source.foo + - match: { items.1.update.get._source.foo: quux } + - is_false: items.1.update.get._source.bar + + - do: + bulk: + index: test_index + type: test_type + _source_include: foo + body: | + { "update": { "_id": "test_id_3" } } + { "doc": { "foo": "garply" } } + + - match: { items.0.update.get._source.foo: garply } + - is_false: items.0.update.get._source.bar + diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/bulk/50_refresh.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/bulk/50_refresh.yml index 6326b9464ca..059794873ad 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/bulk/50_refresh.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/bulk/50_refresh.yml @@ -1,12 +1,18 @@ --- "refresh=true immediately makes changes are visible in search": + + - skip: + version: " - 6.99.99" + reason: include_type_name was introduced in 7.0.0 + - do: bulk: + include_type_name: false refresh: true body: | - {"index": {"_index": "bulk_50_refresh_1", "_type": "test_type", "_id": "bulk_50_refresh_id1"}} + {"index": {"_index": "bulk_50_refresh_1", "_id": "bulk_50_refresh_id1"}} {"f1": "v1", "f2": 42} - {"index": {"_index": "bulk_50_refresh_1", "_type": "test_type", "_id": "bulk_50_refresh_id2"}} + {"index": {"_index": "bulk_50_refresh_1", "_id": "bulk_50_refresh_id2"}} {"f1": "v2", "f2": 47} - do: @@ -16,13 +22,19 @@ --- "refresh=empty string immediately makes changes are visible in search": + + - skip: + version: " - 6.99.99" + reason: include_type_name was introduced in 7.0.0 + - do: bulk: + include_type_name: false refresh: "" body: | - {"index": {"_index": "bulk_50_refresh_2", "_type": "test_type", "_id": "bulk_50_refresh_id3"}} + {"index": {"_index": "bulk_50_refresh_2", "_id": "bulk_50_refresh_id3"}} {"f1": "v1", "f2": 42} - {"index": {"_index": "bulk_50_refresh_2", "_type": "test_type", "_id": "bulk_50_refresh_id4"}} + {"index": {"_index": "bulk_50_refresh_2", "_id": "bulk_50_refresh_id4"}} {"f1": "v2", "f2": 47} - do: @@ -33,13 +45,19 @@ --- "refresh=wait_for waits until changes are visible in search": + + - skip: + version: " - 6.99.99" + reason: include_type_name was introduced in 7.0.0 + - do: bulk: + include_type_name: false refresh: wait_for body: | - {"index": {"_index": "bulk_50_refresh_3", "_type": "test_type", "_id": "bulk_50_refresh_id5"}} + {"index": {"_index": "bulk_50_refresh_3", "_id": "bulk_50_refresh_id5"}} {"f1": "v1", "f2": 42} - {"index": {"_index": "bulk_50_refresh_3", "_type": "test_type", "_id": "bulk_50_refresh_id6"}} + {"index": {"_index": "bulk_50_refresh_3", "_id": "bulk_50_refresh_id6"}} {"f1": "v2", "f2": 47} - do: diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/bulk/51_refresh_with_types.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/bulk/51_refresh_with_types.yml new file mode 100644 index 00000000000..6326b9464ca --- /dev/null +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/bulk/51_refresh_with_types.yml @@ -0,0 +1,48 @@ +--- +"refresh=true immediately makes changes are visible in search": + - do: + bulk: + refresh: true + body: | + {"index": {"_index": "bulk_50_refresh_1", "_type": "test_type", "_id": "bulk_50_refresh_id1"}} + {"f1": "v1", "f2": 42} + {"index": {"_index": "bulk_50_refresh_1", "_type": "test_type", "_id": "bulk_50_refresh_id2"}} + {"f1": "v2", "f2": 47} + + - do: + count: + index: bulk_50_refresh_1 + - match: {count: 2} + +--- +"refresh=empty string immediately makes changes are visible in search": + - do: + bulk: + refresh: "" + body: | + {"index": {"_index": "bulk_50_refresh_2", "_type": "test_type", "_id": "bulk_50_refresh_id3"}} + {"f1": "v1", "f2": 42} + {"index": {"_index": "bulk_50_refresh_2", "_type": "test_type", "_id": "bulk_50_refresh_id4"}} + {"f1": "v2", "f2": 47} + + - do: + count: + index: bulk_50_refresh_2 + - match: {count: 2} + + +--- +"refresh=wait_for waits until changes are visible in search": + - do: + bulk: + refresh: wait_for + body: | + {"index": {"_index": "bulk_50_refresh_3", "_type": "test_type", "_id": "bulk_50_refresh_id5"}} + {"f1": "v1", "f2": 42} + {"index": {"_index": "bulk_50_refresh_3", "_type": "test_type", "_id": "bulk_50_refresh_id6"}} + {"f1": "v2", "f2": 47} + + - do: + count: + index: bulk_50_refresh_3 + - match: {count: 2} diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/delete/10_basic.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/delete/10_basic.yml index a3671d5ac24..7f5d13125c3 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/delete/10_basic.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/delete/10_basic.yml @@ -1,10 +1,14 @@ --- "Basic": + - skip: + version: " - 6.99.99" + reason: include_type_name was introduced in 7.0.0 + - do: index: + include_type_name: false index: test_1 - type: test id: 1 body: { foo: bar } @@ -12,8 +16,16 @@ - do: delete: + include_type_name: false index: test_1 - type: test id: 1 - match: { _version: 2 } + + - do: + catch: /illegal_argument_exception/ + delete: + include_type_name: false + index: index + type: type + id: 1 diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/delete/11_shard_header.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/delete/11_shard_header.yml index d1bb4c0df34..c235b8ebfbf 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/delete/11_shard_header.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/delete/11_shard_header.yml @@ -1,8 +1,13 @@ --- "Delete check shard header": + - skip: + version: " - 6.99.99" + reason: include_type_name was introduced in 7.0.0 + - do: indices.create: + include_type_name: false index: foobar body: settings: @@ -15,19 +20,19 @@ - do: index: + include_type_name: false index: foobar - type: baz id: 1 body: { foo: bar } - do: delete: + include_type_name: false index: foobar - type: baz id: 1 - match: { _index: foobar } - - match: { _type: baz } + - is_false: "_type" - match: { _id: "1"} - match: { _version: 2} - match: { _shards.total: 1} diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/delete/12_result.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/delete/12_result.yml index d01e88be8ad..f6d4fffb68a 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/delete/12_result.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/delete/12_result.yml @@ -1,17 +1,21 @@ --- "Delete result field": + - skip: + version: " - 6.99.99" + reason: include_type_name was introduced in 7.0.0 + - do: index: + include_type_name: false index: test_1 - type: test id: 1 body: { foo: bar } - do: delete: + include_type_name: false index: test_1 - type: test id: 1 - match: { result: deleted } @@ -19,8 +23,8 @@ - do: catch: missing delete: + include_type_name: false index: test_1 - type: test id: 1 - match: { result: not_found } diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/delete/13_basic_with_types.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/delete/13_basic_with_types.yml new file mode 100644 index 00000000000..a3671d5ac24 --- /dev/null +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/delete/13_basic_with_types.yml @@ -0,0 +1,19 @@ +--- +"Basic": + + - do: + index: + index: test_1 + type: test + id: 1 + body: { foo: bar } + + - match: { _version: 1 } + + - do: + delete: + index: test_1 + type: test + id: 1 + + - match: { _version: 2 } diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/delete/14_shard_header_with_types.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/delete/14_shard_header_with_types.yml new file mode 100644 index 00000000000..d1bb4c0df34 --- /dev/null +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/delete/14_shard_header_with_types.yml @@ -0,0 +1,36 @@ +--- +"Delete check shard header": + + - do: + indices.create: + index: foobar + body: + settings: + number_of_shards: "1" + number_of_replicas: "0" + + - do: + cluster.health: + wait_for_status: green + + - do: + index: + index: foobar + type: baz + id: 1 + body: { foo: bar } + + - do: + delete: + index: foobar + type: baz + id: 1 + + - match: { _index: foobar } + - match: { _type: baz } + - match: { _id: "1"} + - match: { _version: 2} + - match: { _shards.total: 1} + - match: { _shards.successful: 1} + - match: { _shards.failed: 0} + - is_false: _shards.pending diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/delete/15_result_with_types.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/delete/15_result_with_types.yml new file mode 100644 index 00000000000..d01e88be8ad --- /dev/null +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/delete/15_result_with_types.yml @@ -0,0 +1,26 @@ +--- +"Delete result field": + + - do: + index: + index: test_1 + type: test + id: 1 + body: { foo: bar } + + - do: + delete: + index: test_1 + type: test + id: 1 + + - match: { result: deleted } + + - do: + catch: missing + delete: + index: test_1 + type: test + id: 1 + + - match: { result: not_found } diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/delete/20_internal_version.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/delete/20_internal_version.yml index 3d9ddb79366..c21617bcac6 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/delete/20_internal_version.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/delete/20_internal_version.yml @@ -1,10 +1,14 @@ --- "Internal version": + - skip: + version: " - 6.99.99" + reason: include_type_name was introduced in 7.0.0 + - do: index: + include_type_name: false index: test_1 - type: test id: 1 body: { foo: bar } @@ -13,15 +17,15 @@ - do: catch: conflict delete: + include_type_name: false index: test_1 - type: test id: 1 version: 2 - do: delete: + include_type_name: false index: test_1 - type: test id: 1 version: 1 diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/delete/21_internal_version_with_types.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/delete/21_internal_version_with_types.yml new file mode 100644 index 00000000000..3d9ddb79366 --- /dev/null +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/delete/21_internal_version_with_types.yml @@ -0,0 +1,28 @@ +--- +"Internal version": + + - do: + index: + index: test_1 + type: test + id: 1 + body: { foo: bar } + + - match: { _version: 1} + + - do: + catch: conflict + delete: + index: test_1 + type: test + id: 1 + version: 2 + + - do: + delete: + index: test_1 + type: test + id: 1 + version: 1 + + - match: { _version: 2 } diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/delete/25_external_version.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/delete/25_external_version.yml index 453d64d85bb..c2cae2b6e1e 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/delete/25_external_version.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/delete/25_external_version.yml @@ -1,10 +1,14 @@ --- "External version": + - skip: + version: " - 6.99.99" + reason: include_type_name was introduced in 7.0.0 + - do: index: + include_type_name: false index: test_1 - type: test id: 1 body: { foo: bar } version_type: external @@ -15,16 +19,16 @@ - do: catch: conflict delete: + include_type_name: false index: test_1 - type: test id: 1 version_type: external version: 4 - do: delete: + include_type_name: false index: test_1 - type: test id: 1 version_type: external version: 6 diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/delete/26_external_gte_version.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/delete/26_external_gte_version.yml index 70f78c17faa..df119a57c12 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/delete/26_external_gte_version.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/delete/26_external_gte_version.yml @@ -1,10 +1,14 @@ --- "External GTE version": + - skip: + version: " - 6.99.99" + reason: include_type_name was introduced in 7.0.0 + - do: index: + include_type_name: false index: test_1 - type: test id: 1 body: { foo: bar } version_type: external_gte @@ -15,16 +19,16 @@ - do: catch: conflict delete: + include_type_name: false index: test_1 - type: test id: 1 version_type: external_gte version: 4 - do: delete: + include_type_name: false index: test_1 - type: test id: 1 version_type: external_gte version: 6 @@ -33,8 +37,8 @@ - do: index: + include_type_name: false index: test_1 - type: test id: 1 body: { foo: bar } version_type: external_gte @@ -44,8 +48,8 @@ - do: delete: + include_type_name: false index: test_1 - type: test id: 1 version_type: external_gte version: 6 diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/delete/27_external_version_with_types.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/delete/27_external_version_with_types.yml new file mode 100644 index 00000000000..453d64d85bb --- /dev/null +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/delete/27_external_version_with_types.yml @@ -0,0 +1,32 @@ +--- +"External version": + + - do: + index: + index: test_1 + type: test + id: 1 + body: { foo: bar } + version_type: external + version: 5 + + - match: { _version: 5} + + - do: + catch: conflict + delete: + index: test_1 + type: test + id: 1 + version_type: external + version: 4 + + - do: + delete: + index: test_1 + type: test + id: 1 + version_type: external + version: 6 + + - match: { _version: 6} diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/delete/28_external_gte_version_with_types.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/delete/28_external_gte_version_with_types.yml new file mode 100644 index 00000000000..70f78c17faa --- /dev/null +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/delete/28_external_gte_version_with_types.yml @@ -0,0 +1,53 @@ +--- +"External GTE version": + + - do: + index: + index: test_1 + type: test + id: 1 + body: { foo: bar } + version_type: external_gte + version: 5 + + - match: { _version: 5} + + - do: + catch: conflict + delete: + index: test_1 + type: test + id: 1 + version_type: external_gte + version: 4 + + - do: + delete: + index: test_1 + type: test + id: 1 + version_type: external_gte + version: 6 + + - match: { _version: 6} + + - do: + index: + index: test_1 + type: test + id: 1 + body: { foo: bar } + version_type: external_gte + version: 6 + + - match: { _version: 6} + + - do: + delete: + index: test_1 + type: test + id: 1 + version_type: external_gte + version: 6 + + - match: { _version: 6} diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/delete/30_routing.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/delete/30_routing.yml index 6f67b3a03f4..a0ad089b0fb 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/delete/30_routing.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/delete/30_routing.yml @@ -1,16 +1,21 @@ --- "Routing": + - skip: + version: " - 6.99.99" + reason: include_type_name was introduced in 7.0.0 + - do: indices.create: + include_type_name: false index: test_1 body: settings: number_of_shards: 5 - do: index: + include_type_name: false index: test_1 - type: test id: 1 routing: 5 body: { foo: bar } @@ -18,15 +23,15 @@ - do: catch: missing delete: + include_type_name: false index: test_1 - type: test id: 1 routing: 4 - do: delete: + include_type_name: false index: test_1 - type: test id: 1 routing: 5 diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/delete/31_routing_with_types.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/delete/31_routing_with_types.yml new file mode 100644 index 00000000000..6f67b3a03f4 --- /dev/null +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/delete/31_routing_with_types.yml @@ -0,0 +1,32 @@ +--- +"Routing": + + - do: + indices.create: + index: test_1 + body: + settings: + number_of_shards: 5 + - do: + index: + index: test_1 + type: test + id: 1 + routing: 5 + body: { foo: bar } + + - do: + catch: missing + delete: + index: test_1 + type: test + id: 1 + routing: 4 + + - do: + delete: + index: test_1 + type: test + id: 1 + routing: 5 + diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/delete/50_refresh.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/delete/50_refresh.yml index ad27bb68601..326186bf07b 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/delete/50_refresh.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/delete/50_refresh.yml @@ -1,8 +1,13 @@ --- "Refresh": + - skip: + version: " - 6.99.99" + reason: include_type_name was introduced in 7.0.0 + - do: indices.create: + include_type_name: false index: test_1 body: settings: @@ -16,8 +21,8 @@ - do: index: + include_type_name: false index: test_1 - type: test id: 1 body: { foo: bar } refresh: true @@ -27,8 +32,8 @@ # them to be different for this test to pass - do: index: + include_type_name: false index: test_1 - type: test id: 3 body: { foo: bar } refresh: true @@ -36,6 +41,7 @@ - do: search: + include_type_name: false index: test_1 body: query: { terms: { _id: [1,3] }} @@ -44,12 +50,13 @@ - do: delete: + include_type_name: false index: test_1 - type: test id: 1 - do: search: + include_type_name: false index: test_1 body: query: { terms: { _id: [1,3] }} @@ -58,8 +65,8 @@ - do: delete: + include_type_name: false index: test_1 - type: test id: 3 refresh: true @@ -69,6 +76,7 @@ - do: search: + include_type_name: false index: test_1 body: query: { terms: { _id: [1,3] }} @@ -77,10 +85,15 @@ --- "When refresh url parameter is an empty string that means \"refresh immediately\"": + + - skip: + version: " - 6.99.99" + reason: include_type_name was introduced in 7.0.0 + - do: index: + include_type_name: false index: test_1 - type: test id: 1 body: { foo: bar } refresh: true @@ -88,6 +101,7 @@ - do: search: + include_type_name: false index: test_1 body: query: { term: { _id: 1 }} @@ -95,13 +109,14 @@ - do: delete: + include_type_name: false index: test_1 - type: test id: 1 refresh: "" - do: search: + include_type_name: false index: test_1 body: query: { term: { _id: 1 }} @@ -109,10 +124,15 @@ --- "refresh=wait_for waits until changes are visible in search": + + - skip: + version: " - 6.99.99" + reason: include_type_name was introduced in 7.0.0 + - do: index: + include_type_name: false index: delete_50_refresh_1 - type: test id: delete_50_refresh_id1 body: { foo: bar } refresh: true @@ -120,6 +140,7 @@ - do: search: + include_type_name: false index: delete_50_refresh_1 body: query: { term: { _id: delete_50_refresh_id1 }} @@ -127,14 +148,15 @@ - do: delete: + include_type_name: false index: delete_50_refresh_1 - type: test id: delete_50_refresh_id1 refresh: wait_for - is_false: forced_refresh - do: search: + include_type_name: false index: delete_50_refresh_1 body: query: { term: { _id: delete_50_refresh_id1 }} diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/delete/51_refresh_with_types.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/delete/51_refresh_with_types.yml new file mode 100644 index 00000000000..ad27bb68601 --- /dev/null +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/delete/51_refresh_with_types.yml @@ -0,0 +1,141 @@ +--- +"Refresh": + + - do: + indices.create: + index: test_1 + body: + settings: + refresh_interval: -1 + number_of_shards: 5 + number_of_routing_shards: 5 + number_of_replicas: 0 + - do: + cluster.health: + wait_for_status: green + + - do: + index: + index: test_1 + type: test + id: 1 + body: { foo: bar } + refresh: true + +# If you wonder why this document get 3 as an id instead of 2, it is because the +# current routing algorithm would route 1 and 2 to the same shard while we need +# them to be different for this test to pass + - do: + index: + index: test_1 + type: test + id: 3 + body: { foo: bar } + refresh: true + - is_true: forced_refresh + + - do: + search: + index: test_1 + body: + query: { terms: { _id: [1,3] }} + + - match: { hits.total: 2 } + + - do: + delete: + index: test_1 + type: test + id: 1 + + - do: + search: + index: test_1 + body: + query: { terms: { _id: [1,3] }} + + - match: { hits.total: 2 } + + - do: + delete: + index: test_1 + type: test + id: 3 + refresh: true + +# If a replica shard where doc 1 is located gets initialized at this point, doc 1 +# won't be found by the following search as the shard gets automatically refreshed +# right before getting started. This is why this test only works with 0 replicas. + + - do: + search: + index: test_1 + body: + query: { terms: { _id: [1,3] }} + + - match: { hits.total: 1 } + +--- +"When refresh url parameter is an empty string that means \"refresh immediately\"": + - do: + index: + index: test_1 + type: test + id: 1 + body: { foo: bar } + refresh: true + - is_true: forced_refresh + + - do: + search: + index: test_1 + body: + query: { term: { _id: 1 }} + - match: { hits.total: 1 } + + - do: + delete: + index: test_1 + type: test + id: 1 + refresh: "" + + - do: + search: + index: test_1 + body: + query: { term: { _id: 1 }} + - match: { hits.total: 0 } + +--- +"refresh=wait_for waits until changes are visible in search": + - do: + index: + index: delete_50_refresh_1 + type: test + id: delete_50_refresh_id1 + body: { foo: bar } + refresh: true + - is_true: forced_refresh + + - do: + search: + index: delete_50_refresh_1 + body: + query: { term: { _id: delete_50_refresh_id1 }} + - match: { hits.total: 1 } + + - do: + delete: + index: delete_50_refresh_1 + type: test + id: delete_50_refresh_id1 + refresh: wait_for + - is_false: forced_refresh + + - do: + search: + index: delete_50_refresh_1 + body: + query: { term: { _id: delete_50_refresh_id1 }} + - match: { hits.total: 0 } diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/delete/60_missing.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/delete/60_missing.yml index 9cfdb48ae20..46b238482d7 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/delete/60_missing.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/delete/60_missing.yml @@ -1,19 +1,27 @@ --- "Missing document with catch": + - skip: + version: " - 6.99.99" + reason: include_type_name was introduced in 7.0.0 + - do: catch: missing delete: + include_type_name: false index: test_1 - type: test id: 1 --- "Missing document with ignore": + - skip: + version: " - 6.99.99" + reason: include_type_name was introduced in 7.0.0 + - do: delete: + include_type_name: false index: test_1 - type: test id: 1 ignore: 404 diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/delete/61_missing_with_types.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/delete/61_missing_with_types.yml new file mode 100644 index 00000000000..9cfdb48ae20 --- /dev/null +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/delete/61_missing_with_types.yml @@ -0,0 +1,19 @@ +--- +"Missing document with catch": + + - do: + catch: missing + delete: + index: test_1 + type: test + id: 1 + +--- +"Missing document with ignore": + + - do: + delete: + index: test_1 + type: test + id: 1 + ignore: 404 diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/field_caps/10_basic.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/field_caps/10_basic.yml index 39320d12136..2c7937aeacc 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/field_caps/10_basic.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/field_caps/10_basic.yml @@ -24,6 +24,16 @@ setup: nested2: type: float doc_values: false + level1: + type: nested + properties: + level2: + type: object + properties: + leaf1: + type: text + index: false + - do: indices.create: index: test2 @@ -48,6 +58,15 @@ setup: nested2: type: float doc_values: true + level1: + type: nested + properties: + level2: + type: object + properties: + leaf1: + type: text + index: false - do: indices.create: index: test3 @@ -64,7 +83,7 @@ setup: geo: type: keyword object: - type: object + type: nested properties: nested1 : type : long @@ -72,6 +91,15 @@ setup: nested2: type: keyword doc_values: false + level1: + type: object + properties: + level2: + type: object + properties: + leaf1: + type: text + index: false --- "Get simple field caps": @@ -112,7 +140,7 @@ setup: - is_false: fields.geo.keyword.non_searchable_indices - is_false: fields.geo.keyword.on_aggregatable_indices --- -"Get nested field caps": +"Get leaves field caps": - do: field_caps: @@ -140,6 +168,47 @@ setup: - is_false: fields.object\.nested2.keyword.non_aggregatable_indices - is_false: fields.object\.nested2.keyword.non_searchable_indices --- +"Get object and nested field caps": + - skip: + version: " - 6.99.99" + reason: object and nested fields are returned since 7.0 + + - do: + field_caps: + index: 'test1,test2,test3' + fields: object*,level1* + + - match: {fields.object.object.indices: ["test1", "test2"]} + - match: {fields.object.object.searchable: false} + - match: {fields.object.object.aggregatable: false} + - is_false: fields.object.object.non_aggregatable_indices + - is_false: fields.object.object.non_searchable_indices + - match: {fields.object.nested.indices: ["test3"]} + - match: {fields.object.nested.searchable: false} + - match: {fields.object.nested.aggregatable: false} + - is_false: fields.object.nested.non_aggregatable_indices + - is_false: fields.object.nested.non_searchable_indices + - match: {fields.level1.nested.indices: ["test1", "test2"]} + - match: {fields.level1.nested.searchable: false} + - match: {fields.level1.nested.aggregatable: false} + - is_false: fields.level1.nested.non_aggregatable_indices + - is_false: fields.level1.nested.non_searchable_indices + - match: {fields.level1.object.indices: ["test3"]} + - match: {fields.level1.object.searchable: false} + - match: {fields.level1.object.aggregatable: false} + - is_false: fields.level1.object.non_aggregatable_indices + - is_false: fields.level1.object.non_searchable_indices + - match: {fields.level1\.level2.object.searchable: false} + - match: {fields.level1\.level2.object.aggregatable: false} + - is_false: fields.level1\.level2.object.indices + - is_false: fields.level1\.level2.object.non_aggregatable_indices + - is_false: fields.level1\.level2.object.non_searchable_indices + - match: {fields.level1\.level2\.leaf1.text.searchable: false} + - match: {fields.level1\.level2\.leaf1.text.aggregatable: false} + - is_false: fields.level1\.level2\.leaf1.text.indices + - is_false: fields.level1\.level2\.leaf1.text.non_aggregatable_indices + - is_false: fields.level1\.level2\.leaf1.text..non_searchable_indices +--- "Get prefix field caps": - do: diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/get/10_basic.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/get/10_basic.yml index 0689f714d64..71403f0b56f 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/get/10_basic.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/get/10_basic.yml @@ -1,31 +1,32 @@ --- "Basic": + - skip: + version: " - 6.99.99" + reason: include_type_name was introduced in 7.0.0 + - do: index: + include_type_name: false index: test_1 - type: test id: 中文 body: { "foo": "Hello: 中文" } - do: get: + include_type_name: false index: test_1 - type: test id: 中文 - match: { _index: test_1 } - - match: { _type: test } + - is_false: "_type" - match: { _id: 中文 } - match: { _source: { foo: "Hello: 中文" } } - do: + catch: /illegal_argument_exception/ get: - index: test_1 - type: _all - id: 中文 - - - match: { _index: test_1 } - - match: { _type: test } - - match: { _id: 中文 } - - match: { _source: { foo: "Hello: 中文" } } + index: index + type: type + id: 1 + include_type_name: false diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/get/11_basic_with_types.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/get/11_basic_with_types.yml new file mode 100644 index 00000000000..0689f714d64 --- /dev/null +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/get/11_basic_with_types.yml @@ -0,0 +1,31 @@ +--- +"Basic": + + - do: + index: + index: test_1 + type: test + id: 中文 + body: { "foo": "Hello: 中文" } + + - do: + get: + index: test_1 + type: test + id: 中文 + + - match: { _index: test_1 } + - match: { _type: test } + - match: { _id: 中文 } + - match: { _source: { foo: "Hello: 中文" } } + + - do: + get: + index: test_1 + type: _all + id: 中文 + + - match: { _index: test_1 } + - match: { _type: test } + - match: { _id: 中文 } + - match: { _source: { foo: "Hello: 中文" } } diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/get/15_default_values.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/get/15_default_values.yml index 5e08112253e..fbab99fc3c6 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/get/15_default_values.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/get/15_default_values.yml @@ -1,21 +1,25 @@ --- "Default values": + - skip: + version: " - 6.99.99" + reason: include_type_name was introduced in 7.0.0 + - do: index: + include_type_name: false index: test_1 - type: test id: 1 body: { "foo": "bar" } - do: get: + include_type_name: false index: test_1 - type: _all id: 1 - match: { _index: test_1 } - - match: { _type: test } + - is_false: "_type" - match: { _id: '1' } - match: { _source: { foo: "bar" } } diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/get/16_default_values_with_types.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/get/16_default_values_with_types.yml new file mode 100644 index 00000000000..5e08112253e --- /dev/null +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/get/16_default_values_with_types.yml @@ -0,0 +1,21 @@ +--- +"Default values": + + - do: + index: + index: test_1 + type: test + id: 1 + body: { "foo": "bar" } + + - do: + get: + index: test_1 + type: _all + id: 1 + + - match: { _index: test_1 } + - match: { _type: test } + - match: { _id: '1' } + - match: { _source: { foo: "bar" } } + diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/get/20_stored_fields.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/get/20_stored_fields.yml index fbffb9e0ea8..20971728ffd 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/get/20_stored_fields.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/get/20_stored_fields.yml @@ -1,43 +1,47 @@ --- "Stored fields": + - skip: + version: " - 6.99.99" + reason: include_type_name was introduced in 7.0.0 + - do: indices.create: + include_type_name: false index: test_1 body: mappings: - test: - properties: - foo: - type: keyword - store: true - count: - type: integer - store: true + properties: + foo: + type: keyword + store: true + count: + type: integer + store: true - do: index: + include_type_name: false index: test_1 - type: test id: 1 body: { "foo": "bar", "count": 1 } - do: get: + include_type_name: false index: test_1 - type: test id: 1 stored_fields: foo - match: { _index: test_1 } - - match: { _type: test } + - is_false: "_type" - match: { _id: '1' } - match: { fields.foo: [bar] } - is_false: _source - do: get: + include_type_name: false index: test_1 - type: test id: 1 stored_fields: [foo, count] @@ -47,8 +51,8 @@ - do: get: + include_type_name: false index: test_1 - type: test id: 1 stored_fields: [foo, count, _source] diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/get/21_stored_fields_with_types.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/get/21_stored_fields_with_types.yml new file mode 100644 index 00000000000..fbffb9e0ea8 --- /dev/null +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/get/21_stored_fields_with_types.yml @@ -0,0 +1,59 @@ +--- +"Stored fields": + + - do: + indices.create: + index: test_1 + body: + mappings: + test: + properties: + foo: + type: keyword + store: true + count: + type: integer + store: true + + - do: + index: + index: test_1 + type: test + id: 1 + body: { "foo": "bar", "count": 1 } + - do: + get: + index: test_1 + type: test + id: 1 + stored_fields: foo + + - match: { _index: test_1 } + - match: { _type: test } + - match: { _id: '1' } + - match: { fields.foo: [bar] } + - is_false: _source + + - do: + get: + index: test_1 + type: test + id: 1 + stored_fields: [foo, count] + + - match: { fields.foo: [bar] } + - match: { fields.count: [1] } + - is_false: _source + + - do: + get: + index: test_1 + type: test + id: 1 + stored_fields: [foo, count, _source] + + - match: { fields.foo: [bar] } + - match: { fields.count: [1] } + - match: { _source.foo: bar } + + diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/get/40_routing.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/get/40_routing.yml index 276346cda4f..94162314225 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/get/40_routing.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/get/40_routing.yml @@ -1,8 +1,13 @@ --- "Routing": + - skip: + version: " - 6.99.99" + reason: include_type_name was introduced in 7.0.0 + - do: indices.create: + include_type_name: false index: test_1 body: settings: @@ -17,16 +22,16 @@ - do: index: + include_type_name: false index: test_1 - type: test id: 1 routing: 5 body: { foo: bar } - do: get: + include_type_name: false index: test_1 - type: test id: 1 routing: 5 stored_fields: [_routing] @@ -37,7 +42,7 @@ - do: catch: missing get: + include_type_name: false index: test_1 - type: test id: 1 diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/get/41_routing_with_types.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/get/41_routing_with_types.yml new file mode 100644 index 00000000000..276346cda4f --- /dev/null +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/get/41_routing_with_types.yml @@ -0,0 +1,43 @@ +--- +"Routing": + + - do: + indices.create: + index: test_1 + body: + settings: + index: + number_of_shards: 5 + number_of_routing_shards: 5 + number_of_replicas: 0 + + - do: + cluster.health: + wait_for_status: green + + - do: + index: + index: test_1 + type: test + id: 1 + routing: 5 + body: { foo: bar } + + - do: + get: + index: test_1 + type: test + id: 1 + routing: 5 + stored_fields: [_routing] + + - match: { _id: "1"} + - match: { _routing: "5"} + + - do: + catch: missing + get: + index: test_1 + type: test + id: 1 + diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/get/50_with_headers.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/get/50_with_headers.yml index b88dbaafc4f..bd26eee1b5d 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/get/50_with_headers.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/get/50_with_headers.yml @@ -2,11 +2,13 @@ "REST test with headers": - skip: features: ["headers", "yaml"] + version: " - 6.99.99" + reason: include_type_name was introduced in 7.0.0 - do: index: + include_type_name: false index: test_1 - type: test id: 1 body: { "body": "foo" } @@ -14,12 +16,12 @@ headers: Accept: application/yaml get: + include_type_name: false index: test_1 - type: _all id: 1 - match: {_index: "test_1"} - - match: {_type: "test"} + - is_false: "_type" - match: {_id: "1"} - match: {_version: 1} - match: {found: true} diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/get/51_with_headers_with_types.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/get/51_with_headers_with_types.yml new file mode 100644 index 00000000000..b88dbaafc4f --- /dev/null +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/get/51_with_headers_with_types.yml @@ -0,0 +1,26 @@ +--- +"REST test with headers": + - skip: + features: ["headers", "yaml"] + + - do: + index: + index: test_1 + type: test + id: 1 + body: { "body": "foo" } + + - do: + headers: + Accept: application/yaml + get: + index: test_1 + type: _all + id: 1 + + - match: {_index: "test_1"} + - match: {_type: "test"} + - match: {_id: "1"} + - match: {_version: 1} + - match: {found: true} + - match: { _source: { body: foo }} diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/get/60_realtime_refresh.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/get/60_realtime_refresh.yml index 7d02b4667ef..c5955bf4d7a 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/get/60_realtime_refresh.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/get/60_realtime_refresh.yml @@ -1,8 +1,13 @@ --- "Realtime Refresh": + - skip: + version: " - 6.99.99" + reason: include_type_name was introduced in 7.0.0 + - do: indices.create: + include_type_name: false index: test_1 body: settings: @@ -16,23 +21,23 @@ - do: index: + include_type_name: false index: test_1 - type: test id: 1 body: { foo: bar } - do: catch: missing get: + include_type_name: false index: test_1 - type: test id: 1 realtime: false - do: get: + include_type_name: false index: test_1 - type: test id: 1 realtime: true @@ -40,8 +45,8 @@ - do: get: + include_type_name: false index: test_1 - type: test id: 1 realtime: false refresh: true diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/get/61_realtime_refresh_with_types.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/get/61_realtime_refresh_with_types.yml new file mode 100644 index 00000000000..7d02b4667ef --- /dev/null +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/get/61_realtime_refresh_with_types.yml @@ -0,0 +1,49 @@ +--- +"Realtime Refresh": + + - do: + indices.create: + index: test_1 + body: + settings: + index: + refresh_interval: -1 + number_of_replicas: 0 + + - do: + cluster.health: + wait_for_status: green + + - do: + index: + index: test_1 + type: test + id: 1 + body: { foo: bar } + + - do: + catch: missing + get: + index: test_1 + type: test + id: 1 + realtime: false + + - do: + get: + index: test_1 + type: test + id: 1 + realtime: true + + - is_true: found + + - do: + get: + index: test_1 + type: test + id: 1 + realtime: false + refresh: true + + - is_true: found diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/get/70_source_filtering.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/get/70_source_filtering.yml index c858886ca3d..55520000e2f 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/get/70_source_filtering.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/get/70_source_filtering.yml @@ -1,53 +1,57 @@ --- "Source filtering": + - skip: + version: " - 6.99.99" + reason: include_type_name was introduced in 7.0.0 + - do: indices.create: + include_type_name: false index: test_1 body: mappings: - test: - properties: - count: - type: integer - store: true + properties: + count: + type: integer + store: true - do: index: + include_type_name: false index: test_1 - type: test id: 1 body: { "include": { "field1": "v1", "field2": "v2" }, "count": 1 } - do: - get: { index: test_1, type: test, id: 1, _source: false } + get: { include_type_name: false, index: test_1, id: 1, _source: false } - match: { _index: test_1 } - - match: { _type: test } + - is_false: "_type" - match: { _id: "1" } - is_false: _source - do: - get: { index: test_1, type: test, id: 1, _source: true } + get: { include_type_name: false, index: test_1, id: 1, _source: true } - match: { _source.include.field1: v1 } - do: - get: { index: test_1, type: test, id: 1, _source: include.field1 } + get: { include_type_name: false, index: test_1, id: 1, _source: include.field1 } - match: { _source.include.field1: v1 } - is_false: _source.include.field2 - do: - get: { index: test_1, type: test, id: 1, _source_include: include.field1 } + get: { include_type_name: false, index: test_1, id: 1, _source_include: include.field1 } - match: { _source.include.field1: v1 } - is_false: _source.include.field2 - do: - get: { index: test_1, type: test, id: 1, _source_include: "include.field1,include.field2" } + get: { include_type_name: false, index: test_1, id: 1, _source_include: "include.field1,include.field2" } - match: { _source.include.field1: v1 } - match: { _source.include.field2: v2 } - is_false: _source.count - do: - get: { index: test_1, type: test, id: 1, _source_include: include, _source_exclude: "*.field2" } + get: { include_type_name: false, index: test_1, id: 1, _source_include: include, _source_exclude: "*.field2" } - match: { _source.include.field1: v1 } - is_false: _source.include.field2 - is_false: _source.count @@ -55,14 +59,14 @@ - do: get: + include_type_name: false index: test_1 - type: test id: 1 stored_fields: count _source: true - match: { _index: test_1 } - - match: { _type: test } + - is_false: "_type" - match: { _id: "1" } - match: { fields.count: [1] } - match: { _source.include.field1: v1 } diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/get/71_source_filtering_with_types.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/get/71_source_filtering_with_types.yml new file mode 100644 index 00000000000..c858886ca3d --- /dev/null +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/get/71_source_filtering_with_types.yml @@ -0,0 +1,68 @@ +--- +"Source filtering": + + - do: + indices.create: + index: test_1 + body: + mappings: + test: + properties: + count: + type: integer + store: true + + - do: + index: + index: test_1 + type: test + id: 1 + body: { "include": { "field1": "v1", "field2": "v2" }, "count": 1 } + - do: + get: { index: test_1, type: test, id: 1, _source: false } + + - match: { _index: test_1 } + - match: { _type: test } + - match: { _id: "1" } + - is_false: _source + + - do: + get: { index: test_1, type: test, id: 1, _source: true } + - match: { _source.include.field1: v1 } + + - do: + get: { index: test_1, type: test, id: 1, _source: include.field1 } + - match: { _source.include.field1: v1 } + - is_false: _source.include.field2 + + - do: + get: { index: test_1, type: test, id: 1, _source_include: include.field1 } + - match: { _source.include.field1: v1 } + - is_false: _source.include.field2 + + - do: + get: { index: test_1, type: test, id: 1, _source_include: "include.field1,include.field2" } + - match: { _source.include.field1: v1 } + - match: { _source.include.field2: v2 } + - is_false: _source.count + + - do: + get: { index: test_1, type: test, id: 1, _source_include: include, _source_exclude: "*.field2" } + - match: { _source.include.field1: v1 } + - is_false: _source.include.field2 + - is_false: _source.count + + + - do: + get: + index: test_1 + type: test + id: 1 + stored_fields: count + _source: true + + - match: { _index: test_1 } + - match: { _type: test } + - match: { _id: "1" } + - match: { fields.count: [1] } + - match: { _source.include.field1: v1 } diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/get/80_missing.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/get/80_missing.yml index a60d1138856..48a6966b455 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/get/80_missing.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/get/80_missing.yml @@ -1,19 +1,27 @@ --- "Missing document with catch": + - skip: + version: " - 6.99.99" + reason: include_type_name was introduced in 7.0.0 + - do: catch: missing get: + include_type_name: false index: test_1 - type: test id: 1 --- "Missing document with ignore": + - skip: + version: " - 6.99.99" + reason: include_type_name was introduced in 7.0.0 + - do: get: + include_type_name: false index: test_1 - type: test id: 1 ignore: 404 diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/get/81_missing_with_types.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/get/81_missing_with_types.yml new file mode 100644 index 00000000000..a60d1138856 --- /dev/null +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/get/81_missing_with_types.yml @@ -0,0 +1,19 @@ +--- +"Missing document with catch": + + - do: + catch: missing + get: + index: test_1 + type: test + id: 1 + +--- +"Missing document with ignore": + + - do: + get: + index: test_1 + type: test + id: 1 + ignore: 404 diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/get/90_versions.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/get/90_versions.yml index c6631b83b18..6975d4f5be5 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/get/90_versions.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/get/90_versions.yml @@ -1,26 +1,30 @@ --- "Versions": + - skip: + version: " - 6.99.99" + reason: include_type_name was introduced in 7.0.0 + - do: index: + include_type_name: false index: test_1 - type: test id: 1 body: { foo: bar } - match: { _version: 1} - do: index: + include_type_name: false index: test_1 - type: test id: 1 body: { foo: bar } - match: { _version: 2} - do: get: + include_type_name: false index: test_1 - type: test id: 1 version: 2 - match: { _id: "1" } @@ -28,15 +32,15 @@ - do: catch: conflict get: + include_type_name: false index: test_1 - type: test id: 1 version: 1 - do: get: + include_type_name: false index: test_1 - type: test id: 1 version: 2 version_type: external @@ -45,8 +49,8 @@ - do: catch: conflict get: + include_type_name: false index: test_1 - type: test id: 1 version: 10 version_type: external @@ -54,16 +58,16 @@ - do: catch: conflict get: + include_type_name: false index: test_1 - type: test id: 1 version: 1 version_type: external - do: get: + include_type_name: false index: test_1 - type: test id: 1 version: 2 version_type: external_gte @@ -72,8 +76,8 @@ - do: catch: conflict get: + include_type_name: false index: test_1 - type: test id: 1 version: 10 version_type: external_gte @@ -81,8 +85,8 @@ - do: catch: conflict get: + include_type_name: false index: test_1 - type: test id: 1 version: 1 version_type: external_gte diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/get/91_versions_with_types.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/get/91_versions_with_types.yml new file mode 100644 index 00000000000..c6631b83b18 --- /dev/null +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/get/91_versions_with_types.yml @@ -0,0 +1,89 @@ +--- +"Versions": + + - do: + index: + index: test_1 + type: test + id: 1 + body: { foo: bar } + - match: { _version: 1} + + - do: + index: + index: test_1 + type: test + id: 1 + body: { foo: bar } + - match: { _version: 2} + + - do: + get: + index: test_1 + type: test + id: 1 + version: 2 + - match: { _id: "1" } + + - do: + catch: conflict + get: + index: test_1 + type: test + id: 1 + version: 1 + + - do: + get: + index: test_1 + type: test + id: 1 + version: 2 + version_type: external + - match: { _id: "1" } + + - do: + catch: conflict + get: + index: test_1 + type: test + id: 1 + version: 10 + version_type: external + + - do: + catch: conflict + get: + index: test_1 + type: test + id: 1 + version: 1 + version_type: external + + - do: + get: + index: test_1 + type: test + id: 1 + version: 2 + version_type: external_gte + - match: { _id: "1" } + + - do: + catch: conflict + get: + index: test_1 + type: test + id: 1 + version: 10 + version_type: external_gte + + - do: + catch: conflict + get: + index: test_1 + type: test + id: 1 + version: 1 + version_type: external_gte + diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/index/10_with_id.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/index/10_with_id.yml index daac81849fb..0deb7637694 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/index/10_with_id.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/index/10_with_id.yml @@ -1,26 +1,30 @@ --- "Index with ID": + - skip: + version: " - 6.99.99" + reason: include_type_name was introduced in 7.0.0 + - do: index: + include_type_name: false index: test-weird-index-中文 - type: weird.type id: 1 body: { foo: bar } - match: { _index: test-weird-index-中文 } - - match: { _type: weird.type } + - is_false: "_type" - match: { _id: "1"} - match: { _version: 1} - do: get: + include_type_name: false index: test-weird-index-中文 - type: weird.type id: 1 - match: { _index: test-weird-index-中文 } - - match: { _type: weird.type } + - is_false: "_type" - match: { _id: "1"} - match: { _version: 1} - match: { _source: { foo: bar }} @@ -28,7 +32,16 @@ - do: catch: bad_request index: + include_type_name: false index: idx - type: type id: aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa body: { foo: bar } + + - do: + catch: /illegal_argument_exception/ + index: + index: index + type: type + id: 1 + include_type_name: false + body: { foo: bar } diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/index/11_with_id_with_types.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/index/11_with_id_with_types.yml new file mode 100644 index 00000000000..daac81849fb --- /dev/null +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/index/11_with_id_with_types.yml @@ -0,0 +1,34 @@ +--- +"Index with ID": + + - do: + index: + index: test-weird-index-中文 + type: weird.type + id: 1 + body: { foo: bar } + + - match: { _index: test-weird-index-中文 } + - match: { _type: weird.type } + - match: { _id: "1"} + - match: { _version: 1} + + - do: + get: + index: test-weird-index-中文 + type: weird.type + id: 1 + + - match: { _index: test-weird-index-中文 } + - match: { _type: weird.type } + - match: { _id: "1"} + - match: { _version: 1} + - match: { _source: { foo: bar }} + + - do: + catch: bad_request + index: + index: idx + type: type + id: aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa + body: { foo: bar } diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/index/12_result.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/index/12_result.yml index 45ebe0bbd3d..7198c694b51 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/index/12_result.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/index/12_result.yml @@ -1,10 +1,14 @@ --- "Index result field": + - skip: + version: " - 6.99.99" + reason: include_type_name was introduced in 7.0.0 + - do: index: + include_type_name: false index: test_index - type: test id: 1 body: { foo: bar } @@ -12,8 +16,8 @@ - do: index: + include_type_name: false index: test_index - type: test id: 1 body: { foo: bar } op_type: index diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/index/13_result_with_types.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/index/13_result_with_types.yml new file mode 100644 index 00000000000..45ebe0bbd3d --- /dev/null +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/index/13_result_with_types.yml @@ -0,0 +1,21 @@ +--- +"Index result field": + + - do: + index: + index: test_index + type: test + id: 1 + body: { foo: bar } + + - match: { result: created } + + - do: + index: + index: test_index + type: test + id: 1 + body: { foo: bar } + op_type: index + + - match: { result: updated } diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/index/15_without_id.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/index/15_without_id.yml index 3fff0512b96..3bd607c66fa 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/index/15_without_id.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/index/15_without_id.yml @@ -1,26 +1,38 @@ --- "Index without ID": + - skip: + version: " - 6.99.99" + reason: include_type_name was introduced in 7.0.0 + - do: index: + include_type_name: false index: test_1 - type: test body: { foo: bar } - is_true: _id - match: { _index: test_1 } - - match: { _type: test } + - is_false: "_type" - match: { _version: 1 } - set: { _id: id } - do: get: + include_type_name: false index: test_1 - type: test id: '$id' - match: { _index: test_1 } - - match: { _type: test } + - is_false: "_type" - match: { _id: $id } - match: { _version: 1 } - match: { _source: { foo: bar }} + + - do: + catch: /illegal_argument_exception/ + index: + index: index + type: type + include_type_name: false + body: { foo: bar } diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/index/16_without_id_with_types.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/index/16_without_id_with_types.yml new file mode 100644 index 00000000000..3fff0512b96 --- /dev/null +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/index/16_without_id_with_types.yml @@ -0,0 +1,26 @@ +--- +"Index without ID": + + - do: + index: + index: test_1 + type: test + body: { foo: bar } + + - is_true: _id + - match: { _index: test_1 } + - match: { _type: test } + - match: { _version: 1 } + - set: { _id: id } + + - do: + get: + index: test_1 + type: test + id: '$id' + + - match: { _index: test_1 } + - match: { _type: test } + - match: { _id: $id } + - match: { _version: 1 } + - match: { _source: { foo: bar }} diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/index/20_optype.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/index/20_optype.yml index 60ae26d46d0..ddab362b80f 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/index/20_optype.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/index/20_optype.yml @@ -1,10 +1,14 @@ --- "Optype": + - skip: + version: " - 6.99.99" + reason: include_type_name was introduced in 7.0.0 + - do: index: + include_type_name: false index: test_1 - type: test id: 1 op_type: create body: { foo: bar } @@ -12,16 +16,16 @@ - do: catch: conflict index: + include_type_name: false index: test_1 - type: test id: 1 op_type: create body: { foo: bar } - do: index: + include_type_name: false index: test_1 - type: test id: 1 op_type: index body: { foo: bar } diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/index/21_optype_with_types.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/index/21_optype_with_types.yml new file mode 100644 index 00000000000..60ae26d46d0 --- /dev/null +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/index/21_optype_with_types.yml @@ -0,0 +1,29 @@ +--- +"Optype": + + - do: + index: + index: test_1 + type: test + id: 1 + op_type: create + body: { foo: bar } + + - do: + catch: conflict + index: + index: test_1 + type: test + id: 1 + op_type: create + body: { foo: bar } + + - do: + index: + index: test_1 + type: test + id: 1 + op_type: index + body: { foo: bar } + + - match: { _version: 2 } diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/index/30_internal_version.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/index/30_internal_version.yml index 1767fbebbf9..53351c24feb 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/index/30_internal_version.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/index/30_internal_version.yml @@ -1,18 +1,22 @@ --- "Internal version": + - skip: + version: " - 6.99.99" + reason: include_type_name was introduced in 7.0.0 + - do: index: + include_type_name: false index: test_1 - type: test id: 1 body: { foo: bar } - match: { _version: 1} - do: index: + include_type_name: false index: test_1 - type: test id: 1 body: { foo: bar } - match: { _version: 2} @@ -20,15 +24,15 @@ - do: catch: conflict index: + include_type_name: false index: test_1 - type: test id: 1 body: { foo: bar } version: 1 - do: index: + include_type_name: false index: test_1 - type: test id: 1 body: { foo: bar } version: 2 diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/index/31_internal_version_with_types.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/index/31_internal_version_with_types.yml new file mode 100644 index 00000000000..1767fbebbf9 --- /dev/null +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/index/31_internal_version_with_types.yml @@ -0,0 +1,36 @@ +--- +"Internal version": + + - do: + index: + index: test_1 + type: test + id: 1 + body: { foo: bar } + - match: { _version: 1} + + - do: + index: + index: test_1 + type: test + id: 1 + body: { foo: bar } + - match: { _version: 2} + + - do: + catch: conflict + index: + index: test_1 + type: test + id: 1 + body: { foo: bar } + version: 1 + - do: + index: + index: test_1 + type: test + id: 1 + body: { foo: bar } + version: 2 + + - match: { _version: 3 } diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/index/35_external_version.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/index/35_external_version.yml index f17e6b74931..054f8cad15d 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/index/35_external_version.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/index/35_external_version.yml @@ -1,10 +1,14 @@ --- "External version": + - skip: + version: " - 6.99.99" + reason: include_type_name was introduced in 7.0.0 + - do: index: + include_type_name: false index: test_1 - type: test id: 1 body: { foo: bar } version_type: external @@ -14,8 +18,8 @@ - do: index: + include_type_name: false index: test_1 - type: test id: 1 body: { foo: bar } version_type: external @@ -26,8 +30,8 @@ - do: catch: conflict index: + include_type_name: false index: test_1 - type: test id: 1 body: { foo: bar } version_type: external @@ -36,8 +40,8 @@ - do: catch: conflict index: + include_type_name: false index: test_1 - type: test id: 1 body: { foo: bar } version_type: external @@ -45,8 +49,8 @@ - do: index: + include_type_name: false index: test_1 - type: test id: 1 body: { foo: bar } version_type: external diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/index/36_external_gte_version.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/index/36_external_gte_version.yml index dccbe02ea14..67f534db341 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/index/36_external_gte_version.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/index/36_external_gte_version.yml @@ -1,10 +1,14 @@ --- "External GTE version": + - skip: + version: " - 6.99.99" + reason: include_type_name was introduced in 7.0.0 + - do: index: + include_type_name: false index: test_1 - type: test id: 1 body: { foo: bar } version_type: external_gte @@ -14,8 +18,8 @@ - do: index: + include_type_name: false index: test_1 - type: test id: 1 body: { foo: bar } version_type: external_gte @@ -26,8 +30,8 @@ - do: catch: conflict index: + include_type_name: false index: test_1 - type: test id: 1 body: { foo: bar } version_type: external_gte @@ -35,8 +39,8 @@ - do: index: + include_type_name: false index: test_1 - type: test id: 1 body: { foo: bar2 } version_type: external_gte @@ -46,8 +50,8 @@ - do: index: + include_type_name: false index: test_1 - type: test id: 1 body: { foo: bar2 } version_type: external_gte diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/index/37_external_version_with_types.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/index/37_external_version_with_types.yml new file mode 100644 index 00000000000..f17e6b74931 --- /dev/null +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/index/37_external_version_with_types.yml @@ -0,0 +1,55 @@ +--- +"External version": + + - do: + index: + index: test_1 + type: test + id: 1 + body: { foo: bar } + version_type: external + version: 0 + + - match: { _version: 0 } + + - do: + index: + index: test_1 + type: test + id: 1 + body: { foo: bar } + version_type: external + version: 5 + + - match: { _version: 5 } + + - do: + catch: conflict + index: + index: test_1 + type: test + id: 1 + body: { foo: bar } + version_type: external + version: 5 + + - do: + catch: conflict + index: + index: test_1 + type: test + id: 1 + body: { foo: bar } + version_type: external + version: 0 + + - do: + index: + index: test_1 + type: test + id: 1 + body: { foo: bar } + version_type: external + version: 6 + + - match: { _version: 6} diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/index/38_external_gte_version_with_types.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/index/38_external_gte_version_with_types.yml new file mode 100644 index 00000000000..dccbe02ea14 --- /dev/null +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/index/38_external_gte_version_with_types.yml @@ -0,0 +1,56 @@ +--- +"External GTE version": + + - do: + index: + index: test_1 + type: test + id: 1 + body: { foo: bar } + version_type: external_gte + version: 0 + + - match: { _version: 0} + + - do: + index: + index: test_1 + type: test + id: 1 + body: { foo: bar } + version_type: external_gte + version: 5 + + - match: { _version: 5} + + - do: + catch: conflict + index: + index: test_1 + type: test + id: 1 + body: { foo: bar } + version_type: external_gte + version: 0 + + - do: + index: + index: test_1 + type: test + id: 1 + body: { foo: bar2 } + version_type: external_gte + version: 5 + + - match: { _version: 5} + + - do: + index: + index: test_1 + type: test + id: 1 + body: { foo: bar2 } + version_type: external_gte + version: 6 + + - match: { _version: 6} diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/index/40_routing.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/index/40_routing.yml index 5b0cf94f423..523cf47f858 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/index/40_routing.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/index/40_routing.yml @@ -1,8 +1,13 @@ --- "Routing": + - skip: + version: " - 6.99.99" + reason: include_type_name was introduced in 7.0.0 + - do: indices.create: + include_type_name: false index: test_1 body: settings: @@ -17,16 +22,16 @@ - do: index: + include_type_name: false index: test_1 - type: test id: 1 routing: 5 body: { foo: bar } - do: get: + include_type_name: false index: test_1 - type: test id: 1 routing: 5 stored_fields: [_routing] @@ -37,7 +42,7 @@ - do: catch: missing get: + include_type_name: false index: test_1 - type: test id: 1 diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/index/41_routing_with_types.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/index/41_routing_with_types.yml new file mode 100644 index 00000000000..5b0cf94f423 --- /dev/null +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/index/41_routing_with_types.yml @@ -0,0 +1,43 @@ +--- +"Routing": + + - do: + indices.create: + index: test_1 + body: + settings: + index: + number_of_shards: 5 + number_of_routing_shards: 5 + number_of_replicas: 0 + + - do: + cluster.health: + wait_for_status: green + + - do: + index: + index: test_1 + type: test + id: 1 + routing: 5 + body: { foo: bar } + + - do: + get: + index: test_1 + type: test + id: 1 + routing: 5 + stored_fields: [_routing] + + - match: { _id: "1"} + - match: { _routing: "5"} + + - do: + catch: missing + get: + index: test_1 + type: test + id: 1 + diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/index/60_refresh.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/index/60_refresh.yml index cd78a4e4282..346338791d6 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/index/60_refresh.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/index/60_refresh.yml @@ -1,8 +1,13 @@ --- "Refresh": + - skip: + version: " - 6.99.99" + reason: include_type_name was introduced in 7.0.0 + - do: indices.create: + include_type_name: false index: test_1 body: settings: @@ -11,13 +16,14 @@ - do: index: + include_type_name: false index: test_1 - type: test id: 1 body: { foo: bar } - do: search: + include_type_name: false index: test_1 body: query: { term: { _id: 1 }} @@ -26,8 +32,8 @@ - do: index: + include_type_name: false index: test_1 - type: test id: 2 refresh: true body: { foo: bar } @@ -35,6 +41,7 @@ - do: search: + include_type_name: false index: test_1 body: query: { term: { _id: 2 }} @@ -43,10 +50,15 @@ --- "When refresh url parameter is an empty string that means \"refresh immediately\"": + + - skip: + version: " - 6.99.99" + reason: include_type_name was introduced in 7.0.0 + - do: index: + include_type_name: false index: test_1 - type: test id: 1 refresh: "" body: { foo: bar } @@ -54,6 +66,7 @@ - do: search: + include_type_name: false index: test_1 body: query: { term: { _id: 1 }} @@ -62,10 +75,15 @@ --- "refresh=wait_for waits until changes are visible in search": + + - skip: + version: " - 6.99.99" + reason: include_type_name was introduced in 7.0.0 + - do: index: + include_type_name: false index: index_60_refresh_1 - type: test id: index_60_refresh_id1 body: { foo: bar } refresh: wait_for @@ -73,6 +91,7 @@ - do: search: + include_type_name: false index: index_60_refresh_1 body: query: { term: { _id: index_60_refresh_id1 }} diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/index/61_refresh_with_types.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/index/61_refresh_with_types.yml new file mode 100644 index 00000000000..cd78a4e4282 --- /dev/null +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/index/61_refresh_with_types.yml @@ -0,0 +1,79 @@ +--- +"Refresh": + + - do: + indices.create: + index: test_1 + body: + settings: + index.refresh_interval: -1 + number_of_replicas: 0 + + - do: + index: + index: test_1 + type: test + id: 1 + body: { foo: bar } + + - do: + search: + index: test_1 + body: + query: { term: { _id: 1 }} + + - match: { hits.total: 0 } + + - do: + index: + index: test_1 + type: test + id: 2 + refresh: true + body: { foo: bar } + - is_true: forced_refresh + + - do: + search: + index: test_1 + body: + query: { term: { _id: 2 }} + + - match: { hits.total: 1 } + +--- +"When refresh url parameter is an empty string that means \"refresh immediately\"": + - do: + index: + index: test_1 + type: test + id: 1 + refresh: "" + body: { foo: bar } + - is_true: forced_refresh + + - do: + search: + index: test_1 + body: + query: { term: { _id: 1 }} + + - match: { hits.total: 1 } + +--- +"refresh=wait_for waits until changes are visible in search": + - do: + index: + index: index_60_refresh_1 + type: test + id: index_60_refresh_id1 + body: { foo: bar } + refresh: wait_for + - is_false: forced_refresh + + - do: + search: + index: index_60_refresh_1 + body: + query: { term: { _id: index_60_refresh_id1 }} + - match: { hits.total: 1 } diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.create/10_basic.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.create/10_basic.yml index 6f7c5a60093..a96c31e9ce6 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.create/10_basic.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.create/10_basic.yml @@ -1,24 +1,35 @@ --- "Create index with mappings": + - skip: + version: " - 6.99.99" + reason: include_type_name was introduced in 7.0.0 + - do: indices.create: + include_type_name: false index: test_index body: mappings: - type_1: {} + {} - do: indices.get_mapping: + include_type_name: false index: test_index - - is_true: test_index.mappings.type_1 + - is_true: test_index.mappings --- "Create index with settings": + - skip: + version: " - 6.99.99" + reason: include_type_name was introduced in 7.0.0 + - do: indices.create: + include_type_name: false index: test_index body: settings: @@ -33,8 +44,13 @@ --- "Create index": + - skip: + version: " - 6.99.99" + reason: include_type_name was introduced in 7.0.0 + - do: indices.create: + include_type_name: false index: test_index - match: { acknowledged: true } @@ -43,8 +59,13 @@ --- "Create index with wait_for_active_shards set to all": + - skip: + version: " - 6.99.99" + reason: include_type_name was introduced in 7.0.0 + - do: indices.create: + include_type_name: false index: test_index wait_for_active_shards: all body: @@ -57,15 +78,19 @@ --- "Create index with aliases": + - skip: + version: " - 6.99.99" + reason: include_type_name was introduced in 7.0.0 + - do: indices.create: + include_type_name: false index: test_index body: mappings: - type_1: - properties: - field: - type: text + properties: + field: + type: text aliases: test_alias: {} test_blias: @@ -93,6 +118,7 @@ reason: is_write_index is not implemented in ES <= 6.x - do: indices.create: + include_type_name: false index: test_index body: aliases: @@ -110,25 +136,15 @@ - is_false: test_index.aliases.test_blias.is_write_index - is_true: test_index.aliases.test_clias.is_write_index ---- -"Create index with no type mappings": - - do: - catch: /illegal_argument_exception/ - indices.create: - index: test_index - body: - mappings: - "" : {} - --- "Create index with invalid mappings": - do: catch: /illegal_argument_exception/ indices.create: + include_type_name: false index: test_index body: mappings: - test_type: - properties: - "": - type: keyword + properties: + "": + type: keyword diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.create/11_basic_with_types.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.create/11_basic_with_types.yml new file mode 100644 index 00000000000..6f7c5a60093 --- /dev/null +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.create/11_basic_with_types.yml @@ -0,0 +1,134 @@ +--- +"Create index with mappings": + + - do: + indices.create: + index: test_index + body: + mappings: + type_1: {} + + - do: + indices.get_mapping: + index: test_index + + - is_true: test_index.mappings.type_1 + +--- +"Create index with settings": + + - do: + indices.create: + index: test_index + body: + settings: + number_of_replicas: "0" + + - do: + indices.get_settings: + index: test_index + + - match: { test_index.settings.index.number_of_replicas: "0"} + +--- +"Create index": + + - do: + indices.create: + index: test_index + + - match: { acknowledged: true } + - match: { index: "test_index"} + +--- +"Create index with wait_for_active_shards set to all": + + - do: + indices.create: + index: test_index + wait_for_active_shards: all + body: + settings: + number_of_replicas: "0" + + - match: { acknowledged: true } + - match: { shards_acknowledged: true } + +--- +"Create index with aliases": + + - do: + indices.create: + index: test_index + body: + mappings: + type_1: + properties: + field: + type: text + aliases: + test_alias: {} + test_blias: + routing: b + test_clias: + filter: + term: + field : value + + - do: + indices.get_alias: + index: test_index + + - match: {test_index.aliases.test_blias.search_routing: b} + - match: {test_index.aliases.test_blias.index_routing: b} + - is_false: test_index.aliases.test_blias.filter + - match: {test_index.aliases.test_clias.filter.term.field: value} + - is_false: test_index.aliases.test_clias.index_routing + - is_false: test_index.aliases.test_clias.search_routing + +--- +"Create index with write aliases": + - skip: + version: " - 6.99.99" + reason: is_write_index is not implemented in ES <= 6.x + - do: + indices.create: + index: test_index + body: + aliases: + test_alias: {} + test_blias: + is_write_index: false + test_clias: + is_write_index: true + + - do: + indices.get_alias: + index: test_index + + - is_false: test_index.aliases.test_alias.is_write_index + - is_false: test_index.aliases.test_blias.is_write_index + - is_true: test_index.aliases.test_clias.is_write_index + +--- +"Create index with no type mappings": + - do: + catch: /illegal_argument_exception/ + indices.create: + index: test_index + body: + mappings: + "" : {} + +--- +"Create index with invalid mappings": + - do: + catch: /illegal_argument_exception/ + indices.create: + index: test_index + body: + mappings: + test_type: + properties: + "": + type: keyword diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_mapping/10_basic.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_mapping/10_basic.yml index 90bb2747a7b..36f45dd0e60 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_mapping/10_basic.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_mapping/10_basic.yml @@ -1,26 +1,32 @@ --- setup: + - skip: + version: " - 6.99.99" + reason: include_type_name was introduced in 7.0.0 + - do: indices.create: + include_type_name: false index: test_1 body: - mappings: - doc: {} + mappings: {} - do: indices.create: + include_type_name: false index: test_2 body: - mappings: - doc: {} + mappings: {} --- "Get /{index}/_mapping with empty mappings": - do: indices.create: + include_type_name: false index: t - do: indices.get_mapping: + include_type_name: false index: t - match: { t.mappings: {}} @@ -29,116 +35,65 @@ setup: "Get /_mapping": - do: - indices.get_mapping: {} + indices.get_mapping: + include_type_name: false - - is_true: test_1.mappings.doc - - is_true: test_2.mappings.doc + - is_true: test_1.mappings + - is_true: test_2.mappings --- "Get /{index}/_mapping": - do: indices.get_mapping: - index: test_1 + include_type_name: false + index: test_1 - - is_true: test_1.mappings.doc + - is_true: test_1.mappings - is_false: test_2 + --- -"Get /{index}/_mapping/_all": +"Get /_all/_mapping": - do: indices.get_mapping: - index: test_1 - type: _all + include_type_name: false + index: _all - - is_true: test_1.mappings.doc - - is_false: test_2 + - is_true: test_1.mappings + - is_true: test_2.mappings --- -"Get /{index}/_mapping/*": +"Get /*/_mapping": - do: indices.get_mapping: - index: test_1 - type: '*' + include_type_name: false + index: '*' - - is_true: test_1.mappings.doc - - is_false: test_2 + - is_true: test_1.mappings + - is_true: test_2.mappings --- -"Get /{index}/_mapping/{type}": +"Get /index,index/_mapping": - do: indices.get_mapping: - index: test_1 - type: doc + include_type_name: false + index: test_1,test_2 - - is_true: test_1.mappings.doc - - is_false: test_2 + - is_true: test_1.mappings + - is_true: test_2.mappings --- -"Get /{index}/_mapping/{type*}": +"Get /index*/_mapping/": - do: indices.get_mapping: - index: test_1 - type: 'd*' + include_type_name: false + index: '*2' - - is_true: test_1.mappings.doc - - is_false: test_2 - ---- -"Get /_mapping/{type}": - - - do: - indices.get_mapping: - type: doc - - - is_true: test_1.mappings.doc - - is_true: test_2.mappings.doc - ---- -"Get /_all/_mapping/{type}": - - - do: - indices.get_mapping: - index: _all - type: doc - - - is_true: test_1.mappings.doc - - is_true: test_2.mappings.doc - ---- -"Get /*/_mapping/{type}": - - - do: - indices.get_mapping: - index: '*' - type: doc - - - is_true: test_1.mappings.doc - - is_true: test_2.mappings.doc - ---- -"Get /index,index/_mapping/{type}": - - - do: - indices.get_mapping: - index: test_1,test_2 - type: doc - - - is_true: test_1.mappings.doc - - is_true: test_2.mappings.doc - ---- -"Get /index*/_mapping/{type}": - - - do: - indices.get_mapping: - index: '*2' - type: doc - - - is_true: test_2.mappings.doc + - is_true: test_2.mappings - is_false: test_1 diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_mapping/11_basic_with_types.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_mapping/11_basic_with_types.yml new file mode 100644 index 00000000000..90bb2747a7b --- /dev/null +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_mapping/11_basic_with_types.yml @@ -0,0 +1,144 @@ +--- +setup: + - do: + indices.create: + index: test_1 + body: + mappings: + doc: {} + - do: + indices.create: + index: test_2 + body: + mappings: + doc: {} +--- +"Get /{index}/_mapping with empty mappings": + + - do: + indices.create: + index: t + + - do: + indices.get_mapping: + index: t + + - match: { t.mappings: {}} + +--- +"Get /_mapping": + + - do: + indices.get_mapping: {} + + - is_true: test_1.mappings.doc + - is_true: test_2.mappings.doc + +--- +"Get /{index}/_mapping": + + - do: + indices.get_mapping: + index: test_1 + + - is_true: test_1.mappings.doc + - is_false: test_2 + + +--- +"Get /{index}/_mapping/_all": + + - do: + indices.get_mapping: + index: test_1 + type: _all + + - is_true: test_1.mappings.doc + - is_false: test_2 + +--- +"Get /{index}/_mapping/*": + + - do: + indices.get_mapping: + index: test_1 + type: '*' + + - is_true: test_1.mappings.doc + - is_false: test_2 + +--- +"Get /{index}/_mapping/{type}": + + - do: + indices.get_mapping: + index: test_1 + type: doc + + - is_true: test_1.mappings.doc + - is_false: test_2 + +--- +"Get /{index}/_mapping/{type*}": + + - do: + indices.get_mapping: + index: test_1 + type: 'd*' + + - is_true: test_1.mappings.doc + - is_false: test_2 + +--- +"Get /_mapping/{type}": + + - do: + indices.get_mapping: + type: doc + + - is_true: test_1.mappings.doc + - is_true: test_2.mappings.doc + +--- +"Get /_all/_mapping/{type}": + + - do: + indices.get_mapping: + index: _all + type: doc + + - is_true: test_1.mappings.doc + - is_true: test_2.mappings.doc + +--- +"Get /*/_mapping/{type}": + + - do: + indices.get_mapping: + index: '*' + type: doc + + - is_true: test_1.mappings.doc + - is_true: test_2.mappings.doc + +--- +"Get /index,index/_mapping/{type}": + + - do: + indices.get_mapping: + index: test_1,test_2 + type: doc + + - is_true: test_1.mappings.doc + - is_true: test_2.mappings.doc + +--- +"Get /index*/_mapping/{type}": + + - do: + indices.get_mapping: + index: '*2' + type: doc + + - is_true: test_2.mappings.doc + - is_false: test_1 diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.put_mapping/10_basic.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.put_mapping/10_basic.yml index 1d33f2d31bb..7588c661885 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.put_mapping/10_basic.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.put_mapping/10_basic.yml @@ -1,69 +1,104 @@ --- "Test Create and update mapping": + + - skip: + version: " - 6.99.99" + reason: include_type_name was introduced in 7.0.0 + - do: indices.create: + include_type_name: false index: test_index - do: indices.put_mapping: + include_type_name: false index: test_index - type: test_type body: - test_type: - properties: - text1: - type: text - analyzer: whitespace - text2: - type: text - analyzer: whitespace - subfield.text3: - type: text + properties: + text1: + type: text + analyzer: whitespace + text2: + type: text + analyzer: whitespace + subfield.text3: + type: text - do: indices.get_mapping: + include_type_name: false index: test_index - - match: {test_index.mappings.test_type.properties.text1.type: text} - - match: {test_index.mappings.test_type.properties.text1.analyzer: whitespace} - - match: {test_index.mappings.test_type.properties.text2.type: text} - - match: {test_index.mappings.test_type.properties.text2.analyzer: whitespace} + - match: {test_index.mappings.properties.text1.type: text} + - match: {test_index.mappings.properties.text1.analyzer: whitespace} + - match: {test_index.mappings.properties.text2.type: text} + - match: {test_index.mappings.properties.text2.analyzer: whitespace} - do: indices.put_mapping: + include_type_name: false index: test_index - type: test_type body: - test_type: - properties: - text1: - type: text - analyzer: whitespace - fields: - text_raw: - type: keyword + properties: + text1: + type: text + analyzer: whitespace + fields: + text_raw: + type: keyword - do: indices.get_mapping: + include_type_name: false index: test_index - - match: {test_index.mappings.test_type.properties.text1.type: text} - - match: {test_index.mappings.test_type.properties.subfield.properties.text3.type: text} - - match: {test_index.mappings.test_type.properties.text1.fields.text_raw.type: keyword} + - match: {test_index.mappings.properties.text1.type: text} + - match: {test_index.mappings.properties.subfield.properties.text3.type: text} + - match: {test_index.mappings.properties.text1.fields.text_raw.type: keyword} --- "Create index with invalid mappings": + + - skip: + version: " - 6.99.99" + reason: include_type_name was introduced in 7.0.0 + - do: indices.create: + include_type_name: false index: test_index - do: catch: /illegal_argument_exception/ indices.put_mapping: + include_type_name: false index: test_index - type: test_type body: - test_type: - properties: - "": - type: keyword + properties: + "": + type: keyword + +--- +"PUT mapping with a type and include_type_name: false": + + - skip: + version: " - 6.99.99" + reason: include_type_name was introduced in 7.0.0 + + - do: + indices.create: + index: index + include_type_name: false + + - do: + catch: /illegal_argument_exception/ + indices.put_mapping: + index: index + type: _doc + include_type_name: false + body: + properties: + bar: + type: float + diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.put_mapping/11_basic_with_types.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.put_mapping/11_basic_with_types.yml new file mode 100644 index 00000000000..1d33f2d31bb --- /dev/null +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.put_mapping/11_basic_with_types.yml @@ -0,0 +1,69 @@ +--- +"Test Create and update mapping": + - do: + indices.create: + index: test_index + + - do: + indices.put_mapping: + index: test_index + type: test_type + body: + test_type: + properties: + text1: + type: text + analyzer: whitespace + text2: + type: text + analyzer: whitespace + subfield.text3: + type: text + + - do: + indices.get_mapping: + index: test_index + + - match: {test_index.mappings.test_type.properties.text1.type: text} + - match: {test_index.mappings.test_type.properties.text1.analyzer: whitespace} + - match: {test_index.mappings.test_type.properties.text2.type: text} + - match: {test_index.mappings.test_type.properties.text2.analyzer: whitespace} + + - do: + indices.put_mapping: + index: test_index + type: test_type + body: + test_type: + properties: + text1: + type: text + analyzer: whitespace + fields: + text_raw: + type: keyword + + + - do: + indices.get_mapping: + index: test_index + + - match: {test_index.mappings.test_type.properties.text1.type: text} + - match: {test_index.mappings.test_type.properties.subfield.properties.text3.type: text} + - match: {test_index.mappings.test_type.properties.text1.fields.text_raw.type: keyword} + +--- +"Create index with invalid mappings": + - do: + indices.create: + index: test_index + - do: + catch: /illegal_argument_exception/ + indices.put_mapping: + index: test_index + type: test_type + body: + test_type: + properties: + "": + type: keyword diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.put_mapping/20_no_types.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.put_mapping/20_no_types.yml deleted file mode 100644 index aa05deb3260..00000000000 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.put_mapping/20_no_types.yml +++ /dev/null @@ -1,334 +0,0 @@ ---- -"Create indices and manage mappings without types": - - - skip: - version: " - 6.99.99" - reason: include_type_name was introduced in 7.0.0 - - - do: - indices.create: - index: index - include_type_name: false - body: - mappings: - properties: - foo: - type: keyword - - - do: - indices.get_mapping: - index: index - include_type_name: false - - - match: { index.mappings.properties.foo.type: "keyword" } - - - do: - indices.put_mapping: - index: index - include_type_name: false - body: - properties: - bar: - type: float - - - do: - indices.get_mapping: - index: index - include_type_name: false - - - match: { index.mappings.properties.foo.type: "keyword" } - - match: { index.mappings.properties.bar.type: "float" } - ---- -"Index explicit IDs without types": - - - skip: - version: " - 6.99.99" - reason: include_type_name was introduced in 7.0.0 - - - do: - indices.create: - index: index - include_type_name: false - - - do: - index: - include_type_name: false - index: index - id: 1 - body: { foo: bar } - - - match: { "_index": "index" } - - is_false: _type - - - do: - bulk: - index: index - include_type_name: false - body: | - { "index": { "_id": "2" } } - { "doc": { "foo": "baz" } } - - - match: { "items.0.index._index": "index" } - - is_false: items.0.index._type - ---- -"Index implicit IDs without types": - - - skip: - version: " - 6.99.99" - reason: include_type_name was introduced in 7.0.0 - - - do: - indices.create: - index: index - include_type_name: false - - - do: - index: - index: index - include_type_name: false - body: { foo: bar } - - - match: { "_index": "index" } - - is_false: _type - - - do: - bulk: - index: index - include_type_name: false - body: | - { "index": { } } - { "doc": { "foo": "baz" } } - - - match: { "items.0.index._index": "index" } - - is_false: items.0.index._type - ---- -"Mixing include_type_name=false with explicit types": - - - skip: - version: " - 6.99.99" - reason: include_type_name was introduced in 7.0.0 - - - do: - indices.create: - index: index - include_type_name: false - - - do: - catch: /illegal_argument_exception/ - index: - index: index - type: type - id: 1 - include_type_name: false - body: { foo: bar } - - - do: - catch: /illegal_argument_exception/ - index: - index: index - type: type - include_type_name: false - body: { foo: bar } - - - do: - catch: /illegal_argument_exception/ - get: - index: index - type: type - id: 1 - include_type_name: false - - - do: - catch: /illegal_argument_exception/ - update: - index: index - type: type - id: 1 - include_type_name: false - body: - doc: { foo: baz } - - - do: - catch: /illegal_argument_exception/ - delete: - index: index - type: type - id: 1 - include_type_name: false - - - do: - catch: /illegal_argument_exception/ - search: - index: index - type: type - include_type_name: false - - - do: - catch: /illegal_argument_exception/ - search: - index: index - type: _doc - include_type_name: false - ---- -"Update API without types": - - - skip: - version: " - 6.99.99" - reason: include_type_name was introduced in 7.0.0 - - - do: - indices.create: - index: index - include_type_name: false - - - do: - index: - index: index - id: 1 - include_type_name: false - body: { "foo": "bar" } - - - do: - update: - index: index - id: 1 - include_type_name: false - body: - doc: { "foo": "baz" } - - - match: { "_index": "index" } - - is_false: _type - ---- -"GET API without types": - - - skip: - version: " - 6.99.99" - reason: include_type_name was introduced in 7.0.0 - - - do: - indices.create: - index: index - include_type_name: false - - - do: - index: - index: index - id: 1 - include_type_name: false - body: { "foo": "bar" } - - - do: - get: - index: index - id: 1 - include_type_name: false - - - match: { "_index": "index" } - - is_false: _type - ---- -"Delete API without types": - - - skip: - version: " - 6.99.99" - reason: include_type_name was introduced in 7.0.0 - - - do: - indices.create: - index: index - include_type_name: false - - - do: - index: - index: index - id: 1 - include_type_name: false - body: { "foo": "bar" } - - - do: - delete: - index: index - id: 1 - include_type_name: false - - - match: { "_index": "index" } - - is_false: _type - ---- -"Search without types": - - - skip: - version: " - 6.99.99" - reason: include_type_name was introduced in 7.0.0 - - - do: - indices.create: - index: index - include_type_name: false - - - do: - index: - index: index - id: 1 - include_type_name: false - body: { "foo": "bar" } - - - do: - indices.refresh: - index: index - - - do: - search: - index: index - include_type_name: false - - - match: { "hits.total": 1 } - - match: { "hits.hits.0._index": "index" } - - is_false: hits.hits.0._type - ---- -"PUT mapping with a type and include_type_name: false": - - - skip: - version: " - 6.99.99" - reason: include_type_name was introduced in 7.0.0 - - - do: - indices.create: - index: index - include_type_name: false - - - do: - catch: /illegal_argument_exception/ - indices.put_mapping: - index: index - type: _doc - include_type_name: false - body: - properties: - bar: - type: float - ---- -"GET mappings on empty index with the include_type_name=false option": - - - skip: - version: " - 6.99.99" - reason: include_type_name was introduced in 7.0.0 - - - do: - indices.create: - index: index - include_type_name: false - - - do: - indices.get_mapping: - index: index - include_type_name: false - - - match: { index.mappings: {} } diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.put_mapping/all_path_options.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.put_mapping/all_path_options.yml index e125ec2a70f..c4133e8d01b 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.put_mapping/all_path_options.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.put_mapping/all_path_options.yml @@ -1,12 +1,19 @@ setup: + - skip: + version: " - 6.99.99" + reason: include_type_name was introduced in 7.0.0 + - do: indices.create: + include_type_name: false index: test_index1 - do: indices.create: + include_type_name: false index: test_index2 - do: indices.create: + include_type_name: false index: foo @@ -14,34 +21,33 @@ setup: "put one mapping per index": - do: indices.put_mapping: + include_type_name: false index: test_index1 - type: test_type body: - test_type: - properties: - text: - type: text - analyzer: whitespace + properties: + text: + type: text + analyzer: whitespace - do: indices.put_mapping: + include_type_name: false index: test_index2 - type: test_type body: - test_type: - properties: - text: - type: text - analyzer: whitespace + properties: + text: + type: text + analyzer: whitespace - do: - indices.get_mapping: {} + indices.get_mapping: + include_type_name: false - - match: {test_index1.mappings.test_type.properties.text.type: text} - - match: {test_index1.mappings.test_type.properties.text.analyzer: whitespace} + - match: {test_index1.mappings.properties.text.type: text} + - match: {test_index1.mappings.properties.text.analyzer: whitespace} - - match: {test_index2.mappings.test_type.properties.text.type: text} - - match: {test_index2.mappings.test_type.properties.text.analyzer: whitespace} + - match: {test_index2.mappings.properties.text.type: text} + - match: {test_index2.mappings.properties.text.analyzer: whitespace} - match: { foo.mappings: {} } @@ -50,73 +56,73 @@ setup: - do: indices.put_mapping: + include_type_name: false index: _all - type: test_type body: - test_type: - properties: - text: - type: text - analyzer: whitespace + properties: + text: + type: text + analyzer: whitespace - do: - indices.get_mapping: {} + indices.get_mapping: + include_type_name: false - - match: {test_index1.mappings.test_type.properties.text.type: text} - - match: {test_index1.mappings.test_type.properties.text.analyzer: whitespace} + - match: {test_index1.mappings.properties.text.type: text} + - match: {test_index1.mappings.properties.text.analyzer: whitespace} - - match: {test_index2.mappings.test_type.properties.text.type: text} - - match: {test_index2.mappings.test_type.properties.text.analyzer: whitespace} + - match: {test_index2.mappings.properties.text.type: text} + - match: {test_index2.mappings.properties.text.analyzer: whitespace} - - match: {foo.mappings.test_type.properties.text.type: text} - - match: {foo.mappings.test_type.properties.text.analyzer: whitespace} + - match: {foo.mappings.properties.text.type: text} + - match: {foo.mappings.properties.text.analyzer: whitespace} --- "put mapping in * index": - do: indices.put_mapping: + include_type_name: false index: "*" - type: test_type body: - test_type: - properties: - text: - type: text - analyzer: whitespace + properties: + text: + type: text + analyzer: whitespace - do: - indices.get_mapping: {} + indices.get_mapping: + include_type_name: false - - match: {test_index1.mappings.test_type.properties.text.type: text} - - match: {test_index1.mappings.test_type.properties.text.analyzer: whitespace} + - match: {test_index1.mappings.properties.text.type: text} + - match: {test_index1.mappings.properties.text.analyzer: whitespace} - - match: {test_index2.mappings.test_type.properties.text.type: text} - - match: {test_index2.mappings.test_type.properties.text.analyzer: whitespace} + - match: {test_index2.mappings.properties.text.type: text} + - match: {test_index2.mappings.properties.text.analyzer: whitespace} - - match: {foo.mappings.test_type.properties.text.type: text} - - match: {foo.mappings.test_type.properties.text.analyzer: whitespace} + - match: {foo.mappings.properties.text.type: text} + - match: {foo.mappings.properties.text.analyzer: whitespace} --- "put mapping in prefix* index": - do: indices.put_mapping: + include_type_name: false index: "test_index*" - type: test_type body: - test_type: - properties: - text: - type: text - analyzer: whitespace + properties: + text: + type: text + analyzer: whitespace - do: - indices.get_mapping: {} + indices.get_mapping: + include_type_name: false - - match: {test_index1.mappings.test_type.properties.text.type: text} - - match: {test_index1.mappings.test_type.properties.text.analyzer: whitespace} + - match: {test_index1.mappings.properties.text.type: text} + - match: {test_index1.mappings.properties.text.analyzer: whitespace} - - match: {test_index2.mappings.test_type.properties.text.type: text} - - match: {test_index2.mappings.test_type.properties.text.analyzer: whitespace} + - match: {test_index2.mappings.properties.text.type: text} + - match: {test_index2.mappings.properties.text.analyzer: whitespace} - match: { foo.mappings: {} } @@ -124,67 +130,34 @@ setup: "put mapping in list of indices": - do: indices.put_mapping: + include_type_name: false index: [test_index1, test_index2] - type: test_type body: - test_type: - properties: - text: - type: text - analyzer: whitespace + properties: + text: + type: text + analyzer: whitespace - do: - indices.get_mapping: {} + indices.get_mapping: + include_type_name: false - - match: {test_index1.mappings.test_type.properties.text.type: text} - - match: {test_index1.mappings.test_type.properties.text.analyzer: whitespace} + - match: {test_index1.mappings.properties.text.type: text} + - match: {test_index1.mappings.properties.text.analyzer: whitespace} - - match: {test_index2.mappings.test_type.properties.text.type: text} - - match: {test_index2.mappings.test_type.properties.text.analyzer: whitespace} + - match: {test_index2.mappings.properties.text.type: text} + - match: {test_index2.mappings.properties.text.analyzer: whitespace} - match: { foo.mappings: {} } ---- -"put mapping with blank index": - - do: - indices.put_mapping: - type: test_type - body: - test_type: - properties: - text: - type: text - analyzer: whitespace - - - do: - indices.get_mapping: {} - - - match: {test_index1.mappings.test_type.properties.text.type: text} - - match: {test_index1.mappings.test_type.properties.text.analyzer: whitespace} - - - match: {test_index2.mappings.test_type.properties.text.type: text} - - match: {test_index2.mappings.test_type.properties.text.analyzer: whitespace} - - - match: {foo.mappings.test_type.properties.text.type: text} - - match: {foo.mappings.test_type.properties.text.analyzer: whitespace} - ---- -"put mapping with missing type": - - - - do: - catch: param - indices.put_mapping: {} - --- "post a mapping with default analyzer twice": - do: indices.put_mapping: + include_type_name: false index: test_index1 - type: test_type body: - test_type: dynamic: false properties: text: @@ -193,18 +166,18 @@ setup: - do: indices.put_mapping: + include_type_name: false index: test_index1 - type: test_type body: - test_type: - dynamic: false - properties: - text: - analyzer: default - type: text + dynamic: false + properties: + text: + analyzer: default + type: text - do: - indices.get_mapping: {} + indices.get_mapping: + include_type_name: false - - match: {test_index1.mappings.test_type.properties.text.type: text} + - match: {test_index1.mappings.properties.text.type: text} diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.put_mapping/all_path_options_with_types.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.put_mapping/all_path_options_with_types.yml new file mode 100644 index 00000000000..e125ec2a70f --- /dev/null +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.put_mapping/all_path_options_with_types.yml @@ -0,0 +1,210 @@ +setup: + - do: + indices.create: + index: test_index1 + - do: + indices.create: + index: test_index2 + - do: + indices.create: + index: foo + + +--- +"put one mapping per index": + - do: + indices.put_mapping: + index: test_index1 + type: test_type + body: + test_type: + properties: + text: + type: text + analyzer: whitespace + - do: + indices.put_mapping: + index: test_index2 + type: test_type + body: + test_type: + properties: + text: + type: text + analyzer: whitespace + + + - do: + indices.get_mapping: {} + + - match: {test_index1.mappings.test_type.properties.text.type: text} + - match: {test_index1.mappings.test_type.properties.text.analyzer: whitespace} + + - match: {test_index2.mappings.test_type.properties.text.type: text} + - match: {test_index2.mappings.test_type.properties.text.analyzer: whitespace} + + - match: { foo.mappings: {} } + +--- +"put mapping in _all index": + + - do: + indices.put_mapping: + index: _all + type: test_type + body: + test_type: + properties: + text: + type: text + analyzer: whitespace + + - do: + indices.get_mapping: {} + + - match: {test_index1.mappings.test_type.properties.text.type: text} + - match: {test_index1.mappings.test_type.properties.text.analyzer: whitespace} + + - match: {test_index2.mappings.test_type.properties.text.type: text} + - match: {test_index2.mappings.test_type.properties.text.analyzer: whitespace} + + - match: {foo.mappings.test_type.properties.text.type: text} + - match: {foo.mappings.test_type.properties.text.analyzer: whitespace} + +--- +"put mapping in * index": + - do: + indices.put_mapping: + index: "*" + type: test_type + body: + test_type: + properties: + text: + type: text + analyzer: whitespace + + - do: + indices.get_mapping: {} + + - match: {test_index1.mappings.test_type.properties.text.type: text} + - match: {test_index1.mappings.test_type.properties.text.analyzer: whitespace} + + - match: {test_index2.mappings.test_type.properties.text.type: text} + - match: {test_index2.mappings.test_type.properties.text.analyzer: whitespace} + + - match: {foo.mappings.test_type.properties.text.type: text} + - match: {foo.mappings.test_type.properties.text.analyzer: whitespace} + +--- +"put mapping in prefix* index": + - do: + indices.put_mapping: + index: "test_index*" + type: test_type + body: + test_type: + properties: + text: + type: text + analyzer: whitespace + + - do: + indices.get_mapping: {} + + - match: {test_index1.mappings.test_type.properties.text.type: text} + - match: {test_index1.mappings.test_type.properties.text.analyzer: whitespace} + + - match: {test_index2.mappings.test_type.properties.text.type: text} + - match: {test_index2.mappings.test_type.properties.text.analyzer: whitespace} + + - match: { foo.mappings: {} } + +--- +"put mapping in list of indices": + - do: + indices.put_mapping: + index: [test_index1, test_index2] + type: test_type + body: + test_type: + properties: + text: + type: text + analyzer: whitespace + + - do: + indices.get_mapping: {} + + - match: {test_index1.mappings.test_type.properties.text.type: text} + - match: {test_index1.mappings.test_type.properties.text.analyzer: whitespace} + + - match: {test_index2.mappings.test_type.properties.text.type: text} + - match: {test_index2.mappings.test_type.properties.text.analyzer: whitespace} + + - match: { foo.mappings: {} } + +--- +"put mapping with blank index": + - do: + indices.put_mapping: + type: test_type + body: + test_type: + properties: + text: + type: text + analyzer: whitespace + + - do: + indices.get_mapping: {} + + - match: {test_index1.mappings.test_type.properties.text.type: text} + - match: {test_index1.mappings.test_type.properties.text.analyzer: whitespace} + + - match: {test_index2.mappings.test_type.properties.text.type: text} + - match: {test_index2.mappings.test_type.properties.text.analyzer: whitespace} + + - match: {foo.mappings.test_type.properties.text.type: text} + - match: {foo.mappings.test_type.properties.text.analyzer: whitespace} + +--- +"put mapping with missing type": + + + - do: + catch: param + indices.put_mapping: {} + +--- +"post a mapping with default analyzer twice": + + - do: + indices.put_mapping: + index: test_index1 + type: test_type + body: + test_type: + dynamic: false + properties: + text: + analyzer: default + type: text + + - do: + indices.put_mapping: + index: test_index1 + type: test_type + body: + test_type: + dynamic: false + properties: + text: + analyzer: default + type: text + + - do: + indices.get_mapping: {} + + - match: {test_index1.mappings.test_type.properties.text.type: text} + diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/search/10_source_filtering.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/search/10_source_filtering.yml index a5f50464794..1b5f9856391 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/search/10_source_filtering.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/search/10_source_filtering.yml @@ -112,6 +112,16 @@ setup: - match: { hits.hits.0._source.bigint: 72057594037927936 } - is_false: hits.hits.0._source.include.field2 + +--- +"_source filtering on bigint": +- do: + search: + body: + _source: ["bigint"] + query: { match_all: {} } +- match: { hits.hits.0._source.bigint: 72057594037927936 } + --- "fields in body": - do: diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/search/220_no_types.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/search/220_no_types.yml new file mode 100644 index 00000000000..b2489d2ad01 --- /dev/null +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/search/220_no_types.yml @@ -0,0 +1,46 @@ +--- +"No type returned": + + - skip: + version: " - 6.99.99" + reason: include_type_name was introduced in 7.0.0 + + - do: + index: + include_type_name: false + index: test_1 + id: 1 + body: {} + - do: + indices.refresh: {} + + - do: + search: + include_type_name: false + index: test_1 + + - length: { hits.hits: 1 } + - match: { hits.hits.0._index: "test_1" } + - is_false: "hits.hits.0._type" + - match: { hits.hits.0._id: "1" } + +--- +"Mixing include_type_name=false with explicit types": + + - skip: + version: " - 6.99.99" + reason: include_type_name was introduced in 7.0.0 + + - do: + catch: /illegal_argument_exception/ + search: + index: index + type: type + include_type_name: false + + - do: + catch: /illegal_argument_exception/ + search: + index: index + type: _doc + include_type_name: false diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/suggest/50_completion_with_multi_fields.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/suggest/50_completion_with_multi_fields.yml new file mode 100644 index 00000000000..42207a073fb --- /dev/null +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/suggest/50_completion_with_multi_fields.yml @@ -0,0 +1,299 @@ + +--- +"Search by suggestion and by keyword sub-field should work": + + - skip: + version: " - 6.99.99" + reason: "Search by suggestion with multi-fields was introduced 7.0.0" + + - do: + indices.create: + index: completion_with_sub_keyword + body: + mappings: + test: + "properties": + "suggest_1": + "type" : "completion" + "fields": + "text_raw": + "type" : "keyword" + + - do: + index: + index: completion_with_sub_keyword + type: test + id: 1 + body: + suggest_1: "bar" + + - do: + index: + index: completion_with_sub_keyword + type: test + id: 2 + body: + suggest_1: "baz" + + - do: + indices.refresh: {} + + - do: + search: + index: completion_with_sub_keyword + body: + suggest: + result: + text: "b" + completion: + field: suggest_1 + + - length: { suggest.result: 1 } + - length: { suggest.result.0.options: 2 } + + + - do: + search: + index: completion_with_sub_keyword + body: + query: { term: { suggest_1.text_raw: "bar" }} + + - match: { hits.total: 1 } + + + +--- +"Search by suggestion on sub field should work": + + - skip: + version: " - 6.99.99" + reason: "Search by suggestion with multi-fields was introduced 7.0.0" + + - do: + indices.create: + index: completion_with_sub_completion + body: + mappings: + test: + "properties": + "suggest_1": + "type": "completion" + "fields": + "suggest_2": + "type": "completion" + + - do: + index: + index: completion_with_sub_completion + type: test + id: 1 + body: + suggest_1: "bar" + + - do: + index: + index: completion_with_sub_completion + type: test + id: 2 + body: + suggest_1: "baz" + + - do: + indices.refresh: {} + + - do: + search: + index: completion_with_sub_completion + body: + suggest: + result: + text: "b" + completion: + field: suggest_1.suggest_2 + + - length: { suggest.result: 1 } + - length: { suggest.result.0.options: 2 } + +--- +"Search by suggestion on sub field with context should work": + + - skip: + version: " - 6.99.99" + reason: "Search by suggestion with multi-fields was introduced 7.0.0" + + - do: + indices.create: + index: completion_with_context + body: + mappings: + test: + "properties": + "suggest_1": + "type": "completion" + "contexts": + - + "name": "color" + "type": "category" + "fields": + "suggest_2": + "type": "completion" + "contexts": + - + "name": "color" + "type": "category" + + + - do: + index: + index: completion_with_context + type: test + id: 1 + body: + suggest_1: + input: "foo red" + contexts: + color: "red" + + - do: + index: + index: completion_with_context + type: test + id: 2 + body: + suggest_1: + input: "foo blue" + contexts: + color: "blue" + + - do: + indices.refresh: {} + + - do: + search: + index: completion_with_context + body: + suggest: + result: + prefix: "foo" + completion: + field: suggest_1.suggest_2 + contexts: + color: "red" + + - length: { suggest.result: 1 } + - length: { suggest.result.0.options: 1 } + - match: { suggest.result.0.options.0.text: "foo red" } + + +--- +"Search by suggestion on sub field with weight should work": + + - skip: + version: " - 6.99.99" + reason: "Search by suggestion with multi-fields was introduced 7.0.0" + + - do: + indices.create: + index: completion_with_weight + body: + mappings: + test: + "properties": + "suggest_1": + "type": "completion" + "fields": + "suggest_2": + "type": "completion" + + - do: + index: + index: completion_with_weight + type: test + id: 1 + body: + suggest_1: + input: "bar" + weight: 2 + + - do: + index: + index: completion_with_weight + type: test + id: 2 + body: + suggest_1: + input: "baz" + weight: 3 + + - do: + indices.refresh: {} + + - do: + search: + index: completion_with_weight + body: + suggest: + result: + text: "b" + completion: + field: suggest_1.suggest_2 + + - length: { suggest.result: 1 } + - length: { suggest.result.0.options: 2 } + - match: { suggest.result.0.options.0.text: "baz" } + - match: { suggest.result.0.options.1.text: "bar" } + +--- +"Search by suggestion on geofield-hash on sub field should work": + + - skip: + version: " - 6.99.99" + reason: "Search by suggestion with multi-fields was introduced 7.0.0" + + - do: + indices.create: + index: geofield_with_completion + body: + mappings: + test: + "properties": + "geofield": + "type": "geo_point" + "fields": + "suggest_1": + "type": "completion" + + - do: + index: + index: geofield_with_completion + type: test + id: 1 + body: + geofield: "hgjhrwysvqw7" + #41.12,-72.34,12 + + - do: + index: + index: geofield_with_completion + type: test + id: 1 + body: + geofield: "hgm4psywmkn7" + #41.12,-71.34,12 + + - do: + indices.refresh: {} + + - do: + search: + index: geofield_with_completion + body: + suggest: + result: + prefix: "hgm" + completion: + field: geofield.suggest_1 + + + - length: { suggest.result: 1 } + - length: { suggest.result.0.options: 1 } diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/update/10_doc.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/update/10_doc.yml index b4581edd350..dd5ada3b1f1 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/update/10_doc.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/update/10_doc.yml @@ -1,10 +1,14 @@ --- "Partial document": + - skip: + version: " - 6.99.99" + reason: include_type_name was introduced in 7.0.0 + - do: index: + include_type_name: false index: test_1 - type: test id: 1 body: foo: bar @@ -13,8 +17,8 @@ - do: update: + include_type_name: false index: test_1 - type: test id: 1 body: doc: @@ -23,14 +27,14 @@ one: 3 - match: { _index: test_1 } - - match: { _type: test } + - is_false: "_type" - match: { _id: "1" } - match: { _version: 2 } - do: get: + include_type_name: false index: test_1 - type: test id: 1 - match: { _source.foo: baz } @@ -38,3 +42,12 @@ - match: { _source.nested.one: 3 } - match: { _source.nested.two: 2 } + - do: + catch: /illegal_argument_exception/ + update: + index: index + type: type + id: 1 + include_type_name: false + body: + doc: { foo: baz } diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/update/11_shard_header.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/update/11_shard_header.yml index eb2e4ff9a91..69f8f7c6427 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/update/11_shard_header.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/update/11_shard_header.yml @@ -1,8 +1,13 @@ --- "Update check shard header": + - skip: + version: " - 6.99.99" + reason: include_type_name was introduced in 7.0.0 + - do: indices.create: + include_type_name: false index: foobar body: settings: @@ -15,22 +20,22 @@ - do: index: + include_type_name: false index: foobar - type: baz id: 1 body: { foo: bar } - do: update: + include_type_name: false index: foobar - type: baz id: 1 body: doc: foo: baz - match: { _index: foobar } - - match: { _type: baz } + - is_false: "_type" - match: { _id: "1"} - match: { _version: 2} - match: { _shards.total: 1} diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/update/12_result.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/update/12_result.yml index 9adada6d54b..db4b56eedd3 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/update/12_result.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/update/12_result.yml @@ -1,10 +1,14 @@ --- "Update result field": + - skip: + version: " - 6.99.99" + reason: include_type_name was introduced in 7.0.0 + - do: update: + include_type_name: false index: test_1 - type: test id: 1 body: doc: { foo: bar } @@ -15,8 +19,8 @@ - do: update: + include_type_name: false index: test_1 - type: test id: 1 body: doc: { foo: bar } @@ -27,8 +31,8 @@ - do: update: + include_type_name: false index: test_1 - type: test id: 1 body: doc: { foo: bar } @@ -40,8 +44,8 @@ - do: update: + include_type_name: false index: test_1 - type: test id: 1 body: doc: { foo: baz } diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/update/13_legacy_doc.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/update/13_legacy_doc.yml new file mode 100644 index 00000000000..b4581edd350 --- /dev/null +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/update/13_legacy_doc.yml @@ -0,0 +1,40 @@ +--- +"Partial document": + + - do: + index: + index: test_1 + type: test + id: 1 + body: + foo: bar + count: 1 + nested: { one: 1, two: 2 } + + - do: + update: + index: test_1 + type: test + id: 1 + body: + doc: + foo: baz + nested: + one: 3 + + - match: { _index: test_1 } + - match: { _type: test } + - match: { _id: "1" } + - match: { _version: 2 } + + - do: + get: + index: test_1 + type: test + id: 1 + + - match: { _source.foo: baz } + - match: { _source.count: 1 } + - match: { _source.nested.one: 3 } + - match: { _source.nested.two: 2 } + diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/update/14_shard_header_with_types.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/update/14_shard_header_with_types.yml new file mode 100644 index 00000000000..eb2e4ff9a91 --- /dev/null +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/update/14_shard_header_with_types.yml @@ -0,0 +1,39 @@ +--- +"Update check shard header": + + - do: + indices.create: + index: foobar + body: + settings: + number_of_shards: "1" + number_of_replicas: "0" + + - do: + cluster.health: + wait_for_status: green + + - do: + index: + index: foobar + type: baz + id: 1 + body: { foo: bar } + + - do: + update: + index: foobar + type: baz + id: 1 + body: + doc: + foo: baz + + - match: { _index: foobar } + - match: { _type: baz } + - match: { _id: "1"} + - match: { _version: 2} + - match: { _shards.total: 1} + - match: { _shards.successful: 1} + - match: { _shards.failed: 0} + - is_false: _shards.pending diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/update/15_result_with_types.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/update/15_result_with_types.yml new file mode 100644 index 00000000000..9adada6d54b --- /dev/null +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/update/15_result_with_types.yml @@ -0,0 +1,52 @@ +--- +"Update result field": + + - do: + update: + index: test_1 + type: test + id: 1 + body: + doc: { foo: bar } + doc_as_upsert: true + + - match: { _version: 1 } + - match: { result: created } + + - do: + update: + index: test_1 + type: test + id: 1 + body: + doc: { foo: bar } + doc_as_upsert: true + + - match: { _version: 1 } + - match: { result: noop } + + - do: + update: + index: test_1 + type: test + id: 1 + body: + doc: { foo: bar } + doc_as_upsert: true + detect_noop: false + + - match: { _version: 2 } + - match: { result: updated } + + - do: + update: + index: test_1 + type: test + id: 1 + body: + doc: { foo: baz } + doc_as_upsert: true + detect_noop: true + + - match: { _version: 3 } + - match: { result: updated } diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/update/20_doc_upsert.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/update/20_doc_upsert.yml index f34e030ff66..1595e9d6f8a 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/update/20_doc_upsert.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/update/20_doc_upsert.yml @@ -1,10 +1,14 @@ --- "Doc upsert": + - skip: + version: " - 6.99.99" + reason: include_type_name was introduced in 7.0.0 + - do: update: + include_type_name: false index: test_1 - type: test id: 1 body: doc: { foo: bar, count: 1 } @@ -12,8 +16,8 @@ - do: get: + include_type_name: false index: test_1 - type: test id: 1 - match: { _source.foo: baz } @@ -22,8 +26,8 @@ - do: update: + include_type_name: false index: test_1 - type: test id: 1 body: doc: { foo: bar, count: 1 } @@ -31,8 +35,8 @@ - do: get: + include_type_name: false index: test_1 - type: test id: 1 - match: { _source.foo: bar } diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/update/21_doc_upsert_with_types.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/update/21_doc_upsert_with_types.yml new file mode 100644 index 00000000000..f34e030ff66 --- /dev/null +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/update/21_doc_upsert_with_types.yml @@ -0,0 +1,41 @@ +--- +"Doc upsert": + + - do: + update: + index: test_1 + type: test + id: 1 + body: + doc: { foo: bar, count: 1 } + upsert: { foo: baz } + + - do: + get: + index: test_1 + type: test + id: 1 + + - match: { _source.foo: baz } + - is_false: _source.count + + + - do: + update: + index: test_1 + type: test + id: 1 + body: + doc: { foo: bar, count: 1 } + upsert: { foo: baz } + + - do: + get: + index: test_1 + type: test + id: 1 + + - match: { _source.foo: bar } + - match: { _source.count: 1 } + + diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/update/22_doc_as_upsert.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/update/22_doc_as_upsert.yml index 7585b9f3e0b..884fa3e16f6 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/update/22_doc_as_upsert.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/update/22_doc_as_upsert.yml @@ -1,10 +1,14 @@ --- "Doc as upsert": + - skip: + version: " - 6.99.99" + reason: include_type_name was introduced in 7.0.0 + - do: update: + include_type_name: false index: test_1 - type: test id: 1 body: doc: { foo: bar, count: 1 } @@ -12,8 +16,8 @@ - do: get: + include_type_name: false index: test_1 - type: test id: 1 - match: { _source.foo: bar } @@ -22,8 +26,8 @@ - do: update: + include_type_name: false index: test_1 - type: test id: 1 body: doc: { count: 2 } @@ -31,8 +35,8 @@ - do: get: + include_type_name: false index: test_1 - type: test id: 1 - match: { _source.foo: bar } diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/update/24_doc_as_upsert_with_types.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/update/24_doc_as_upsert_with_types.yml new file mode 100644 index 00000000000..7585b9f3e0b --- /dev/null +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/update/24_doc_as_upsert_with_types.yml @@ -0,0 +1,41 @@ +--- +"Doc as upsert": + + - do: + update: + index: test_1 + type: test + id: 1 + body: + doc: { foo: bar, count: 1 } + doc_as_upsert: true + + - do: + get: + index: test_1 + type: test + id: 1 + + - match: { _source.foo: bar } + - match: { _source.count: 1 } + + + - do: + update: + index: test_1 + type: test + id: 1 + body: + doc: { count: 2 } + doc_as_upsert: true + + - do: + get: + index: test_1 + type: test + id: 1 + + - match: { _source.foo: bar } + - match: { _source.count: 2 } + + diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/update/30_internal_version.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/update/30_internal_version.yml index 17c4806c693..20ff2020932 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/update/30_internal_version.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/update/30_internal_version.yml @@ -1,11 +1,15 @@ --- "Internal version": + - skip: + version: " - 6.99.99" + reason: include_type_name was introduced in 7.0.0 + - do: catch: missing update: + include_type_name: false index: test_1 - type: test id: 1 version: 1 body: @@ -13,8 +17,8 @@ - do: index: + include_type_name: false index: test_1 - type: test id: 1 body: doc: { foo: baz } @@ -22,8 +26,8 @@ - do: catch: conflict update: + include_type_name: false index: test_1 - type: test id: 1 version: 2 body: diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/update/31_internal_version_with_types.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/update/31_internal_version_with_types.yml new file mode 100644 index 00000000000..17c4806c693 --- /dev/null +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/update/31_internal_version_with_types.yml @@ -0,0 +1,30 @@ +--- +"Internal version": + + - do: + catch: missing + update: + index: test_1 + type: test + id: 1 + version: 1 + body: + doc: { foo: baz } + + - do: + index: + index: test_1 + type: test + id: 1 + body: + doc: { foo: baz } + + - do: + catch: conflict + update: + index: test_1 + type: test + id: 1 + version: 2 + body: + doc: { foo: baz } diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/update/35_other_versions.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/update/35_other_versions.yml index c0ec082b91a..904d3ce4b4f 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/update/35_other_versions.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/update/35_other_versions.yml @@ -1,11 +1,15 @@ --- "Not supported versions": + - skip: + version: " - 6.99.99" + reason: include_type_name was introduced in 7.0.0 + - do: catch: /Validation|Invalid/ update: + include_type_name: false index: test_1 - type: test id: 1 version: 2 version_type: external @@ -16,8 +20,8 @@ - do: catch: /Validation|Invalid/ update: + include_type_name: false index: test_1 - type: test id: 1 version: 2 version_type: external_gte diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/update/36_other_versions_with_types.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/update/36_other_versions_with_types.yml new file mode 100644 index 00000000000..c0ec082b91a --- /dev/null +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/update/36_other_versions_with_types.yml @@ -0,0 +1,27 @@ +--- +"Not supported versions": + + - do: + catch: /Validation|Invalid/ + update: + index: test_1 + type: test + id: 1 + version: 2 + version_type: external + body: + doc: { foo: baz } + upsert: { foo: bar } + + - do: + catch: /Validation|Invalid/ + update: + index: test_1 + type: test + id: 1 + version: 2 + version_type: external_gte + body: + doc: { foo: baz } + upsert: { foo: bar } + diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/update/40_routing.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/update/40_routing.yml index 977db506710..643d79239d0 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/update/40_routing.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/update/40_routing.yml @@ -1,8 +1,13 @@ --- "Routing": + - skip: + version: " - 6.99.99" + reason: include_type_name was introduced in 7.0.0 + - do: indices.create: + include_type_name: false index: test_1 body: settings: @@ -17,8 +22,8 @@ - do: update: + include_type_name: false index: test_1 - type: test id: 1 routing: 5 body: @@ -27,8 +32,8 @@ - do: get: + include_type_name: false index: test_1 - type: test id: 1 routing: 5 stored_fields: _routing @@ -38,16 +43,16 @@ - do: catch: missing update: + include_type_name: false index: test_1 - type: test id: 1 body: doc: { foo: baz } - do: update: + include_type_name: false index: test_1 - type: test id: 1 routing: 5 _source: foo diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/update/41_routing_with_types.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/update/41_routing_with_types.yml new file mode 100644 index 00000000000..977db506710 --- /dev/null +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/update/41_routing_with_types.yml @@ -0,0 +1,58 @@ +--- +"Routing": + + - do: + indices.create: + index: test_1 + body: + settings: + index: + number_of_shards: 5 + number_of_routing_shards: 5 + number_of_replicas: 0 + + - do: + cluster.health: + wait_for_status: green + + - do: + update: + index: test_1 + type: test + id: 1 + routing: 5 + body: + doc: { foo: baz } + upsert: { foo: bar } + + - do: + get: + index: test_1 + type: test + id: 1 + routing: 5 + stored_fields: _routing + + - match: { _routing: "5"} + + - do: + catch: missing + update: + index: test_1 + type: test + id: 1 + body: + doc: { foo: baz } + + - do: + update: + index: test_1 + type: test + id: 1 + routing: 5 + _source: foo + body: + doc: { foo: baz } + + - match: { get._source.foo: baz } + diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/update/60_refresh.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/update/60_refresh.yml index 8ac1568a127..8039edc2e3a 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/update/60_refresh.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/update/60_refresh.yml @@ -1,8 +1,13 @@ --- "Refresh": + - skip: + version: " - 6.99.99" + reason: include_type_name was introduced in 7.0.0 + - do: indices.create: + include_type_name: false index: test_1 body: settings: @@ -11,8 +16,8 @@ - do: update: + include_type_name: false index: test_1 - type: test id: 1 body: doc: { foo: baz } @@ -20,6 +25,7 @@ - do: search: + include_type_name: false index: test_1 body: query: { term: { _id: 1 }} @@ -28,8 +34,8 @@ - do: update: + include_type_name: false index: test_1 - type: test id: 2 refresh: true body: @@ -39,6 +45,7 @@ - do: search: + include_type_name: false index: test_1 body: query: { term: { _id: 2 }} @@ -47,10 +54,15 @@ --- "When refresh url parameter is an empty string that means \"refresh immediately\"": + + - skip: + version: " - 6.99.99" + reason: include_type_name was introduced in 7.0.0 + - do: index: + include_type_name: false index: test_1 - type: test id: 1 refresh: true body: { foo: bar } @@ -58,8 +70,8 @@ - do: update: + include_type_name: false index: test_1 - type: test id: 1 refresh: "" body: @@ -68,6 +80,7 @@ - do: search: + include_type_name: false index: test_1 body: query: { term: { cat: dog }} @@ -76,10 +89,15 @@ --- "refresh=wait_for waits until changes are visible in search": + + - skip: + version: " - 6.99.99" + reason: include_type_name was introduced in 7.0.0 + - do: index: + include_type_name: false index: update_60_refresh_1 - type: test id: update_60_refresh_id1 body: { foo: bar } refresh: true @@ -87,6 +105,7 @@ - do: search: + include_type_name: false index: update_60_refresh_1 body: query: { term: { _id: update_60_refresh_id1 }} @@ -94,8 +113,8 @@ - do: update: + include_type_name: false index: update_60_refresh_1 - type: test id: update_60_refresh_id1 refresh: wait_for body: @@ -104,6 +123,7 @@ - do: search: + include_type_name: false index: update_60_refresh_1 body: query: { match: { test: asdf } } diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/update/61_refresh_with_types.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/update/61_refresh_with_types.yml new file mode 100644 index 00000000000..8ac1568a127 --- /dev/null +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/update/61_refresh_with_types.yml @@ -0,0 +1,110 @@ +--- +"Refresh": + + - do: + indices.create: + index: test_1 + body: + settings: + index.refresh_interval: -1 + number_of_replicas: 0 + + - do: + update: + index: test_1 + type: test + id: 1 + body: + doc: { foo: baz } + upsert: { foo: bar } + + - do: + search: + index: test_1 + body: + query: { term: { _id: 1 }} + + - match: { hits.total: 0 } + + - do: + update: + index: test_1 + type: test + id: 2 + refresh: true + body: + doc: { foo: baz } + upsert: { foo: bar } + - is_true: forced_refresh + + - do: + search: + index: test_1 + body: + query: { term: { _id: 2 }} + + - match: { hits.total: 1 } + +--- +"When refresh url parameter is an empty string that means \"refresh immediately\"": + - do: + index: + index: test_1 + type: test + id: 1 + refresh: true + body: { foo: bar } + - is_true: forced_refresh + + - do: + update: + index: test_1 + type: test + id: 1 + refresh: "" + body: + doc: {cat: dog} + - is_true: forced_refresh + + - do: + search: + index: test_1 + body: + query: { term: { cat: dog }} + + - match: { hits.total: 1 } + +--- +"refresh=wait_for waits until changes are visible in search": + - do: + index: + index: update_60_refresh_1 + type: test + id: update_60_refresh_id1 + body: { foo: bar } + refresh: true + - is_true: forced_refresh + + - do: + search: + index: update_60_refresh_1 + body: + query: { term: { _id: update_60_refresh_id1 }} + - match: { hits.total: 1 } + + - do: + update: + index: update_60_refresh_1 + type: test + id: update_60_refresh_id1 + refresh: wait_for + body: + doc: { test: asdf } + - is_false: forced_refresh + + - do: + search: + index: update_60_refresh_1 + body: + query: { match: { test: asdf } } + - match: { hits.total: 1 } diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/update/80_source_filtering.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/update/80_source_filtering.yml index 4bb22e6b801..c69984e5cde 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/update/80_source_filtering.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/update/80_source_filtering.yml @@ -1,10 +1,14 @@ --- "Source filtering": + - skip: + version: " - 6.99.99" + reason: include_type_name was introduced in 7.0.0 + - do: update: + include_type_name: false index: test_1 - type: test id: 1 _source: [foo, bar] body: diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/update/81_source_filtering_with_types.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/update/81_source_filtering_with_types.yml new file mode 100644 index 00000000000..4bb22e6b801 --- /dev/null +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/update/81_source_filtering_with_types.yml @@ -0,0 +1,19 @@ +--- +"Source filtering": + + - do: + update: + index: test_1 + type: test + id: 1 + _source: [foo, bar] + body: + doc: { foo: baz } + upsert: { foo: bar } + + - match: { get._source.foo: bar } + - is_false: get._source.bar + +# TODO: +# +# - Add _routing diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/update/85_fields_meta.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/update/85_fields_meta.yml index f7791d09863..7838c200851 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/update/85_fields_meta.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/update/85_fields_meta.yml @@ -7,12 +7,13 @@ - do: indices.create: + include_type_name: false index: test_1 - do: update: + include_type_name: false index: test_1 - type: test id: 1 parent: 5 fields: [ _routing ] @@ -24,8 +25,8 @@ - do: get: + include_type_name: false index: test_1 - type: test id: 1 parent: 5 stored_fields: [ _routing ] diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/update/86_fields_meta_with_types.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/update/86_fields_meta_with_types.yml new file mode 100644 index 00000000000..f7791d09863 --- /dev/null +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/update/86_fields_meta_with_types.yml @@ -0,0 +1,33 @@ +--- +"Metadata Fields": + + - skip: + version: "all" + reason: "Update doesn't return metadata fields, waiting for #3259" + + - do: + indices.create: + index: test_1 + + - do: + update: + index: test_1 + type: test + id: 1 + parent: 5 + fields: [ _routing ] + body: + doc: { foo: baz } + upsert: { foo: bar } + + - match: { get._routing: "5" } + + - do: + get: + index: test_1 + type: test + id: 1 + parent: 5 + stored_fields: [ _routing ] + + diff --git a/server/src/main/java/org/apache/lucene/queries/SpanMatchNoDocsQuery.java b/server/src/main/java/org/apache/lucene/queries/SpanMatchNoDocsQuery.java new file mode 100644 index 00000000000..55b78739d1d --- /dev/null +++ b/server/src/main/java/org/apache/lucene/queries/SpanMatchNoDocsQuery.java @@ -0,0 +1,87 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.apache.lucene.queries; + +import org.apache.lucene.index.LeafReaderContext; +import org.apache.lucene.index.Term; +import org.apache.lucene.index.TermStates; +import org.apache.lucene.search.IndexSearcher; +import org.apache.lucene.search.ScoreMode; +import org.apache.lucene.search.spans.SpanQuery; +import org.apache.lucene.search.spans.SpanWeight; +import org.apache.lucene.search.spans.Spans; + +import java.io.IOException; +import java.util.Collections; +import java.util.Map; +import java.util.Set; + +/** + * A {@link SpanQuery} that matches no documents. + */ +public class SpanMatchNoDocsQuery extends SpanQuery { + private final String field; + private final String reason; + + public SpanMatchNoDocsQuery(String field, String reason) { + this.field = field; + this.reason = reason; + } + + @Override + public String getField() { + return field; + } + + @Override + public String toString(String field) { + return "SpanMatchNoDocsQuery(\"" + reason + "\")"; + } + + @Override + public boolean equals(Object o) { + return sameClassAs(o); + } + + @Override + public int hashCode() { + return classHash(); + } + + @Override + public SpanWeight createWeight(IndexSearcher searcher, ScoreMode scoreMode, float boost) throws IOException { + return new SpanWeight(this, searcher, Collections.emptyMap(), boost) { + @Override + public void extractTermStates(Map contexts) {} + + @Override + public Spans getSpans(LeafReaderContext ctx, Postings requiredPostings) { + return null; + } + + @Override + public void extractTerms(Set terms) {} + + @Override + public boolean isCacheable(LeafReaderContext ctx) { + return true; + } + }; + } +} diff --git a/server/src/main/java/org/elasticsearch/Version.java b/server/src/main/java/org/elasticsearch/Version.java index e6939edbd89..5a921098b4c 100644 --- a/server/src/main/java/org/elasticsearch/Version.java +++ b/server/src/main/java/org/elasticsearch/Version.java @@ -103,6 +103,8 @@ public class Version implements Comparable, ToXContentFragment { public static final Version V_6_4_1 = new Version(V_6_4_1_ID, org.apache.lucene.util.Version.LUCENE_7_4_0); public static final int V_6_4_2_ID = 6040299; public static final Version V_6_4_2 = new Version(V_6_4_2_ID, org.apache.lucene.util.Version.LUCENE_7_4_0); + public static final int V_6_4_3_ID = 6040399; + public static final Version V_6_4_3 = new Version(V_6_4_3_ID, org.apache.lucene.util.Version.LUCENE_7_4_0); public static final int V_6_5_0_ID = 6050099; public static final Version V_6_5_0 = new Version(V_6_5_0_ID, org.apache.lucene.util.Version.LUCENE_7_5_0); public static final int V_7_0_0_alpha1_ID = 7000001; @@ -125,6 +127,8 @@ public class Version implements Comparable, ToXContentFragment { return V_7_0_0_alpha1; case V_6_5_0_ID: return V_6_5_0; + case V_6_4_3_ID: + return V_6_4_3; case V_6_4_2_ID: return V_6_4_2; case V_6_4_1_ID: diff --git a/server/src/main/java/org/elasticsearch/action/bulk/BulkRequestHandler.java b/server/src/main/java/org/elasticsearch/action/bulk/BulkRequestHandler.java index d02173ca370..cf5f94a9738 100644 --- a/server/src/main/java/org/elasticsearch/action/bulk/BulkRequestHandler.java +++ b/server/src/main/java/org/elasticsearch/action/bulk/BulkRequestHandler.java @@ -22,7 +22,6 @@ import org.apache.logging.log4j.Logger; import org.apache.logging.log4j.message.ParameterizedMessage; import org.elasticsearch.action.ActionListener; import org.elasticsearch.common.logging.Loggers; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.threadpool.Scheduler; import java.util.concurrent.CountDownLatch; @@ -80,7 +79,7 @@ public final class BulkRequestHandler { latch.countDown(); } } - }, Settings.EMPTY); + }); bulkRequestSetupSuccessful = true; if (concurrentRequests == 0) { latch.await(); diff --git a/server/src/main/java/org/elasticsearch/action/bulk/Retry.java b/server/src/main/java/org/elasticsearch/action/bulk/Retry.java index 75a1a2d5f8d..6ec5017e832 100644 --- a/server/src/main/java/org/elasticsearch/action/bulk/Retry.java +++ b/server/src/main/java/org/elasticsearch/action/bulk/Retry.java @@ -19,10 +19,9 @@ package org.elasticsearch.action.bulk; import org.apache.logging.log4j.Logger; +import org.apache.logging.log4j.LogManager; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.PlainActionFuture; -import org.elasticsearch.common.logging.Loggers; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.util.concurrent.FutureUtils; import org.elasticsearch.rest.RestStatus; @@ -54,11 +53,10 @@ public class Retry { * @param consumer The consumer to which apply the request and listener * @param bulkRequest The bulk request that should be executed. * @param listener A listener that is invoked when the bulk request finishes or completes with an exception. The listener is not - * @param settings settings */ public void withBackoff(BiConsumer> consumer, BulkRequest bulkRequest, - ActionListener listener, Settings settings) { - RetryHandler r = new RetryHandler(backoffPolicy, consumer, listener, settings, scheduler); + ActionListener listener) { + RetryHandler r = new RetryHandler(backoffPolicy, consumer, listener, scheduler); r.execute(bulkRequest); } @@ -68,20 +66,19 @@ public class Retry { * * @param consumer The consumer to which apply the request and listener * @param bulkRequest The bulk request that should be executed. - * @param settings settings * @return a future representing the bulk response returned by the client. */ public PlainActionFuture withBackoff(BiConsumer> consumer, - BulkRequest bulkRequest, Settings settings) { + BulkRequest bulkRequest) { PlainActionFuture future = PlainActionFuture.newFuture(); - withBackoff(consumer, bulkRequest, future, settings); + withBackoff(consumer, bulkRequest, future); return future; } static class RetryHandler implements ActionListener { private static final RestStatus RETRY_STATUS = RestStatus.TOO_MANY_REQUESTS; + private static final Logger logger = LogManager.getLogger(RetryHandler.class); - private final Logger logger; private final Scheduler scheduler; private final BiConsumer> consumer; private final ActionListener listener; @@ -95,11 +92,10 @@ public class Retry { private volatile ScheduledFuture scheduledRequestFuture; RetryHandler(BackoffPolicy backoffPolicy, BiConsumer> consumer, - ActionListener listener, Settings settings, Scheduler scheduler) { + ActionListener listener, Scheduler scheduler) { this.backoff = backoffPolicy.iterator(); this.consumer = consumer; this.listener = listener; - this.logger = Loggers.getLogger(getClass(), settings); this.scheduler = scheduler; // in contrast to System.currentTimeMillis(), nanoTime() uses a monotonic clock under the hood this.startTimestampNanos = System.nanoTime(); diff --git a/server/src/main/java/org/elasticsearch/action/fieldcaps/TransportFieldCapabilitiesAction.java b/server/src/main/java/org/elasticsearch/action/fieldcaps/TransportFieldCapabilitiesAction.java index b8d1f477ac1..64e411d0fe2 100644 --- a/server/src/main/java/org/elasticsearch/action/fieldcaps/TransportFieldCapabilitiesAction.java +++ b/server/src/main/java/org/elasticsearch/action/fieldcaps/TransportFieldCapabilitiesAction.java @@ -70,9 +70,8 @@ public class TransportFieldCapabilitiesAction extends HandledTransportAction indexNameExpressionResolver.hasIndexOrAlias(idx, clusterState)); final OriginalIndices localIndices = remoteClusterIndices.remove(RemoteClusterAware.LOCAL_CLUSTER_GROUP_KEY); final String[] concreteIndices; - if (remoteClusterIndices.isEmpty() == false && localIndices.indices().length == 0) { - // in the case we have one or more remote indices but no local we don't expand to all local indices and just do remote - // indices + if (localIndices == null) { + // in the case we have one or more remote indices but no local we don't expand to all local indices and just do remote indices concreteIndices = Strings.EMPTY_ARRAY; } else { concreteIndices = indexNameExpressionResolver.concreteIndexNames(clusterState, localIndices); diff --git a/server/src/main/java/org/elasticsearch/action/fieldcaps/TransportFieldCapabilitiesIndexAction.java b/server/src/main/java/org/elasticsearch/action/fieldcaps/TransportFieldCapabilitiesIndexAction.java index 18f33ab397f..4ce810be931 100644 --- a/server/src/main/java/org/elasticsearch/action/fieldcaps/TransportFieldCapabilitiesIndexAction.java +++ b/server/src/main/java/org/elasticsearch/action/fieldcaps/TransportFieldCapabilitiesIndexAction.java @@ -31,6 +31,7 @@ import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.MapperService; +import org.elasticsearch.index.mapper.ObjectMapper; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.indices.IndicesService; import org.elasticsearch.threadpool.ThreadPool; @@ -86,6 +87,26 @@ public class TransportFieldCapabilitiesIndexAction extends TransportSingleShardA if (indicesService.isMetaDataField(field) || fieldPredicate.test(ft.name())) { FieldCapabilities fieldCap = new FieldCapabilities(field, ft.typeName(), ft.isSearchable(), ft.isAggregatable()); responseMap.put(field, fieldCap); + } else { + continue; + } + // add nested and object fields + int dotIndex = ft.name().lastIndexOf('.'); + while (dotIndex > -1) { + String parentField = ft.name().substring(0, dotIndex); + if (responseMap.containsKey(parentField)) { + // we added this path on another field already + break; + } + // checks if the parent field contains sub-fields + if (mapperService.fullName(parentField) == null) { + // no field type, it must be an object field + ObjectMapper mapper = mapperService.getObjectMapper(parentField); + String type = mapper.nested().isNested() ? "nested" : "object"; + FieldCapabilities fieldCap = new FieldCapabilities(parentField, type, false, false); + responseMap.put(parentField, fieldCap); + } + dotIndex = parentField.lastIndexOf('.'); } } } diff --git a/server/src/main/java/org/elasticsearch/action/ingest/SimulateExecutionService.java b/server/src/main/java/org/elasticsearch/action/ingest/SimulateExecutionService.java index 430da9955ba..c081707f4db 100644 --- a/server/src/main/java/org/elasticsearch/action/ingest/SimulateExecutionService.java +++ b/server/src/main/java/org/elasticsearch/action/ingest/SimulateExecutionService.java @@ -24,12 +24,16 @@ import org.elasticsearch.action.ActionRunnable; import org.elasticsearch.ingest.IngestDocument; import org.elasticsearch.ingest.Pipeline; import org.elasticsearch.ingest.CompoundProcessor; +import org.elasticsearch.ingest.PipelineProcessor; import org.elasticsearch.threadpool.ThreadPool; import java.util.ArrayList; +import java.util.Collections; +import java.util.IdentityHashMap; import java.util.List; +import java.util.Set; -import static org.elasticsearch.action.ingest.TrackingResultProcessor.decorate; +import static org.elasticsearch.ingest.TrackingResultProcessor.decorate; class SimulateExecutionService { @@ -42,11 +46,15 @@ class SimulateExecutionService { } SimulateDocumentResult executeDocument(Pipeline pipeline, IngestDocument ingestDocument, boolean verbose) { + // Prevent cycles in pipeline decoration + final Set pipelinesSeen = Collections.newSetFromMap(new IdentityHashMap<>()); if (verbose) { List processorResultList = new ArrayList<>(); - CompoundProcessor verbosePipelineProcessor = decorate(pipeline.getCompoundProcessor(), processorResultList); + CompoundProcessor verbosePipelineProcessor = decorate(pipeline.getCompoundProcessor(), processorResultList, pipelinesSeen); try { - verbosePipelineProcessor.execute(ingestDocument); + Pipeline verbosePipeline = new Pipeline(pipeline.getId(), pipeline.getDescription(), pipeline.getVersion(), + verbosePipelineProcessor); + ingestDocument.executePipeline(verbosePipeline); return new SimulateDocumentVerboseResult(processorResultList); } catch (Exception e) { return new SimulateDocumentVerboseResult(processorResultList); diff --git a/server/src/main/java/org/elasticsearch/action/main/TransportMainAction.java b/server/src/main/java/org/elasticsearch/action/main/TransportMainAction.java index d3a54bf7e45..62fb866ee11 100644 --- a/server/src/main/java/org/elasticsearch/action/main/TransportMainAction.java +++ b/server/src/main/java/org/elasticsearch/action/main/TransportMainAction.java @@ -29,7 +29,6 @@ import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.node.Node; -import org.elasticsearch.rest.RestStatus; import org.elasticsearch.tasks.Task; import org.elasticsearch.transport.TransportService; @@ -48,7 +47,6 @@ public class TransportMainAction extends HandledTransportAction listener) { ClusterState clusterState = clusterService.state(); assert Node.NODE_NAME_SETTING.exists(settings); - final boolean available = clusterState.getBlocks().hasGlobalBlock(RestStatus.SERVICE_UNAVAILABLE) == false; listener.onResponse( new MainResponse(Node.NODE_NAME_SETTING.get(settings), Version.CURRENT, clusterState.getClusterName(), clusterState.metaData().clusterUUID(), Build.CURRENT)); diff --git a/server/src/main/java/org/elasticsearch/action/resync/ResyncReplicationRequest.java b/server/src/main/java/org/elasticsearch/action/resync/ResyncReplicationRequest.java index d4e2c652fa8..a53766af7cf 100644 --- a/server/src/main/java/org/elasticsearch/action/resync/ResyncReplicationRequest.java +++ b/server/src/main/java/org/elasticsearch/action/resync/ResyncReplicationRequest.java @@ -19,6 +19,7 @@ package org.elasticsearch.action.resync; import org.elasticsearch.Version; +import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.support.replication.ReplicatedWriteRequest; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -28,6 +29,7 @@ import org.elasticsearch.index.translog.Translog; import java.io.IOException; import java.util.Arrays; +import java.util.Objects; /** * Represents a batch of operations sent from the primary to its replicas during the primary-replica resync. @@ -36,15 +38,17 @@ public final class ResyncReplicationRequest extends ReplicatedWriteRequest indexNameExpressionResolver.hasIndexOrAlias(idx, clusterState)); OriginalIndices localIndices = remoteClusterIndices.remove(RemoteClusterAware.LOCAL_CLUSTER_GROUP_KEY); if (remoteClusterIndices.isEmpty()) { - executeSearch((SearchTask)task, timeProvider, searchRequest, localIndices, remoteClusterIndices, Collections.emptyList(), + executeSearch((SearchTask)task, timeProvider, searchRequest, localIndices, Collections.emptyList(), (clusterName, nodeId) -> null, clusterState, Collections.emptyMap(), listener, SearchResponse.Clusters.EMPTY); } else { remoteClusterService.collectSearchShards(searchRequest.indicesOptions(), searchRequest.preference(), @@ -203,7 +204,7 @@ public class TransportSearchAction extends HandledTransportAction clusterNodeLookup = processRemoteShards(searchShardsResponses, remoteClusterIndices, remoteShardIterators, remoteAliasFilters); SearchResponse.Clusters clusters = buildClusters(localIndices, remoteClusterIndices, searchShardsResponses); - executeSearch((SearchTask) task, timeProvider, searchRequest, localIndices, remoteClusterIndices, + executeSearch((SearchTask) task, timeProvider, searchRequest, localIndices, remoteShardIterators, clusterNodeLookup, clusterState, remoteAliasFilters, listener, clusters); }, listener::onFailure)); @@ -219,7 +220,7 @@ public class TransportSearchAction extends HandledTransportAction remoteIndices, Map searchShardsResponses) { - int localClusters = Math.min(localIndices.indices().length, 1); + int localClusters = localIndices == null ? 0 : 1; int totalClusters = remoteIndices.size() + localClusters; int successfulClusters = localClusters; for (ClusterSearchShardsResponse searchShardsResponse : searchShardsResponses.values()) { @@ -277,8 +278,19 @@ public class TransportSearchAction extends HandledTransportAction remoteClusterIndices, List remoteShardIterators, + private Index[] resolveLocalIndices(OriginalIndices localIndices, + IndicesOptions indicesOptions, + ClusterState clusterState, + SearchTimeProvider timeProvider) { + if (localIndices == null) { + return Index.EMPTY_ARRAY; //don't search on any local index (happens when only remote indices were specified) + } + return indexNameExpressionResolver.concreteIndices(clusterState, indicesOptions, + timeProvider.getAbsoluteStartMillis(), localIndices.indices()); + } + + private void executeSearch(SearchTask task, SearchTimeProvider timeProvider, SearchRequest searchRequest, + OriginalIndices localIndices, List remoteShardIterators, BiFunction remoteConnections, ClusterState clusterState, Map remoteAliasMap, ActionListener listener, SearchResponse.Clusters clusters) { @@ -287,13 +299,7 @@ public class TransportSearchAction extends HandledTransportAction aliasFilter = buildPerIndexAliasFilter(searchRequest, clusterState, indices, remoteAliasMap); Map> routingMap = indexNameExpressionResolver.resolveSearchRouting(clusterState, searchRequest.routing(), searchRequest.indices()); diff --git a/server/src/main/java/org/elasticsearch/action/support/master/AcknowledgedRequest.java b/server/src/main/java/org/elasticsearch/action/support/master/AcknowledgedRequest.java index e4f0353d0e2..93a259a4436 100644 --- a/server/src/main/java/org/elasticsearch/action/support/master/AcknowledgedRequest.java +++ b/server/src/main/java/org/elasticsearch/action/support/master/AcknowledgedRequest.java @@ -41,6 +41,11 @@ public abstract class AcknowledgedRequest, Response extends ActionResponse> extends HandledTransportAction { +public abstract class TransportMasterNodeAction, Response extends ActionResponse> + extends HandledTransportAction { + protected final ThreadPool threadPool; protected final TransportService transportService; protected final ClusterService clusterService; protected final IndexNameExpressionResolver indexNameExpressionResolver; - final String executor; + private final String executor; protected TransportMasterNodeAction(Settings settings, String actionName, TransportService transportService, ClusterService clusterService, ThreadPool threadPool, ActionFilters actionFilters, @@ -75,7 +77,8 @@ public abstract class TransportMasterNodeAction request) { + ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver, + Supplier request) { super(settings, actionName, canTripCircuitBreaker, transportService, actionFilters, request); this.transportService = transportService; this.clusterService = clusterService; @@ -138,7 +141,8 @@ public abstract class TransportMasterNodeAction new ParameterizedMessage("master could not publish cluster state or stepped down before publishing action [{}], scheduling a retry", actionName), t); + if (t instanceof FailedToCommitClusterStateException || t instanceof NotMasterException) { + logger.debug(() -> new ParameterizedMessage("master could not publish cluster state or " + + "stepped down before publishing action [{}], scheduling a retry", actionName), t); retry(t, masterChangePredicate); } else { listener.onFailure(t); } } }; - threadPool.executor(executor).execute(new ActionRunnable(delegate) { + threadPool.executor(executor).execute(new ActionRunnable(delegate) { @Override protected void doRun() throws Exception { masterOperation(task, request, clusterState, delegate); @@ -204,7 +208,8 @@ public abstract class TransportMasterNodeAction new ParameterizedMessage("timed out while retrying [{}] after failure (timeout [{}])", actionName, timeout), failure); + logger.debug(() -> new ParameterizedMessage("timed out while retrying [{}] after failure (timeout [{}])", + actionName, timeout), failure); listener.onFailure(new MasterNotDiscoveredException(failure)); } }, statePredicate diff --git a/server/src/main/java/org/elasticsearch/action/support/replication/ReplicationOperation.java b/server/src/main/java/org/elasticsearch/action/support/replication/ReplicationOperation.java index 78bf9f2fc72..0da39a593a2 100644 --- a/server/src/main/java/org/elasticsearch/action/support/replication/ReplicationOperation.java +++ b/server/src/main/java/org/elasticsearch/action/support/replication/ReplicationOperation.java @@ -31,6 +31,7 @@ import org.elasticsearch.cluster.routing.IndexShardRoutingTable; import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.index.seqno.SequenceNumbers; import org.elasticsearch.index.shard.ReplicationGroup; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.rest.RestStatus; @@ -114,9 +115,13 @@ public class ReplicationOperation< // of the sampled replication group, and advanced further than what the given replication group would allow it to. // This would entail that some shards could learn about a global checkpoint that would be higher than its local checkpoint. final long globalCheckpoint = primary.globalCheckpoint(); + // we have to capture the max_seq_no_of_updates after this request was completed on the primary to make sure the value of + // max_seq_no_of_updates on replica when this request is executed is at least the value on the primary when it was executed on. + final long maxSeqNoOfUpdatesOrDeletes = primary.maxSeqNoOfUpdatesOrDeletes(); + assert maxSeqNoOfUpdatesOrDeletes != SequenceNumbers.UNASSIGNED_SEQ_NO : "seqno_of_updates still uninitialized"; final ReplicationGroup replicationGroup = primary.getReplicationGroup(); markUnavailableShardsAsStale(replicaRequest, replicationGroup); - performOnReplicas(replicaRequest, globalCheckpoint, replicationGroup); + performOnReplicas(replicaRequest, globalCheckpoint, maxSeqNoOfUpdatesOrDeletes, replicationGroup); } successfulShards.incrementAndGet(); // mark primary as successful @@ -136,7 +141,7 @@ public class ReplicationOperation< } private void performOnReplicas(final ReplicaRequest replicaRequest, final long globalCheckpoint, - final ReplicationGroup replicationGroup) { + final long maxSeqNoOfUpdatesOrDeletes, final ReplicationGroup replicationGroup) { // for total stats, add number of unassigned shards and // number of initializing shards that are not ready yet to receive operations (recovery has not opened engine yet on the target) totalShards.addAndGet(replicationGroup.getSkippedShards().size()); @@ -145,19 +150,20 @@ public class ReplicationOperation< for (final ShardRouting shard : replicationGroup.getReplicationTargets()) { if (shard.isSameAllocation(primaryRouting) == false) { - performOnReplica(shard, replicaRequest, globalCheckpoint); + performOnReplica(shard, replicaRequest, globalCheckpoint, maxSeqNoOfUpdatesOrDeletes); } } } - private void performOnReplica(final ShardRouting shard, final ReplicaRequest replicaRequest, final long globalCheckpoint) { + private void performOnReplica(final ShardRouting shard, final ReplicaRequest replicaRequest, + final long globalCheckpoint, final long maxSeqNoOfUpdatesOrDeletes) { if (logger.isTraceEnabled()) { logger.trace("[{}] sending op [{}] to replica {} for request [{}]", shard.shardId(), opType, shard, replicaRequest); } totalShards.incrementAndGet(); pendingActions.incrementAndGet(); - replicasProxy.performOn(shard, replicaRequest, globalCheckpoint, new ActionListener() { + replicasProxy.performOn(shard, replicaRequest, globalCheckpoint, maxSeqNoOfUpdatesOrDeletes, new ActionListener() { @Override public void onResponse(ReplicaResponse response) { successfulShards.incrementAndGet(); @@ -322,6 +328,12 @@ public class ReplicationOperation< */ long globalCheckpoint(); + /** + * Returns the maximum seq_no of updates (index operations overwrite Lucene) or deletes on the primary. + * This value must be captured after the execution of a replication request on the primary is completed. + */ + long maxSeqNoOfUpdatesOrDeletes(); + /** * Returns the current replication group on the primary shard * @@ -338,12 +350,15 @@ public class ReplicationOperation< /** * Performs the specified request on the specified replica. * - * @param replica the shard this request should be executed on - * @param replicaRequest the operation to perform - * @param globalCheckpoint the global checkpoint on the primary - * @param listener callback for handling the response or failure + * @param replica the shard this request should be executed on + * @param replicaRequest the operation to perform + * @param globalCheckpoint the global checkpoint on the primary + * @param maxSeqNoOfUpdatesOrDeletes the max seq_no of updates (index operations overwriting Lucene) or deletes on primary + * after this replication was executed on it. + * @param listener callback for handling the response or failure */ - void performOn(ShardRouting replica, RequestT replicaRequest, long globalCheckpoint, ActionListener listener); + void performOn(ShardRouting replica, RequestT replicaRequest, long globalCheckpoint, + long maxSeqNoOfUpdatesOrDeletes, ActionListener listener); /** * Fail the specified shard if needed, removing it from the current set diff --git a/server/src/main/java/org/elasticsearch/action/support/replication/TransportReplicationAction.java b/server/src/main/java/org/elasticsearch/action/support/replication/TransportReplicationAction.java index e5a23e03ce8..a07412b321f 100644 --- a/server/src/main/java/org/elasticsearch/action/support/replication/TransportReplicationAction.java +++ b/server/src/main/java/org/elasticsearch/action/support/replication/TransportReplicationAction.java @@ -200,7 +200,7 @@ public abstract class TransportReplicationAction< /** * Synchronously execute the specified replica operation. This is done under a permit from - * {@link IndexShard#acquireReplicaOperationPermit(long, long, ActionListener, String, Object)}. + * {@link IndexShard#acquireReplicaOperationPermit(long, long, long, ActionListener, String, Object)}. * * @param shardRequest the request to the replica shard * @param replica the replica shard to perform the operation on @@ -489,6 +489,7 @@ public abstract class TransportReplicationAction< replicaRequest.getTargetAllocationID(), replicaRequest.getPrimaryTerm(), replicaRequest.getGlobalCheckpoint(), + replicaRequest.getMaxSeqNoOfUpdatesOrDeletes(), channel, (ReplicationTask) task).run(); } @@ -513,6 +514,7 @@ public abstract class TransportReplicationAction< private final String targetAllocationID; private final long primaryTerm; private final long globalCheckpoint; + private final long maxSeqNoOfUpdatesOrDeletes; private final TransportChannel channel; private final IndexShard replica; /** @@ -528,6 +530,7 @@ public abstract class TransportReplicationAction< String targetAllocationID, long primaryTerm, long globalCheckpoint, + long maxSeqNoOfUpdatesOrDeletes, TransportChannel channel, ReplicationTask task) { this.request = request; @@ -536,6 +539,7 @@ public abstract class TransportReplicationAction< this.targetAllocationID = targetAllocationID; this.primaryTerm = primaryTerm; this.globalCheckpoint = globalCheckpoint; + this.maxSeqNoOfUpdatesOrDeletes = maxSeqNoOfUpdatesOrDeletes; final ShardId shardId = request.shardId(); assert shardId != null : "request shardId must be set"; this.replica = getIndexShard(shardId); @@ -575,7 +579,8 @@ public abstract class TransportReplicationAction< new TransportChannelResponseHandler<>(logger, channel, extraMessage, () -> TransportResponse.Empty.INSTANCE); transportService.sendRequest(clusterService.localNode(), transportReplicaAction, - new ConcreteReplicaRequest<>(request, targetAllocationID, primaryTerm, globalCheckpoint), + new ConcreteReplicaRequest<>(request, targetAllocationID, primaryTerm, + globalCheckpoint, maxSeqNoOfUpdatesOrDeletes), handler); } @@ -613,7 +618,7 @@ public abstract class TransportReplicationAction< throw new ShardNotFoundException(this.replica.shardId(), "expected aID [{}] but found [{}]", targetAllocationID, actualAllocationId); } - replica.acquireReplicaOperationPermit(primaryTerm, globalCheckpoint, this, executor, request); + replica.acquireReplicaOperationPermit(primaryTerm, globalCheckpoint, maxSeqNoOfUpdatesOrDeletes, this, executor, request); } /** @@ -1023,6 +1028,11 @@ public abstract class TransportReplicationAction< return indexShard.getGlobalCheckpoint(); } + @Override + public long maxSeqNoOfUpdatesOrDeletes() { + return indexShard.getMaxSeqNoOfUpdatesOrDeletes(); + } + @Override public ReplicationGroup getReplicationGroup() { return indexShard.getReplicationGroup(); @@ -1120,6 +1130,7 @@ public abstract class TransportReplicationAction< final ShardRouting replica, final ReplicaRequest request, final long globalCheckpoint, + final long maxSeqNoOfUpdatesOrDeletes, final ActionListener listener) { String nodeId = replica.currentNodeId(); final DiscoveryNode node = clusterService.state().nodes().get(nodeId); @@ -1127,8 +1138,8 @@ public abstract class TransportReplicationAction< listener.onFailure(new NoNodeAvailableException("unknown node [" + nodeId + "]")); return; } - final ConcreteReplicaRequest replicaRequest = - new ConcreteReplicaRequest<>(request, replica.allocationId().getId(), primaryTerm, globalCheckpoint); + final ConcreteReplicaRequest replicaRequest = new ConcreteReplicaRequest<>( + request, replica.allocationId().getId(), primaryTerm, globalCheckpoint, maxSeqNoOfUpdatesOrDeletes); sendReplicaRequest(replicaRequest, node, listener); } @@ -1276,15 +1287,17 @@ public abstract class TransportReplicationAction< protected static final class ConcreteReplicaRequest extends ConcreteShardRequest { private long globalCheckpoint; + private long maxSeqNoOfUpdatesOrDeletes; public ConcreteReplicaRequest(final Supplier requestSupplier) { super(requestSupplier); } public ConcreteReplicaRequest(final R request, final String targetAllocationID, final long primaryTerm, - final long globalCheckpoint) { + final long globalCheckpoint, final long maxSeqNoOfUpdatesOrDeletes) { super(request, targetAllocationID, primaryTerm); this.globalCheckpoint = globalCheckpoint; + this.maxSeqNoOfUpdatesOrDeletes = maxSeqNoOfUpdatesOrDeletes; } @Override @@ -1295,6 +1308,13 @@ public abstract class TransportReplicationAction< } else { globalCheckpoint = SequenceNumbers.UNASSIGNED_SEQ_NO; } + if (in.getVersion().onOrAfter(Version.V_6_5_0)) { + maxSeqNoOfUpdatesOrDeletes = in.readZLong(); + } else { + // UNASSIGNED_SEQ_NO (-2) means uninitialized, and replicas will disable + // optimization using seq_no if its max_seq_no_of_updates is still uninitialized + maxSeqNoOfUpdatesOrDeletes = SequenceNumbers.UNASSIGNED_SEQ_NO; + } } @Override @@ -1303,12 +1323,19 @@ public abstract class TransportReplicationAction< if (out.getVersion().onOrAfter(Version.V_6_0_0_alpha1)) { out.writeZLong(globalCheckpoint); } + if (out.getVersion().onOrAfter(Version.V_6_5_0)) { + out.writeZLong(maxSeqNoOfUpdatesOrDeletes); + } } public long getGlobalCheckpoint() { return globalCheckpoint; } + public long getMaxSeqNoOfUpdatesOrDeletes() { + return maxSeqNoOfUpdatesOrDeletes; + } + @Override public String toString() { return "ConcreteReplicaRequest{" + @@ -1316,6 +1343,7 @@ public abstract class TransportReplicationAction< ", primaryTerm='" + getPrimaryTerm() + '\'' + ", request=" + getRequest() + ", globalCheckpoint=" + globalCheckpoint + + ", maxSeqNoOfUpdatesOrDeletes=" + maxSeqNoOfUpdatesOrDeletes + '}'; } } diff --git a/server/src/main/java/org/elasticsearch/action/support/single/shard/SingleShardRequest.java b/server/src/main/java/org/elasticsearch/action/support/single/shard/SingleShardRequest.java index 88c65381e7a..6dc11877e7c 100644 --- a/server/src/main/java/org/elasticsearch/action/support/single/shard/SingleShardRequest.java +++ b/server/src/main/java/org/elasticsearch/action/support/single/shard/SingleShardRequest.java @@ -44,7 +44,6 @@ public abstract class SingleShardRequest params; - if (CTX_IN_PARAMS) { - params = new HashMap<>(script.getParams()); - params.put(ContextFields.CTX, ctx); - deprecationLogger.deprecated("Using `ctx` via `params.ctx` is deprecated. " + - "Use -Des.scripting.update.ctx_in_params=false to enforce non-deprecated usage."); - } else { - params = script.getParams(); - } - UpdateScript executableScript = factory.newInstance(params); - executableScript.execute(ctx); + UpdateScript executableScript = factory.newInstance(script.getParams(), ctx); + executableScript.execute(); } } catch (Exception e) { throw new IllegalArgumentException("failed to execute script", e); diff --git a/server/src/main/java/org/elasticsearch/bootstrap/Bootstrap.java b/server/src/main/java/org/elasticsearch/bootstrap/Bootstrap.java index 475e9b94c68..3395d951415 100644 --- a/server/src/main/java/org/elasticsearch/bootstrap/Bootstrap.java +++ b/server/src/main/java/org/elasticsearch/bootstrap/Bootstrap.java @@ -307,7 +307,7 @@ final class Bootstrap { final boolean closeStandardStreams = (foreground == false) || quiet; try { if (closeStandardStreams) { - final Logger rootLogger = ESLoggerFactory.getRootLogger(); + final Logger rootLogger = LogManager.getRootLogger(); final Appender maybeConsoleAppender = Loggers.findAppender(rootLogger, ConsoleAppender.class); if (maybeConsoleAppender != null) { Loggers.removeAppender(rootLogger, maybeConsoleAppender); @@ -339,7 +339,7 @@ final class Bootstrap { } } catch (NodeValidationException | RuntimeException e) { // disable console logging, so user does not see the exception twice (jvm will show it already) - final Logger rootLogger = ESLoggerFactory.getRootLogger(); + final Logger rootLogger = LogManager.getRootLogger(); final Appender maybeConsoleAppender = Loggers.findAppender(rootLogger, ConsoleAppender.class); if (foreground && maybeConsoleAppender != null) { Loggers.removeAppender(rootLogger, maybeConsoleAppender); diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/AutoExpandReplicas.java b/server/src/main/java/org/elasticsearch/cluster/metadata/AutoExpandReplicas.java index 5ef85d94970..789b01c0cfa 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/AutoExpandReplicas.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/AutoExpandReplicas.java @@ -27,7 +27,7 @@ import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; -import java.util.Optional; +import java.util.OptionalInt; /** * This class acts as a functional wrapper around the {@code index.auto_expand_replicas} setting. @@ -93,7 +93,7 @@ public final class AutoExpandReplicas { return Math.min(maxReplicas, numDataNodes-1); } - Optional getDesiredNumberOfReplicas(int numDataNodes) { + private OptionalInt getDesiredNumberOfReplicas(int numDataNodes) { if (enabled) { final int min = getMinReplicas(); final int max = getMaxReplicas(numDataNodes); @@ -105,10 +105,10 @@ public final class AutoExpandReplicas { } if (numberOfReplicas >= min && numberOfReplicas <= max) { - return Optional.of(numberOfReplicas); + return OptionalInt.of(numberOfReplicas); } } - return Optional.empty(); + return OptionalInt.empty(); } @Override diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/IndexMetaData.java b/server/src/main/java/org/elasticsearch/cluster/metadata/IndexMetaData.java index c1e70191417..e764c16f910 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/IndexMetaData.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/IndexMetaData.java @@ -955,6 +955,10 @@ public class IndexMetaData implements Diffable, ToXContentFragmen return this; } + public Map removeCustom(String type) { + return this.customMetaData.remove(type); + } + public Set getInSyncAllocationIds(int shardId) { return inSyncAllocationIds.get(shardId); } diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolver.java b/server/src/main/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolver.java index 1f6a9fe027d..09fde36e1f9 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolver.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolver.java @@ -27,10 +27,11 @@ import org.elasticsearch.common.Nullable; import org.elasticsearch.common.Strings; import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.component.AbstractComponent; -import org.elasticsearch.common.joda.DateMathParser; import org.elasticsearch.common.joda.FormatDateTimeFormatter; import org.elasticsearch.common.regex.Regex; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.time.DateMathParser; +import org.elasticsearch.common.time.DateUtils; import org.elasticsearch.common.util.set.Sets; import org.elasticsearch.index.Index; import org.elasticsearch.index.IndexNotFoundException; @@ -923,8 +924,9 @@ public class IndexNameExpressionResolver extends AbstractComponent { } DateTimeFormatter parser = dateFormatter.withZone(timeZone); FormatDateTimeFormatter formatter = new FormatDateTimeFormatter(dateFormatterPattern, parser, Locale.ROOT); - DateMathParser dateMathParser = new DateMathParser(formatter); - long millis = dateMathParser.parse(mathExpression, context::getStartTime, false, timeZone); + DateMathParser dateMathParser = formatter.toDateMathParser(); + long millis = dateMathParser.parse(mathExpression, context::getStartTime, false, + DateUtils.dateTimeZoneToZoneId(timeZone)); String time = formatter.printer().print(millis); beforePlaceHolderSb.append(time); diff --git a/server/src/main/java/org/elasticsearch/common/geo/builders/ShapeBuilder.java b/server/src/main/java/org/elasticsearch/common/geo/builders/ShapeBuilder.java index fbb2fd19f0e..e90d7261c99 100644 --- a/server/src/main/java/org/elasticsearch/common/geo/builders/ShapeBuilder.java +++ b/server/src/main/java/org/elasticsearch/common/geo/builders/ShapeBuilder.java @@ -24,6 +24,7 @@ import org.locationtech.jts.geom.Geometry; import org.locationtech.jts.geom.GeometryFactory; import org.apache.logging.log4j.Logger; +import org.apache.logging.log4j.LogManager; import org.elasticsearch.Assertions; import org.elasticsearch.Version; import org.elasticsearch.common.Strings; @@ -32,7 +33,6 @@ import org.elasticsearch.common.geo.parsers.GeoWKTParser; import org.elasticsearch.common.io.stream.NamedWriteable; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.logging.ESLoggerFactory; import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.XContentBuilder; import org.locationtech.spatial4j.context.jts.JtsSpatialContext; @@ -54,7 +54,7 @@ import java.util.Objects; */ public abstract class ShapeBuilder> implements NamedWriteable, ToXContentObject { - protected static final Logger LOGGER = ESLoggerFactory.getLogger(ShapeBuilder.class.getName()); + protected static final Logger LOGGER = LogManager.getLogger(ShapeBuilder.class); private static final boolean DEBUG; static { diff --git a/server/src/main/java/org/elasticsearch/common/inject/internal/Join.java b/server/src/main/java/org/elasticsearch/common/inject/internal/Join.java deleted file mode 100644 index e44bed9d88a..00000000000 --- a/server/src/main/java/org/elasticsearch/common/inject/internal/Join.java +++ /dev/null @@ -1,318 +0,0 @@ -/* - * Copyright (C) 2007 Google Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.elasticsearch.common.inject.internal; - -import org.elasticsearch.common.Nullable; -import org.elasticsearch.common.util.CollectionUtils; - -import java.io.IOException; -import java.util.Arrays; -import java.util.Iterator; -import java.util.Map; -import java.util.Objects; - -/** - * Utility for joining pieces of text separated by a delimiter. It can handle - * iterators, collections, arrays, and varargs, and can append to any - * {@link Appendable} or just return a {@link String}. For example, - * {@code join(":", "a", "b", "c")} returns {@code "a:b:c"}. - *

- * All methods of this class throw {@link NullPointerException} when a value - * of {@code null} is supplied for any parameter. The elements within the - * collection, iterator, array, or varargs parameter list may be null -- - * these will be represented in the output by the string {@code "null"}. - * - * @author Kevin Bourrillion - */ -public final class Join { - private Join() { - } - - /** - * Returns a string containing the {@code tokens}, converted to strings if - * necessary, separated by {@code delimiter}. If {@code tokens} is empty, it - * returns an empty string. - *

- * Each token will be converted to a {@link CharSequence} using - * {@link String#valueOf(Object)}, if it isn't a {@link CharSequence} already. - * Note that this implies that null tokens will be appended as the - * four-character string {@code "null"}. - * - * @param delimiter a string to append between every element, but not at the - * beginning or end - * @param tokens objects to append - * @return a string consisting of the joined elements - */ - public static String join(String delimiter, Iterable tokens) { - return join(delimiter, tokens.iterator()); - } - - /** - * Returns a string containing the {@code tokens}, converted to strings if - * necessary, separated by {@code delimiter}. If {@code tokens} is empty, it - * returns an empty string. - *

- * Each token will be converted to a {@link CharSequence} using - * {@link String#valueOf(Object)}, if it isn't a {@link CharSequence} already. - * Note that this implies that null tokens will be appended as the - * four-character string {@code "null"}. - * - * @param delimiter a string to append between every element, but not at the - * beginning or end - * @param tokens objects to append - * @return a string consisting of the joined elements - */ - public static String join(String delimiter, Object[] tokens) { - return join(delimiter, Arrays.asList(tokens)); - } - - /** - * Returns a string containing the {@code tokens}, converted to strings if - * necessary, separated by {@code delimiter}. - *

- * Each token will be converted to a {@link CharSequence} using - * {@link String#valueOf(Object)}, if it isn't a {@link CharSequence} already. - * Note that this implies that null tokens will be appended as the - * four-character string {@code "null"}. - * - * @param delimiter a string to append between every element, but not at the - * beginning or end - * @param firstToken the first object to append - * @param otherTokens subsequent objects to append - * @return a string consisting of the joined elements - */ - public static String join( - String delimiter, @Nullable Object firstToken, Object... otherTokens) { - Objects.requireNonNull(otherTokens); - return join(delimiter, CollectionUtils.asArrayList(firstToken, otherTokens)); - } - - /** - * Returns a string containing the {@code tokens}, converted to strings if - * necessary, separated by {@code delimiter}. If {@code tokens} is empty, it - * returns an empty string. - *

- * Each token will be converted to a {@link CharSequence} using - * {@link String#valueOf(Object)}, if it isn't a {@link CharSequence} already. - * Note that this implies that null tokens will be appended as the - * four-character string {@code "null"}. - * - * @param delimiter a string to append between every element, but not at the - * beginning or end - * @param tokens objects to append - * @return a string consisting of the joined elements - */ - public static String join(String delimiter, Iterator tokens) { - StringBuilder sb = new StringBuilder(); - join(sb, delimiter, tokens); - return sb.toString(); - } - - /** - * Returns a string containing the contents of {@code map}, with entries - * separated by {@code entryDelimiter}, and keys and values separated with - * {@code keyValueSeparator}. - *

- * Each key and value will be converted to a {@link CharSequence} using - * {@link String#valueOf(Object)}, if it isn't a {@link CharSequence} already. - * Note that this implies that null tokens will be appended as the - * four-character string {@code "null"}. - * - * @param keyValueSeparator a string to append between every key and its - * associated value - * @param entryDelimiter a string to append between every entry, but not at - * the beginning or end - * @param map the map containing the data to join - * @return a string consisting of the joined entries of the map; empty if the - * map is empty - */ - public static String join( - String keyValueSeparator, String entryDelimiter, Map map) { - return join(new StringBuilder(), keyValueSeparator, entryDelimiter, map) - .toString(); - } - - /** - * Appends each of the {@code tokens} to {@code appendable}, separated by - * {@code delimiter}. - *

- * Each token will be converted to a {@link CharSequence} using - * {@link String#valueOf(Object)}, if it isn't a {@link CharSequence} already. - * Note that this implies that null tokens will be appended as the - * four-character string {@code "null"}. - * - * @param appendable the object to append the results to - * @param delimiter a string to append between every element, but not at the - * beginning or end - * @param tokens objects to append - * @return the same {@code Appendable} instance that was passed in - * @throws JoinException if an {@link IOException} occurs - */ - public static T join( - T appendable, String delimiter, Iterable tokens) { - return join(appendable, delimiter, tokens.iterator()); - } - - /** - * Appends each of the {@code tokens} to {@code appendable}, separated by - * {@code delimiter}. - *

- * Each token will be converted to a {@link CharSequence} using - * {@link String#valueOf(Object)}, if it isn't a {@link CharSequence} already. - * Note that this implies that null tokens will be appended as the - * four-character string {@code "null"}. - * - * @param appendable the object to append the results to - * @param delimiter a string to append between every element, but not at the - * beginning or end - * @param tokens objects to append - * @return the same {@code Appendable} instance that was passed in - * @throws JoinException if an {@link IOException} occurs - */ - public static T join( - T appendable, String delimiter, Object[] tokens) { - return join(appendable, delimiter, Arrays.asList(tokens)); - } - - /** - * Appends each of the {@code tokens} to {@code appendable}, separated by - * {@code delimiter}. - *

- * Each token will be converted to a {@link CharSequence} using - * {@link String#valueOf(Object)}, if it isn't a {@link CharSequence} already. - * Note that this implies that null tokens will be appended as the - * four-character string {@code "null"}. - * - * @param appendable the object to append the results to - * @param delimiter a string to append between every element, but not at the - * beginning or end - * @param firstToken the first object to append - * @param otherTokens subsequent objects to append - * @return the same {@code Appendable} instance that was passed in - * @throws JoinException if an {@link IOException} occurs - */ - public static T join(T appendable, String delimiter, - @Nullable Object firstToken, Object... otherTokens) { - Objects.requireNonNull(otherTokens); - return join(appendable, delimiter, CollectionUtils.asArrayList(firstToken, otherTokens)); - } - - /** - * Appends each of the {@code tokens} to {@code appendable}, separated by - * {@code delimiter}. - *

- * Each token will be converted to a {@link CharSequence} using - * {@link String#valueOf(Object)}, if it isn't a {@link CharSequence} already. - * Note that this implies that null tokens will be appended as the - * four-character string {@code "null"}. - * - * @param appendable the object to append the results to - * @param delimiter a string to append between every element, but not at the - * beginning or end - * @param tokens objects to append - * @return the same {@code Appendable} instance that was passed in - * @throws JoinException if an {@link IOException} occurs - */ - public static T join( - T appendable, String delimiter, Iterator tokens) { - - /* This method is the workhorse of the class */ - - Objects.requireNonNull(appendable); - Objects.requireNonNull(delimiter); - if (tokens.hasNext()) { - try { - appendOneToken(appendable, tokens.next()); - while (tokens.hasNext()) { - appendable.append(delimiter); - appendOneToken(appendable, tokens.next()); - } - } catch (IOException e) { - throw new JoinException(e); - } - } - return appendable; - } - - /** - * Appends the contents of {@code map} to {@code appendable}, with entries - * separated by {@code entryDelimiter}, and keys and values separated with - * {@code keyValueSeparator}. - *

- * Each key and value will be converted to a {@link CharSequence} using - * {@link String#valueOf(Object)}, if it isn't a {@link CharSequence} already. - * Note that this implies that null tokens will be appended as the - * four-character string {@code "null"}. - * - * @param appendable the object to append the results to - * @param keyValueSeparator a string to append between every key and its - * associated value - * @param entryDelimiter a string to append between every entry, but not at - * the beginning or end - * @param map the map containing the data to join - * @return the same {@code Appendable} instance that was passed in - */ - public static T join(T appendable, - String keyValueSeparator, String entryDelimiter, Map map) { - Objects.requireNonNull(appendable); - Objects.requireNonNull(keyValueSeparator); - Objects.requireNonNull(entryDelimiter); - Iterator> entries = map.entrySet().iterator(); - if (entries.hasNext()) { - try { - appendOneEntry(appendable, keyValueSeparator, entries.next()); - while (entries.hasNext()) { - appendable.append(entryDelimiter); - appendOneEntry(appendable, keyValueSeparator, entries.next()); - } - } catch (IOException e) { - throw new JoinException(e); - } - } - return appendable; - } - - private static void appendOneEntry( - Appendable appendable, String keyValueSeparator, Map.Entry entry) - throws IOException { - appendOneToken(appendable, entry.getKey()); - appendable.append(keyValueSeparator); - appendOneToken(appendable, entry.getValue()); - } - - private static void appendOneToken(Appendable appendable, Object token) - throws IOException { - appendable.append(toCharSequence(token)); - } - - private static CharSequence toCharSequence(Object token) { - return (token instanceof CharSequence) - ? (CharSequence) token - : String.valueOf(token); - } - - /** - * Exception thrown in response to an {@link IOException} from the supplied - * {@link Appendable}. This is used because most callers won't want to - * worry about catching an IOException. - */ - public static class JoinException extends RuntimeException { - private JoinException(IOException cause) { - super(cause); - } - } -} diff --git a/server/src/main/java/org/elasticsearch/common/inject/internal/MoreTypes.java b/server/src/main/java/org/elasticsearch/common/inject/internal/MoreTypes.java index ed46a5c3c54..7e4d54867fb 100644 --- a/server/src/main/java/org/elasticsearch/common/inject/internal/MoreTypes.java +++ b/server/src/main/java/org/elasticsearch/common/inject/internal/MoreTypes.java @@ -634,7 +634,6 @@ public class MoreTypes { private final int modifiers; private final boolean synthetic; private final Class memberType; - private final String memberKey; private MemberImpl(Member member) { this.declaringClass = member.getDeclaringClass(); @@ -642,7 +641,6 @@ public class MoreTypes { this.modifiers = member.getModifiers(); this.synthetic = member.isSynthetic(); this.memberType = memberType(member); - this.memberKey = memberKey(member); } @Override diff --git a/server/src/main/java/org/elasticsearch/common/joda/FormatDateTimeFormatter.java b/server/src/main/java/org/elasticsearch/common/joda/FormatDateTimeFormatter.java index 72a60e8678c..e953e9563c6 100644 --- a/server/src/main/java/org/elasticsearch/common/joda/FormatDateTimeFormatter.java +++ b/server/src/main/java/org/elasticsearch/common/joda/FormatDateTimeFormatter.java @@ -19,6 +19,7 @@ package org.elasticsearch.common.joda; +import org.elasticsearch.common.time.DateMathParser; import org.joda.time.format.DateTimeFormatter; import java.util.Locale; @@ -64,4 +65,8 @@ public class FormatDateTimeFormatter { public Locale locale() { return locale; } + + public DateMathParser toDateMathParser() { + return new JodaDateMathParser(this); + } } diff --git a/server/src/main/java/org/elasticsearch/common/joda/DateMathParser.java b/server/src/main/java/org/elasticsearch/common/joda/JodaDateMathParser.java similarity index 95% rename from server/src/main/java/org/elasticsearch/common/joda/DateMathParser.java rename to server/src/main/java/org/elasticsearch/common/joda/JodaDateMathParser.java index ba5531c813c..0cef1d3e09b 100644 --- a/server/src/main/java/org/elasticsearch/common/joda/DateMathParser.java +++ b/server/src/main/java/org/elasticsearch/common/joda/JodaDateMathParser.java @@ -20,10 +20,13 @@ package org.elasticsearch.common.joda; import org.elasticsearch.ElasticsearchParseException; +import org.elasticsearch.common.time.DateMathParser; +import org.elasticsearch.common.time.DateUtils; import org.joda.time.DateTimeZone; import org.joda.time.MutableDateTime; import org.joda.time.format.DateTimeFormatter; +import java.time.ZoneId; import java.util.Objects; import java.util.function.LongSupplier; @@ -34,23 +37,21 @@ import java.util.function.LongSupplier; * is appended to a datetime with the following syntax: * ||[+-/](\d+)?[yMwdhHms]. */ -public class DateMathParser { +public class JodaDateMathParser implements DateMathParser { private final FormatDateTimeFormatter dateTimeFormatter; - public DateMathParser(FormatDateTimeFormatter dateTimeFormatter) { + public JodaDateMathParser(FormatDateTimeFormatter dateTimeFormatter) { Objects.requireNonNull(dateTimeFormatter); this.dateTimeFormatter = dateTimeFormatter; } - public long parse(String text, LongSupplier now) { - return parse(text, now, false, null); - } - // Note: we take a callable here for the timestamp in order to be able to figure out // if it has been used. For instance, the request cache does not cache requests that make // use of `now`. - public long parse(String text, LongSupplier now, boolean roundUp, DateTimeZone timeZone) { + @Override + public long parse(String text, LongSupplier now, boolean roundUp, ZoneId tz) { + final DateTimeZone timeZone = tz == null ? null : DateUtils.zoneIdToDateTimeZone(tz); long time; String mathString; if (text.startsWith("now")) { diff --git a/server/src/main/java/org/elasticsearch/common/logging/ESLoggerFactory.java b/server/src/main/java/org/elasticsearch/common/logging/ESLoggerFactory.java index 2159014f825..6388853d00b 100644 --- a/server/src/main/java/org/elasticsearch/common/logging/ESLoggerFactory.java +++ b/server/src/main/java/org/elasticsearch/common/logging/ESLoggerFactory.java @@ -70,23 +70,4 @@ public final class ESLoggerFactory { public static Logger getLogger(Class clazz) { return getLogger(null, clazz); } - - /** - * Get or build a logger. - * @deprecated Prefer {@link LogManager#getLogger} - */ - @Deprecated - public static Logger getLogger(String name) { - return getLogger(null, name); - } - - /** - * Get the root logger. - * @deprecated Prefer {@link LogManager#getRootLogger} - */ - @Deprecated - public static Logger getRootLogger() { - return LogManager.getRootLogger(); - } - } diff --git a/server/src/main/java/org/elasticsearch/common/logging/LogConfigurator.java b/server/src/main/java/org/elasticsearch/common/logging/LogConfigurator.java index 89c9ce229d6..6f8358e113d 100644 --- a/server/src/main/java/org/elasticsearch/common/logging/LogConfigurator.java +++ b/server/src/main/java/org/elasticsearch/common/logging/LogConfigurator.java @@ -258,13 +258,13 @@ public class LogConfigurator { private static void configureLoggerLevels(final Settings settings) { if (Loggers.LOG_DEFAULT_LEVEL_SETTING.exists(settings)) { final Level level = Loggers.LOG_DEFAULT_LEVEL_SETTING.get(settings); - Loggers.setLevel(ESLoggerFactory.getRootLogger(), level); + Loggers.setLevel(LogManager.getRootLogger(), level); } Loggers.LOG_LEVEL_SETTING.getAllConcreteSettings(settings) // do not set a log level for a logger named level (from the default log setting) .filter(s -> s.getKey().equals(Loggers.LOG_DEFAULT_LEVEL_SETTING.getKey()) == false).forEach(s -> { final Level level = s.get(settings); - Loggers.setLevel(ESLoggerFactory.getLogger(s.getKey().substring("logger.".length())), level); + Loggers.setLevel(LogManager.getLogger(s.getKey().substring("logger.".length())), level); }); } diff --git a/server/src/main/java/org/elasticsearch/common/logging/Loggers.java b/server/src/main/java/org/elasticsearch/common/logging/Loggers.java index 57bafbbdac4..a772867c084 100644 --- a/server/src/main/java/org/elasticsearch/common/logging/Loggers.java +++ b/server/src/main/java/org/elasticsearch/common/logging/Loggers.java @@ -62,13 +62,23 @@ public class Loggers { } public static Logger getLogger(Class clazz, Index index, String... prefixes) { - return getLogger(clazz, Settings.EMPTY, asArrayList(Loggers.SPACE, index.getName(), prefixes).toArray(new String[0])); + return getLogger(clazz, asArrayList(Loggers.SPACE, index.getName(), prefixes).toArray(new String[0])); } + /** + * Get a logger. + * @deprecated prefer {@link #getLogger(Class, String...)} or {@link LogManager#getLogger} + * as the Settings is no longer needed + */ + @Deprecated public static Logger getLogger(Class clazz, Settings settings, String... prefixes) { return ESLoggerFactory.getLogger(formatPrefix(prefixes), clazz); } + public static Logger getLogger(Class clazz, String... prefixes) { + return ESLoggerFactory.getLogger(formatPrefix(prefixes), clazz); + } + public static Logger getLogger(Logger parentLogger, String s) { String prefix = null; if (parentLogger instanceof PrefixLogger) { @@ -83,7 +93,7 @@ public class Loggers { */ @Deprecated public static Logger getLogger(String s) { - return ESLoggerFactory.getLogger(s); + return LogManager.getLogger(s); } /** diff --git a/server/src/main/java/org/elasticsearch/common/lucene/search/XMoreLikeThis.java b/server/src/main/java/org/elasticsearch/common/lucene/search/XMoreLikeThis.java index 5d1e4537f65..f931ee2dc31 100644 --- a/server/src/main/java/org/elasticsearch/common/lucene/search/XMoreLikeThis.java +++ b/server/src/main/java/org/elasticsearch/common/lucene/search/XMoreLikeThis.java @@ -759,11 +759,11 @@ public final class XMoreLikeThis { if (queue.size() < limit) { // there is still space in the queue - queue.add(new ScoreTerm(word, topField, score, idf, docFreq, tf)); + queue.add(new ScoreTerm(word, topField, score)); } else { ScoreTerm term = queue.top(); if (term.score < score) { // update the smallest in the queue in place and update the queue. - term.update(word, topField, score, idf, docFreq, tf); + term.update(word, topField, score); queue.updateTop(); } } @@ -1026,30 +1026,20 @@ public final class XMoreLikeThis { } private static class ScoreTerm { - // only really need 1st 3 entries, other ones are for troubleshooting String word; String topField; float score; - float idf; - int docFreq; - int tf; - ScoreTerm(String word, String topField, float score, float idf, int docFreq, int tf) { + ScoreTerm(String word, String topField, float score) { this.word = word; this.topField = topField; this.score = score; - this.idf = idf; - this.docFreq = docFreq; - this.tf = tf; } - void update(String word, String topField, float score, float idf, int docFreq, int tf) { + void update(String word, String topField, float score) { this.word = word; this.topField = topField; this.score = score; - this.idf = idf; - this.docFreq = docFreq; - this.tf = tf; } } diff --git a/server/src/main/java/org/elasticsearch/common/rounding/Rounding.java b/server/src/main/java/org/elasticsearch/common/rounding/Rounding.java index f8339079926..8f2802c76c9 100644 --- a/server/src/main/java/org/elasticsearch/common/rounding/Rounding.java +++ b/server/src/main/java/org/elasticsearch/common/rounding/Rounding.java @@ -410,9 +410,14 @@ public abstract class Rounding implements Writeable { Rounding rounding; byte id = in.readByte(); switch (id) { - case TimeUnitRounding.ID: rounding = new TimeUnitRounding(in); break; - case TimeIntervalRounding.ID: rounding = new TimeIntervalRounding(in); break; - default: throw new ElasticsearchException("unknown rounding id [" + id + "]"); + case TimeUnitRounding.ID: + rounding = new TimeUnitRounding(in); + break; + case TimeIntervalRounding.ID: + rounding = new TimeIntervalRounding(in); + break; + default: + throw new ElasticsearchException("unknown rounding id [" + id + "]"); } return rounding; } diff --git a/server/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java b/server/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java index 3ded6f78f1c..ae3f1f0fa0d 100644 --- a/server/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java +++ b/server/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java @@ -18,6 +18,7 @@ */ package org.elasticsearch.common.settings; +import org.apache.logging.log4j.LogManager; import org.elasticsearch.action.admin.indices.close.TransportCloseIndexAction; import org.elasticsearch.action.search.TransportSearchAction; import org.elasticsearch.action.support.AutoCreateIndex; @@ -46,7 +47,6 @@ import org.elasticsearch.cluster.routing.allocation.decider.SameShardAllocationD import org.elasticsearch.cluster.routing.allocation.decider.ShardsLimitAllocationDecider; import org.elasticsearch.cluster.routing.allocation.decider.ThrottlingAllocationDecider; import org.elasticsearch.cluster.service.ClusterService; -import org.elasticsearch.common.logging.ESLoggerFactory; import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.network.NetworkModule; import org.elasticsearch.common.network.NetworkService; @@ -161,12 +161,12 @@ public final class ClusterSettings extends AbstractScopedSettings { if ("_root".equals(component)) { final String rootLevel = value.get(key); if (rootLevel == null) { - Loggers.setLevel(ESLoggerFactory.getRootLogger(), Loggers.LOG_DEFAULT_LEVEL_SETTING.get(settings)); + Loggers.setLevel(LogManager.getRootLogger(), Loggers.LOG_DEFAULT_LEVEL_SETTING.get(settings)); } else { - Loggers.setLevel(ESLoggerFactory.getRootLogger(), rootLevel); + Loggers.setLevel(LogManager.getRootLogger(), rootLevel); } } else { - Loggers.setLevel(ESLoggerFactory.getLogger(component), value.get(key)); + Loggers.setLevel(LogManager.getLogger(component), value.get(key)); } } } diff --git a/server/src/main/java/org/elasticsearch/common/settings/SettingsModule.java b/server/src/main/java/org/elasticsearch/common/settings/SettingsModule.java index 1eca3eb415f..086346f470a 100644 --- a/server/src/main/java/org/elasticsearch/common/settings/SettingsModule.java +++ b/server/src/main/java/org/elasticsearch/common/settings/SettingsModule.java @@ -20,10 +20,10 @@ package org.elasticsearch.common.settings; import org.apache.logging.log4j.Logger; +import org.apache.logging.log4j.LogManager; import org.elasticsearch.common.Strings; import org.elasticsearch.common.inject.Binder; import org.elasticsearch.common.inject.Module; -import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentType; @@ -43,12 +43,12 @@ import java.util.stream.IntStream; * A module that binds the provided settings to the {@link Settings} interface. */ public class SettingsModule implements Module { + private static final Logger logger = LogManager.getLogger(SettingsModule.class); private final Settings settings; private final Set settingsFilterPattern = new HashSet<>(); private final Map> nodeSettings = new HashMap<>(); private final Map> indexSettings = new HashMap<>(); - private final Logger logger; private final IndexScopedSettings indexScopedSettings; private final ClusterSettings clusterSettings; private final SettingsFilter settingsFilter; @@ -62,7 +62,6 @@ public class SettingsModule implements Module { List> additionalSettings, List settingsFilter, Set> settingUpgraders) { - logger = Loggers.getLogger(getClass(), settings); this.settings = settings; for (Setting setting : ClusterSettings.BUILT_IN_CLUSTER_SETTINGS) { registerSetting(setting); diff --git a/server/src/main/java/org/elasticsearch/common/time/DateFormatter.java b/server/src/main/java/org/elasticsearch/common/time/DateFormatter.java index d16662b23b9..b952f1d69bc 100644 --- a/server/src/main/java/org/elasticsearch/common/time/DateFormatter.java +++ b/server/src/main/java/org/elasticsearch/common/time/DateFormatter.java @@ -24,6 +24,7 @@ import java.time.format.DateTimeParseException; import java.time.temporal.TemporalAccessor; import java.time.temporal.TemporalField; import java.util.Arrays; +import java.util.Locale; import java.util.Map; import java.util.stream.Collectors; @@ -46,6 +47,14 @@ public interface DateFormatter { */ DateFormatter withZone(ZoneId zoneId); + /** + * Create a copy of this formatter that is configured to parse dates in the specified locale + * + * @param locale The local to use for the new formatter + * @return A copy of the date formatter this has been called on + */ + DateFormatter withLocale(Locale locale); + /** * Print the supplied java time accessor in a string based representation according to this formatter * @@ -62,6 +71,20 @@ public interface DateFormatter { */ String pattern(); + /** + * Returns the configured locale of the date formatter + * + * @return The locale of this formatter + */ + Locale getLocale(); + + /** + * Returns the configured time zone of the date formatter + * + * @return The time zone of this formatter + */ + ZoneId getZone(); + /** * Configure a formatter using default fields for a TemporalAccessor that should be used in case * the supplied date is not having all of those fields @@ -115,6 +138,11 @@ public interface DateFormatter { return new MergedDateFormatter(Arrays.stream(formatters).map(f -> f.withZone(zoneId)).toArray(DateFormatter[]::new)); } + @Override + public DateFormatter withLocale(Locale locale) { + return new MergedDateFormatter(Arrays.stream(formatters).map(f -> f.withLocale(locale)).toArray(DateFormatter[]::new)); + } + @Override public String format(TemporalAccessor accessor) { return formatters[0].format(accessor); @@ -125,6 +153,16 @@ public interface DateFormatter { return format; } + @Override + public Locale getLocale() { + return formatters[0].getLocale(); + } + + @Override + public ZoneId getZone() { + return formatters[0].getZone(); + } + @Override public DateFormatter parseDefaulting(Map fields) { return new MergedDateFormatter(Arrays.stream(formatters).map(f -> f.parseDefaulting(fields)).toArray(DateFormatter[]::new)); diff --git a/server/src/main/java/org/elasticsearch/common/time/DateFormatters.java b/server/src/main/java/org/elasticsearch/common/time/DateFormatters.java index 5f687651344..a330e02b0cb 100644 --- a/server/src/main/java/org/elasticsearch/common/time/DateFormatters.java +++ b/server/src/main/java/org/elasticsearch/common/time/DateFormatters.java @@ -890,17 +890,6 @@ public class DateFormatters { private static final DateFormatter YEAR = new JavaDateFormatter("year", new DateTimeFormatterBuilder().appendValue(ChronoField.YEAR).toFormatter(Locale.ROOT)); - /* - * Returns a formatter for parsing the seconds since the epoch - */ - private static final DateFormatter EPOCH_SECOND = new JavaDateFormatter("epoch_second", - new DateTimeFormatterBuilder().appendValue(ChronoField.INSTANT_SECONDS).toFormatter(Locale.ROOT)); - - /* - * Parses the milliseconds since/before the epoch - */ - private static final DateFormatter EPOCH_MILLIS = EpochMillisDateFormatter.INSTANCE; - /* * Returns a formatter that combines a full date and two digit hour of * day. (yyyy-MM-dd'T'HH) @@ -1280,7 +1269,7 @@ public class DateFormatters { return forPattern(input, Locale.ROOT); } - public static DateFormatter forPattern(String input, Locale locale) { + private static DateFormatter forPattern(String input, Locale locale) { if (Strings.hasLength(input)) { input = input.trim(); } @@ -1375,9 +1364,9 @@ public class DateFormatters { } else if ("yearMonthDay".equals(input) || "year_month_day".equals(input)) { return YEAR_MONTH_DAY; } else if ("epoch_second".equals(input)) { - return EPOCH_SECOND; + return EpochSecondsDateFormatter.INSTANCE; } else if ("epoch_millis".equals(input)) { - return EPOCH_MILLIS; + return EpochMillisDateFormatter.INSTANCE; // strict date formats here, must be at least 4 digits for year and two for months and two for day } else if ("strictBasicWeekDate".equals(input) || "strict_basic_week_date".equals(input)) { return STRICT_BASIC_WEEK_DATE; diff --git a/server/src/main/java/org/elasticsearch/common/time/DateMathParser.java b/server/src/main/java/org/elasticsearch/common/time/DateMathParser.java index 5e5ecc5bafd..b2cb319071f 100644 --- a/server/src/main/java/org/elasticsearch/common/time/DateMathParser.java +++ b/server/src/main/java/org/elasticsearch/common/time/DateMathParser.java @@ -19,56 +19,31 @@ package org.elasticsearch.common.time; -import org.elasticsearch.ElasticsearchParseException; +import org.joda.time.DateTimeZone; -import java.time.DateTimeException; -import java.time.DayOfWeek; -import java.time.Instant; -import java.time.LocalTime; import java.time.ZoneId; -import java.time.ZoneOffset; -import java.time.ZonedDateTime; -import java.time.temporal.ChronoField; -import java.time.temporal.TemporalAccessor; -import java.time.temporal.TemporalAdjusters; -import java.time.temporal.TemporalField; -import java.time.temporal.TemporalQueries; -import java.util.HashMap; -import java.util.Map; -import java.util.Objects; import java.util.function.LongSupplier; /** - * A parser for date/time formatted text with optional date math. - * - * The format of the datetime is configurable, and unix timestamps can also be used. Datemath - * is appended to a datetime with the following syntax: - * ||[+-/](\d+)?[yMwdhHms]. + * An abstraction over date math parsing to allow different implementation for joda and java time. */ -public class DateMathParser { +public interface DateMathParser { - // base fields which should be used for default parsing, when we round up - private static final Map ROUND_UP_BASE_FIELDS = new HashMap<>(6); - { - ROUND_UP_BASE_FIELDS.put(ChronoField.MONTH_OF_YEAR, 1L); - ROUND_UP_BASE_FIELDS.put(ChronoField.DAY_OF_MONTH, 1L); - ROUND_UP_BASE_FIELDS.put(ChronoField.HOUR_OF_DAY, 23L); - ROUND_UP_BASE_FIELDS.put(ChronoField.MINUTE_OF_HOUR, 59L); - ROUND_UP_BASE_FIELDS.put(ChronoField.SECOND_OF_MINUTE, 59L); - ROUND_UP_BASE_FIELDS.put(ChronoField.MILLI_OF_SECOND, 999L); + /** + * Parse a date math expression without timzeone info and rounding down. + */ + default long parse(String text, LongSupplier now) { + return parse(text, now, false, (ZoneId) null); } - private final DateFormatter formatter; - private final DateFormatter roundUpFormatter; + // Note: we take a callable here for the timestamp in order to be able to figure out + // if it has been used. For instance, the request cache does not cache requests that make + // use of `now`. - public DateMathParser(DateFormatter formatter) { - Objects.requireNonNull(formatter); - this.formatter = formatter; - this.roundUpFormatter = formatter.parseDefaulting(ROUND_UP_BASE_FIELDS); - } - - public long parse(String text, LongSupplier now) { - return parse(text, now, false, null); + // exists for backcompat, do not use! + @Deprecated + default long parse(String text, LongSupplier now, boolean roundUp, DateTimeZone tz) { + return parse(text, now, roundUp, tz == null ? null : ZoneId.of(tz.getID())); } /** @@ -92,176 +67,8 @@ public class DateMathParser { * @param text the input * @param now a supplier to retrieve the current date in milliseconds, if needed for additions * @param roundUp should the result be rounded up - * @param timeZone an optional timezone that should be applied before returning the milliseconds since the epoch + * @param tz an optional timezone that should be applied before returning the milliseconds since the epoch * @return the parsed date in milliseconds since the epoch */ - public long parse(String text, LongSupplier now, boolean roundUp, ZoneId timeZone) { - long time; - String mathString; - if (text.startsWith("now")) { - try { - time = now.getAsLong(); - } catch (Exception e) { - throw new ElasticsearchParseException("could not read the current timestamp", e); - } - mathString = text.substring("now".length()); - } else { - int index = text.indexOf("||"); - if (index == -1) { - return parseDateTime(text, timeZone, roundUp); - } - time = parseDateTime(text.substring(0, index), timeZone, false); - mathString = text.substring(index + 2); - } - - return parseMath(mathString, time, roundUp, timeZone); - } - - private long parseMath(final String mathString, final long time, final boolean roundUp, - ZoneId timeZone) throws ElasticsearchParseException { - if (timeZone == null) { - timeZone = ZoneOffset.UTC; - } - ZonedDateTime dateTime = ZonedDateTime.ofInstant(Instant.ofEpochMilli(time), timeZone); - for (int i = 0; i < mathString.length(); ) { - char c = mathString.charAt(i++); - final boolean round; - final int sign; - if (c == '/') { - round = true; - sign = 1; - } else { - round = false; - if (c == '+') { - sign = 1; - } else if (c == '-') { - sign = -1; - } else { - throw new ElasticsearchParseException("operator not supported for date math [{}]", mathString); - } - } - - if (i >= mathString.length()) { - throw new ElasticsearchParseException("truncated date math [{}]", mathString); - } - - final int num; - if (!Character.isDigit(mathString.charAt(i))) { - num = 1; - } else { - int numFrom = i; - while (i < mathString.length() && Character.isDigit(mathString.charAt(i))) { - i++; - } - if (i >= mathString.length()) { - throw new ElasticsearchParseException("truncated date math [{}]", mathString); - } - num = Integer.parseInt(mathString.substring(numFrom, i)); - } - if (round) { - if (num != 1) { - throw new ElasticsearchParseException("rounding `/` can only be used on single unit types [{}]", mathString); - } - } - char unit = mathString.charAt(i++); - switch (unit) { - case 'y': - if (round) { - dateTime = dateTime.withDayOfYear(1).with(LocalTime.MIN); - } else { - dateTime = dateTime.plusYears(sign * num); - } - if (roundUp) { - dateTime = dateTime.plusYears(1); - } - break; - case 'M': - if (round) { - dateTime = dateTime.withDayOfMonth(1).with(LocalTime.MIN); - } else { - dateTime = dateTime.plusMonths(sign * num); - } - if (roundUp) { - dateTime = dateTime.plusMonths(1); - } - break; - case 'w': - if (round) { - dateTime = dateTime.with(TemporalAdjusters.previousOrSame(DayOfWeek.MONDAY)).with(LocalTime.MIN); - } else { - dateTime = dateTime.plusWeeks(sign * num); - } - if (roundUp) { - dateTime = dateTime.plusWeeks(1); - } - break; - case 'd': - if (round) { - dateTime = dateTime.with(LocalTime.MIN); - } else { - dateTime = dateTime.plusDays(sign * num); - } - if (roundUp) { - dateTime = dateTime.plusDays(1); - } - break; - case 'h': - case 'H': - if (round) { - dateTime = dateTime.withMinute(0).withSecond(0).withNano(0); - } else { - dateTime = dateTime.plusHours(sign * num); - } - if (roundUp) { - dateTime = dateTime.plusHours(1); - } - break; - case 'm': - if (round) { - dateTime = dateTime.withSecond(0).withNano(0); - } else { - dateTime = dateTime.plusMinutes(sign * num); - } - if (roundUp) { - dateTime = dateTime.plusMinutes(1); - } - break; - case 's': - if (round) { - dateTime = dateTime.withNano(0); - } else { - dateTime = dateTime.plusSeconds(sign * num); - } - if (roundUp) { - dateTime = dateTime.plusSeconds(1); - } - break; - default: - throw new ElasticsearchParseException("unit [{}] not supported for date math [{}]", unit, mathString); - } - if (roundUp) { - dateTime = dateTime.minus(1, ChronoField.MILLI_OF_SECOND.getBaseUnit()); - } - } - return dateTime.toInstant().toEpochMilli(); - } - - private long parseDateTime(String value, ZoneId timeZone, boolean roundUpIfNoTime) { - DateFormatter formatter = roundUpIfNoTime ? this.roundUpFormatter : this.formatter; - try { - if (timeZone == null) { - return DateFormatters.toZonedDateTime(formatter.parse(value)).toInstant().toEpochMilli(); - } else { - TemporalAccessor accessor = formatter.parse(value); - ZoneId zoneId = TemporalQueries.zone().queryFrom(accessor); - if (zoneId != null) { - timeZone = zoneId; - } - - return DateFormatters.toZonedDateTime(accessor).withZoneSameLocal(timeZone).toInstant().toEpochMilli(); - } - } catch (IllegalArgumentException | DateTimeException e) { - throw new ElasticsearchParseException("failed to parse date field [{}]: [{}]", e, value, e.getMessage()); - } - } + long parse(String text, LongSupplier now, boolean roundUp, ZoneId tz); } diff --git a/server/src/main/java/org/elasticsearch/common/time/DateUtils.java b/server/src/main/java/org/elasticsearch/common/time/DateUtils.java new file mode 100644 index 00000000000..ed04321ee83 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/common/time/DateUtils.java @@ -0,0 +1,70 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.common.time; + +import org.apache.logging.log4j.LogManager; +import org.elasticsearch.common.logging.DeprecationLogger; +import org.joda.time.DateTimeZone; + +import java.time.ZoneId; +import java.time.ZoneOffset; +import java.util.Collections; +import java.util.HashMap; +import java.util.Map; + +public class DateUtils { + public static DateTimeZone zoneIdToDateTimeZone(ZoneId zoneId) { + if (zoneId == null) { + return null; + } + if (zoneId instanceof ZoneOffset) { + // the id for zoneoffset is not ISO compatible, so cannot be read by ZoneId.of + return DateTimeZone.forOffsetMillis(((ZoneOffset)zoneId).getTotalSeconds() * 1000); + } + return DateTimeZone.forID(zoneId.getId()); + } + + private static final DeprecationLogger DEPRECATION_LOGGER = new DeprecationLogger(LogManager.getLogger(DateFormatters.class)); + // pkg private for tests + static final Map DEPRECATED_SHORT_TIMEZONES; + static { + Map tzs = new HashMap<>(); + tzs.put("EST", "-05:00"); // eastern time without daylight savings + tzs.put("HST", "-10:00"); + tzs.put("MST", "-07:00"); + tzs.put("ROC", "Asia/Taipei"); + tzs.put("Eire", "Europe/London"); + DEPRECATED_SHORT_TIMEZONES = Collections.unmodifiableMap(tzs); + } + + public static ZoneId dateTimeZoneToZoneId(DateTimeZone timeZone) { + if (timeZone == null) { + return null; + } + + String deprecatedId = DEPRECATED_SHORT_TIMEZONES.get(timeZone.getID()); + if (deprecatedId != null) { + DEPRECATION_LOGGER.deprecatedAndMaybeLog("timezone", + "Use of short timezone id " + timeZone.getID() + " is deprecated. Use " + deprecatedId + " instead"); + return ZoneId.of(deprecatedId); + } + return ZoneId.of(timeZone.getID()); + } +} diff --git a/server/src/main/java/org/elasticsearch/common/time/EpochMillisDateFormatter.java b/server/src/main/java/org/elasticsearch/common/time/EpochMillisDateFormatter.java index d50cc0cf466..d059f358ccb 100644 --- a/server/src/main/java/org/elasticsearch/common/time/EpochMillisDateFormatter.java +++ b/server/src/main/java/org/elasticsearch/common/time/EpochMillisDateFormatter.java @@ -25,6 +25,7 @@ import java.time.ZoneOffset; import java.time.format.DateTimeParseException; import java.time.temporal.TemporalAccessor; import java.time.temporal.TemporalField; +import java.util.Locale; import java.util.Map; /** @@ -40,7 +41,8 @@ class EpochMillisDateFormatter implements DateFormatter { public static DateFormatter INSTANCE = new EpochMillisDateFormatter(); - private EpochMillisDateFormatter() {} + private EpochMillisDateFormatter() { + } @Override public TemporalAccessor parse(String input) { @@ -53,6 +55,17 @@ class EpochMillisDateFormatter implements DateFormatter { @Override public DateFormatter withZone(ZoneId zoneId) { + if (ZoneOffset.UTC.equals(zoneId) == false) { + throw new IllegalArgumentException(pattern() + " date formatter can only be in zone offset UTC"); + } + return INSTANCE; + } + + @Override + public DateFormatter withLocale(Locale locale) { + if (Locale.ROOT.equals(locale) == false) { + throw new IllegalArgumentException(pattern() + " date formatter can only be in locale ROOT"); + } return this; } @@ -70,4 +83,14 @@ class EpochMillisDateFormatter implements DateFormatter { public DateFormatter parseDefaulting(Map fields) { return this; } + + @Override + public Locale getLocale() { + return Locale.ROOT; + } + + @Override + public ZoneId getZone() { + return ZoneOffset.UTC; + } } diff --git a/server/src/main/java/org/elasticsearch/common/time/EpochSecondsDateFormatter.java b/server/src/main/java/org/elasticsearch/common/time/EpochSecondsDateFormatter.java new file mode 100644 index 00000000000..a5d702bbda5 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/common/time/EpochSecondsDateFormatter.java @@ -0,0 +1,107 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.common.time; + +import java.math.BigDecimal; +import java.time.Instant; +import java.time.ZoneId; +import java.time.ZoneOffset; +import java.time.format.DateTimeParseException; +import java.time.temporal.TemporalAccessor; +import java.time.temporal.TemporalField; +import java.util.Locale; +import java.util.Map; +import java.util.regex.Pattern; + +public class EpochSecondsDateFormatter implements DateFormatter { + + public static DateFormatter INSTANCE = new EpochSecondsDateFormatter(); + private static final Pattern SPLIT_BY_DOT_PATTERN = Pattern.compile("\\."); + + private EpochSecondsDateFormatter() {} + + @Override + public TemporalAccessor parse(String input) { + try { + if (input.contains(".")) { + String[] inputs = SPLIT_BY_DOT_PATTERN.split(input, 2); + Long seconds = Long.valueOf(inputs[0]); + if (inputs[1].length() == 0) { + // this is BWC compatible to joda time, nothing after the dot is allowed + return Instant.ofEpochSecond(seconds, 0).atZone(ZoneOffset.UTC); + } + if (inputs[1].length() > 9) { + throw new DateTimeParseException("too much granularity after dot [" + input + "]", input, 0); + } + Long nanos = new BigDecimal(inputs[1]).movePointRight(9 - inputs[1].length()).longValueExact(); + return Instant.ofEpochSecond(seconds, nanos).atZone(ZoneOffset.UTC); + } else { + return Instant.ofEpochSecond(Long.valueOf(input)).atZone(ZoneOffset.UTC); + } + } catch (NumberFormatException e) { + throw new DateTimeParseException("invalid number [" + input + "]", input, 0, e); + } + } + + @Override + public String format(TemporalAccessor accessor) { + Instant instant = Instant.from(accessor); + if (instant.getNano() != 0) { + return String.valueOf(instant.getEpochSecond()) + "." + String.valueOf(instant.getNano()).replaceAll("0*$", ""); + } + return String.valueOf(instant.getEpochSecond()); + } + + @Override + public String pattern() { + return "epoch_second"; + } + + @Override + public Locale getLocale() { + return Locale.ROOT; + } + + @Override + public ZoneId getZone() { + return ZoneOffset.UTC; + } + + @Override + public DateFormatter withZone(ZoneId zoneId) { + if (zoneId.equals(ZoneOffset.UTC) == false) { + throw new IllegalArgumentException(pattern() + " date formatter can only be in zone offset UTC"); + } + return this; + } + + @Override + public DateFormatter withLocale(Locale locale) { + if (Locale.ROOT.equals(locale) == false) { + throw new IllegalArgumentException(pattern() + " date formatter can only be in locale ROOT"); + } + return this; + } + + @Override + public DateFormatter parseDefaulting(Map fields) { + return this; + } +} diff --git a/server/src/main/java/org/elasticsearch/common/time/JavaDateFormatter.java b/server/src/main/java/org/elasticsearch/common/time/JavaDateFormatter.java index f68215fde49..75cd82b51e8 100644 --- a/server/src/main/java/org/elasticsearch/common/time/JavaDateFormatter.java +++ b/server/src/main/java/org/elasticsearch/common/time/JavaDateFormatter.java @@ -28,6 +28,7 @@ import java.time.temporal.TemporalField; import java.util.Arrays; import java.util.Locale; import java.util.Map; +import java.util.Objects; class JavaDateFormatter implements DateFormatter { @@ -36,10 +37,17 @@ class JavaDateFormatter implements DateFormatter { private final DateTimeFormatter[] parsers; JavaDateFormatter(String format, DateTimeFormatter printer, DateTimeFormatter... parsers) { + if (printer == null) { + throw new IllegalArgumentException("printer may not be null"); + } long distinctZones = Arrays.stream(parsers).map(DateTimeFormatter::getZone).distinct().count(); if (distinctZones > 1) { throw new IllegalArgumentException("formatters must have the same time zone"); } + long distinctLocales = Arrays.stream(parsers).map(DateTimeFormatter::getLocale).distinct().count(); + if (distinctLocales > 1) { + throw new IllegalArgumentException("formatters must have the same locale"); + } if (parsers.length == 0) { this.parsers = new DateTimeFormatter[]{printer}; } else { @@ -83,6 +91,21 @@ class JavaDateFormatter implements DateFormatter { return new JavaDateFormatter(format, printer.withZone(zoneId), parsersWithZone); } + @Override + public DateFormatter withLocale(Locale locale) { + // shortcurt to not create new objects unnecessarily + if (locale.equals(parsers[0].getLocale())) { + return this; + } + + final DateTimeFormatter[] parsersWithZone = new DateTimeFormatter[parsers.length]; + for (int i = 0; i < parsers.length; i++) { + parsersWithZone[i] = parsers[i].withLocale(locale); + } + + return new JavaDateFormatter(format, printer.withLocale(locale), parsersWithZone); + } + @Override public String format(TemporalAccessor accessor) { return printer.format(accessor); @@ -109,4 +132,36 @@ class JavaDateFormatter implements DateFormatter { return new JavaDateFormatter(format, parseDefaultingBuilder.toFormatter(Locale.ROOT), parsersWithDefaulting); } } + + @Override + public Locale getLocale() { + return this.printer.getLocale(); + } + + @Override + public ZoneId getZone() { + return this.printer.getZone(); + } + + @Override + public int hashCode() { + return Objects.hash(getLocale(), printer.getZone(), format); + } + + @Override + public boolean equals(Object obj) { + if (obj.getClass().equals(this.getClass()) == false) { + return false; + } + JavaDateFormatter other = (JavaDateFormatter) obj; + + return Objects.equals(format, other.format) && + Objects.equals(getLocale(), other.getLocale()) && + Objects.equals(this.printer.getZone(), other.printer.getZone()); + } + + @Override + public String toString() { + return String.format(Locale.ROOT, "format[%s] locale[%s]", format, getLocale()); + } } diff --git a/server/src/main/java/org/elasticsearch/common/time/JavaDateMathParser.java b/server/src/main/java/org/elasticsearch/common/time/JavaDateMathParser.java new file mode 100644 index 00000000000..c3a59f52190 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/common/time/JavaDateMathParser.java @@ -0,0 +1,240 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.common.time; + +import org.elasticsearch.ElasticsearchParseException; + +import java.time.DateTimeException; +import java.time.DayOfWeek; +import java.time.Instant; +import java.time.LocalTime; +import java.time.ZoneId; +import java.time.ZoneOffset; +import java.time.ZonedDateTime; +import java.time.temporal.ChronoField; +import java.time.temporal.TemporalAccessor; +import java.time.temporal.TemporalAdjusters; +import java.time.temporal.TemporalField; +import java.time.temporal.TemporalQueries; +import java.util.HashMap; +import java.util.Map; +import java.util.Objects; +import java.util.function.LongSupplier; + +/** + * A parser for date/time formatted text with optional date math. + * + * The format of the datetime is configurable, and unix timestamps can also be used. Datemath + * is appended to a datetime with the following syntax: + * ||[+-/](\d+)?[yMwdhHms]. + */ +public class JavaDateMathParser implements DateMathParser { + + // base fields which should be used for default parsing, when we round up + private static final Map ROUND_UP_BASE_FIELDS = new HashMap<>(6); + { + ROUND_UP_BASE_FIELDS.put(ChronoField.MONTH_OF_YEAR, 1L); + ROUND_UP_BASE_FIELDS.put(ChronoField.DAY_OF_MONTH, 1L); + ROUND_UP_BASE_FIELDS.put(ChronoField.HOUR_OF_DAY, 23L); + ROUND_UP_BASE_FIELDS.put(ChronoField.MINUTE_OF_HOUR, 59L); + ROUND_UP_BASE_FIELDS.put(ChronoField.SECOND_OF_MINUTE, 59L); + ROUND_UP_BASE_FIELDS.put(ChronoField.MILLI_OF_SECOND, 999L); + } + + private final DateFormatter formatter; + private final DateFormatter roundUpFormatter; + + public JavaDateMathParser(DateFormatter formatter) { + Objects.requireNonNull(formatter); + this.formatter = formatter; + this.roundUpFormatter = formatter.parseDefaulting(ROUND_UP_BASE_FIELDS); + } + + @Override + public long parse(String text, LongSupplier now, boolean roundUp, ZoneId timeZone) { + long time; + String mathString; + if (text.startsWith("now")) { + try { + time = now.getAsLong(); + } catch (Exception e) { + throw new ElasticsearchParseException("could not read the current timestamp", e); + } + mathString = text.substring("now".length()); + } else { + int index = text.indexOf("||"); + if (index == -1) { + return parseDateTime(text, timeZone, roundUp); + } + time = parseDateTime(text.substring(0, index), timeZone, false); + mathString = text.substring(index + 2); + } + + return parseMath(mathString, time, roundUp, timeZone); + } + + private long parseMath(final String mathString, final long time, final boolean roundUp, + ZoneId timeZone) throws ElasticsearchParseException { + if (timeZone == null) { + timeZone = ZoneOffset.UTC; + } + ZonedDateTime dateTime = ZonedDateTime.ofInstant(Instant.ofEpochMilli(time), timeZone); + for (int i = 0; i < mathString.length(); ) { + char c = mathString.charAt(i++); + final boolean round; + final int sign; + if (c == '/') { + round = true; + sign = 1; + } else { + round = false; + if (c == '+') { + sign = 1; + } else if (c == '-') { + sign = -1; + } else { + throw new ElasticsearchParseException("operator not supported for date math [{}]", mathString); + } + } + + if (i >= mathString.length()) { + throw new ElasticsearchParseException("truncated date math [{}]", mathString); + } + + final int num; + if (!Character.isDigit(mathString.charAt(i))) { + num = 1; + } else { + int numFrom = i; + while (i < mathString.length() && Character.isDigit(mathString.charAt(i))) { + i++; + } + if (i >= mathString.length()) { + throw new ElasticsearchParseException("truncated date math [{}]", mathString); + } + num = Integer.parseInt(mathString.substring(numFrom, i)); + } + if (round) { + if (num != 1) { + throw new ElasticsearchParseException("rounding `/` can only be used on single unit types [{}]", mathString); + } + } + char unit = mathString.charAt(i++); + switch (unit) { + case 'y': + if (round) { + dateTime = dateTime.withDayOfYear(1).with(LocalTime.MIN); + } else { + dateTime = dateTime.plusYears(sign * num); + } + if (roundUp) { + dateTime = dateTime.plusYears(1); + } + break; + case 'M': + if (round) { + dateTime = dateTime.withDayOfMonth(1).with(LocalTime.MIN); + } else { + dateTime = dateTime.plusMonths(sign * num); + } + if (roundUp) { + dateTime = dateTime.plusMonths(1); + } + break; + case 'w': + if (round) { + dateTime = dateTime.with(TemporalAdjusters.previousOrSame(DayOfWeek.MONDAY)).with(LocalTime.MIN); + } else { + dateTime = dateTime.plusWeeks(sign * num); + } + if (roundUp) { + dateTime = dateTime.plusWeeks(1); + } + break; + case 'd': + if (round) { + dateTime = dateTime.with(LocalTime.MIN); + } else { + dateTime = dateTime.plusDays(sign * num); + } + if (roundUp) { + dateTime = dateTime.plusDays(1); + } + break; + case 'h': + case 'H': + if (round) { + dateTime = dateTime.withMinute(0).withSecond(0).withNano(0); + } else { + dateTime = dateTime.plusHours(sign * num); + } + if (roundUp) { + dateTime = dateTime.plusHours(1); + } + break; + case 'm': + if (round) { + dateTime = dateTime.withSecond(0).withNano(0); + } else { + dateTime = dateTime.plusMinutes(sign * num); + } + if (roundUp) { + dateTime = dateTime.plusMinutes(1); + } + break; + case 's': + if (round) { + dateTime = dateTime.withNano(0); + } else { + dateTime = dateTime.plusSeconds(sign * num); + } + if (roundUp) { + dateTime = dateTime.plusSeconds(1); + } + break; + default: + throw new ElasticsearchParseException("unit [{}] not supported for date math [{}]", unit, mathString); + } + if (roundUp) { + dateTime = dateTime.minus(1, ChronoField.MILLI_OF_SECOND.getBaseUnit()); + } + } + return dateTime.toInstant().toEpochMilli(); + } + + private long parseDateTime(String value, ZoneId timeZone, boolean roundUpIfNoTime) { + DateFormatter formatter = roundUpIfNoTime ? this.roundUpFormatter : this.formatter; + try { + if (timeZone == null) { + return DateFormatters.toZonedDateTime(formatter.parse(value)).toInstant().toEpochMilli(); + } else { + TemporalAccessor accessor = formatter.parse(value); + ZoneId zoneId = TemporalQueries.zone().queryFrom(accessor); + if (zoneId != null) { + timeZone = zoneId; + } + + return DateFormatters.toZonedDateTime(accessor).withZoneSameLocal(timeZone).toInstant().toEpochMilli(); + } + } catch (IllegalArgumentException | DateTimeException e) { + throw new ElasticsearchParseException("failed to parse date field [{}]: [{}]", e, value, e.getMessage()); + } + } +} diff --git a/server/src/main/java/org/elasticsearch/common/unit/ByteSizeValue.java b/server/src/main/java/org/elasticsearch/common/unit/ByteSizeValue.java index fc945027d1e..9d36b6f5ff6 100644 --- a/server/src/main/java/org/elasticsearch/common/unit/ByteSizeValue.java +++ b/server/src/main/java/org/elasticsearch/common/unit/ByteSizeValue.java @@ -35,8 +35,11 @@ import java.util.Locale; import java.util.Objects; public class ByteSizeValue implements Writeable, Comparable, ToXContentFragment { + private static final DeprecationLogger DEPRECATION_LOGGER = new DeprecationLogger(Loggers.getLogger(ByteSizeValue.class)); + public static final ByteSizeValue ZERO = new ByteSizeValue(0, ByteSizeUnit.BYTES); + private final long size; private final ByteSizeUnit unit; diff --git a/server/src/main/java/org/elasticsearch/discovery/AckClusterStatePublishResponseHandler.java b/server/src/main/java/org/elasticsearch/discovery/AckClusterStatePublishResponseHandler.java index fd47fd0e86d..d3ca7a222e0 100644 --- a/server/src/main/java/org/elasticsearch/discovery/AckClusterStatePublishResponseHandler.java +++ b/server/src/main/java/org/elasticsearch/discovery/AckClusterStatePublishResponseHandler.java @@ -19,9 +19,9 @@ package org.elasticsearch.discovery; import org.apache.logging.log4j.Logger; +import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.message.ParameterizedMessage; import org.elasticsearch.cluster.node.DiscoveryNode; -import org.elasticsearch.common.logging.ESLoggerFactory; import java.util.Set; @@ -32,7 +32,7 @@ import java.util.Set; */ public class AckClusterStatePublishResponseHandler extends BlockingClusterStatePublishResponseHandler { - private static final Logger logger = ESLoggerFactory.getLogger(AckClusterStatePublishResponseHandler.class.getName()); + private static final Logger logger = LogManager.getLogger(AckClusterStatePublishResponseHandler.class); private final Discovery.AckListener ackListener; diff --git a/server/src/main/java/org/elasticsearch/discovery/DiscoveryModule.java b/server/src/main/java/org/elasticsearch/discovery/DiscoveryModule.java index 91f4e615159..7256bb16747 100644 --- a/server/src/main/java/org/elasticsearch/discovery/DiscoveryModule.java +++ b/server/src/main/java/org/elasticsearch/discovery/DiscoveryModule.java @@ -19,13 +19,14 @@ package org.elasticsearch.discovery; +import org.apache.logging.log4j.Logger; +import org.apache.logging.log4j.LogManager; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.routing.allocation.AllocationService; import org.elasticsearch.cluster.service.ClusterApplier; import org.elasticsearch.cluster.service.MasterService; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; -import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.network.NetworkService; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Setting; @@ -60,6 +61,7 @@ import java.util.stream.Collectors; * A module for loading classes for node discovery. */ public class DiscoveryModule { + private static final Logger logger = LogManager.getLogger(DiscoveryModule.class); public static final Setting DISCOVERY_TYPE_SETTING = new Setting<>("discovery.type", "zen", Function.identity(), Property.NodeScope); @@ -131,7 +133,7 @@ public class DiscoveryModule { if (discoverySupplier == null) { throw new IllegalArgumentException("Unknown discovery type [" + discoveryType + "]"); } - Loggers.getLogger(getClass(), settings).info("using discovery type [{}] and host providers {}", discoveryType, hostsProviderNames); + logger.info("using discovery type [{}] and host providers {}", discoveryType, hostsProviderNames); discovery = Objects.requireNonNull(discoverySupplier.get()); } diff --git a/server/src/main/java/org/elasticsearch/env/NodeEnvironment.java b/server/src/main/java/org/elasticsearch/env/NodeEnvironment.java index 3240bb236df..7d490be4af5 100644 --- a/server/src/main/java/org/elasticsearch/env/NodeEnvironment.java +++ b/server/src/main/java/org/elasticsearch/env/NodeEnvironment.java @@ -19,6 +19,10 @@ package org.elasticsearch.env; +import java.io.UncheckedIOException; +import java.util.Iterator; +import java.util.stream.Collectors; +import java.util.stream.Stream; import org.apache.logging.log4j.Logger; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.message.ParameterizedMessage; @@ -486,12 +490,27 @@ public final class NodeEnvironment implements Closeable { } private static boolean assertPathsDoNotExist(final Path[] paths) { - Set existingPaths = new HashSet<>(); - for (Path path : paths) { - if (FileSystemUtils.exists(paths)) { - existingPaths.add(path); - } - } + Set existingPaths = Stream.of(paths) + .filter(FileSystemUtils::exists) + .filter(leftOver -> { + // Relaxed assertion for the special case where only the empty state directory exists after deleting + // the shard directory because it was created again as a result of a metadata read action concurrently. + try (DirectoryStream children = Files.newDirectoryStream(leftOver)) { + Iterator iter = children.iterator(); + if (iter.hasNext() == false) { + return true; + } + Path maybeState = iter.next(); + if (iter.hasNext() || maybeState.equals(leftOver.resolve(MetaDataStateFormat.STATE_DIR_NAME)) == false) { + return true; + } + try (DirectoryStream stateChildren = Files.newDirectoryStream(maybeState)) { + return stateChildren.iterator().hasNext(); + } + } catch (IOException e) { + throw new UncheckedIOException(e); + } + }).collect(Collectors.toSet()); assert existingPaths.size() == 0 : "Paths exist that should have been deleted: " + existingPaths; return existingPaths.size() == 0; } @@ -551,10 +570,8 @@ public final class NodeEnvironment implements Closeable { * @param index the index to lock shards for * @param lockTimeoutMS how long to wait for acquiring the indices shard locks * @return the {@link ShardLock} instances for this index. - * @throws IOException if an IOException occurs. */ - public List lockAllForIndex(Index index, IndexSettings settings, long lockTimeoutMS) - throws IOException, ShardLockObtainFailedException { + public List lockAllForIndex(Index index, IndexSettings settings, long lockTimeoutMS) throws ShardLockObtainFailedException { final int numShards = settings.getNumberOfShards(); if (numShards <= 0) { throw new IllegalArgumentException("settings must contain a non-null > 0 number of shards"); @@ -842,7 +859,7 @@ public final class NodeEnvironment implements Closeable { /** * Resolves all existing paths to indexFolderName in ${data.paths}/nodes/{node.id}/indices */ - public Path[] resolveIndexFolder(String indexFolderName) throws IOException { + public Path[] resolveIndexFolder(String indexFolderName) { if (nodePaths == null || locks == null) { throw new IllegalStateException("node is not configured to store local location"); } @@ -987,17 +1004,6 @@ public final class NodeEnvironment implements Closeable { } } - /** - * Resolve the custom path for a index's shard. - * Uses the {@code IndexMetaData.SETTING_DATA_PATH} setting to determine - * the root path for the index. - * - * @param indexSettings settings for the index - */ - public Path resolveBaseCustomLocation(IndexSettings indexSettings) { - return resolveBaseCustomLocation(indexSettings, sharedDataPath, nodeLockId); - } - /** * Resolve the custom path for a index's shard. * Uses the {@code IndexMetaData.SETTING_DATA_PATH} setting to determine diff --git a/server/src/main/java/org/elasticsearch/env/ShardLock.java b/server/src/main/java/org/elasticsearch/env/ShardLock.java index 63b3f8e046e..dfb9d4c6359 100644 --- a/server/src/main/java/org/elasticsearch/env/ShardLock.java +++ b/server/src/main/java/org/elasticsearch/env/ShardLock.java @@ -29,7 +29,7 @@ import java.util.concurrent.atomic.AtomicBoolean; * directory. Internal processes should acquire a lock on a shard * before executing any write operations on the shards data directory. * - * @see org.elasticsearch.env.NodeEnvironment + * @see NodeEnvironment */ public abstract class ShardLock implements Closeable { @@ -56,13 +56,6 @@ public abstract class ShardLock implements Closeable { protected abstract void closeInternal(); - /** - * Returns true if this lock is still open ie. has not been closed yet. - */ - public final boolean isOpen() { - return closed.get() == false; - } - @Override public String toString() { return "ShardLock{" + diff --git a/server/src/main/java/org/elasticsearch/index/analysis/AnalysisRegistry.java b/server/src/main/java/org/elasticsearch/index/analysis/AnalysisRegistry.java index d7a283f3158..86b9c5a65c5 100644 --- a/server/src/main/java/org/elasticsearch/index/analysis/AnalysisRegistry.java +++ b/server/src/main/java/org/elasticsearch/index/analysis/AnalysisRegistry.java @@ -158,16 +158,8 @@ public final class AnalysisRegistry implements Closeable { public Map buildTokenFilterFactories(IndexSettings indexSettings) throws IOException { final Map tokenFiltersSettings = indexSettings.getSettings().getGroups(INDEX_ANALYSIS_FILTER); - Map> tokenFilters = new HashMap<>(this.tokenFilters); - /* - * synonym and synonym_graph are different than everything else since they need access to the tokenizer factories for the index. - * instead of building the infrastructure for plugins we rather make it a real exception to not pollute the general interface and - * hide internal data-structures as much as possible. - */ - tokenFilters.put("synonym", requiresAnalysisSettings((is, env, name, settings) -> new SynonymTokenFilterFactory(is, env, this, name, settings))); - tokenFilters.put("synonym_graph", requiresAnalysisSettings((is, env, name, settings) -> new SynonymGraphTokenFilterFactory(is, env, this, name, settings))); - - return buildMapping(Component.FILTER, indexSettings, tokenFiltersSettings, Collections.unmodifiableMap(tokenFilters), prebuiltAnalysis.preConfiguredTokenFilters); + return buildMapping(Component.FILTER, indexSettings, tokenFiltersSettings, + Collections.unmodifiableMap(this.tokenFilters), prebuiltAnalysis.preConfiguredTokenFilters); } public Map buildTokenizerFactories(IndexSettings indexSettings) throws IOException { @@ -222,18 +214,7 @@ public final class AnalysisRegistry implements Closeable { if (tokenFilterSettings.containsKey(tokenFilter)) { Settings currentSettings = tokenFilterSettings.get(tokenFilter); String typeName = currentSettings.get("type"); - /* - * synonym and synonym_graph are different than everything else since they need access to the tokenizer factories for the index. - * instead of building the infrastructure for plugins we rather make it a real exception to not pollute the general interface and - * hide internal data-structures as much as possible. - */ - if ("synonym".equals(typeName)) { - return requiresAnalysisSettings((is, env, name, settings) -> new SynonymTokenFilterFactory(is, env, this, name, settings)); - } else if ("synonym_graph".equals(typeName)) { - return requiresAnalysisSettings((is, env, name, settings) -> new SynonymGraphTokenFilterFactory(is, env, this, name, settings)); - } else { - return getAnalysisProvider(Component.FILTER, tokenFilters, tokenFilter, typeName); - } + return getAnalysisProvider(Component.FILTER, tokenFilters, tokenFilter, typeName); } else { return getTokenFilterProvider(tokenFilter); } @@ -257,19 +238,6 @@ public final class AnalysisRegistry implements Closeable { } } - private static AnalysisModule.AnalysisProvider requiresAnalysisSettings(AnalysisModule.AnalysisProvider provider) { - return new AnalysisModule.AnalysisProvider() { - @Override - public T get(IndexSettings indexSettings, Environment environment, String name, Settings settings) throws IOException { - return provider.get(indexSettings, environment, name, settings); - } - @Override - public boolean requiresAnalysisSettings() { - return true; - } - }; - } - enum Component { ANALYZER { @Override diff --git a/server/src/main/java/org/elasticsearch/index/engine/Engine.java b/server/src/main/java/org/elasticsearch/index/engine/Engine.java index 09171d4df84..460501c8b52 100644 --- a/server/src/main/java/org/elasticsearch/index/engine/Engine.java +++ b/server/src/main/java/org/elasticsearch/index/engine/Engine.java @@ -43,8 +43,10 @@ import org.apache.lucene.store.Directory; import org.apache.lucene.store.IOContext; import org.apache.lucene.util.Accountable; import org.apache.lucene.util.Accountables; +import org.apache.lucene.util.IOUtils; import org.apache.lucene.util.SetOnce; import org.elasticsearch.ExceptionsHelper; +import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.common.CheckedRunnable; import org.elasticsearch.common.FieldMemoryStats; import org.elasticsearch.common.Nullable; @@ -96,6 +98,7 @@ import java.util.Set; import java.util.concurrent.CountDownLatch; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; +import java.util.concurrent.atomic.AtomicLong; import java.util.concurrent.locks.Condition; import java.util.concurrent.locks.Lock; import java.util.concurrent.locks.ReentrantLock; @@ -108,6 +111,7 @@ public abstract class Engine implements Closeable { public static final String SYNC_COMMIT_ID = "sync_id"; public static final String HISTORY_UUID_KEY = "history_uuid"; public static final String MIN_RETAINED_SEQNO = "min_retained_seq_no"; + public static final String MAX_UNSAFE_AUTO_ID_TIMESTAMP_COMMIT_ID = "max_unsafe_auto_id_timestamp"; protected final ShardId shardId; protected final String allocationId; @@ -135,6 +139,16 @@ public abstract class Engine implements Closeable { */ protected volatile long lastWriteNanos = System.nanoTime(); + /* + * This marker tracks the max seq_no of either update operations or delete operations have been processed in this engine. + * An index request is considered as an update if it overwrites existing documents with the same docId in the Lucene index. + * This marker is started uninitialized (-2), and the optimization using seq_no will be disabled if this marker is uninitialized. + * The value of this marker never goes backwards, and is updated/changed differently on primary and replica: + * 1. A primary initializes this marker once using the max_seq_no from its history, then advances when processing an update or delete. + * 2. A replica never advances this marker by itself but only inherits from its primary (via advanceMaxSeqNoOfUpdatesOrDeletes). + */ + private final AtomicLong maxSeqNoOfUpdatesOrDeletes = new AtomicLong(SequenceNumbers.UNASSIGNED_SEQ_NO); + protected Engine(EngineConfig engineConfig) { Objects.requireNonNull(engineConfig.getStore(), "Store must be provided to the engine"); @@ -650,7 +664,15 @@ public abstract class Engine implements Closeable { } Releasable releasable = store::decRef; try { - EngineSearcher engineSearcher = new EngineSearcher(source, getReferenceManager(scope), store, logger); + ReferenceManager referenceManager = getReferenceManager(scope); + Searcher engineSearcher = new Searcher(source, referenceManager.acquire(), + s -> { + try { + referenceManager.release(s); + } finally { + store.decRef(); + } + }, logger); releasable = null; // success - hand over the reference to the engine searcher return engineSearcher; } catch (AlreadyClosedException ex) { @@ -1154,40 +1176,67 @@ public abstract class Engine implements Closeable { } public static class Searcher implements Releasable { - private final String source; private final IndexSearcher searcher; + private final AtomicBoolean released = new AtomicBoolean(false); + private final Logger logger; + private final IOUtils.IOConsumer onClose; - public Searcher(String source, IndexSearcher searcher) { + public Searcher(String source, IndexSearcher searcher, Logger logger) { + this(source, searcher, s -> s.getIndexReader().close(), logger); + } + + public Searcher(String source, IndexSearcher searcher, IOUtils.IOConsumer onClose, Logger logger) { this.source = source; this.searcher = searcher; + this.onClose = onClose; + this.logger = logger; } /** * The source that caused this searcher to be acquired. */ - public String source() { + public final String source() { return source; } - public IndexReader reader() { + public final IndexReader reader() { return searcher.getIndexReader(); } - public DirectoryReader getDirectoryReader() { + public final DirectoryReader getDirectoryReader() { if (reader() instanceof DirectoryReader) { return (DirectoryReader) reader(); } throw new IllegalStateException("Can't use " + reader().getClass() + " as a directory reader"); } - public IndexSearcher searcher() { + public final IndexSearcher searcher() { return searcher; } @Override public void close() { - // Nothing to close here + if (released.compareAndSet(false, true) == false) { + /* In general, searchers should never be released twice or this would break reference counting. There is one rare case + * when it might happen though: when the request and the Reaper thread would both try to release it in a very short amount + * of time, this is why we only log a warning instead of throwing an exception. + */ + logger.warn("Searcher was released twice", new IllegalStateException("Double release")); + return; + } + try { + onClose.accept(searcher()); + } catch (IOException e) { + throw new IllegalStateException("Cannot close", e); + } catch (AlreadyClosedException e) { + // This means there's a bug somewhere: don't suppress it + throw new AssertionError(e); + } + } + + public final Logger getLogger() { + return logger; } } @@ -1720,12 +1769,12 @@ public abstract class Engine implements Closeable { public abstract void deactivateThrottling(); /** - * Marks operations in the translog as completed. This is used to restore the state of the local checkpoint tracker on primary - * promotion. + * This method replays translog to restore the Lucene index which might be reverted previously. + * This ensures that all acknowledged writes are restored correctly when this engine is promoted. * - * @throws IOException if an I/O exception occurred reading the translog + * @return the number of translog operations have been recovered */ - public abstract void restoreLocalCheckpointFromTranslog() throws IOException; + public abstract int restoreLocalHistoryFromTranslog(TranslogRecoveryRunner translogRecoveryRunner) throws IOException; /** * Fills up the local checkpoints history with no-ops until the local checkpoint @@ -1761,8 +1810,77 @@ public abstract class Engine implements Closeable { */ public abstract void maybePruneDeletes(); + /** + * Returns the maximum auto_id_timestamp of all append-only index requests have been processed by this engine + * or the auto_id_timestamp received from its primary shard via {@link #updateMaxUnsafeAutoIdTimestamp(long)}. + * Notes this method returns the auto_id_timestamp of all append-only requests, not max_unsafe_auto_id_timestamp. + */ + public long getMaxSeenAutoIdTimestamp() { + return IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP; + } + + /** + * Forces this engine to advance its max_unsafe_auto_id_timestamp marker to at least the given timestamp. + * The engine will disable optimization for all append-only whose timestamp at most {@code newTimestamp}. + */ + public abstract void updateMaxUnsafeAutoIdTimestamp(long newTimestamp); + @FunctionalInterface public interface TranslogRecoveryRunner { int run(Engine engine, Translog.Snapshot snapshot) throws IOException; } + + /** + * Returns the maximum sequence number of either update or delete operations have been processed in this engine + * or the sequence number from {@link #advanceMaxSeqNoOfUpdatesOrDeletes(long)}. An index request is considered + * as an update operation if it overwrites the existing documents in Lucene index with the same document id. + *

+ * A note on the optimization using max_seq_no_of_updates_or_deletes: + * For each operation O, the key invariants are: + *

    + *
  1. I1: There is no operation on docID(O) with seqno that is {@literal > MSU(O) and < seqno(O)}
  2. + *
  3. I2: If {@literal MSU(O) < seqno(O)} then docID(O) did not exist when O was applied; more precisely, if there is any O' + * with {@literal seqno(O') < seqno(O) and docID(O') = docID(O)} then the one with the greatest seqno is a delete.
  4. + *
+ *

+ * When a receiving shard (either a replica or a follower) receives an operation O, it must first ensure its own MSU at least MSU(O), + * and then compares its MSU to its local checkpoint (LCP). If {@literal LCP < MSU} then there's a gap: there may be some operations + * that act on docID(O) about which we do not yet know, so we cannot perform an add. Note this also covers the case where a future + * operation O' with {@literal seqNo(O') > seqNo(O) and docId(O') = docID(O)} is processed before O. In that case MSU(O') is at least + * seqno(O') and this means {@literal MSU >= seqNo(O') > seqNo(O) > LCP} (because O wasn't processed yet). + *

+ * However, if {@literal MSU <= LCP} then there is no gap: we have processed every {@literal operation <= LCP}, and no operation O' + * with {@literal seqno(O') > LCP and seqno(O') < seqno(O) also has docID(O') = docID(O)}, because such an operation would have + * {@literal seqno(O') > LCP >= MSU >= MSU(O)} which contradicts the first invariant. Furthermore in this case we immediately know + * that docID(O) has been deleted (or never existed) without needing to check Lucene for the following reason. If there's no earlier + * operation on docID(O) then this is clear, so suppose instead that the preceding operation on docID(O) is O': + * 1. The first invariant above tells us that {@literal seqno(O') <= MSU(O) <= LCP} so we have already applied O' to Lucene. + * 2. Also {@literal MSU(O) <= MSU <= LCP < seqno(O)} (we discard O if {@literal seqno(O) <= LCP}) so the second invariant applies, + * meaning that the O' was a delete. + *

+ * Therefore, if {@literal MSU <= LCP < seqno(O)} we know that O can safely be optimized with and added to lucene with addDocument. + * Moreover, operations that are optimized using the MSU optimization must not be processed twice as this will create duplicates + * in Lucene. To avoid this we check the local checkpoint tracker to see if an operation was already processed. + * + * @see #initializeMaxSeqNoOfUpdatesOrDeletes() + * @see #advanceMaxSeqNoOfUpdatesOrDeletes(long) + */ + public final long getMaxSeqNoOfUpdatesOrDeletes() { + return maxSeqNoOfUpdatesOrDeletes.get(); + } + + /** + * A primary shard calls this method once to initialize the max_seq_no_of_updates marker using the + * max_seq_no from Lucene index and translog before replaying the local translog in its local recovery. + */ + public abstract void initializeMaxSeqNoOfUpdatesOrDeletes(); + + /** + * A replica shard receives a new max_seq_no_of_updates from its primary shard, then calls this method + * to advance this marker to at least the given sequence number. + */ + public final void advanceMaxSeqNoOfUpdatesOrDeletes(long seqNo) { + maxSeqNoOfUpdatesOrDeletes.updateAndGet(curr -> Math.max(curr, seqNo)); + assert maxSeqNoOfUpdatesOrDeletes.get() >= seqNo : maxSeqNoOfUpdatesOrDeletes.get() + " < " + seqNo; + } } diff --git a/server/src/main/java/org/elasticsearch/index/engine/EngineSearcher.java b/server/src/main/java/org/elasticsearch/index/engine/EngineSearcher.java deleted file mode 100644 index 7fd0fe6cc39..00000000000 --- a/server/src/main/java/org/elasticsearch/index/engine/EngineSearcher.java +++ /dev/null @@ -1,68 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.index.engine; - -import org.apache.logging.log4j.Logger; -import org.apache.lucene.search.IndexSearcher; -import org.apache.lucene.search.ReferenceManager; -import org.apache.lucene.store.AlreadyClosedException; -import org.elasticsearch.index.store.Store; - -import java.io.IOException; -import java.util.concurrent.atomic.AtomicBoolean; - -/** - * Searcher for an Engine - */ -final class EngineSearcher extends Engine.Searcher { - private final AtomicBoolean released = new AtomicBoolean(false); - private final Store store; - private final Logger logger; - private final ReferenceManager referenceManager; - - EngineSearcher(String source, ReferenceManager searcherReferenceManager, Store store, Logger logger) throws IOException { - super(source, searcherReferenceManager.acquire()); - this.store = store; - this.logger = logger; - this.referenceManager = searcherReferenceManager; - } - - @Override - public void close() { - if (!released.compareAndSet(false, true)) { - /* In general, searchers should never be released twice or this would break reference counting. There is one rare case - * when it might happen though: when the request and the Reaper thread would both try to release it in a very short amount - * of time, this is why we only log a warning instead of throwing an exception. - */ - logger.warn("Searcher was released twice", new IllegalStateException("Double release")); - return; - } - try { - referenceManager.release(searcher()); - } catch (IOException e) { - throw new IllegalStateException("Cannot close", e); - } catch (AlreadyClosedException e) { - // This means there's a bug somewhere: don't suppress it - throw new AssertionError(e); - } finally { - store.decRef(); - } - } -} diff --git a/server/src/main/java/org/elasticsearch/index/engine/InternalEngine.java b/server/src/main/java/org/elasticsearch/index/engine/InternalEngine.java index 52dd4d3fcd0..187b0eb1359 100644 --- a/server/src/main/java/org/elasticsearch/index/engine/InternalEngine.java +++ b/server/src/main/java/org/elasticsearch/index/engine/InternalEngine.java @@ -47,6 +47,7 @@ import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.InfoStream; import org.elasticsearch.Assertions; import org.elasticsearch.ExceptionsHelper; +import org.elasticsearch.Version; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.SuppressForbidden; @@ -138,8 +139,8 @@ public class InternalEngine extends Engine { // incoming indexing ops to a single thread: private final AtomicInteger throttleRequestCount = new AtomicInteger(); private final AtomicBoolean pendingTranslogRecovery = new AtomicBoolean(false); - public static final String MAX_UNSAFE_AUTO_ID_TIMESTAMP_COMMIT_ID = "max_unsafe_auto_id_timestamp"; private final AtomicLong maxUnsafeAutoIdTimestamp = new AtomicLong(-1); + private final AtomicLong maxSeenAutoIdTimestamp = new AtomicLong(-1); private final AtomicLong maxSeqNoOfNonAppendOnlyOperations = new AtomicLong(SequenceNumbers.NO_OPS_PERFORMED); private final CounterMetric numVersionLookups = new CounterMetric(); private final CounterMetric numIndexVersionsLookups = new CounterMetric(); @@ -166,7 +167,7 @@ public class InternalEngine extends Engine { final BiFunction localCheckpointTrackerSupplier) { super(engineConfig); if (engineConfig.isAutoGeneratedIDsOptimizationEnabled() == false) { - maxUnsafeAutoIdTimestamp.set(Long.MAX_VALUE); + updateAutoIdTimestamp(Long.MAX_VALUE, true); } final TranslogDeletionPolicy translogDeletionPolicy = new TranslogDeletionPolicy( engineConfig.getIndexSettings().getTranslogRetentionSize().getBytes(), @@ -332,17 +333,12 @@ public class InternalEngine extends Engine { } @Override - public void restoreLocalCheckpointFromTranslog() throws IOException { - try (ReleasableLock ignored = writeLock.acquire()) { + public int restoreLocalHistoryFromTranslog(TranslogRecoveryRunner translogRecoveryRunner) throws IOException { + try (ReleasableLock ignored = readLock.acquire()) { ensureOpen(); final long localCheckpoint = localCheckpointTracker.getCheckpoint(); try (Translog.Snapshot snapshot = getTranslog().newSnapshotFromMinSeqNo(localCheckpoint + 1)) { - Translog.Operation operation; - while ((operation = snapshot.next()) != null) { - if (operation.seqNo() > localCheckpoint) { - localCheckpointTracker.markSeqNoAsCompleted(operation.seqNo()); - } - } + return translogRecoveryRunner.run(this, snapshot); } } } @@ -374,7 +370,7 @@ public class InternalEngine extends Engine { if (key.equals(MAX_UNSAFE_AUTO_ID_TIMESTAMP_COMMIT_ID)) { assert maxUnsafeAutoIdTimestamp.get() == -1 : "max unsafe timestamp was assigned already [" + maxUnsafeAutoIdTimestamp.get() + "]"; - maxUnsafeAutoIdTimestamp.set(Long.parseLong(entry.getValue())); + updateAutoIdTimestamp(Long.parseLong(entry.getValue()), true); } if (key.equals(SequenceNumbers.MAX_SEQ_NO)) { assert maxSeqNoOfNonAppendOnlyOperations.get() == -1 : @@ -389,6 +385,7 @@ public class InternalEngine extends Engine { flushLock.lock(); try (ReleasableLock lock = readLock.acquire()) { ensureOpen(); + assert getMaxSeqNoOfUpdatesOrDeletes() != SequenceNumbers.UNASSIGNED_SEQ_NO : "max_seq_no_of_updates is uninitialized"; if (pendingTranslogRecovery.get() == false) { throw new IllegalStateException("Engine has already been recovered"); } @@ -609,7 +606,7 @@ public class InternalEngine extends Engine { // in the case of a already pruned translog generation we might get null here - yet very unlikely TranslogLeafReader reader = new TranslogLeafReader((Translog.Index) operation, engineConfig .getIndexSettings().getIndexVersionCreated()); - return new GetResult(new Searcher("realtime_get", new IndexSearcher(reader)), + return new GetResult(new Searcher("realtime_get", new IndexSearcher(reader), logger), new VersionsAndSeqNoResolver.DocIdAndVersion(0, ((Translog.Index) operation).version(), reader, 0)); } } catch (IOException e) { @@ -878,7 +875,7 @@ public class InternalEngine extends Engine { * requests, we can assert the replica have not seen the document of that append-only request, thus we can apply optimization. */ assert index.version() == 1L : "can optimize on replicas but incoming version is [" + index.version() + "]"; - plan = IndexingStrategy.optimizedAppendOnly(index.seqNo()); + plan = IndexingStrategy.optimizedAppendOnly(index.seqNo(), 1L); } else { if (appendOnlyRequest == false) { maxSeqNoOfNonAppendOnlyOperations.updateAndGet(curr -> Math.max(index.seqNo(), curr)); @@ -922,6 +919,7 @@ public class InternalEngine extends Engine { protected final IndexingStrategy planIndexingAsPrimary(Index index) throws IOException { assert index.origin() == Operation.Origin.PRIMARY : "planing as primary but origin isn't. got " + index.origin(); + assert getMaxSeqNoOfUpdatesOrDeletes() != SequenceNumbers.UNASSIGNED_SEQ_NO : "max_seq_no_of_updates is not initialized"; final IndexingStrategy plan; // resolve an external operation into an internal one which is safe to replay if (canOptimizeAddDocument(index)) { @@ -929,7 +927,7 @@ public class InternalEngine extends Engine { plan = IndexingStrategy.overrideExistingAsIfNotThere(generateSeqNoForOperation(index), 1L); versionMap.enforceSafeAccess(); } else { - plan = IndexingStrategy.optimizedAppendOnly(generateSeqNoForOperation(index)); + plan = IndexingStrategy.optimizedAppendOnly(generateSeqNoForOperation(index), 1L); } } else { versionMap.enforceSafeAccess(); @@ -956,6 +954,10 @@ public class InternalEngine extends Engine { ); } } + final boolean toAppend = plan.indexIntoLucene && plan.useLuceneUpdateDocument == false; + if (toAppend == false) { + advanceMaxSeqNoOfUpdatesOrDeletes(plan.seqNoForIndexing); + } return plan; } @@ -974,6 +976,7 @@ public class InternalEngine extends Engine { if (plan.addStaleOpToLucene) { addStaleDocs(index.docs(), indexWriter); } else if (plan.useLuceneUpdateDocument) { + assert assertMaxSeqNoOfUpdatesIsAdvanced(index.uid(), plan.seqNoForIndexing, true, true); updateDocs(index.uid(), index.docs(), indexWriter); } else { // document does not exists, we can optimize for create, but double check if assertions are running @@ -1014,11 +1017,12 @@ public class InternalEngine extends Engine { final boolean mayHaveBeenIndexBefore; if (index.isRetry()) { mayHaveBeenIndexBefore = true; - maxUnsafeAutoIdTimestamp.updateAndGet(curr -> Math.max(index.getAutoGeneratedIdTimestamp(), curr)); + updateAutoIdTimestamp(index.getAutoGeneratedIdTimestamp(), true); assert maxUnsafeAutoIdTimestamp.get() >= index.getAutoGeneratedIdTimestamp(); } else { // in this case we force mayHaveBeenIndexBefore = maxUnsafeAutoIdTimestamp.get() >= index.getAutoGeneratedIdTimestamp(); + updateAutoIdTimestamp(index.getAutoGeneratedIdTimestamp(), false); } return mayHaveBeenIndexBefore; } @@ -1078,8 +1082,8 @@ public class InternalEngine extends Engine { Optional.of(earlyResultOnPreFlightError); } - static IndexingStrategy optimizedAppendOnly(long seqNoForIndexing) { - return new IndexingStrategy(true, false, true, false, seqNoForIndexing, 1, null); + public static IndexingStrategy optimizedAppendOnly(long seqNoForIndexing, long versionForIndexing) { + return new IndexingStrategy(true, false, true, false, seqNoForIndexing, versionForIndexing, null); } static IndexingStrategy skipDueToVersionConflict( @@ -1100,7 +1104,8 @@ public class InternalEngine extends Engine { return new IndexingStrategy(true, true, true, false, seqNoForIndexing, versionForIndexing, null); } - static IndexingStrategy processButSkipLucene(boolean currentNotFoundOrDeleted, long seqNoForIndexing, long versionForIndexing) { + public static IndexingStrategy processButSkipLucene(boolean currentNotFoundOrDeleted, long seqNoForIndexing, + long versionForIndexing) { return new IndexingStrategy(currentNotFoundOrDeleted, false, false, false, seqNoForIndexing, versionForIndexing, null); } @@ -1245,6 +1250,7 @@ public class InternalEngine extends Engine { protected final DeletionStrategy planDeletionAsPrimary(Delete delete) throws IOException { assert delete.origin() == Operation.Origin.PRIMARY : "planing as primary but got " + delete.origin(); + assert getMaxSeqNoOfUpdatesOrDeletes() != SequenceNumbers.UNASSIGNED_SEQ_NO : "max_seq_no_of_updates is not initialized"; // resolve operation from external to internal final VersionValue versionValue = resolveDocVersion(delete); assert incrementVersionLookup(); @@ -1266,12 +1272,13 @@ public class InternalEngine extends Engine { currentlyDeleted, generateSeqNoForOperation(delete), delete.versionType().updateVersion(currentVersion, delete.version())); + advanceMaxSeqNoOfUpdatesOrDeletes(plan.seqNoOfDeletion); } return plan; } - private DeleteResult deleteInLucene(Delete delete, DeletionStrategy plan) - throws IOException { + private DeleteResult deleteInLucene(Delete delete, DeletionStrategy plan) throws IOException { + assert assertMaxSeqNoOfUpdatesIsAdvanced(delete.uid(), plan.seqNoOfDeletion, false, false); try { if (softDeleteEnabled) { final ParsedDocument tombstone = engineConfig.getTombstoneDocSupplier().newDeleteTombstoneDoc(delete.type(), delete.id()); @@ -2079,7 +2086,7 @@ public class InternalEngine extends Engine { if (warmer != null) { try { assert searcher.getIndexReader() instanceof ElasticsearchDirectoryReader : "this class needs an ElasticsearchDirectoryReader but got: " + searcher.getIndexReader().getClass(); - warmer.warm(new Searcher("top_reader_warming", searcher)); + warmer.warm(new Searcher("top_reader_warming", searcher, s -> {}, logger)); } catch (Exception e) { if (isEngineClosed.get() == false) { logger.warn("failed to prepare/warm", e); @@ -2292,7 +2299,7 @@ public class InternalEngine extends Engine { // this is an anti-viral settings you can only opt out for the entire index // only if a shard starts up again due to relocation or if the index is closed // the setting will be re-interpreted if it's set to true - this.maxUnsafeAutoIdTimestamp.set(Long.MAX_VALUE); + updateAutoIdTimestamp(Long.MAX_VALUE, true); } final TranslogDeletionPolicy translogDeletionPolicy = translog.getDeletionPolicy(); final IndexSettings indexSettings = engineConfig.getIndexSettings(); @@ -2325,6 +2332,16 @@ public class InternalEngine extends Engine { localCheckpointTracker.waitForOpsToComplete(seqNo); } + /** + * Checks if the given operation has been processed in this engine or not. + * @return true if the given operation was processed; otherwise false. + */ + protected final boolean hasBeenProcessedBefore(Operation op) { + assert op.seqNo() != SequenceNumbers.UNASSIGNED_SEQ_NO : "operation is not assigned seq_no"; + assert versionMap.assertKeyedLockHeldByCurrentThread(op.uid().bytes()); + return localCheckpointTracker.contains(op.seqNo()); + } + @Override public SeqNoStats getSeqNoStats(long globalCheckpoint) { return localCheckpointTracker.getStats(globalCheckpoint); @@ -2531,4 +2548,55 @@ public class InternalEngine extends Engine { assert refreshedCheckpoint.get() >= checkpoint : refreshedCheckpoint.get() + " < " + checkpoint; } } + + @Override + public final long getMaxSeenAutoIdTimestamp() { + return maxSeenAutoIdTimestamp.get(); + } + + @Override + public final void updateMaxUnsafeAutoIdTimestamp(long newTimestamp) { + updateAutoIdTimestamp(newTimestamp, true); + } + + private void updateAutoIdTimestamp(long newTimestamp, boolean unsafe) { + assert newTimestamp >= -1 : "invalid timestamp [" + newTimestamp + "]"; + maxSeenAutoIdTimestamp.updateAndGet(curr -> Math.max(curr, newTimestamp)); + if (unsafe) { + maxUnsafeAutoIdTimestamp.updateAndGet(curr -> Math.max(curr, newTimestamp)); + } + assert maxUnsafeAutoIdTimestamp.get() <= maxSeenAutoIdTimestamp.get(); + } + + private boolean assertMaxSeqNoOfUpdatesIsAdvanced(Term id, long seqNo, boolean allowDeleted, boolean relaxIfGapInSeqNo) { + final long maxSeqNoOfUpdates = getMaxSeqNoOfUpdatesOrDeletes(); + // If the primary is on an old version which does not replicate msu, we need to relax this assertion for that. + if (maxSeqNoOfUpdates == SequenceNumbers.UNASSIGNED_SEQ_NO) { + assert config().getIndexSettings().getIndexVersionCreated().before(Version.V_6_5_0); + return true; + } + // We treat a delete on the tombstones on replicas as a regular document, then use updateDocument (not addDocument). + if (allowDeleted) { + final VersionValue versionValue = versionMap.getVersionForAssert(id.bytes()); + if (versionValue != null && versionValue.isDelete()) { + return true; + } + } + // Operations can be processed on a replica in a different order than on the primary. If the order on the primary is index-1, + // delete-2, index-3, and the order on a replica is index-1, index-3, delete-2, then the msu of index-3 on the replica is 2 + // even though it is an update (overwrites index-1). We should relax this assertion if there is a pending gap in the seq_no. + if (relaxIfGapInSeqNo && getLocalCheckpoint() < maxSeqNoOfUpdates) { + return true; + } + assert seqNo <= maxSeqNoOfUpdates : "id=" + id + " seq_no=" + seqNo + " msu=" + maxSeqNoOfUpdates; + return true; + } + + @Override + public void initializeMaxSeqNoOfUpdatesOrDeletes() { + assert getMaxSeqNoOfUpdatesOrDeletes() == SequenceNumbers.UNASSIGNED_SEQ_NO : + "max_seq_no_of_updates is already initialized [" + getMaxSeqNoOfUpdatesOrDeletes() + "]"; + final long maxSeqNo = SequenceNumbers.max(localCheckpointTracker.getMaxSeqNo(), translog.getMaxSeqNo()); + advanceMaxSeqNoOfUpdatesOrDeletes(maxSeqNo); + } } diff --git a/server/src/main/java/org/elasticsearch/index/engine/LiveVersionMap.java b/server/src/main/java/org/elasticsearch/index/engine/LiveVersionMap.java index d0dd9466b60..6d6340dd337 100644 --- a/server/src/main/java/org/elasticsearch/index/engine/LiveVersionMap.java +++ b/server/src/main/java/org/elasticsearch/index/engine/LiveVersionMap.java @@ -470,7 +470,7 @@ final class LiveVersionMap implements ReferenceManager.RefreshListener, Accounta return keyedLock.acquire(uid); } - private boolean assertKeyedLockHeldByCurrentThread(BytesRef uid) { + boolean assertKeyedLockHeldByCurrentThread(BytesRef uid) { assert keyedLock.isHeldByCurrentThread(uid) : "Thread [" + Thread.currentThread().getName() + "], uid [" + uid.utf8ToString() + "]"; return true; } diff --git a/server/src/main/java/org/elasticsearch/index/engine/ReadOnlyEngine.java b/server/src/main/java/org/elasticsearch/index/engine/ReadOnlyEngine.java index 80b65393929..26ef259a1e1 100644 --- a/server/src/main/java/org/elasticsearch/index/engine/ReadOnlyEngine.java +++ b/server/src/main/java/org/elasticsearch/index/engine/ReadOnlyEngine.java @@ -21,6 +21,7 @@ package org.elasticsearch.index.engine; import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.IndexCommit; import org.apache.lucene.index.IndexWriter; +import org.apache.lucene.index.SegmentCommitInfo; import org.apache.lucene.index.SegmentInfos; import org.apache.lucene.index.SoftDeletesDirectoryReaderWrapper; import org.apache.lucene.search.IndexSearcher; @@ -95,7 +96,7 @@ public final class ReadOnlyEngine extends Engine { this.lastCommittedSegmentInfos = Lucene.readSegmentInfos(directory); this.translogStats = translogStats == null ? new TranslogStats(0, 0, 0, 0, 0) : translogStats; this.seqNoStats = seqNoStats == null ? buildSeqNoStats(lastCommittedSegmentInfos) : seqNoStats; - reader = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(directory), config.getShardId()); + reader = ElasticsearchDirectoryReader.wrap(open(directory), config.getShardId()); if (config.getIndexSettings().isSoftDeleteEnabled()) { reader = new SoftDeletesDirectoryReaderWrapper(reader, Lucene.SOFT_DELETES_FIELD); } @@ -103,7 +104,7 @@ public final class ReadOnlyEngine extends Engine { this.indexCommit = reader.getIndexCommit(); this.searcherManager = new SearcherManager(reader, new RamAccountingSearcherFactory(engineConfig.getCircuitBreakerService())); - this.docsStats = docsStats(reader); + this.docsStats = docsStats(lastCommittedSegmentInfos); this.indexWriterLock = indexWriterLock; success = true; } finally { @@ -116,6 +117,28 @@ public final class ReadOnlyEngine extends Engine { } } + protected DirectoryReader open(final Directory directory) throws IOException { + return DirectoryReader.open(directory); + } + + private DocsStats docsStats(final SegmentInfos lastCommittedSegmentInfos) { + long numDocs = 0; + long numDeletedDocs = 0; + long sizeInBytes = 0; + if (lastCommittedSegmentInfos != null) { + for (SegmentCommitInfo segmentCommitInfo : lastCommittedSegmentInfos) { + numDocs += segmentCommitInfo.info.maxDoc() - segmentCommitInfo.getDelCount() - segmentCommitInfo.getSoftDelCount(); + numDeletedDocs += segmentCommitInfo.getDelCount() + segmentCommitInfo.getSoftDelCount(); + try { + sizeInBytes += segmentCommitInfo.sizeInBytes(); + } catch (IOException e) { + throw new UncheckedIOException("Failed to get size for [" + segmentCommitInfo.info.name + "]", e); + } + } + } + return new DocsStats(numDocs, numDeletedDocs, sizeInBytes); + } + @Override protected void closeNoLock(String reason, CountDownLatch closedLatch) { if (isClosed.compareAndSet(false, true)) { @@ -344,7 +367,8 @@ public final class ReadOnlyEngine extends Engine { } @Override - public void restoreLocalCheckpointFromTranslog() { + public int restoreLocalHistoryFromTranslog(TranslogRecoveryRunner translogRecoveryRunner) { + return 0; } @Override @@ -373,4 +397,14 @@ public final class ReadOnlyEngine extends Engine { public DocsStats docStats() { return docsStats; } + + @Override + public void updateMaxUnsafeAutoIdTimestamp(long newTimestamp) { + + } + + @Override + public void initializeMaxSeqNoOfUpdatesOrDeletes() { + advanceMaxSeqNoOfUpdatesOrDeletes(seqNoStats.getMaxSeqNo()); + } } diff --git a/server/src/main/java/org/elasticsearch/index/fieldvisitor/SingleFieldsVisitor.java b/server/src/main/java/org/elasticsearch/index/fieldvisitor/SingleFieldsVisitor.java index aa9f5c8c6de..a67485d4f4a 100644 --- a/server/src/main/java/org/elasticsearch/index/fieldvisitor/SingleFieldsVisitor.java +++ b/server/src/main/java/org/elasticsearch/index/fieldvisitor/SingleFieldsVisitor.java @@ -19,42 +19,79 @@ package org.elasticsearch.index.fieldvisitor; import org.apache.lucene.index.FieldInfo; +import org.apache.lucene.index.StoredFieldVisitor; import org.elasticsearch.index.mapper.IdFieldMapper; -import org.elasticsearch.index.mapper.MapperService; -import org.elasticsearch.index.mapper.TypeFieldMapper; +import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.Uid; +import org.apache.lucene.util.BytesRef; -import java.io.IOException; +import java.nio.charset.StandardCharsets; +import java.util.List; -public class SingleFieldsVisitor extends FieldsVisitor { +/** + * {@linkplain StoredFieldVisitor} that loads a single field value. + */ +public final class SingleFieldsVisitor extends StoredFieldVisitor { + private final MappedFieldType field; + private final List destination; - private String field; - - public SingleFieldsVisitor(String field) { - super(false); + /** + * Build the field visitor; + * @param field the name of the field to load + * @param destination where to put the field's values + */ + public SingleFieldsVisitor(MappedFieldType field, List destination) { this.field = field; + this.destination = destination; } @Override - public Status needsField(FieldInfo fieldInfo) throws IOException { - if (fieldInfo.name.equals(field)) { + public Status needsField(FieldInfo fieldInfo) { + if (fieldInfo.name.equals(field.name())) { return Status.YES; } + /* + * We can't return Status.STOP here because we could be loading + * multi-valued fields. + */ return Status.NO; } - public void reset(String field) { - this.field = field; - super.reset(); + private void addValue(Object value) { + destination.add(field.valueForDisplay(value)); } @Override - public void postProcess(MapperService mapperService) { - super.postProcess(mapperService); - if (id != null) { - addValue(IdFieldMapper.NAME, id); - } - if (type != null) { - addValue(TypeFieldMapper.NAME, type); + public void binaryField(FieldInfo fieldInfo, byte[] value) { + if (IdFieldMapper.NAME.equals(fieldInfo.name)) { + addValue(Uid.decodeId(value)); + } else { + addValue(new BytesRef(value)); } } + + @Override + public void stringField(FieldInfo fieldInfo, byte[] bytes) { + addValue(new String(bytes, StandardCharsets.UTF_8)); + } + + @Override + public void intField(FieldInfo fieldInfo, int value) { + addValue(value); + } + + @Override + public void longField(FieldInfo fieldInfo, long value) { + addValue(value); + } + + @Override + public void floatField(FieldInfo fieldInfo, float value) { + addValue(value); + } + + @Override + public void doubleField(FieldInfo fieldInfo, double value) { + addValue(value); + } } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/BooleanFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/BooleanFieldMapper.java index cb44e777f87..9e0b9f62acb 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/BooleanFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/BooleanFieldMapper.java @@ -30,8 +30,6 @@ import org.apache.lucene.search.TermQuery; import org.apache.lucene.search.TermRangeQuery; import org.apache.lucene.util.BytesRef; import org.elasticsearch.common.Nullable; -import org.elasticsearch.common.logging.DeprecationLogger; -import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.lucene.Lucene; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; @@ -55,7 +53,6 @@ import static org.elasticsearch.index.mapper.TypeParsers.parseField; * A field mapper for boolean fields. */ public class BooleanFieldMapper extends FieldMapper { - private static final DeprecationLogger deprecationLogger = new DeprecationLogger(Loggers.getLogger(BooleanFieldMapper.class)); public static final String CONTENT_TYPE = "boolean"; diff --git a/server/src/main/java/org/elasticsearch/index/mapper/CompletionFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/CompletionFieldMapper.java index db04e64b164..0635cdd0661 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/CompletionFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/CompletionFieldMapper.java @@ -436,8 +436,9 @@ public class CompletionFieldMapper extends FieldMapper implements ArrayValueMapp Token token = parser.currentToken(); Map inputMap = new HashMap<>(1); - // ignore null values - if (token == Token.VALUE_NULL) { + if (context.externalValueSet()) { + inputMap = getInputMapFromExternalValue(context); + } else if (token == Token.VALUE_NULL) { // ignore null values return; } else if (token == Token.START_ARRAY) { while ((token = parser.nextToken()) != Token.END_ARRAY) { @@ -471,12 +472,33 @@ public class CompletionFieldMapper extends FieldMapper implements ArrayValueMapp context.doc().add(new SuggestField(fieldType().name(), input, metaData.weight)); } } + List fields = new ArrayList<>(1); createFieldNamesField(context, fields); for (IndexableField field : fields) { context.doc().add(field); } - multiFields.parse(this, context); + + for (CompletionInputMetaData metaData: inputMap.values()) { + ParseContext externalValueContext = context.createExternalValueContext(metaData); + multiFields.parse(this, externalValueContext); + } + } + + private Map getInputMapFromExternalValue(ParseContext context) { + Map inputMap; + if (isExternalValueOfClass(context, CompletionInputMetaData.class)) { + CompletionInputMetaData inputAndMeta = (CompletionInputMetaData) context.externalValue(); + inputMap = Collections.singletonMap(inputAndMeta.input, inputAndMeta); + } else { + String fieldName = context.externalValue().toString(); + inputMap = Collections.singletonMap(fieldName, new CompletionInputMetaData(fieldName, Collections.emptyMap(), 1)); + } + return inputMap; + } + + private boolean isExternalValueOfClass(ParseContext context, Class clazz) { + return context.externalValue().getClass().equals(clazz); } /** @@ -487,7 +509,7 @@ public class CompletionFieldMapper extends FieldMapper implements ArrayValueMapp private void parse(ParseContext parseContext, Token token, XContentParser parser, Map inputMap) throws IOException { String currentFieldName = null; if (token == Token.VALUE_STRING) { - inputMap.put(parser.text(), new CompletionInputMetaData(Collections.>emptyMap(), 1)); + inputMap.put(parser.text(), new CompletionInputMetaData(parser.text(), Collections.emptyMap(), 1)); } else if (token == Token.START_OBJECT) { Set inputs = new HashSet<>(); int weight = 1; @@ -561,7 +583,7 @@ public class CompletionFieldMapper extends FieldMapper implements ArrayValueMapp } for (String input : inputs) { if (inputMap.containsKey(input) == false || inputMap.get(input).weight < weight) { - inputMap.put(input, new CompletionInputMetaData(contextsMap, weight)); + inputMap.put(input, new CompletionInputMetaData(input, contextsMap, weight)); } } } else { @@ -570,13 +592,20 @@ public class CompletionFieldMapper extends FieldMapper implements ArrayValueMapp } static class CompletionInputMetaData { + public final String input; public final Map> contexts; public final int weight; - CompletionInputMetaData(Map> contexts, int weight) { + CompletionInputMetaData(String input, Map> contexts, int weight) { + this.input = input; this.contexts = contexts; this.weight = weight; } + + @Override + public String toString() { + return input; + } } @Override diff --git a/server/src/main/java/org/elasticsearch/index/mapper/DateFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/DateFieldMapper.java index c8360e468d7..0de2731ffd1 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/DateFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/DateFieldMapper.java @@ -36,10 +36,11 @@ import org.apache.lucene.util.BytesRef; import org.elasticsearch.common.Explicit; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.geo.ShapeRelation; -import org.elasticsearch.common.joda.DateMathParser; import org.elasticsearch.common.joda.FormatDateTimeFormatter; import org.elasticsearch.common.joda.Joda; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.time.DateMathParser; +import org.elasticsearch.common.time.DateUtils; import org.elasticsearch.common.util.LocaleUtils; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.support.XContentMapValues; @@ -231,7 +232,7 @@ public class DateFieldMapper extends FieldMapper { public void setDateTimeFormatter(FormatDateTimeFormatter dateTimeFormatter) { checkIfFrozen(); this.dateTimeFormatter = dateTimeFormatter; - this.dateMathParser = new DateMathParser(dateTimeFormatter); + this.dateMathParser = dateTimeFormatter.toDateMathParser(); } protected DateMathParser dateMathParser() { @@ -262,7 +263,7 @@ public class DateFieldMapper extends FieldMapper { @Override public Query rangeQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper, ShapeRelation relation, - @Nullable DateTimeZone timeZone, @Nullable DateMathParser forcedDateParser, QueryShardContext context) { + @Nullable DateTimeZone timeZone, @Nullable DateMathParser forcedDateParser, QueryShardContext context) { failIfNotIndexed(); if (relation == ShapeRelation.DISJOINT) { throw new IllegalArgumentException("Field [" + name() + "] of type [" + typeName() + @@ -296,8 +297,8 @@ public class DateFieldMapper extends FieldMapper { return query; } - public long parseToMilliseconds(Object value, boolean roundUp, - @Nullable DateTimeZone zone, @Nullable DateMathParser forcedDateParser, QueryRewriteContext context) { + public long parseToMilliseconds(Object value, boolean roundUp, @Nullable DateTimeZone zone, + @Nullable DateMathParser forcedDateParser, QueryRewriteContext context) { DateMathParser dateParser = dateMathParser(); if (forcedDateParser != null) { dateParser = forcedDateParser; @@ -309,13 +310,13 @@ public class DateFieldMapper extends FieldMapper { } else { strValue = value.toString(); } - return dateParser.parse(strValue, context::nowInMillis, roundUp, zone); + return dateParser.parse(strValue, context::nowInMillis, roundUp, DateUtils.dateTimeZoneToZoneId(zone)); } @Override - public Relation isFieldWithinQuery(IndexReader reader, - Object from, Object to, boolean includeLower, boolean includeUpper, - DateTimeZone timeZone, DateMathParser dateParser, QueryRewriteContext context) throws IOException { + public Relation isFieldWithinQuery(IndexReader reader, Object from, Object to, boolean includeLower, boolean includeUpper, + DateTimeZone timeZone, DateMathParser dateParser, + QueryRewriteContext context) throws IOException { if (dateParser == null) { dateParser = this.dateMathParser; } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/DocumentParser.java b/server/src/main/java/org/elasticsearch/index/mapper/DocumentParser.java index 3f8a7cd62dd..8d785cdfea5 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/DocumentParser.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/DocumentParser.java @@ -882,7 +882,8 @@ final class DocumentParser { builder = new ObjectMapper.Builder(paths[i]).enabled(true); } Mapper.BuilderContext builderContext = new Mapper.BuilderContext(context.indexSettings().getSettings(), - context.path()); mapper = (ObjectMapper) builder.build(builderContext); + context.path()); + mapper = (ObjectMapper) builder.build(builderContext); if (mapper.nested() != ObjectMapper.Nested.NO) { throw new MapperParsingException("It is forbidden to create dynamic nested objects ([" + context.path().pathAsText(paths[i]) + "]) through `copy_to` or dots in field names"); diff --git a/server/src/main/java/org/elasticsearch/index/mapper/GeoPointFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/GeoPointFieldMapper.java index 350dc27c615..587e9abd50b 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/GeoPointFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/GeoPointFieldMapper.java @@ -306,12 +306,11 @@ public class GeoPointFieldMapper extends FieldMapper implements ArrayValueMapper // its an array of other possible values if (token == XContentParser.Token.VALUE_NUMBER) { double lon = context.parser().doubleValue(); - token = context.parser().nextToken(); + context.parser().nextToken(); double lat = context.parser().doubleValue(); token = context.parser().nextToken(); - Double alt = Double.NaN; if (token == XContentParser.Token.VALUE_NUMBER) { - alt = GeoPoint.assertZValue(ignoreZValue.value(), context.parser().doubleValue()); + GeoPoint.assertZValue(ignoreZValue.value(), context.parser().doubleValue()); } else if (token != XContentParser.Token.END_ARRAY) { throw new ElasticsearchParseException("[{}] field type does not accept > 3 dimensions", CONTENT_TYPE); } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/IgnoredFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/IgnoredFieldMapper.java index 7a3a9a8f2ae..dc6aee5b9dc 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/IgnoredFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/IgnoredFieldMapper.java @@ -85,7 +85,7 @@ public final class IgnoredFieldMapper extends MetadataFieldMapper { } } - public static final class IgnoredFieldType extends TermBasedFieldType { + public static final class IgnoredFieldType extends StringFieldType { public IgnoredFieldType() { } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/IndexFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/IndexFieldMapper.java index bb048ab9afa..7e8ac563cac 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/IndexFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/IndexFieldMapper.java @@ -22,6 +22,7 @@ package org.elasticsearch.index.mapper; import org.apache.lucene.index.IndexOptions; import org.apache.lucene.index.IndexableField; import org.apache.lucene.search.MatchAllDocsQuery; +import org.apache.lucene.search.MultiTermQuery; import org.apache.lucene.search.Query; import org.apache.lucene.util.BytesRef; import org.elasticsearch.common.Nullable; @@ -37,6 +38,7 @@ import org.elasticsearch.index.query.QueryShardContext; import java.io.IOException; import java.util.List; import java.util.Map; +import java.util.regex.Pattern; public class IndexFieldMapper extends MetadataFieldMapper { @@ -150,8 +152,48 @@ public class IndexFieldMapper extends MetadataFieldMapper { + " vs. " + values); } + @Override + public Query prefixQuery(String value, + @Nullable MultiTermQuery.RewriteMethod method, + QueryShardContext context) { + String indexName = context.getFullyQualifiedIndex().getName(); + if (indexName.startsWith(value)) { + return Queries.newMatchAllQuery(); + } else { + return Queries.newMatchNoDocsQuery("The index [" + indexName + + "] doesn't match the provided prefix [" + value + "]."); + } + } + + @Override + public Query regexpQuery(String value, int flags, int maxDeterminizedStates, + MultiTermQuery.RewriteMethod method, QueryShardContext context) { + String indexName = context.getFullyQualifiedIndex().getName(); + Pattern pattern = Regex.compile(value, Regex.flagsToString(flags)); + + if (pattern.matcher(indexName).matches()) { + return Queries.newMatchAllQuery(); + } else { + return Queries.newMatchNoDocsQuery("The index [" + indexName + + "] doesn't match the provided pattern [" + value + "]."); + } + } + + @Override + public Query wildcardQuery(String value, + @Nullable MultiTermQuery.RewriteMethod method, + QueryShardContext context) { + String indexName = context.getFullyQualifiedIndex().getName(); + if (isSameIndex(value, indexName)) { + return Queries.newMatchAllQuery(); + } else { + return Queries.newMatchNoDocsQuery("The index [" + indexName + + "] doesn't match the provided pattern [" + value + "]."); + } + } + private boolean isSameIndex(Object value, String indexName) { - String pattern = value instanceof BytesRef ? pattern = ((BytesRef) value).utf8ToString() : value.toString(); + String pattern = value instanceof BytesRef ? ((BytesRef) value).utf8ToString() : value.toString(); return Regex.simpleMatch(pattern, indexName); } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/MappedFieldType.java b/server/src/main/java/org/elasticsearch/index/mapper/MappedFieldType.java index 82a601de05e..45bb5ed395d 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/MappedFieldType.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/MappedFieldType.java @@ -38,7 +38,7 @@ import org.apache.lucene.util.BytesRef; import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.geo.ShapeRelation; -import org.elasticsearch.common.joda.DateMathParser; +import org.elasticsearch.common.time.DateMathParser; import org.elasticsearch.common.unit.Fuzziness; import org.elasticsearch.index.analysis.NamedAnalyzer; import org.elasticsearch.index.fielddata.IndexFieldData; @@ -345,6 +345,12 @@ public abstract class MappedFieldType extends FieldType { throw new QueryShardException(context, "Can only use prefix queries on keyword and text fields - not on [" + name + "] which is of type [" + typeName() + "]"); } + public Query wildcardQuery(String value, + @Nullable MultiTermQuery.RewriteMethod method, + QueryShardContext context) { + throw new QueryShardException(context, "Can only use wildcard queries on keyword and text fields - not on [" + name + "] which is of type [" + typeName() + "]"); + } + public Query regexpQuery(String value, int flags, int maxDeterminizedStates, @Nullable MultiTermQuery.RewriteMethod method, QueryShardContext context) { throw new QueryShardException(context, "Can only use regexp queries on keyword and text fields - not on [" + name + "] which is of type [" + typeName() + "]"); } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/ObjectMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/ObjectMapper.java index 99ad3936700..792ac4ba03e 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/ObjectMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/ObjectMapper.java @@ -458,6 +458,8 @@ public class ObjectMapper extends Mapper implements Cloneable { for (Mapper mergeWithMapper : mergeWith) { Mapper mergeIntoMapper = mappers.get(mergeWithMapper.simpleName()); + checkEnabledFieldChange(mergeWith, mergeWithMapper, mergeIntoMapper); + Mapper merged; if (mergeIntoMapper == null) { // no mapping, simply add it @@ -470,6 +472,18 @@ public class ObjectMapper extends Mapper implements Cloneable { } } + private static void checkEnabledFieldChange(ObjectMapper mergeWith, Mapper mergeWithMapper, Mapper mergeIntoMapper) { + if (mergeIntoMapper instanceof ObjectMapper && mergeWithMapper instanceof ObjectMapper) { + final ObjectMapper mergeIntoObjectMapper = (ObjectMapper) mergeIntoMapper; + final ObjectMapper mergeWithObjectMapper = (ObjectMapper) mergeWithMapper; + + if (mergeIntoObjectMapper.isEnabled() != mergeWithObjectMapper.isEnabled()) { + final String path = mergeWith.fullPath() + "." + mergeWithObjectMapper.simpleName() + ".enabled"; + throw new MapperException("Can't update attribute for type [" + path + "] in index mapping"); + } + } + } + @Override public ObjectMapper updateFieldType(Map fullNameToFieldType) { List updatedMappers = null; diff --git a/server/src/main/java/org/elasticsearch/index/mapper/RangeFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/RangeFieldMapper.java index 4c356c3a559..0deb6e8afa0 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/RangeFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/RangeFieldMapper.java @@ -44,11 +44,12 @@ import org.elasticsearch.common.Explicit; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.geo.ShapeRelation; -import org.elasticsearch.common.joda.DateMathParser; import org.elasticsearch.common.joda.FormatDateTimeFormatter; import org.elasticsearch.common.network.InetAddresses; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.time.DateMathParser; +import org.elasticsearch.common.time.DateUtils; import org.elasticsearch.common.util.LocaleUtils; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; @@ -60,6 +61,7 @@ import org.joda.time.DateTimeZone; import java.io.IOException; import java.net.InetAddress; import java.net.UnknownHostException; +import java.time.ZoneId; import java.util.ArrayList; import java.util.HashSet; import java.util.Iterator; @@ -257,7 +259,7 @@ public class RangeFieldMapper extends FieldMapper { public void setDateTimeFormatter(FormatDateTimeFormatter dateTimeFormatter) { checkIfFrozen(); this.dateTimeFormatter = dateTimeFormatter; - this.dateMathParser = new DateMathParser(dateTimeFormatter); + this.dateMathParser = dateTimeFormatter.toDateMathParser(); } protected DateMathParser dateMathParser() { @@ -587,15 +589,16 @@ public class RangeFieldMapper extends FieldMapper { public Query rangeQuery(String field, boolean hasDocValues, Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper, ShapeRelation relation, @Nullable DateTimeZone timeZone, @Nullable DateMathParser parser, QueryShardContext context) { - DateTimeZone zone = (timeZone == null) ? DateTimeZone.UTC : timeZone; + DateTimeZone zone = (timeZone == null) ? DateTimeZone.UTC : timeZone; + ZoneId zoneId = DateUtils.dateTimeZoneToZoneId(zone); DateMathParser dateMathParser = (parser == null) ? - new DateMathParser(DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER) : parser; + DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.toDateMathParser() : parser; Long low = lowerTerm == null ? Long.MIN_VALUE : dateMathParser.parse(lowerTerm instanceof BytesRef ? ((BytesRef) lowerTerm).utf8ToString() : lowerTerm.toString(), - context::nowInMillis, false, zone); + context::nowInMillis, false, zoneId); Long high = upperTerm == null ? Long.MAX_VALUE : dateMathParser.parse(upperTerm instanceof BytesRef ? ((BytesRef) upperTerm).utf8ToString() : upperTerm.toString(), - context::nowInMillis, false, zone); + context::nowInMillis, false, zoneId); return super.rangeQuery(field, hasDocValues, low, high, includeLower, includeUpper, relation, zone, dateMathParser, context); diff --git a/server/src/main/java/org/elasticsearch/index/mapper/RoutingFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/RoutingFieldMapper.java index 5411c4604ac..6a171b767c8 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/RoutingFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/RoutingFieldMapper.java @@ -108,7 +108,7 @@ public class RoutingFieldMapper extends MetadataFieldMapper { } } - static final class RoutingFieldType extends TermBasedFieldType { + static final class RoutingFieldType extends StringFieldType { RoutingFieldType() { } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/SimpleMappedFieldType.java b/server/src/main/java/org/elasticsearch/index/mapper/SimpleMappedFieldType.java index b91be82cd6b..3d3b1607870 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/SimpleMappedFieldType.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/SimpleMappedFieldType.java @@ -21,7 +21,7 @@ package org.elasticsearch.index.mapper; import org.apache.lucene.search.Query; import org.elasticsearch.common.geo.ShapeRelation; -import org.elasticsearch.common.joda.DateMathParser; +import org.elasticsearch.common.time.DateMathParser; import org.elasticsearch.index.query.QueryShardContext; import org.joda.time.DateTimeZone; @@ -40,7 +40,7 @@ public abstract class SimpleMappedFieldType extends MappedFieldType { @Override public final Query rangeQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper, - ShapeRelation relation, DateTimeZone timeZone, DateMathParser parser, QueryShardContext context) { + ShapeRelation relation, DateTimeZone timeZone, DateMathParser parser, QueryShardContext context) { if (relation == ShapeRelation.DISJOINT) { throw new IllegalArgumentException("Field [" + name() + "] of type [" + typeName() + "] does not support DISJOINT ranges"); diff --git a/server/src/main/java/org/elasticsearch/index/mapper/StringFieldType.java b/server/src/main/java/org/elasticsearch/index/mapper/StringFieldType.java index 37834b93a1e..cde8e392dab 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/StringFieldType.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/StringFieldType.java @@ -19,20 +19,24 @@ package org.elasticsearch.index.mapper; -import java.util.List; - import org.apache.lucene.index.Term; -import org.apache.lucene.search.TermInSetQuery; import org.apache.lucene.search.FuzzyQuery; +import org.apache.lucene.search.MatchAllDocsQuery; +import org.apache.lucene.search.MatchNoDocsQuery; import org.apache.lucene.search.MultiTermQuery; import org.apache.lucene.search.PrefixQuery; import org.apache.lucene.search.Query; import org.apache.lucene.search.RegexpQuery; +import org.apache.lucene.search.TermInSetQuery; import org.apache.lucene.search.TermRangeQuery; +import org.apache.lucene.search.WildcardQuery; import org.apache.lucene.util.BytesRef; import org.elasticsearch.common.lucene.BytesRefs; import org.elasticsearch.common.unit.Fuzziness; import org.elasticsearch.index.query.QueryShardContext; +import org.elasticsearch.index.query.support.QueryParsers; + +import java.util.List; /** Base class for {@link MappedFieldType} implementations that use the same * representation for internal index terms as the external representation so @@ -74,6 +78,19 @@ public abstract class StringFieldType extends TermBasedFieldType { return query; } + @Override + public Query wildcardQuery(String value, MultiTermQuery.RewriteMethod method, QueryShardContext context) { + Query termQuery = termQuery(value, context); + if (termQuery instanceof MatchNoDocsQuery || termQuery instanceof MatchAllDocsQuery) { + return termQuery; + } + Term term = MappedFieldType.extractTerm(termQuery); + + WildcardQuery query = new WildcardQuery(term); + QueryParsers.setRewriteMethod(query, method); + return query; + } + @Override public Query regexpQuery(String value, int flags, int maxDeterminizedStates, MultiTermQuery.RewriteMethod method, QueryShardContext context) { diff --git a/server/src/main/java/org/elasticsearch/index/mapper/TextFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/TextFieldMapper.java index f7bcab21d72..7851bb1655a 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/TextFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/TextFieldMapper.java @@ -19,7 +19,6 @@ package org.elasticsearch.index.mapper; -import org.apache.logging.log4j.Logger; import org.apache.lucene.analysis.Analyzer; import org.apache.lucene.analysis.AnalyzerWrapper; import org.apache.lucene.analysis.CachingTokenFilter; @@ -42,7 +41,6 @@ import org.apache.lucene.search.Query; import org.apache.lucene.search.TermQuery; import org.elasticsearch.Version; import org.elasticsearch.common.collect.Iterators; -import org.elasticsearch.common.logging.ESLoggerFactory; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.support.XContentMapValues; @@ -64,8 +62,6 @@ import static org.elasticsearch.index.mapper.TypeParsers.parseTextField; /** A {@link FieldMapper} for full-text fields. */ public class TextFieldMapper extends FieldMapper { - private static final Logger logger = ESLoggerFactory.getLogger(TextFieldMapper.class); - public static final String CONTENT_TYPE = "text"; private static final int POSITION_INCREMENT_GAP_USE_ANALYZER = -1; @@ -481,6 +477,7 @@ public class TextFieldMapper extends FieldMapper { } } + @Override public TextFieldType clone() { return new TextFieldType(this); } diff --git a/server/src/main/java/org/elasticsearch/index/query/FuzzyQueryBuilder.java b/server/src/main/java/org/elasticsearch/index/query/FuzzyQueryBuilder.java index 379f583ef60..93528bb9520 100644 --- a/server/src/main/java/org/elasticsearch/index/query/FuzzyQueryBuilder.java +++ b/server/src/main/java/org/elasticsearch/index/query/FuzzyQueryBuilder.java @@ -181,6 +181,7 @@ public class FuzzyQueryBuilder extends AbstractQueryBuilder i out.writeOptionalString(this.rewrite); } + @Override public String fieldName() { return this.fieldName; } diff --git a/server/src/main/java/org/elasticsearch/index/query/MultiTermQueryBuilder.java b/server/src/main/java/org/elasticsearch/index/query/MultiTermQueryBuilder.java index be9abfc5e44..ee9fa5b114b 100644 --- a/server/src/main/java/org/elasticsearch/index/query/MultiTermQueryBuilder.java +++ b/server/src/main/java/org/elasticsearch/index/query/MultiTermQueryBuilder.java @@ -19,5 +19,8 @@ package org.elasticsearch.index.query; public interface MultiTermQueryBuilder extends QueryBuilder { - + /** + * Get the field name for this query. + */ + String fieldName(); } diff --git a/server/src/main/java/org/elasticsearch/index/query/PrefixQueryBuilder.java b/server/src/main/java/org/elasticsearch/index/query/PrefixQueryBuilder.java index c1cd99d712a..eacb2be100c 100644 --- a/server/src/main/java/org/elasticsearch/index/query/PrefixQueryBuilder.java +++ b/server/src/main/java/org/elasticsearch/index/query/PrefixQueryBuilder.java @@ -87,6 +87,7 @@ public class PrefixQueryBuilder extends AbstractQueryBuilder out.writeOptionalString(rewrite); } + @Override public String fieldName() { return this.fieldName; } diff --git a/server/src/main/java/org/elasticsearch/index/query/RangeQueryBuilder.java b/server/src/main/java/org/elasticsearch/index/query/RangeQueryBuilder.java index b297036f2f3..756c6456a9f 100644 --- a/server/src/main/java/org/elasticsearch/index/query/RangeQueryBuilder.java +++ b/server/src/main/java/org/elasticsearch/index/query/RangeQueryBuilder.java @@ -29,10 +29,10 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.geo.ShapeRelation; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.joda.DateMathParser; import org.elasticsearch.common.joda.FormatDateTimeFormatter; import org.elasticsearch.common.joda.Joda; import org.elasticsearch.common.lucene.BytesRefs; +import org.elasticsearch.common.time.DateMathParser; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.index.mapper.FieldNamesFieldMapper; @@ -146,6 +146,7 @@ public class RangeQueryBuilder extends AbstractQueryBuilder i /** * Get the field name for this query. */ + @Override public String fieldName() { return this.fieldName; } @@ -302,7 +303,7 @@ public class RangeQueryBuilder extends AbstractQueryBuilder i DateMathParser getForceDateParser() { // pkg private for testing if (this.format != null) { - return new DateMathParser(this.format); + return this.format.toDateMathParser(); } return null; } diff --git a/server/src/main/java/org/elasticsearch/index/query/RegexpQueryBuilder.java b/server/src/main/java/org/elasticsearch/index/query/RegexpQueryBuilder.java index 39d7c1e2cf0..472c1014874 100644 --- a/server/src/main/java/org/elasticsearch/index/query/RegexpQueryBuilder.java +++ b/server/src/main/java/org/elasticsearch/index/query/RegexpQueryBuilder.java @@ -104,6 +104,7 @@ public class RegexpQueryBuilder extends AbstractQueryBuilder } /** Returns the field name used in this query. */ + @Override public String fieldName() { return this.fieldName; } diff --git a/server/src/main/java/org/elasticsearch/index/query/SpanMultiTermQueryBuilder.java b/server/src/main/java/org/elasticsearch/index/query/SpanMultiTermQueryBuilder.java index 6ea068176b4..22fca7d1d0b 100644 --- a/server/src/main/java/org/elasticsearch/index/query/SpanMultiTermQueryBuilder.java +++ b/server/src/main/java/org/elasticsearch/index/query/SpanMultiTermQueryBuilder.java @@ -21,9 +21,11 @@ package org.elasticsearch.index.query; import org.apache.lucene.index.Term; import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.TermStates; +import org.apache.lucene.queries.SpanMatchNoDocsQuery; import org.apache.lucene.search.BooleanQuery; import org.apache.lucene.search.BoostQuery; import org.apache.lucene.search.ConstantScoreQuery; +import org.apache.lucene.search.MatchNoDocsQuery; import org.apache.lucene.search.MultiTermQuery; import org.apache.lucene.search.PrefixQuery; import org.apache.lucene.search.Query; @@ -190,9 +192,14 @@ public class SpanMultiTermQueryBuilder extends AbstractQueryBuilder clauses = new ArrayList<>(); + int[] articulationPoints = graph.articulationPoints(); + int lastState = 0; + int maxBooleanClause = BooleanQuery.getMaxClauseCount(); + for (int i = 0; i <= articulationPoints.length; i++) { + int start = lastState; + int end = -1; + if (i < articulationPoints.length) { + end = articulationPoints[i]; + } + lastState = end; + final SpanQuery queryPos; + if (graph.hasSidePath(start)) { + List queries = new ArrayList<>(); + Iterator it = graph.getFiniteStrings(start, end); + while (it.hasNext()) { + TokenStream ts = it.next(); + SpanQuery q = createSpanQuery(ts, field); + if (q != null) { + if (queries.size() >= maxBooleanClause) { + throw new BooleanQuery.TooManyClauses(); + } + queries.add(q); + } + } + if (queries.size() > 0) { + queryPos = new SpanOrQuery(queries.toArray(new SpanQuery[0])); + } else { + queryPos = null; + } + } else { + Term[] terms = graph.getTerms(field, start); + assert terms.length > 0; + if (terms.length >= maxBooleanClause) { + throw new BooleanQuery.TooManyClauses(); + } + if (terms.length == 1) { + queryPos = new SpanTermQuery(terms[0]); + } else { + SpanTermQuery[] orClauses = new SpanTermQuery[terms.length]; + for (int idx = 0; idx < terms.length; idx++) { + orClauses[idx] = new SpanTermQuery(terms[idx]); + } + + queryPos = new SpanOrQuery(orClauses); + } + } + + if (queryPos != null) { + if (clauses.size() >= maxBooleanClause) { + throw new BooleanQuery.TooManyClauses(); + } + clauses.add(queryPos); + } + } + + if (clauses.isEmpty()) { + return null; + } else if (clauses.size() == 1) { + return clauses.get(0); + } else { + return new SpanNearQuery(clauses.toArray(new SpanQuery[0]), phraseSlop, true); + } + } } /** diff --git a/server/src/main/java/org/elasticsearch/index/search/MultiMatchQuery.java b/server/src/main/java/org/elasticsearch/index/search/MultiMatchQuery.java index 89cebf38a40..63e88ae19a9 100644 --- a/server/src/main/java/org/elasticsearch/index/search/MultiMatchQuery.java +++ b/server/src/main/java/org/elasticsearch/index/search/MultiMatchQuery.java @@ -67,7 +67,9 @@ public class MultiMatchQuery extends MatchQuery { } public Query parse(MultiMatchQueryBuilder.Type type, Map fieldNames, Object value, String minimumShouldMatch) throws IOException { - Query result; + final Query result; + // reset query builder + queryBuilder = null; if (fieldNames.size() == 1) { Map.Entry fieldBoost = fieldNames.entrySet().iterator().next(); Float boostValue = fieldBoost.getValue(); diff --git a/server/src/main/java/org/elasticsearch/index/search/NestedHelper.java b/server/src/main/java/org/elasticsearch/index/search/NestedHelper.java index f5a5c8143bc..1c17fa0cb93 100644 --- a/server/src/main/java/org/elasticsearch/index/search/NestedHelper.java +++ b/server/src/main/java/org/elasticsearch/index/search/NestedHelper.java @@ -19,6 +19,7 @@ package org.elasticsearch.index.search; +import org.apache.lucene.index.PrefixCodedTerms; import org.apache.lucene.search.BooleanClause; import org.apache.lucene.search.BooleanQuery; import org.apache.lucene.search.BoostQuery; @@ -28,6 +29,7 @@ import org.apache.lucene.search.MatchAllDocsQuery; import org.apache.lucene.search.MatchNoDocsQuery; import org.apache.lucene.search.PointRangeQuery; import org.apache.lucene.search.Query; +import org.apache.lucene.search.TermInSetQuery; import org.apache.lucene.search.TermQuery; import org.apache.lucene.search.BooleanClause.Occur; import org.elasticsearch.index.mapper.MapperService; @@ -54,9 +56,18 @@ public final class NestedHelper { } else if (query instanceof MatchNoDocsQuery) { return false; } else if (query instanceof TermQuery) { - // We only handle term queries and range queries, which should already + // We only handle term(s) queries and range queries, which should already // cover a high majority of use-cases return mightMatchNestedDocs(((TermQuery) query).getTerm().field()); + } else if (query instanceof TermInSetQuery) { + PrefixCodedTerms terms = ((TermInSetQuery) query).getTermData(); + if (terms.size() > 0) { + PrefixCodedTerms.TermIterator it = terms.iterator(); + it.next(); + return mightMatchNestedDocs(it.field()); + } else { + return false; + } } else if (query instanceof PointRangeQuery) { return mightMatchNestedDocs(((PointRangeQuery) query).getField()); } else if (query instanceof IndexOrDocValuesQuery) { @@ -118,6 +129,15 @@ public final class NestedHelper { return false; } else if (query instanceof TermQuery) { return mightMatchNonNestedDocs(((TermQuery) query).getTerm().field(), nestedPath); + } else if (query instanceof TermInSetQuery) { + PrefixCodedTerms terms = ((TermInSetQuery) query).getTermData(); + if (terms.size() > 0) { + PrefixCodedTerms.TermIterator it = terms.iterator(); + it.next(); + return mightMatchNonNestedDocs(it.field(), nestedPath); + } else { + return false; + } } else if (query instanceof PointRangeQuery) { return mightMatchNonNestedDocs(((PointRangeQuery) query).getField(), nestedPath); } else if (query instanceof IndexOrDocValuesQuery) { diff --git a/server/src/main/java/org/elasticsearch/index/shard/IndexSearcherWrapper.java b/server/src/main/java/org/elasticsearch/index/shard/IndexSearcherWrapper.java index a6949c05597..bc62f4067b9 100644 --- a/server/src/main/java/org/elasticsearch/index/shard/IndexSearcherWrapper.java +++ b/server/src/main/java/org/elasticsearch/index/shard/IndexSearcherWrapper.java @@ -24,8 +24,8 @@ import org.apache.lucene.index.FilterDirectoryReader; import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.LeafReader; import org.apache.lucene.search.IndexSearcher; -import org.elasticsearch.ElasticsearchException; import org.elasticsearch.common.lucene.index.ElasticsearchDirectoryReader; +import org.elasticsearch.core.internal.io.IOUtils; import org.elasticsearch.index.engine.Engine; import java.io.IOException; @@ -97,21 +97,10 @@ public class IndexSearcherWrapper { if (reader == nonClosingReaderWrapper && indexSearcher == innerIndexSearcher) { return engineSearcher; } else { - return new Engine.Searcher(engineSearcher.source(), indexSearcher) { - @Override - public void close() throws ElasticsearchException { - try { - reader().close(); - // we close the reader to make sure wrappers can release resources if needed.... - // our NonClosingReaderWrapper makes sure that our reader is not closed - } catch (IOException e) { - throw new ElasticsearchException("failed to close reader", e); - } finally { - engineSearcher.close(); - } - - } - }; + // we close the reader to make sure wrappers can release resources if needed.... + // our NonClosingReaderWrapper makes sure that our reader is not closed + return new Engine.Searcher(engineSearcher.source(), indexSearcher, s -> IOUtils.close(s.getIndexReader(), engineSearcher), + engineSearcher.getLogger()); } } diff --git a/server/src/main/java/org/elasticsearch/index/shard/IndexShard.java b/server/src/main/java/org/elasticsearch/index/shard/IndexShard.java index 5d947b0cf40..11d8f44bef1 100644 --- a/server/src/main/java/org/elasticsearch/index/shard/IndexShard.java +++ b/server/src/main/java/org/elasticsearch/index/shard/IndexShard.java @@ -79,7 +79,6 @@ import org.elasticsearch.index.engine.Engine; import org.elasticsearch.index.engine.EngineConfig; import org.elasticsearch.index.engine.EngineException; import org.elasticsearch.index.engine.EngineFactory; -import org.elasticsearch.index.engine.InternalEngine; import org.elasticsearch.index.engine.RefreshFailedEngineException; import org.elasticsearch.index.engine.Segment; import org.elasticsearch.index.engine.SegmentsStats; @@ -494,17 +493,22 @@ public class IndexShard extends AbstractIndexShardComponent implements IndicesCl try { replicationTracker.activatePrimaryMode(getLocalCheckpoint()); /* - * If this shard was serving as a replica shard when another shard was promoted to primary then the state of - * its local checkpoint tracker was reset during the primary term transition. In particular, the local - * checkpoint on this shard was thrown back to the global checkpoint and the state of the local checkpoint - * tracker above the local checkpoint was destroyed. If the other shard that was promoted to primary - * subsequently fails before the primary/replica re-sync completes successfully and we are now being - * promoted, the local checkpoint tracker here could be left in a state where it would re-issue sequence - * numbers. To ensure that this is not the case, we restore the state of the local checkpoint tracker by - * replaying the translog and marking any operations there are completed. + * If this shard was serving as a replica shard when another shard was promoted to primary then + * its Lucene index was reset during the primary term transition. In particular, the Lucene index + * on this shard was reset to the global checkpoint and the operations above the local checkpoint + * were reverted. If the other shard that was promoted to primary subsequently fails before the + * primary/replica re-sync completes successfully and we are now being promoted, we have to restore + * the reverted operations on this shard by replaying the translog to avoid losing acknowledged writes. */ final Engine engine = getEngine(); - engine.restoreLocalCheckpointFromTranslog(); + if (getMaxSeqNoOfUpdatesOrDeletes() == SequenceNumbers.UNASSIGNED_SEQ_NO) { + // If the old primary was on an old version that did not replicate the msu, + // we need to bootstrap it manually from its local history. + assert indexSettings.getIndexVersionCreated().before(Version.V_6_5_0); + engine.advanceMaxSeqNoOfUpdatesOrDeletes(seqNoStats().getMaxSeqNo()); + } + engine.restoreLocalHistoryFromTranslog((resettingEngine, snapshot) -> + runTranslogRecovery(resettingEngine, snapshot, Engine.Operation.Origin.LOCAL_RESET, () -> {})); /* Rolling the translog generation is not strictly needed here (as we will never have collisions between * sequence numbers in a translog generation in a new primary as it takes the last known sequence number * as a starting point), but it simplifies reasoning about the relationship between primary terms and @@ -1220,6 +1224,29 @@ public class IndexShard extends AbstractIndexShardComponent implements IndicesCl getEngine().trimOperationsFromTranslog(operationPrimaryTerm, aboveSeqNo); } + /** + * Returns the maximum auto_id_timestamp of all append-only requests have been processed by this shard or the auto_id_timestamp received + * from the primary via {@link #updateMaxUnsafeAutoIdTimestamp(long)} at the beginning of a peer-recovery or a primary-replica resync. + * + * @see #updateMaxUnsafeAutoIdTimestamp(long) + */ + public long getMaxSeenAutoIdTimestamp() { + return getEngine().getMaxSeenAutoIdTimestamp(); + } + + /** + * Since operations stored in soft-deletes do not have max_auto_id_timestamp, the primary has to propagate its max_auto_id_timestamp + * (via {@link #getMaxSeenAutoIdTimestamp()} of all processed append-only requests to replicas at the beginning of a peer-recovery + * or a primary-replica resync to force a replica to disable optimization for all append-only requests which are replicated via + * replication while its retry variants are replicated via recovery without auto_id_timestamp. + *

+ * Without this force-update, a replica can generate duplicate documents (for the same id) if it first receives + * a retry append-only (without timestamp) via recovery, then an original append-only (with timestamp) via replication. + */ + public void updateMaxUnsafeAutoIdTimestamp(long maxSeenAutoIdTimestampFromPrimary) { + getEngine().updateMaxUnsafeAutoIdTimestamp(maxSeenAutoIdTimestampFromPrimary); + } + public Engine.Result applyTranslogOperation(Translog.Operation operation, Engine.Operation.Origin origin) throws IOException { // If a translog op is replayed on the primary (eg. ccr), we need to use external instead of null for its version type. final VersionType versionType = (origin == Engine.Operation.Origin.PRIMARY) ? VersionType.EXTERNAL : null; @@ -1299,7 +1326,9 @@ public class IndexShard extends AbstractIndexShardComponent implements IndicesCl translogRecoveryStats::incrementRecoveredOperations); }; innerOpenEngineAndTranslog(); - getEngine().recoverFromTranslog(translogRecoveryRunner, Long.MAX_VALUE); + final Engine engine = getEngine(); + engine.initializeMaxSeqNoOfUpdatesOrDeletes(); + engine.recoverFromTranslog(translogRecoveryRunner, Long.MAX_VALUE); } /** @@ -1369,8 +1398,8 @@ public class IndexShard extends AbstractIndexShardComponent implements IndicesCl private boolean assertMaxUnsafeAutoIdInCommit() throws IOException { final Map userData = SegmentInfos.readLatestCommit(store.directory()).getUserData(); - assert userData.containsKey(InternalEngine.MAX_UNSAFE_AUTO_ID_TIMESTAMP_COMMIT_ID) : - "opening index which was created post 5.5.0 but " + InternalEngine.MAX_UNSAFE_AUTO_ID_TIMESTAMP_COMMIT_ID + assert userData.containsKey(Engine.MAX_UNSAFE_AUTO_ID_TIMESTAMP_COMMIT_ID) : + "opening index which was created post 5.5.0 but " + Engine.MAX_UNSAFE_AUTO_ID_TIMESTAMP_COMMIT_ID + " is not found in commit"; return true; } @@ -1452,9 +1481,11 @@ public class IndexShard extends AbstractIndexShardComponent implements IndicesCl } else { if (origin == Engine.Operation.Origin.PRIMARY) { assert assertPrimaryMode(); - } else { - assert origin == Engine.Operation.Origin.REPLICA || origin == Engine.Operation.Origin.LOCAL_RESET; + } else if (origin == Engine.Operation.Origin.REPLICA) { assert assertReplicationTarget(); + } else { + assert origin == Engine.Operation.Origin.LOCAL_RESET; + assert getActiveOperationsCount() == 0 : "Ongoing writes [" + getActiveOperations() + "]"; } if (writeAllowedStates.contains(state) == false) { throw new IllegalIndexShardStateException(shardId, state, "operation only allowed when shard state is one of " + writeAllowedStates + ", origin [" + origin + "]"); @@ -1923,6 +1954,12 @@ public class IndexShard extends AbstractIndexShardComponent implements IndicesCl getLocalCheckpoint() == primaryContext.getCheckpointStates().get(routingEntry().allocationId().getId()).getLocalCheckpoint(); synchronized (mutex) { replicationTracker.activateWithPrimaryContext(primaryContext); // make changes to primaryMode flag only under mutex + if (getMaxSeqNoOfUpdatesOrDeletes() == SequenceNumbers.UNASSIGNED_SEQ_NO) { + // If the old primary was on an old version that did not replicate the msu, + // we need to bootstrap it manually from its local history. + assert indexSettings.getIndexVersionCreated().before(Version.V_6_5_0); + getEngine().advanceMaxSeqNoOfUpdatesOrDeletes(seqNoStats().getMaxSeqNo()); + } } } @@ -2277,15 +2314,17 @@ public class IndexShard extends AbstractIndexShardComponent implements IndicesCl * {@link IllegalStateException}. If permit acquisition is delayed, the listener will be invoked on the executor with the specified * name. * - * @param opPrimaryTerm the operation primary term - * @param globalCheckpoint the global checkpoint associated with the request - * @param onPermitAcquired the listener for permit acquisition - * @param executorOnDelay the name of the executor to invoke the listener on if permit acquisition is delayed - * @param debugInfo an extra information that can be useful when tracing an unreleased permit. When assertions are enabled - * the tracing will capture the supplied object's {@link Object#toString()} value. Otherwise the object - * isn't used + * @param opPrimaryTerm the operation primary term + * @param globalCheckpoint the global checkpoint associated with the request + * @param maxSeqNoOfUpdatesOrDeletes the max seq_no of updates (index operations overwrite Lucene) or deletes captured on the primary + * after this replication request was executed on it (see {@link #getMaxSeqNoOfUpdatesOrDeletes()} + * @param onPermitAcquired the listener for permit acquisition + * @param executorOnDelay the name of the executor to invoke the listener on if permit acquisition is delayed + * @param debugInfo an extra information that can be useful when tracing an unreleased permit. When assertions are + * enabled the tracing will capture the supplied object's {@link Object#toString()} value. + * Otherwise the object isn't used */ - public void acquireReplicaOperationPermit(final long opPrimaryTerm, final long globalCheckpoint, + public void acquireReplicaOperationPermit(final long opPrimaryTerm, final long globalCheckpoint, final long maxSeqNoOfUpdatesOrDeletes, final ActionListener onPermitAcquired, final String executorOnDelay, final Object debugInfo) { verifyNotClosed(); @@ -2339,6 +2378,7 @@ public class IndexShard extends AbstractIndexShardComponent implements IndicesCl assert assertReplicationTarget(); try { updateGlobalCheckpointOnReplica(globalCheckpoint, "operation"); + advanceMaxSeqNoOfUpdatesOrDeletes(maxSeqNoOfUpdatesOrDeletes); } catch (Exception e) { releasable.close(); onPermitAcquired.onFailure(e); @@ -2690,10 +2730,47 @@ public class IndexShard extends AbstractIndexShardComponent implements IndicesCl newEngine = createNewEngine(newEngineConfig()); active.set(true); } + newEngine.advanceMaxSeqNoOfUpdatesOrDeletes(globalCheckpoint); final Engine.TranslogRecoveryRunner translogRunner = (engine, snapshot) -> runTranslogRecovery( engine, snapshot, Engine.Operation.Origin.LOCAL_RESET, () -> { // TODO: add a dedicate recovery stats for the reset translog }); newEngine.recoverFromTranslog(translogRunner, globalCheckpoint); } + + /** + * Returns the maximum sequence number of either update or delete operations have been processed in this shard + * or the sequence number from {@link #advanceMaxSeqNoOfUpdatesOrDeletes(long)}. An index request is considered + * as an update operation if it overwrites the existing documents in Lucene index with the same document id. + *

+ * The primary captures this value after executes a replication request, then transfers it to a replica before + * executing that replication request on a replica. + */ + public long getMaxSeqNoOfUpdatesOrDeletes() { + return getEngine().getMaxSeqNoOfUpdatesOrDeletes(); + } + + /** + * A replica calls this method to advance the max_seq_no_of_updates marker of its engine to at least the max_seq_no_of_updates + * value (piggybacked in a replication request) that it receives from its primary before executing that replication request. + * The receiving value is at least as high as the max_seq_no_of_updates on the primary was when any of the operations of that + * replication request were processed on it. + *

+ * A replica shard also calls this method to bootstrap the max_seq_no_of_updates marker with the value that it received from + * the primary in peer-recovery, before it replays remote translog operations from the primary. The receiving value is at least + * as high as the max_seq_no_of_updates on the primary was when any of these operations were processed on it. + *

+ * These transfers guarantee that every index/delete operation when executing on a replica engine will observe this marker a value + * which is at least the value of the max_seq_no_of_updates marker on the primary after that operation was executed on the primary. + * + * @see #acquireReplicaOperationPermit(long, long, long, ActionListener, String, Object) + * @see org.elasticsearch.indices.recovery.RecoveryTarget#indexTranslogOperations(List, int, long, long) + */ + public void advanceMaxSeqNoOfUpdatesOrDeletes(long seqNo) { + assert seqNo != SequenceNumbers.UNASSIGNED_SEQ_NO + || getMaxSeqNoOfUpdatesOrDeletes() == SequenceNumbers.UNASSIGNED_SEQ_NO : + "replica has max_seq_no_of_updates=" + getMaxSeqNoOfUpdatesOrDeletes() + " but primary does not"; + getEngine().advanceMaxSeqNoOfUpdatesOrDeletes(seqNo); + assert seqNo <= getMaxSeqNoOfUpdatesOrDeletes() : getMaxSeqNoOfUpdatesOrDeletes() + " < " + seqNo; + } } diff --git a/server/src/main/java/org/elasticsearch/index/shard/LocalShardSnapshot.java b/server/src/main/java/org/elasticsearch/index/shard/LocalShardSnapshot.java index 09391c9bc96..cc9ac40c274 100644 --- a/server/src/main/java/org/elasticsearch/index/shard/LocalShardSnapshot.java +++ b/server/src/main/java/org/elasticsearch/index/shard/LocalShardSnapshot.java @@ -28,7 +28,6 @@ import org.apache.lucene.store.NoLockFactory; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.index.Index; import org.elasticsearch.index.engine.Engine; -import org.elasticsearch.index.engine.InternalEngine; import org.elasticsearch.index.store.Store; import java.io.Closeable; @@ -66,7 +65,7 @@ final class LocalShardSnapshot implements Closeable { } long maxUnsafeAutoIdTimestamp() { - return Long.parseLong(shard.getEngine().commitStats().getUserData().get(InternalEngine.MAX_UNSAFE_AUTO_ID_TIMESTAMP_COMMIT_ID)); + return Long.parseLong(shard.getEngine().commitStats().getUserData().get(Engine.MAX_UNSAFE_AUTO_ID_TIMESTAMP_COMMIT_ID)); } Directory getSnapshotDirectory() { diff --git a/server/src/main/java/org/elasticsearch/index/shard/PrimaryReplicaSyncer.java b/server/src/main/java/org/elasticsearch/index/shard/PrimaryReplicaSyncer.java index 016a8afff69..3530fe5ae5d 100644 --- a/server/src/main/java/org/elasticsearch/index/shard/PrimaryReplicaSyncer.java +++ b/server/src/main/java/org/elasticsearch/index/shard/PrimaryReplicaSyncer.java @@ -135,9 +135,11 @@ public class PrimaryReplicaSyncer extends AbstractComponent { } } }; - + // We must capture the timestamp after snapshotting a snapshot of operations to make sure + // that the auto_id_timestamp of every operation in the snapshot is at most this value. + final long maxSeenAutoIdTimestamp = indexShard.getMaxSeenAutoIdTimestamp(); resync(shardId, indexShard.routingEntry().allocationId().getId(), indexShard.getPendingPrimaryTerm(), wrappedSnapshot, - startingSeqNo, maxSeqNo, resyncListener); + startingSeqNo, maxSeqNo, maxSeenAutoIdTimestamp, resyncListener); } catch (Exception e) { try { IOUtils.close(snapshot); @@ -150,7 +152,7 @@ public class PrimaryReplicaSyncer extends AbstractComponent { } private void resync(final ShardId shardId, final String primaryAllocationId, final long primaryTerm, final Translog.Snapshot snapshot, - long startingSeqNo, long maxSeqNo, ActionListener listener) { + long startingSeqNo, long maxSeqNo, long maxSeenAutoIdTimestamp, ActionListener listener) { ResyncRequest request = new ResyncRequest(shardId, primaryAllocationId); ResyncTask resyncTask = (ResyncTask) taskManager.register("transport", "resync", request); // it's not transport :-) ActionListener wrappedListener = new ActionListener() { @@ -170,7 +172,7 @@ public class PrimaryReplicaSyncer extends AbstractComponent { }; try { new SnapshotSender(logger, syncAction, resyncTask, shardId, primaryAllocationId, primaryTerm, snapshot, chunkSize.bytesAsInt(), - startingSeqNo, maxSeqNo, wrappedListener).run(); + startingSeqNo, maxSeqNo, maxSeenAutoIdTimestamp, wrappedListener).run(); } catch (Exception e) { wrappedListener.onFailure(e); } @@ -191,6 +193,7 @@ public class PrimaryReplicaSyncer extends AbstractComponent { private final Translog.Snapshot snapshot; private final long startingSeqNo; private final long maxSeqNo; + private final long maxSeenAutoIdTimestamp; private final int chunkSizeInBytes; private final ActionListener listener; private final AtomicBoolean firstMessage = new AtomicBoolean(true); @@ -199,7 +202,8 @@ public class PrimaryReplicaSyncer extends AbstractComponent { private AtomicBoolean closed = new AtomicBoolean(); SnapshotSender(Logger logger, SyncAction syncAction, ResyncTask task, ShardId shardId, String primaryAllocationId, long primaryTerm, - Translog.Snapshot snapshot, int chunkSizeInBytes, long startingSeqNo, long maxSeqNo, ActionListener listener) { + Translog.Snapshot snapshot, int chunkSizeInBytes, long startingSeqNo, long maxSeqNo, + long maxSeenAutoIdTimestamp, ActionListener listener) { this.logger = logger; this.syncAction = syncAction; this.task = task; @@ -210,6 +214,7 @@ public class PrimaryReplicaSyncer extends AbstractComponent { this.chunkSizeInBytes = chunkSizeInBytes; this.startingSeqNo = startingSeqNo; this.maxSeqNo = maxSeqNo; + this.maxSeenAutoIdTimestamp = maxSeenAutoIdTimestamp; this.listener = listener; task.setTotalOperations(snapshot.totalOperations()); } @@ -260,7 +265,7 @@ public class PrimaryReplicaSyncer extends AbstractComponent { if (!operations.isEmpty() || trimmedAboveSeqNo != SequenceNumbers.UNASSIGNED_SEQ_NO) { task.setPhase("sending_ops"); ResyncReplicationRequest request = - new ResyncReplicationRequest(shardId, trimmedAboveSeqNo, operations.toArray(EMPTY_ARRAY)); + new ResyncReplicationRequest(shardId, trimmedAboveSeqNo, maxSeenAutoIdTimestamp, operations.toArray(EMPTY_ARRAY)); logger.trace("{} sending batch of [{}][{}] (total sent: [{}], skipped: [{}])", shardId, operations.size(), new ByteSizeValue(size), totalSentOps.get(), totalSkippedOps.get()); firstMessage.set(false); diff --git a/server/src/main/java/org/elasticsearch/index/shard/RemoveCorruptedShardDataCommand.java b/server/src/main/java/org/elasticsearch/index/shard/RemoveCorruptedShardDataCommand.java index de22903efb3..54c1dd7c1db 100644 --- a/server/src/main/java/org/elasticsearch/index/shard/RemoveCorruptedShardDataCommand.java +++ b/server/src/main/java/org/elasticsearch/index/shard/RemoveCorruptedShardDataCommand.java @@ -85,10 +85,6 @@ public class RemoveCorruptedShardDataCommand extends EnvironmentAwareCommand { private final NamedXContentRegistry namedXContentRegistry; public RemoveCorruptedShardDataCommand() { - this(false); - } - - public RemoveCorruptedShardDataCommand(boolean translogOnly) { super("Removes corrupted shard files"); folderOption = parser.acceptsAll(Arrays.asList("d", "dir"), @@ -104,18 +100,13 @@ public class RemoveCorruptedShardDataCommand extends EnvironmentAwareCommand { namedXContentRegistry = new NamedXContentRegistry(ClusterModule.getNamedXWriteables()); - removeCorruptedLuceneSegmentsAction = translogOnly ? null : new RemoveCorruptedLuceneSegmentsAction(); + removeCorruptedLuceneSegmentsAction = new RemoveCorruptedLuceneSegmentsAction(); truncateTranslogAction = new TruncateTranslogAction(namedXContentRegistry); } @Override protected void printAdditionalHelp(Terminal terminal) { - if (removeCorruptedLuceneSegmentsAction == null) { - // that's only for 6.x branch for bwc with elasticsearch-translog - terminal.println("This tool truncates the translog and translog checkpoint files to create a new translog"); - } else { - terminal.println("This tool attempts to detect and remove unrecoverable corrupted data in a shard."); - } + terminal.println("This tool attempts to detect and remove unrecoverable corrupted data in a shard."); } // Visible for testing @@ -277,12 +268,6 @@ public class RemoveCorruptedShardDataCommand extends EnvironmentAwareCommand { terminal.println(""); terminal.println(" WARNING: Elasticsearch MUST be stopped before running this tool."); terminal.println(""); - // that's only for 6.x branch for bwc with elasticsearch-translog - if (removeCorruptedLuceneSegmentsAction == null) { - terminal.println(" This tool is deprecated and will be completely removed in 7.0."); - terminal.println(" It is replaced by the elasticsearch-shard tool. "); - terminal.println(""); - } terminal.println(" Please make a complete backup of your index before using this tool."); terminal.println(""); terminal.println("-----------------------------------------------------------------------"); @@ -318,26 +303,21 @@ public class RemoveCorruptedShardDataCommand extends EnvironmentAwareCommand { // keep the index lock to block any runs of older versions of this tool try (Lock writeIndexLock = indexDir.obtainLock(IndexWriter.WRITE_LOCK_NAME)) { ////////// Index - // that's only for 6.x branch for bwc with elasticsearch-translog - if (removeCorruptedLuceneSegmentsAction != null) { - terminal.println(""); - terminal.println("Opening Lucene index at " + indexPath); - terminal.println(""); - try { - indexCleanStatus = removeCorruptedLuceneSegmentsAction.getCleanStatus(shardPath, indexDir, - writeIndexLock, printStream, verbose); - } catch (Exception e) { - terminal.println(e.getMessage()); - throw e; - } - - terminal.println(""); - terminal.println(" >> Lucene index is " + indexCleanStatus.v1().getMessage() + " at " + indexPath); - terminal.println(""); - } else { - indexCleanStatus = Tuple.tuple(CleanStatus.CLEAN, null); + terminal.println(""); + terminal.println("Opening Lucene index at " + indexPath); + terminal.println(""); + try { + indexCleanStatus = removeCorruptedLuceneSegmentsAction.getCleanStatus(shardPath, indexDir, + writeIndexLock, printStream, verbose); + } catch (Exception e) { + terminal.println(e.getMessage()); + throw e; } + terminal.println(""); + terminal.println(" >> Lucene index is " + indexCleanStatus.v1().getMessage() + " at " + indexPath); + terminal.println(""); + ////////// Translog // as translog relies on data stored in an index commit - we have to have non unrecoverable index to truncate translog if (indexCleanStatus.v1() != CleanStatus.UNRECOVERABLE) { diff --git a/server/src/main/java/org/elasticsearch/index/shard/StoreRecovery.java b/server/src/main/java/org/elasticsearch/index/shard/StoreRecovery.java index c4b971e470d..4d8b63fc719 100644 --- a/server/src/main/java/org/elasticsearch/index/shard/StoreRecovery.java +++ b/server/src/main/java/org/elasticsearch/index/shard/StoreRecovery.java @@ -40,8 +40,8 @@ import org.elasticsearch.common.lucene.Lucene; import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.index.Index; +import org.elasticsearch.index.engine.Engine; import org.elasticsearch.index.engine.EngineException; -import org.elasticsearch.index.engine.InternalEngine; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.seqno.SequenceNumbers; import org.elasticsearch.index.snapshots.IndexShardRestoreFailedException; @@ -181,7 +181,7 @@ final class StoreRecovery { final HashMap liveCommitData = new HashMap<>(3); liveCommitData.put(SequenceNumbers.MAX_SEQ_NO, Long.toString(maxSeqNo)); liveCommitData.put(SequenceNumbers.LOCAL_CHECKPOINT_KEY, Long.toString(maxSeqNo)); - liveCommitData.put(InternalEngine.MAX_UNSAFE_AUTO_ID_TIMESTAMP_COMMIT_ID, Long.toString(maxUnsafeAutoIdTimestamp)); + liveCommitData.put(Engine.MAX_UNSAFE_AUTO_ID_TIMESTAMP_COMMIT_ID, Long.toString(maxUnsafeAutoIdTimestamp)); return liveCommitData.entrySet().iterator(); }); writer.commit(); diff --git a/server/src/main/java/org/elasticsearch/index/similarity/SimilarityService.java b/server/src/main/java/org/elasticsearch/index/similarity/SimilarityService.java index 06a476e64ec..552ef3c4aae 100644 --- a/server/src/main/java/org/elasticsearch/index/similarity/SimilarityService.java +++ b/server/src/main/java/org/elasticsearch/index/similarity/SimilarityService.java @@ -206,7 +206,7 @@ public final class SimilarityService extends AbstractIndexComponent { CollectionStatistics collectionStats = new CollectionStatistics("some_field", 1200, 1100, 3000, 2000); TermStatistics termStats = new TermStatistics(new BytesRef("some_value"), 100, 130); SimScorer scorer = similarity.scorer(2f, collectionStats, termStats); - FieldInvertState state = new FieldInvertState(indexCreatedVersion.major, "some_field", + FieldInvertState state = new FieldInvertState(indexCreatedVersion.luceneVersion.major, "some_field", IndexOptions.DOCS_AND_FREQS, 20, 20, 0, 50, 10, 3); // length = 20, no overlap final long norm = similarity.computeNorm(state); for (int freq = 1; freq <= 10; ++freq) { @@ -214,6 +214,7 @@ public final class SimilarityService extends AbstractIndexComponent { if (score < 0) { fail(indexCreatedVersion, "Similarities should not return negative scores:\n" + scorer.explain(Explanation.match(freq, "term freq"), norm)); + break; } } } @@ -222,7 +223,7 @@ public final class SimilarityService extends AbstractIndexComponent { CollectionStatistics collectionStats = new CollectionStatistics("some_field", 1200, 1100, 3000, 2000); TermStatistics termStats = new TermStatistics(new BytesRef("some_value"), 100, 130); SimScorer scorer = similarity.scorer(2f, collectionStats, termStats); - FieldInvertState state = new FieldInvertState(indexCreatedVersion.major, "some_field", + FieldInvertState state = new FieldInvertState(indexCreatedVersion.luceneVersion.major, "some_field", IndexOptions.DOCS_AND_FREQS, 20, 20, 0, 50, 10, 3); // length = 20, no overlap final long norm = similarity.computeNorm(state); float previousScore = 0; @@ -232,6 +233,7 @@ public final class SimilarityService extends AbstractIndexComponent { fail(indexCreatedVersion, "Similarity scores should not decrease when term frequency increases:\n" + scorer.explain(Explanation.match(freq - 1, "term freq"), norm) + "\n" + scorer.explain(Explanation.match(freq, "term freq"), norm)); + break; } previousScore = score; } @@ -245,7 +247,7 @@ public final class SimilarityService extends AbstractIndexComponent { long previousNorm = 0; float previousScore = Float.MAX_VALUE; for (int length = 1; length <= 10; ++length) { - FieldInvertState state = new FieldInvertState(indexCreatedVersion.major, "some_field", + FieldInvertState state = new FieldInvertState(indexCreatedVersion.luceneVersion.major, "some_field", IndexOptions.DOCS_AND_FREQS, length, length, 0, 50, 10, 3); // length = 20, no overlap final long norm = similarity.computeNorm(state); if (Long.compareUnsigned(previousNorm, norm) > 0) { @@ -257,6 +259,7 @@ public final class SimilarityService extends AbstractIndexComponent { fail(indexCreatedVersion, "Similarity scores should not increase when norm increases:\n" + scorer.explain(Explanation.match(1, "term freq"), norm - 1) + "\n" + scorer.explain(Explanation.match(1, "term freq"), norm)); + break; } previousScore = score; previousNorm = norm; diff --git a/server/src/main/java/org/elasticsearch/index/store/Store.java b/server/src/main/java/org/elasticsearch/index/store/Store.java index 8e57caad3b4..13499dfd60c 100644 --- a/server/src/main/java/org/elasticsearch/index/store/Store.java +++ b/server/src/main/java/org/elasticsearch/index/store/Store.java @@ -75,7 +75,6 @@ import org.elasticsearch.env.ShardLockObtainFailedException; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.engine.CombinedDeletionPolicy; import org.elasticsearch.index.engine.Engine; -import org.elasticsearch.index.engine.InternalEngine; import org.elasticsearch.index.seqno.SequenceNumbers; import org.elasticsearch.index.shard.AbstractIndexShardComponent; import org.elasticsearch.index.shard.IndexShard; @@ -185,7 +184,7 @@ public class Store extends AbstractIndexShardComponent implements Closeable, Ref failIfCorrupted(); try { return readSegmentsInfo(null, directory()); - } catch (CorruptIndexException ex) { + } catch (CorruptIndexException | IndexFormatTooOldException | IndexFormatTooNewException ex) { markStoreCorrupted(ex); throw ex; } @@ -1425,7 +1424,7 @@ public class Store extends AbstractIndexShardComponent implements Closeable, Ref map.put(Engine.HISTORY_UUID_KEY, UUIDs.randomBase64UUID()); map.put(SequenceNumbers.LOCAL_CHECKPOINT_KEY, Long.toString(SequenceNumbers.NO_OPS_PERFORMED)); map.put(SequenceNumbers.MAX_SEQ_NO, Long.toString(SequenceNumbers.NO_OPS_PERFORMED)); - map.put(InternalEngine.MAX_UNSAFE_AUTO_ID_TIMESTAMP_COMMIT_ID, "-1"); + map.put(Engine.MAX_UNSAFE_AUTO_ID_TIMESTAMP_COMMIT_ID, "-1"); updateCommitData(writer, map); } finally { metadataLock.writeLock().unlock(); diff --git a/server/src/main/java/org/elasticsearch/index/translog/Translog.java b/server/src/main/java/org/elasticsearch/index/translog/Translog.java index f17acac3789..0b91de81932 100644 --- a/server/src/main/java/org/elasticsearch/index/translog/Translog.java +++ b/server/src/main/java/org/elasticsearch/index/translog/Translog.java @@ -60,6 +60,7 @@ import java.util.Iterator; import java.util.List; import java.util.Objects; import java.util.Optional; +import java.util.OptionalLong; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.locks.ReadWriteLock; import java.util.concurrent.locks.ReentrantReadWriteLock; @@ -1825,6 +1826,19 @@ public class Translog extends AbstractIndexShardComponent implements IndexShardC return translogUUID; } + /** + * Returns the max seq_no of translog operations found in this translog. Since this value is calculated based on the current + * existing readers, this value is not necessary to be the max seq_no of all operations have been stored in this translog. + */ + public long getMaxSeqNo() { + try (ReleasableLock ignored = readLock.acquire()) { + ensureOpen(); + final OptionalLong maxSeqNo = Stream.concat(readers.stream(), Stream.of(current)) + .mapToLong(reader -> reader.getCheckpoint().maxSeqNo).max(); + assert maxSeqNo.isPresent() : "must have at least one translog generation"; + return maxSeqNo.getAsLong(); + } + } TranslogWriter getCurrent() { return current; diff --git a/server/src/main/java/org/elasticsearch/indices/recovery/PeerRecoveryTargetService.java b/server/src/main/java/org/elasticsearch/indices/recovery/PeerRecoveryTargetService.java index aaa4697e5cb..f60994a4bce 100644 --- a/server/src/main/java/org/elasticsearch/indices/recovery/PeerRecoveryTargetService.java +++ b/server/src/main/java/org/elasticsearch/indices/recovery/PeerRecoveryTargetService.java @@ -455,7 +455,8 @@ public class PeerRecoveryTargetService extends AbstractComponent implements Inde final ClusterStateObserver observer = new ClusterStateObserver(clusterService, null, logger, threadPool.getThreadContext()); final RecoveryTarget recoveryTarget = recoveryRef.target(); try { - recoveryTarget.indexTranslogOperations(request.operations(), request.totalTranslogOps()); + recoveryTarget.indexTranslogOperations(request.operations(), request.totalTranslogOps(), + request.maxSeenAutoIdTimestampOnPrimary(), request.maxSeqNoOfUpdatesOrDeletesOnPrimary()); channel.sendResponse(new RecoveryTranslogOperationsResponse(recoveryTarget.indexShard().getLocalCheckpoint())); } catch (MapperException exception) { // in very rare cases a translog replay from primary is processed before a mapping update on this node diff --git a/server/src/main/java/org/elasticsearch/indices/recovery/RecoverySourceHandler.java b/server/src/main/java/org/elasticsearch/indices/recovery/RecoverySourceHandler.java index 220abf43124..46f98275740 100644 --- a/server/src/main/java/org/elasticsearch/indices/recovery/RecoverySourceHandler.java +++ b/server/src/main/java/org/elasticsearch/indices/recovery/RecoverySourceHandler.java @@ -215,7 +215,12 @@ public class RecoverySourceHandler { } final long targetLocalCheckpoint; try (Translog.Snapshot snapshot = shard.getHistoryOperations("peer-recovery", startingSeqNo)) { - targetLocalCheckpoint = phase2(startingSeqNo, requiredSeqNoRangeStart, endingSeqNo, snapshot); + // we have to capture the max_seen_auto_id_timestamp and the max_seq_no_of_updates to make sure that these values + // are at least as high as the corresponding values on the primary when any of these operations were executed on it. + final long maxSeenAutoIdTimestamp = shard.getMaxSeenAutoIdTimestamp(); + final long maxSeqNoOfUpdatesOrDeletes = shard.getMaxSeqNoOfUpdatesOrDeletes(); + targetLocalCheckpoint = phase2(startingSeqNo, requiredSeqNoRangeStart, endingSeqNo, snapshot, + maxSeenAutoIdTimestamp, maxSeqNoOfUpdatesOrDeletes); } catch (Exception e) { throw new RecoveryEngineException(shard.shardId(), 2, "phase2 failed", e); } @@ -442,14 +447,17 @@ public class RecoverySourceHandler { * point-in-time view of the translog). It then sends each translog operation to the target node so it can be replayed into the new * shard. * - * @param startingSeqNo the sequence number to start recovery from, or {@link SequenceNumbers#UNASSIGNED_SEQ_NO} if all - * ops should be sent - * @param requiredSeqNoRangeStart the lower sequence number of the required range (ending with endingSeqNo) - * @param endingSeqNo the highest sequence number that should be sent - * @param snapshot a snapshot of the translog + * @param startingSeqNo the sequence number to start recovery from, or {@link SequenceNumbers#UNASSIGNED_SEQ_NO} if all + * ops should be sent + * @param requiredSeqNoRangeStart the lower sequence number of the required range (ending with endingSeqNo) + * @param endingSeqNo the highest sequence number that should be sent + * @param snapshot a snapshot of the translog + * @param maxSeenAutoIdTimestamp the max auto_id_timestamp of append-only requests on the primary + * @param maxSeqNoOfUpdatesOrDeletes the max seq_no of updates or deletes on the primary after these operations were executed on it. * @return the local checkpoint on the target */ - long phase2(final long startingSeqNo, long requiredSeqNoRangeStart, long endingSeqNo, final Translog.Snapshot snapshot) + long phase2(final long startingSeqNo, long requiredSeqNoRangeStart, long endingSeqNo, final Translog.Snapshot snapshot, + final long maxSeenAutoIdTimestamp, final long maxSeqNoOfUpdatesOrDeletes) throws IOException { if (shard.state() == IndexShardState.CLOSED) { throw new IndexShardClosedException(request.shardId()); @@ -462,7 +470,8 @@ public class RecoverySourceHandler { "required [" + requiredSeqNoRangeStart + ":" + endingSeqNo + "]"); // send all the snapshot's translog operations to the target - final SendSnapshotResult result = sendSnapshot(startingSeqNo, requiredSeqNoRangeStart, endingSeqNo, snapshot); + final SendSnapshotResult result = sendSnapshot( + startingSeqNo, requiredSeqNoRangeStart, endingSeqNo, snapshot, maxSeenAutoIdTimestamp, maxSeqNoOfUpdatesOrDeletes); stopWatch.stop(); logger.trace("recovery [phase2]: took [{}]", stopWatch.totalTime()); @@ -525,15 +534,18 @@ public class RecoverySourceHandler { *

* Operations are bulked into a single request depending on an operation count limit or size-in-bytes limit. * - * @param startingSeqNo the sequence number for which only operations with a sequence number greater than this will be sent - * @param requiredSeqNoRangeStart the lower sequence number of the required range - * @param endingSeqNo the upper bound of the sequence number range to be sent (inclusive) - * @param snapshot the translog snapshot to replay operations from @return the local checkpoint on the target and the - * total number of operations sent + * @param startingSeqNo the sequence number for which only operations with a sequence number greater than this will be sent + * @param requiredSeqNoRangeStart the lower sequence number of the required range + * @param endingSeqNo the upper bound of the sequence number range to be sent (inclusive) + * @param snapshot the translog snapshot to replay operations from @return the local checkpoint on the target and the + * total number of operations sent + * @param maxSeenAutoIdTimestamp the max auto_id_timestamp of append-only requests on the primary + * @param maxSeqNoOfUpdatesOrDeletes the max seq_no of updates or deletes on the primary after these operations were executed on it. * @throws IOException if an I/O exception occurred reading the translog snapshot */ protected SendSnapshotResult sendSnapshot(final long startingSeqNo, long requiredSeqNoRangeStart, long endingSeqNo, - final Translog.Snapshot snapshot) throws IOException { + final Translog.Snapshot snapshot, final long maxSeenAutoIdTimestamp, + final long maxSeqNoOfUpdatesOrDeletes) throws IOException { assert requiredSeqNoRangeStart <= endingSeqNo + 1: "requiredSeqNoRangeStart " + requiredSeqNoRangeStart + " is larger than endingSeqNo " + endingSeqNo; assert startingSeqNo <= requiredSeqNoRangeStart : @@ -551,8 +563,11 @@ public class RecoverySourceHandler { logger.trace("no translog operations to send"); } - final CancellableThreads.IOInterruptable sendBatch = - () -> targetLocalCheckpoint.set(recoveryTarget.indexTranslogOperations(operations, expectedTotalOps)); + final CancellableThreads.IOInterruptable sendBatch = () -> { + final long targetCheckpoint = recoveryTarget.indexTranslogOperations( + operations, expectedTotalOps, maxSeenAutoIdTimestamp, maxSeqNoOfUpdatesOrDeletes); + targetLocalCheckpoint.set(targetCheckpoint); + }; // send operations in batches Translog.Operation operation; diff --git a/server/src/main/java/org/elasticsearch/indices/recovery/RecoveryTarget.java b/server/src/main/java/org/elasticsearch/indices/recovery/RecoveryTarget.java index e28b01c8a61..3a3a78941b1 100644 --- a/server/src/main/java/org/elasticsearch/indices/recovery/RecoveryTarget.java +++ b/server/src/main/java/org/elasticsearch/indices/recovery/RecoveryTarget.java @@ -386,13 +386,26 @@ public class RecoveryTarget extends AbstractRefCounted implements RecoveryTarget } @Override - public long indexTranslogOperations(List operations, int totalTranslogOps) throws IOException { + public long indexTranslogOperations(List operations, int totalTranslogOps, long maxSeenAutoIdTimestampOnPrimary, + long maxSeqNoOfDeletesOrUpdatesOnPrimary) throws IOException { final RecoveryState.Translog translog = state().getTranslog(); translog.totalOperations(totalTranslogOps); assert indexShard().recoveryState() == state(); if (indexShard().state() != IndexShardState.RECOVERING) { throw new IndexShardNotRecoveringException(shardId, indexShard().state()); } + /* + * The maxSeenAutoIdTimestampOnPrimary received from the primary is at least the highest auto_id_timestamp from any operation + * will be replayed. Bootstrapping this timestamp here will disable the optimization for original append-only requests + * (source of these operations) replicated via replication. Without this step, we may have duplicate documents if we + * replay these operations first (without timestamp), then optimize append-only requests (with timestamp). + */ + indexShard().updateMaxUnsafeAutoIdTimestamp(maxSeenAutoIdTimestampOnPrimary); + /* + * Bootstrap the max_seq_no_of_updates from the primary to make sure that the max_seq_no_of_updates on this replica when + * replaying any of these operations will be at least the max_seq_no_of_updates on the primary when that operation was executed on. + */ + indexShard().advanceMaxSeqNoOfUpdatesOrDeletes(maxSeqNoOfDeletesOrUpdatesOnPrimary); for (Translog.Operation operation : operations) { Engine.Result result = indexShard().applyTranslogOperation(operation, Engine.Operation.Origin.PEER_RECOVERY); if (result.getResultType() == Engine.Result.Type.MAPPING_UPDATE_REQUIRED) { diff --git a/server/src/main/java/org/elasticsearch/indices/recovery/RecoveryTargetHandler.java b/server/src/main/java/org/elasticsearch/indices/recovery/RecoveryTargetHandler.java index 4e728a72b30..b7c3de97b4e 100644 --- a/server/src/main/java/org/elasticsearch/indices/recovery/RecoveryTargetHandler.java +++ b/server/src/main/java/org/elasticsearch/indices/recovery/RecoveryTargetHandler.java @@ -59,12 +59,17 @@ public interface RecoveryTargetHandler { /** * Index a set of translog operations on the target - * @param operations operations to index - * @param totalTranslogOps current number of total operations expected to be indexed * + * @param operations operations to index + * @param totalTranslogOps current number of total operations expected to be indexed + * @param maxSeenAutoIdTimestampOnPrimary the maximum auto_id_timestamp of all append-only requests processed by the primary shard + * @param maxSeqNoOfUpdatesOrDeletesOnPrimary the max seq_no of update operations (index operations overwrite Lucene) or delete ops on + * the primary shard when capturing these operations. This value is at least as high as the + * max_seq_no_of_updates on the primary was when any of these ops were processed on it. * @return the local checkpoint on the target shard */ - long indexTranslogOperations(List operations, int totalTranslogOps) throws IOException; + long indexTranslogOperations(List operations, int totalTranslogOps, + long maxSeenAutoIdTimestampOnPrimary, long maxSeqNoOfUpdatesOrDeletesOnPrimary) throws IOException; /** * Notifies the target of the files it is going to receive diff --git a/server/src/main/java/org/elasticsearch/indices/recovery/RecoveryTranslogOperationsRequest.java b/server/src/main/java/org/elasticsearch/indices/recovery/RecoveryTranslogOperationsRequest.java index be399e0f81f..0ae5d507eb3 100644 --- a/server/src/main/java/org/elasticsearch/indices/recovery/RecoveryTranslogOperationsRequest.java +++ b/server/src/main/java/org/elasticsearch/indices/recovery/RecoveryTranslogOperationsRequest.java @@ -19,8 +19,11 @@ package org.elasticsearch.indices.recovery; +import org.elasticsearch.Version; +import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.index.seqno.SequenceNumbers; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.index.translog.Translog; import org.elasticsearch.transport.TransportRequest; @@ -34,15 +37,20 @@ public class RecoveryTranslogOperationsRequest extends TransportRequest { private ShardId shardId; private List operations; private int totalTranslogOps = RecoveryState.Translog.UNKNOWN; + private long maxSeenAutoIdTimestampOnPrimary; + private long maxSeqNoOfUpdatesOrDeletesOnPrimary; public RecoveryTranslogOperationsRequest() { } - RecoveryTranslogOperationsRequest(long recoveryId, ShardId shardId, List operations, int totalTranslogOps) { + RecoveryTranslogOperationsRequest(long recoveryId, ShardId shardId, List operations, int totalTranslogOps, + long maxSeenAutoIdTimestampOnPrimary, long maxSeqNoOfUpdatesOrDeletesOnPrimary) { this.recoveryId = recoveryId; this.shardId = shardId; this.operations = operations; this.totalTranslogOps = totalTranslogOps; + this.maxSeenAutoIdTimestampOnPrimary = maxSeenAutoIdTimestampOnPrimary; + this.maxSeqNoOfUpdatesOrDeletesOnPrimary = maxSeqNoOfUpdatesOrDeletesOnPrimary; } public long recoveryId() { @@ -61,6 +69,14 @@ public class RecoveryTranslogOperationsRequest extends TransportRequest { return totalTranslogOps; } + public long maxSeenAutoIdTimestampOnPrimary() { + return maxSeenAutoIdTimestampOnPrimary; + } + + public long maxSeqNoOfUpdatesOrDeletesOnPrimary() { + return maxSeqNoOfUpdatesOrDeletesOnPrimary; + } + @Override public void readFrom(StreamInput in) throws IOException { super.readFrom(in); @@ -68,6 +84,17 @@ public class RecoveryTranslogOperationsRequest extends TransportRequest { shardId = ShardId.readShardId(in); operations = Translog.readOperations(in, "recovery"); totalTranslogOps = in.readVInt(); + if (in.getVersion().onOrAfter(Version.V_6_5_0)) { + maxSeenAutoIdTimestampOnPrimary = in.readZLong(); + } else { + maxSeenAutoIdTimestampOnPrimary = IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP; + } + if (in.getVersion().onOrAfter(Version.V_6_5_0)) { + maxSeqNoOfUpdatesOrDeletesOnPrimary = in.readZLong(); + } else { + // UNASSIGNED_SEQ_NO means uninitialized and replica won't enable optimization using seq_no + maxSeqNoOfUpdatesOrDeletesOnPrimary = SequenceNumbers.UNASSIGNED_SEQ_NO; + } } @Override @@ -77,5 +104,11 @@ public class RecoveryTranslogOperationsRequest extends TransportRequest { shardId.writeTo(out); Translog.writeOperations(out, operations); out.writeVInt(totalTranslogOps); + if (out.getVersion().onOrAfter(Version.V_6_5_0)) { + out.writeZLong(maxSeenAutoIdTimestampOnPrimary); + } + if (out.getVersion().onOrAfter(Version.V_6_5_0)) { + out.writeZLong(maxSeqNoOfUpdatesOrDeletesOnPrimary); + } } } diff --git a/server/src/main/java/org/elasticsearch/indices/recovery/RemoteRecoveryTargetHandler.java b/server/src/main/java/org/elasticsearch/indices/recovery/RemoteRecoveryTargetHandler.java index edf17595350..b37fefee7da 100644 --- a/server/src/main/java/org/elasticsearch/indices/recovery/RemoteRecoveryTargetHandler.java +++ b/server/src/main/java/org/elasticsearch/indices/recovery/RemoteRecoveryTargetHandler.java @@ -110,9 +110,10 @@ public class RemoteRecoveryTargetHandler implements RecoveryTargetHandler { } @Override - public long indexTranslogOperations(List operations, int totalTranslogOps) { - final RecoveryTranslogOperationsRequest translogOperationsRequest = - new RecoveryTranslogOperationsRequest(recoveryId, shardId, operations, totalTranslogOps); + public long indexTranslogOperations(List operations, int totalTranslogOps, + long maxSeenAutoIdTimestampOnPrimary, long maxSeqNoOfDeletesOrUpdatesOnPrimary) { + final RecoveryTranslogOperationsRequest translogOperationsRequest = new RecoveryTranslogOperationsRequest( + recoveryId, shardId, operations, totalTranslogOps, maxSeenAutoIdTimestampOnPrimary, maxSeqNoOfDeletesOrUpdatesOnPrimary); final TransportFuture future = transportService.submitRequest( targetNode, PeerRecoveryTargetService.Actions.TRANSLOG_OPS, diff --git a/server/src/main/java/org/elasticsearch/indices/store/TransportNodesListShardStoreMetaData.java b/server/src/main/java/org/elasticsearch/indices/store/TransportNodesListShardStoreMetaData.java index 373edfc3b46..62f5dba9825 100644 --- a/server/src/main/java/org/elasticsearch/indices/store/TransportNodesListShardStoreMetaData.java +++ b/server/src/main/java/org/elasticsearch/indices/store/TransportNodesListShardStoreMetaData.java @@ -254,9 +254,6 @@ public class TransportNodesListShardStoreMetaData extends TransportNodesAction { - NodesStoreFilesMetaData() { - } - public NodesStoreFilesMetaData(ClusterName clusterName, List nodes, List failures) { super(clusterName, nodes, failures); } diff --git a/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/DropProcessor.java b/server/src/main/java/org/elasticsearch/ingest/DropProcessor.java similarity index 90% rename from modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/DropProcessor.java rename to server/src/main/java/org/elasticsearch/ingest/DropProcessor.java index a0eabe38979..d4ccf7ad635 100644 --- a/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/DropProcessor.java +++ b/server/src/main/java/org/elasticsearch/ingest/DropProcessor.java @@ -17,12 +17,9 @@ * under the License. */ -package org.elasticsearch.ingest.common; +package org.elasticsearch.ingest; import java.util.Map; -import org.elasticsearch.ingest.AbstractProcessor; -import org.elasticsearch.ingest.IngestDocument; -import org.elasticsearch.ingest.Processor; /** * Drop processor only returns {@code null} for the execution result to indicate that any document diff --git a/server/src/main/java/org/elasticsearch/ingest/IngestDocument.java b/server/src/main/java/org/elasticsearch/ingest/IngestDocument.java index 10cb2fd17fe..719558edbf7 100644 --- a/server/src/main/java/org/elasticsearch/ingest/IngestDocument.java +++ b/server/src/main/java/org/elasticsearch/ingest/IngestDocument.java @@ -647,7 +647,7 @@ public final class IngestDocument { public IngestDocument executePipeline(Pipeline pipeline) throws Exception { try { if (this.executedPipelines.add(pipeline) == false) { - throw new IllegalStateException("Recursive invocation of pipeline [" + pipeline.getId() + "] detected."); + throw new IllegalStateException("Cycle detected for pipeline: " + pipeline.getId()); } return pipeline.execute(this); } finally { diff --git a/server/src/main/java/org/elasticsearch/ingest/IngestMetric.java b/server/src/main/java/org/elasticsearch/ingest/IngestMetric.java new file mode 100644 index 00000000000..4e809aa5b44 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/ingest/IngestMetric.java @@ -0,0 +1,95 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.ingest; + +import org.elasticsearch.common.metrics.CounterMetric; +import org.elasticsearch.common.metrics.MeanMetric; + +/** + *

Metrics to measure ingest actions. + *

This counts measure documents and timings for a given scope. + * The scope is determined by the calling code. For example you can use this class to count all documents across all pipeline, + * or you can use this class to count documents for a given pipeline or a specific processor. + * This class does not make assumptions about it's given scope. + */ +class IngestMetric { + + /** + * The time it takes to complete the measured item. + */ + private final MeanMetric ingestTime = new MeanMetric(); + /** + * The current count of things being measure. Should most likely ever be 0 or 1. + * Useful when aggregating multiple metrics to see how many things are in flight. + */ + private final CounterMetric ingestCurrent = new CounterMetric(); + /** + * The ever increasing count of things being measured + */ + private final CounterMetric ingestCount = new CounterMetric(); + /** + * The only increasing count of failures + */ + private final CounterMetric ingestFailed = new CounterMetric(); + + /** + * Call this prior to the ingest action. + */ + void preIngest() { + ingestCurrent.inc(); + } + + /** + * Call this after the performing the ingest action, even if the action failed. + * @param ingestTimeInMillis The time it took to perform the action. + */ + void postIngest(long ingestTimeInMillis) { + ingestCurrent.dec(); + ingestTime.inc(ingestTimeInMillis); + ingestCount.inc(); + } + + /** + * Call this if the ingest action failed. + */ + void ingestFailed() { + ingestFailed.inc(); + } + + /** + *

Add two sets of metrics together. + *

Note - this method does not add the current count values. + * The current count value is ephemeral and requires a increase/decrease operation pairs to keep the value correct. + * + * @param metrics The metric to add. + */ + void add(IngestMetric metrics) { + ingestCount.inc(metrics.ingestCount.count()); + ingestTime.inc(metrics.ingestTime.sum()); + ingestFailed.inc(metrics.ingestFailed.count()); + } + + /** + * Creates a serializable representation for these metrics. + */ + IngestStats.Stats createStats() { + return new IngestStats.Stats(ingestCount.count(), ingestTime.sum(), ingestCurrent.count(), ingestFailed.count()); + } +} diff --git a/server/src/main/java/org/elasticsearch/ingest/IngestService.java b/server/src/main/java/org/elasticsearch/ingest/IngestService.java index 5623cf30f36..6c46a9b2354 100644 --- a/server/src/main/java/org/elasticsearch/ingest/IngestService.java +++ b/server/src/main/java/org/elasticsearch/ingest/IngestService.java @@ -23,15 +23,15 @@ import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; -import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Objects; -import java.util.Optional; import java.util.Set; import java.util.concurrent.TimeUnit; import java.util.function.BiConsumer; import java.util.function.Consumer; +import java.util.stream.Collectors; + import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.ResourceNotFoundException; @@ -49,8 +49,6 @@ import org.elasticsearch.cluster.ClusterStateApplier; import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.service.ClusterService; -import org.elasticsearch.common.metrics.CounterMetric; -import org.elasticsearch.common.metrics.MeanMetric; import org.elasticsearch.common.regex.Regex; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.util.concurrent.AbstractRunnable; @@ -79,8 +77,7 @@ public class IngestService implements ClusterStateApplier { // are loaded, so in the cluster state we just save the pipeline config and here we keep the actual pipelines around. private volatile Map pipelines = new HashMap<>(); private final ThreadPool threadPool; - private final StatsHolder totalStats = new StatsHolder(); - private volatile Map statsHolderPerPipeline = Collections.emptyMap(); + private final IngestMetric totalMetrics = new IngestMetric(); public IngestService(ClusterService clusterService, ThreadPool threadPool, Environment env, ScriptService scriptService, AnalysisRegistry analysisRegistry, @@ -257,10 +254,16 @@ public class IngestService implements ClusterStateApplier { @Override public void applyClusterState(final ClusterChangedEvent event) { ClusterState state = event.state(); + Map originalPipelines = pipelines; innerUpdatePipelines(event.previousState(), state); - IngestMetadata ingestMetadata = state.getMetaData().custom(IngestMetadata.TYPE); - if (ingestMetadata != null) { - updatePipelineStats(ingestMetadata); + //pipelines changed, so add the old metrics to the new metrics + if (originalPipelines != pipelines) { + pipelines.forEach((id, pipeline) -> { + Pipeline originalPipeline = originalPipelines.get(id); + if (originalPipeline != null) { + pipeline.getMetrics().add(originalPipeline.getMetrics()); + } + }); } } @@ -325,6 +328,7 @@ public class IngestService implements ClusterStateApplier { public void executeBulkRequest(Iterable> actionRequests, BiConsumer itemFailureHandler, Consumer completionHandler, Consumer itemDroppedHandler) { + threadPool.executor(ThreadPool.Names.WRITE).execute(new AbstractRunnable() { @Override @@ -367,37 +371,11 @@ public class IngestService implements ClusterStateApplier { } public IngestStats stats() { - Map statsHolderPerPipeline = this.statsHolderPerPipeline; - Map statsPerPipeline = new HashMap<>(statsHolderPerPipeline.size()); - for (Map.Entry entry : statsHolderPerPipeline.entrySet()) { - statsPerPipeline.put(entry.getKey(), entry.getValue().createStats()); - } + Map statsPerPipeline = + pipelines.entrySet().stream().collect(Collectors.toMap(Map.Entry::getKey, v -> v.getValue().getMetrics().createStats())); - return new IngestStats(totalStats.createStats(), statsPerPipeline); - } - - void updatePipelineStats(IngestMetadata ingestMetadata) { - boolean changed = false; - Map newStatsPerPipeline = new HashMap<>(statsHolderPerPipeline); - Iterator iterator = newStatsPerPipeline.keySet().iterator(); - while (iterator.hasNext()) { - String pipeline = iterator.next(); - if (ingestMetadata.getPipelines().containsKey(pipeline) == false) { - iterator.remove(); - changed = true; - } - } - for (String pipeline : ingestMetadata.getPipelines().keySet()) { - if (newStatsPerPipeline.containsKey(pipeline) == false) { - newStatsPerPipeline.put(pipeline, new StatsHolder()); - changed = true; - } - } - - if (changed) { - statsHolderPerPipeline = Collections.unmodifiableMap(newStatsPerPipeline); - } + return new IngestStats(totalMetrics.createStats(), statsPerPipeline); } private void innerExecute(IndexRequest indexRequest, Pipeline pipeline, Consumer itemDroppedHandler) throws Exception { @@ -408,10 +386,8 @@ public class IngestService implements ClusterStateApplier { long startTimeInNanos = System.nanoTime(); // the pipeline specific stat holder may not exist and that is fine: // (e.g. the pipeline may have been removed while we're ingesting a document - Optional pipelineStats = Optional.ofNullable(statsHolderPerPipeline.get(pipeline.getId())); try { - totalStats.preIngest(); - pipelineStats.ifPresent(StatsHolder::preIngest); + totalMetrics.preIngest(); String index = indexRequest.index(); String type = indexRequest.type(); String id = indexRequest.id(); @@ -437,13 +413,11 @@ public class IngestService implements ClusterStateApplier { indexRequest.source(ingestDocument.getSourceAndMetadata()); } } catch (Exception e) { - totalStats.ingestFailed(); - pipelineStats.ifPresent(StatsHolder::ingestFailed); + totalMetrics.ingestFailed(); throw e; } finally { long ingestTimeInMillis = TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - startTimeInNanos); - totalStats.postIngest(ingestTimeInMillis); - pipelineStats.ifPresent(statsHolder -> statsHolder.postIngest(ingestTimeInMillis)); + totalMetrics.postIngest(ingestTimeInMillis); } } @@ -480,27 +454,4 @@ public class IngestService implements ClusterStateApplier { ExceptionsHelper.rethrowAndSuppress(exceptions); } - private static class StatsHolder { - - private final MeanMetric ingestMetric = new MeanMetric(); - private final CounterMetric ingestCurrent = new CounterMetric(); - private final CounterMetric ingestFailed = new CounterMetric(); - - void preIngest() { - ingestCurrent.inc(); - } - - void postIngest(long ingestTimeInMillis) { - ingestCurrent.dec(); - ingestMetric.inc(ingestTimeInMillis); - } - - void ingestFailed() { - ingestFailed.inc(); - } - - IngestStats.Stats createStats() { - return new IngestStats.Stats(ingestMetric.count(), ingestMetric.sum(), ingestCurrent.count(), ingestFailed.count()); - } - } } diff --git a/server/src/main/java/org/elasticsearch/ingest/Pipeline.java b/server/src/main/java/org/elasticsearch/ingest/Pipeline.java index 9f13cb1280a..8d5f6d6ff7c 100644 --- a/server/src/main/java/org/elasticsearch/ingest/Pipeline.java +++ b/server/src/main/java/org/elasticsearch/ingest/Pipeline.java @@ -22,10 +22,12 @@ package org.elasticsearch.ingest; import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.common.Nullable; +import java.time.Clock; import java.util.Arrays; import java.util.Collections; import java.util.List; import java.util.Map; + import org.elasticsearch.script.ScriptService; /** @@ -44,12 +46,21 @@ public final class Pipeline { @Nullable private final Integer version; private final CompoundProcessor compoundProcessor; + private final IngestMetric metrics; + private final Clock clock; public Pipeline(String id, @Nullable String description, @Nullable Integer version, CompoundProcessor compoundProcessor) { + this(id, description, version, compoundProcessor, Clock.systemUTC()); + } + + //package private for testing + Pipeline(String id, @Nullable String description, @Nullable Integer version, CompoundProcessor compoundProcessor, Clock clock) { this.id = id; this.description = description; this.compoundProcessor = compoundProcessor; this.version = version; + this.metrics = new IngestMetric(); + this.clock = clock; } public static Pipeline create(String id, Map config, @@ -78,7 +89,17 @@ public final class Pipeline { * Modifies the data of a document to be indexed based on the processor this pipeline holds */ public IngestDocument execute(IngestDocument ingestDocument) throws Exception { - return compoundProcessor.execute(ingestDocument); + long startTimeInMillis = clock.millis(); + try { + metrics.preIngest(); + return compoundProcessor.execute(ingestDocument); + } catch (Exception e) { + metrics.ingestFailed(); + throw e; + } finally { + long ingestTimeInMillis = clock.millis() - startTimeInMillis; + metrics.postIngest(ingestTimeInMillis); + } } /** @@ -136,4 +157,10 @@ public final class Pipeline { return compoundProcessor.flattenProcessors(); } + /** + * The metrics associated with this pipeline. + */ + public IngestMetric getMetrics() { + return metrics; + } } diff --git a/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/PipelineProcessor.java b/server/src/main/java/org/elasticsearch/ingest/PipelineProcessor.java similarity index 87% rename from modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/PipelineProcessor.java rename to server/src/main/java/org/elasticsearch/ingest/PipelineProcessor.java index 1958a3e5232..918ff6b8aef 100644 --- a/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/PipelineProcessor.java +++ b/server/src/main/java/org/elasticsearch/ingest/PipelineProcessor.java @@ -17,15 +17,9 @@ * under the License. */ -package org.elasticsearch.ingest.common; +package org.elasticsearch.ingest; import java.util.Map; -import org.elasticsearch.ingest.AbstractProcessor; -import org.elasticsearch.ingest.ConfigurationUtils; -import org.elasticsearch.ingest.IngestDocument; -import org.elasticsearch.ingest.IngestService; -import org.elasticsearch.ingest.Pipeline; -import org.elasticsearch.ingest.Processor; public class PipelineProcessor extends AbstractProcessor { @@ -50,6 +44,10 @@ public class PipelineProcessor extends AbstractProcessor { return ingestDocument.executePipeline(pipeline); } + Pipeline getPipeline(){ + return ingestService.getPipeline(pipelineName); + } + @Override public String getType() { return TYPE; diff --git a/server/src/main/java/org/elasticsearch/action/ingest/TrackingResultProcessor.java b/server/src/main/java/org/elasticsearch/ingest/TrackingResultProcessor.java similarity index 65% rename from server/src/main/java/org/elasticsearch/action/ingest/TrackingResultProcessor.java rename to server/src/main/java/org/elasticsearch/ingest/TrackingResultProcessor.java index 04c0fe7ca49..41a984be5ad 100644 --- a/server/src/main/java/org/elasticsearch/action/ingest/TrackingResultProcessor.java +++ b/server/src/main/java/org/elasticsearch/ingest/TrackingResultProcessor.java @@ -17,14 +17,13 @@ * under the License. */ -package org.elasticsearch.action.ingest; +package org.elasticsearch.ingest; -import org.elasticsearch.ingest.CompoundProcessor; -import org.elasticsearch.ingest.IngestDocument; -import org.elasticsearch.ingest.Processor; +import org.elasticsearch.action.ingest.SimulateProcessorResult; import java.util.ArrayList; import java.util.List; +import java.util.Set; /** * Processor to be used within Simulate API to keep track of processors executed in pipeline. @@ -35,7 +34,7 @@ public final class TrackingResultProcessor implements Processor { private final List processorResultList; private final boolean ignoreFailure; - public TrackingResultProcessor(boolean ignoreFailure, Processor actualProcessor, List processorResultList) { + TrackingResultProcessor(boolean ignoreFailure, Processor actualProcessor, List processorResultList) { this.ignoreFailure = ignoreFailure; this.processorResultList = processorResultList; this.actualProcessor = actualProcessor; @@ -67,19 +66,35 @@ public final class TrackingResultProcessor implements Processor { return actualProcessor.getTag(); } - public static CompoundProcessor decorate(CompoundProcessor compoundProcessor, List processorResultList) { + public static CompoundProcessor decorate(CompoundProcessor compoundProcessor, List processorResultList, + Set pipelinesSeen) { List processors = new ArrayList<>(compoundProcessor.getProcessors().size()); for (Processor processor : compoundProcessor.getProcessors()) { - if (processor instanceof CompoundProcessor) { - processors.add(decorate((CompoundProcessor) processor, processorResultList)); + if (processor instanceof PipelineProcessor) { + PipelineProcessor pipelineProcessor = ((PipelineProcessor) processor); + if (pipelinesSeen.add(pipelineProcessor) == false) { + throw new IllegalStateException("Cycle detected for pipeline: " + pipelineProcessor.getPipeline().getId()); + } + processors.add(decorate(pipelineProcessor.getPipeline().getCompoundProcessor(), processorResultList, pipelinesSeen)); + pipelinesSeen.remove(pipelineProcessor); + } else if (processor instanceof CompoundProcessor) { + processors.add(decorate((CompoundProcessor) processor, processorResultList, pipelinesSeen)); } else { processors.add(new TrackingResultProcessor(compoundProcessor.isIgnoreFailure(), processor, processorResultList)); } } List onFailureProcessors = new ArrayList<>(compoundProcessor.getProcessors().size()); for (Processor processor : compoundProcessor.getOnFailureProcessors()) { - if (processor instanceof CompoundProcessor) { - onFailureProcessors.add(decorate((CompoundProcessor) processor, processorResultList)); + if (processor instanceof PipelineProcessor) { + PipelineProcessor pipelineProcessor = ((PipelineProcessor) processor); + if (pipelinesSeen.add(pipelineProcessor) == false) { + throw new IllegalStateException("Cycle detected for pipeline: " + pipelineProcessor.getPipeline().getId()); + } + onFailureProcessors.add(decorate(pipelineProcessor.getPipeline().getCompoundProcessor(), processorResultList, + pipelinesSeen)); + pipelinesSeen.remove(pipelineProcessor); + } else if (processor instanceof CompoundProcessor) { + onFailureProcessors.add(decorate((CompoundProcessor) processor, processorResultList, pipelinesSeen)); } else { onFailureProcessors.add(new TrackingResultProcessor(compoundProcessor.isIgnoreFailure(), processor, processorResultList)); } diff --git a/server/src/main/java/org/elasticsearch/rest/BytesRestResponse.java b/server/src/main/java/org/elasticsearch/rest/BytesRestResponse.java index f8575b4a012..d6ed68bcafa 100644 --- a/server/src/main/java/org/elasticsearch/rest/BytesRestResponse.java +++ b/server/src/main/java/org/elasticsearch/rest/BytesRestResponse.java @@ -20,6 +20,7 @@ package org.elasticsearch.rest; import org.apache.logging.log4j.Logger; +import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.message.ParameterizedMessage; import org.apache.logging.log4j.util.Supplier; import org.elasticsearch.ElasticsearchException; @@ -27,7 +28,6 @@ import org.elasticsearch.ElasticsearchStatusException; import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; -import org.elasticsearch.common.logging.ESLoggerFactory; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; @@ -117,7 +117,7 @@ public class BytesRestResponse extends RestResponse { return this.status; } - private static final Logger SUPPRESSED_ERROR_LOGGER = ESLoggerFactory.getLogger("rest.suppressed"); + private static final Logger SUPPRESSED_ERROR_LOGGER = LogManager.getLogger("rest.suppressed"); private static XContentBuilder build(RestChannel channel, RestStatus status, Exception e) throws IOException { ToXContent.Params params = channel.request(); diff --git a/server/src/main/java/org/elasticsearch/script/ParameterMap.java b/server/src/main/java/org/elasticsearch/script/ParameterMap.java index b4fd24b059b..b59d057d66e 100644 --- a/server/src/main/java/org/elasticsearch/script/ParameterMap.java +++ b/server/src/main/java/org/elasticsearch/script/ParameterMap.java @@ -19,11 +19,12 @@ package org.elasticsearch.script; +import org.apache.logging.log4j.LogManager; +import org.elasticsearch.common.logging.DeprecationLogger; + import java.util.Collection; import java.util.Map; import java.util.Set; -import org.apache.logging.log4j.LogManager; -import org.elasticsearch.common.logging.DeprecationLogger; public final class ParameterMap implements Map { @@ -34,7 +35,7 @@ public final class ParameterMap implements Map { private final Map deprecations; - ParameterMap(Map params, Map deprecations) { + public ParameterMap(Map params, Map deprecations) { this.params = params; this.deprecations = deprecations; } diff --git a/server/src/main/java/org/elasticsearch/script/ScriptModule.java b/server/src/main/java/org/elasticsearch/script/ScriptModule.java index 968bc143ba8..d98ed62f602 100644 --- a/server/src/main/java/org/elasticsearch/script/ScriptModule.java +++ b/server/src/main/java/org/elasticsearch/script/ScriptModule.java @@ -45,7 +45,6 @@ public class ScriptModule { ScoreScript.CONTEXT, SearchScript.SCRIPT_SORT_CONTEXT, TermsSetQueryScript.CONTEXT, - ExecutableScript.CONTEXT, UpdateScript.CONTEXT, BucketAggregationScript.CONTEXT, BucketAggregationSelectorScript.CONTEXT, diff --git a/server/src/main/java/org/elasticsearch/script/SearchScript.java b/server/src/main/java/org/elasticsearch/script/SearchScript.java index cdf5c98ec62..693d0dd8a3d 100644 --- a/server/src/main/java/org/elasticsearch/script/SearchScript.java +++ b/server/src/main/java/org/elasticsearch/script/SearchScript.java @@ -41,7 +41,7 @@ import java.util.Map; *

  • Call one of the {@code run} methods: {@link #run()}, {@link #runAsDouble()}, or {@link #runAsLong()}
  • * */ -public abstract class SearchScript implements ScorerAware, ExecutableScript { +public abstract class SearchScript implements ScorerAware { /** The generic runtime parameters for the script. */ private final Map params; @@ -112,7 +112,6 @@ public abstract class SearchScript implements ScorerAware, ExecutableScript { setNextVar("_value", value); } - @Override public void setNextVar(String field, Object value) {} /** Return the result as a long. This is used by aggregation scripts over long fields. */ @@ -120,7 +119,6 @@ public abstract class SearchScript implements ScorerAware, ExecutableScript { throw new UnsupportedOperationException("runAsLong is not implemented"); } - @Override public Object run() { return runAsDouble(); } diff --git a/server/src/main/java/org/elasticsearch/script/UpdateScript.java b/server/src/main/java/org/elasticsearch/script/UpdateScript.java index c6a1d5dd9ea..e1eaf14bcb9 100644 --- a/server/src/main/java/org/elasticsearch/script/UpdateScript.java +++ b/server/src/main/java/org/elasticsearch/script/UpdateScript.java @@ -20,6 +20,8 @@ package org.elasticsearch.script; +import java.util.Collections; +import java.util.HashMap; import java.util.Map; /** @@ -27,7 +29,18 @@ import java.util.Map; */ public abstract class UpdateScript { - public static final String[] PARAMETERS = { "ctx" }; + public static final String[] PARAMETERS = { }; + + private static final Map DEPRECATIONS; + static { + Map deprecations = new HashMap<>(); + deprecations.put( + "ctx", + "Accessing variable [ctx] via [params.ctx] from within a update script " + + "is deprecated in favor of directly accessing [ctx]." + ); + DEPRECATIONS = Collections.unmodifiableMap(deprecations); + } /** The context used to compile {@link UpdateScript} factories. */ public static final ScriptContext CONTEXT = new ScriptContext<>("update", Factory.class); @@ -35,8 +48,14 @@ public abstract class UpdateScript { /** The generic runtime parameters for the script. */ private final Map params; - public UpdateScript(Map params) { - this.params = params; + /** The update context for the script. */ + private final Map ctx; + + public UpdateScript(Map params, Map ctx) { + Map paramsWithCtx = new HashMap<>(params); + paramsWithCtx.put("ctx", ctx); + this.params = new ParameterMap(paramsWithCtx, DEPRECATIONS); + this.ctx = ctx; } /** Return the parameters for this script. */ @@ -44,9 +63,14 @@ public abstract class UpdateScript { return params; } - public abstract void execute(Map ctx); + /** Return the update context for this script. */ + public Map getCtx() { + return ctx; + } + + public abstract void execute(); public interface Factory { - UpdateScript newInstance(Map params); + UpdateScript newInstance(Map params, Map ctx); } } diff --git a/server/src/main/java/org/elasticsearch/search/DocValueFormat.java b/server/src/main/java/org/elasticsearch/search/DocValueFormat.java index 3a3b1c680ab..e5ece1afa33 100644 --- a/server/src/main/java/org/elasticsearch/search/DocValueFormat.java +++ b/server/src/main/java/org/elasticsearch/search/DocValueFormat.java @@ -25,11 +25,12 @@ import org.elasticsearch.common.geo.GeoHashUtils; import org.elasticsearch.common.io.stream.NamedWriteable; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.joda.DateMathParser; import org.elasticsearch.common.joda.FormatDateTimeFormatter; import org.elasticsearch.common.joda.Joda; import org.elasticsearch.common.network.InetAddresses; import org.elasticsearch.common.network.NetworkAddress; +import org.elasticsearch.common.time.DateMathParser; +import org.elasticsearch.common.time.DateUtils; import org.joda.time.DateTimeZone; import java.io.IOException; @@ -171,13 +172,14 @@ public interface DocValueFormat extends NamedWriteable { public static final String NAME = "date_time"; final FormatDateTimeFormatter formatter; + // TODO: change this to ZoneId, but will require careful change to serialization final DateTimeZone timeZone; private final DateMathParser parser; public DateTime(FormatDateTimeFormatter formatter, DateTimeZone timeZone) { this.formatter = Objects.requireNonNull(formatter); this.timeZone = Objects.requireNonNull(timeZone); - this.parser = new DateMathParser(formatter); + this.parser = formatter.toDateMathParser(); } public DateTime(StreamInput in) throws IOException { @@ -212,7 +214,7 @@ public interface DocValueFormat extends NamedWriteable { @Override public long parseLong(String value, boolean roundUp, LongSupplier now) { - return parser.parse(value, now, roundUp, timeZone); + return parser.parse(value, now, roundUp, DateUtils.dateTimeZoneToZoneId(timeZone)); } @Override diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/AggregationBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/AggregationBuilder.java index 7a3d3fee8ac..359c8dd571e 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/AggregationBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/AggregationBuilder.java @@ -28,7 +28,7 @@ import org.elasticsearch.index.query.QueryRewriteContext; import org.elasticsearch.search.internal.SearchContext; import java.io.IOException; -import java.util.List; +import java.util.Collection; import java.util.Map; /** @@ -79,12 +79,12 @@ public abstract class AggregationBuilder public abstract AggregationBuilder subAggregation(PipelineAggregationBuilder aggregation); /** Return the configured set of subaggregations **/ - public List getSubAggregations() { + public Collection getSubAggregations() { return factoriesBuilder.getAggregatorFactories(); } /** Return the configured set of pipeline aggregations **/ - public List getPipelineAggregations() { + public Collection getPipelineAggregations() { return factoriesBuilder.getPipelineAggregatorFactories(); } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/AggregatorFactories.java b/server/src/main/java/org/elasticsearch/search/aggregations/AggregatorFactories.java index de4f0aab676..5bfb575bee8 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/AggregatorFactories.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/AggregatorFactories.java @@ -38,9 +38,11 @@ import org.elasticsearch.search.profile.aggregation.ProfilingAggregator; import java.io.IOException; import java.util.ArrayList; +import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; +import java.util.LinkedHashSet; import java.util.LinkedList; import java.util.List; import java.util.Map; @@ -159,10 +161,9 @@ public class AggregatorFactories { } } - public static final AggregatorFactories EMPTY = new AggregatorFactories(null, new AggregatorFactory[0], + public static final AggregatorFactories EMPTY = new AggregatorFactories(new AggregatorFactory[0], new ArrayList()); - private AggregatorFactory parent; private AggregatorFactory[] factories; private List pipelineAggregatorFactories; @@ -170,9 +171,7 @@ public class AggregatorFactories { return new Builder(); } - private AggregatorFactories(AggregatorFactory parent, AggregatorFactory[] factories, - List pipelineAggregators) { - this.parent = parent; + private AggregatorFactories(AggregatorFactory[] factories, List pipelineAggregators) { this.factories = factories; this.pipelineAggregatorFactories = pipelineAggregators; } @@ -240,8 +239,11 @@ public class AggregatorFactories { public static class Builder implements Writeable, ToXContentObject { private final Set names = new HashSet<>(); - private final List aggregationBuilders = new ArrayList<>(); - private final List pipelineAggregatorBuilders = new ArrayList<>(); + + // Using LinkedHashSets to preserve the order of insertion, that makes the results + // ordered nicely, although technically order does not matter + private final Collection aggregationBuilders = new LinkedHashSet<>(); + private final Collection pipelineAggregatorBuilders = new LinkedHashSet<>(); private boolean skipResolveOrder; /** @@ -325,29 +327,32 @@ public class AggregatorFactories { parent); } AggregatorFactory[] aggFactories = new AggregatorFactory[aggregationBuilders.size()]; - for (int i = 0; i < aggregationBuilders.size(); i++) { - aggFactories[i] = aggregationBuilders.get(i).build(context, parent); + + int i = 0; + for (AggregationBuilder agg : aggregationBuilders) { + aggFactories[i] = agg.build(context, parent); + ++i; } - return new AggregatorFactories(parent, aggFactories, orderedpipelineAggregators); + return new AggregatorFactories(aggFactories, orderedpipelineAggregators); } private List resolvePipelineAggregatorOrder( - List pipelineAggregatorBuilders, List aggBuilders, + Collection pipelineAggregatorBuilders, Collection aggregationBuilders, AggregatorFactory parent) { Map pipelineAggregatorBuildersMap = new HashMap<>(); for (PipelineAggregationBuilder builder : pipelineAggregatorBuilders) { pipelineAggregatorBuildersMap.put(builder.getName(), builder); } Map aggBuildersMap = new HashMap<>(); - for (AggregationBuilder aggBuilder : aggBuilders) { + for (AggregationBuilder aggBuilder : aggregationBuilders) { aggBuildersMap.put(aggBuilder.name, aggBuilder); } List orderedPipelineAggregatorrs = new LinkedList<>(); List unmarkedBuilders = new ArrayList<>(pipelineAggregatorBuilders); - Set temporarilyMarked = new HashSet<>(); + Collection temporarilyMarked = new HashSet<>(); while (!unmarkedBuilders.isEmpty()) { PipelineAggregationBuilder builder = unmarkedBuilders.get(0); - builder.validate(parent, aggBuilders, pipelineAggregatorBuilders); + builder.validate(parent, aggregationBuilders, pipelineAggregatorBuilders); resolvePipelineAggregatorOrder(aggBuildersMap, pipelineAggregatorBuildersMap, orderedPipelineAggregatorrs, unmarkedBuilders, temporarilyMarked, builder); } @@ -357,7 +362,7 @@ public class AggregatorFactories { private void resolvePipelineAggregatorOrder(Map aggBuildersMap, Map pipelineAggregatorBuildersMap, List orderedPipelineAggregators, List unmarkedBuilders, - Set temporarilyMarked, PipelineAggregationBuilder builder) { + Collection temporarilyMarked, PipelineAggregationBuilder builder) { if (temporarilyMarked.contains(builder)) { throw new IllegalArgumentException("Cyclical dependency found with pipeline aggregator [" + builder.getName() + "]"); } else if (unmarkedBuilders.contains(builder)) { @@ -378,7 +383,7 @@ public class AggregatorFactories { } else { // Check the non-pipeline sub-aggregator // factories - List subBuilders = aggBuilder.factoriesBuilder.aggregationBuilders; + Collection subBuilders = aggBuilder.factoriesBuilder.aggregationBuilders; boolean foundSubBuilder = false; for (AggregationBuilder subBuilder : subBuilders) { if (aggName.equals(subBuilder.name)) { @@ -389,7 +394,7 @@ public class AggregatorFactories { } // Check the pipeline sub-aggregator factories if (!foundSubBuilder && (i == bucketsPathElements.size() - 1)) { - List subPipelineBuilders = aggBuilder.factoriesBuilder.pipelineAggregatorBuilders; + Collection subPipelineBuilders = aggBuilder.factoriesBuilder.pipelineAggregatorBuilders; for (PipelineAggregationBuilder subFactory : subPipelineBuilders) { if (aggName.equals(subFactory.getName())) { foundSubBuilder = true; @@ -420,12 +425,12 @@ public class AggregatorFactories { } } - public List getAggregatorFactories() { - return Collections.unmodifiableList(aggregationBuilders); + public Collection getAggregatorFactories() { + return Collections.unmodifiableCollection(aggregationBuilders); } - public List getPipelineAggregatorFactories() { - return Collections.unmodifiableList(pipelineAggregatorBuilders); + public Collection getPipelineAggregatorFactories() { + return Collections.unmodifiableCollection(pipelineAggregatorBuilders); } public int count() { @@ -466,6 +471,7 @@ public class AggregatorFactories { if (getClass() != obj.getClass()) return false; Builder other = (Builder) obj; + if (!Objects.equals(aggregationBuilders, other.aggregationBuilders)) return false; if (!Objects.equals(pipelineAggregatorBuilders, other.pipelineAggregatorBuilders)) diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/InternalAggregation.java b/server/src/main/java/org/elasticsearch/search/aggregations/InternalAggregation.java index da328edd7aa..eafdbe10977 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/InternalAggregation.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/InternalAggregation.java @@ -143,6 +143,14 @@ public abstract class InternalAggregation implements Aggregation, NamedWriteable public abstract InternalAggregation doReduce(List aggregations, ReduceContext reduceContext); + /** + * Return true if this aggregation is mapped, and can lead a reduction. If this agg returns + * false, it should return itself if asked to lead a reduction + */ + public boolean isMapped() { + return true; + } + /** * Get the value of specified path in the aggregation. * diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/InternalAggregations.java b/server/src/main/java/org/elasticsearch/search/aggregations/InternalAggregations.java index 57170e2f8ab..95140b50d2b 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/InternalAggregations.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/InternalAggregations.java @@ -25,6 +25,7 @@ import org.elasticsearch.search.aggregations.InternalAggregation.ReduceContext; import java.io.IOException; import java.util.ArrayList; +import java.util.Comparator; import java.util.HashMap; import java.util.List; import java.util.Map; @@ -37,6 +38,15 @@ import static java.util.Collections.emptyMap; public final class InternalAggregations extends Aggregations implements Streamable { public static final InternalAggregations EMPTY = new InternalAggregations(); + private static final Comparator INTERNAL_AGG_COMPARATOR = (agg1, agg2) -> { + if (agg1.isMapped() == agg2.isMapped()) { + return 0; + } else if (agg1.isMapped() && agg2.isMapped() == false) { + return -1; + } else { + return 1; + } + }; private InternalAggregations() { } @@ -73,6 +83,9 @@ public final class InternalAggregations extends Aggregations implements Streamab List reducedAggregations = new ArrayList<>(); for (Map.Entry> entry : aggByName.entrySet()) { List aggregations = entry.getValue(); + // Sort aggregations so that unmapped aggs come last in the list + // If all aggs are unmapped, the agg that leads the reduction will just return itself + aggregations.sort(INTERNAL_AGG_COMPARATOR); InternalAggregation first = aggregations.get(0); // the list can't be empty as it's created on demand reducedAggregations.add(first.reduce(aggregations, context)); } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/InternalOrder.java b/server/src/main/java/org/elasticsearch/search/aggregations/InternalOrder.java index fafcb5c37f7..9312f9720cd 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/InternalOrder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/InternalOrder.java @@ -502,10 +502,18 @@ public class InternalOrder extends BucketOrder { // convert the new order IDs to the old histogram order IDs. byte id; switch (order.id()) { - case COUNT_DESC_ID: id = 4; break; - case COUNT_ASC_ID: id = 3; break; - case KEY_DESC_ID: id = 2; break; - case KEY_ASC_ID: id = 1; break; + case COUNT_DESC_ID: + id = 4; + break; + case COUNT_ASC_ID: + id = 3; + break; + case KEY_DESC_ID: + id = 2; + break; + case KEY_ASC_ID: + id = 1; + break; default: throw new RuntimeException("unknown order id [" + order.id() + "]"); } out.writeByte(id); diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/PipelineAggregationBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/PipelineAggregationBuilder.java index 1b751d8c684..8b66738848f 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/PipelineAggregationBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/PipelineAggregationBuilder.java @@ -25,7 +25,7 @@ import org.elasticsearch.search.aggregations.AggregatorFactories.Builder; import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator; import java.io.IOException; -import java.util.List; +import java.util.Collection; import java.util.Map; /** @@ -68,8 +68,8 @@ public abstract class PipelineAggregationBuilder implements NamedWriteable, Base * Internal: Validates the state of this factory (makes sure the factory is properly * configured) */ - protected abstract void validate(AggregatorFactory parent, List factories, - List pipelineAggregatorFactories); + protected abstract void validate(AggregatorFactory parent, Collection aggregationBuilders, + Collection pipelineAggregatorBuilders); /** * Creates the pipeline aggregator diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeAggregationBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeAggregationBuilder.java index 482b8be1c45..251dc7e4283 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeAggregationBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeAggregationBuilder.java @@ -144,6 +144,13 @@ public class CompositeAggregationBuilder extends AbstractAggregationBuilder doBuild(SearchContext context, AggregatorFactory parent, AggregatorFactories.Builder subfactoriesBuilder) throws IOException { @@ -193,11 +200,7 @@ public class CompositeAggregationBuilder extends AbstractAggregationBuilder source: sources) { - builder.startObject(); - builder.startObject(source.name()); - source.toXContent(builder, params); - builder.endObject(); - builder.endObject(); + CompositeValuesSourceParserHelper.toXContent(source, builder, params); } builder.endArray(); if (after != null) { diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeValuesSourceParserHelper.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeValuesSourceParserHelper.java index 713ecfc6628..d773a09d645 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeValuesSourceParserHelper.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeValuesSourceParserHelper.java @@ -25,7 +25,9 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.AbstractObjectParser; import org.elasticsearch.common.xcontent.ObjectParser; +import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.common.xcontent.ToXContent.Params; import org.elasticsearch.script.Script; import org.elasticsearch.search.aggregations.support.ValueType; @@ -33,7 +35,7 @@ import java.io.IOException; import static org.elasticsearch.common.xcontent.XContentParserUtils.ensureExpectedToken; -class CompositeValuesSourceParserHelper { +public class CompositeValuesSourceParserHelper { static , T> void declareValuesSourceFields(AbstractObjectParser objectParser, ValueType targetValueType) { objectParser.declareField(VB::field, XContentParser::text, @@ -57,7 +59,7 @@ class CompositeValuesSourceParserHelper { objectParser.declareField(VB::order, XContentParser::text, new ParseField("order"), ObjectParser.ValueType.STRING); } - static void writeTo(CompositeValuesSourceBuilder builder, StreamOutput out) throws IOException { + public static void writeTo(CompositeValuesSourceBuilder builder, StreamOutput out) throws IOException { final byte code; if (builder.getClass() == TermsValuesSourceBuilder.class) { code = 0; @@ -72,7 +74,7 @@ class CompositeValuesSourceParserHelper { builder.writeTo(out); } - static CompositeValuesSourceBuilder readFrom(StreamInput in) throws IOException { + public static CompositeValuesSourceBuilder readFrom(StreamInput in) throws IOException { int code = in.readByte(); switch(code) { case 0: @@ -86,7 +88,7 @@ class CompositeValuesSourceParserHelper { } } - static CompositeValuesSourceBuilder fromXContent(XContentParser parser) throws IOException { + public static CompositeValuesSourceBuilder fromXContent(XContentParser parser) throws IOException { XContentParser.Token token = parser.currentToken(); ensureExpectedToken(XContentParser.Token.START_OBJECT, token, parser::getTokenLocation); token = parser.nextToken(); @@ -117,4 +119,14 @@ class CompositeValuesSourceParserHelper { parser.nextToken(); return builder; } + + public static XContentBuilder toXContent(CompositeValuesSourceBuilder source, XContentBuilder builder, Params params) + throws IOException { + builder.startObject(); + builder.startObject(source.name()); + source.toXContent(builder, params); + builder.endObject(); + builder.endObject(); + return builder; + } } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/DateHistogramAggregationBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/DateHistogramAggregationBuilder.java index bb785efde48..dba7fbb34fb 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/DateHistogramAggregationBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/DateHistogramAggregationBuilder.java @@ -25,10 +25,10 @@ import org.apache.lucene.index.SortedNumericDocValues; import org.apache.lucene.search.DocIdSetIterator; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.joda.DateMathParser; import org.elasticsearch.common.joda.Joda; import org.elasticsearch.common.rounding.DateTimeUnit; import org.elasticsearch.common.rounding.Rounding; +import org.elasticsearch.common.time.DateMathParser; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.xcontent.ObjectParser; import org.elasticsearch.common.xcontent.XContentBuilder; @@ -72,7 +72,7 @@ import static java.util.Collections.unmodifiableMap; public class DateHistogramAggregationBuilder extends ValuesSourceAggregationBuilder implements MultiBucketAggregationBuilder { public static final String NAME = "date_histogram"; - private static DateMathParser EPOCH_MILLIS_PARSER = new DateMathParser(Joda.forPattern("epoch_millis", Locale.ROOT)); + private static DateMathParser EPOCH_MILLIS_PARSER = Joda.forPattern("epoch_millis", Locale.ROOT).toDateMathParser(); public static final Map DATE_FIELD_UNITS; diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/UnmappedSampler.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/UnmappedSampler.java index 3459e110d7e..5f5f557ffd5 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/UnmappedSampler.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/UnmappedSampler.java @@ -50,12 +50,12 @@ public class UnmappedSampler extends InternalSampler { @Override public InternalAggregation doReduce(List aggregations, ReduceContext reduceContext) { - for (InternalAggregation agg : aggregations) { - if (!(agg instanceof UnmappedSampler)) { - return agg.reduce(aggregations, reduceContext); - } - } - return this; + return new UnmappedSampler(name, pipelineAggregators(), metaData); + } + + @Override + public boolean isMapped() { + return false; } @Override diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/UnmappedSignificantTerms.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/UnmappedSignificantTerms.java index 66fc171bbe3..f2c9f8b29ad 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/UnmappedSignificantTerms.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/UnmappedSignificantTerms.java @@ -51,13 +51,13 @@ public class UnmappedSignificantTerms extends InternalSignificantTerms { private Bucket(BytesRef term, long subsetDf, long subsetSize, long supersetDf, long supersetSize, InternalAggregations aggregations, - DocValueFormat format) { + DocValueFormat format) { super(subsetDf, subsetSize, supersetDf, supersetSize, aggregations, format); } } public UnmappedSignificantTerms(String name, int requiredSize, long minDocCount, List pipelineAggregators, - Map metaData) { + Map metaData) { super(name, requiredSize, minDocCount, pipelineAggregators, metaData); } @@ -100,12 +100,12 @@ public class UnmappedSignificantTerms extends InternalSignificantTerms aggregations, ReduceContext reduceContext) { - for (InternalAggregation aggregation : aggregations) { - if (!(aggregation instanceof UnmappedSignificantTerms)) { - return aggregation.reduce(aggregations, reduceContext); - } - } - return this; + return new UnmappedSignificantTerms(name, requiredSize, minDocCount, pipelineAggregators(), metaData); + } + + @Override + public boolean isMapped() { + return false; } @Override diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/UnmappedTerms.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/UnmappedTerms.java index 595991dac06..17a3e603b6f 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/UnmappedTerms.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/UnmappedTerms.java @@ -22,10 +22,10 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.search.DocValueFormat; +import org.elasticsearch.search.aggregations.BucketOrder; import org.elasticsearch.search.aggregations.InternalAggregation; import org.elasticsearch.search.aggregations.InternalAggregations; import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator; -import org.elasticsearch.search.aggregations.BucketOrder; import java.io.IOException; import java.util.Collections; @@ -95,12 +95,12 @@ public class UnmappedTerms extends InternalTerms aggregations, ReduceContext reduceContext) { - for (InternalAggregation agg : aggregations) { - if (!(agg instanceof UnmappedTerms)) { - return agg.reduce(aggregations, reduceContext); - } - } - return this; + return new UnmappedTerms(name, order, requiredSize, minDocCount, pipelineAggregators(), metaData); + } + + @Override + public boolean isMapped() { + return false; } @Override diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/TopHitsAggregationBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/TopHitsAggregationBuilder.java index 38b783e6b95..c2add245058 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/TopHitsAggregationBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/TopHitsAggregationBuilder.java @@ -721,7 +721,6 @@ public class TopHitsAggregationBuilder extends AbstractAggregationBuilder fieldDataFields = new ArrayList<>(); while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) { FieldAndFormat ff = FieldAndFormat.fromXContent(parser); factory.docValueField(ff.field, ff.format); diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/AbstractPipelineAggregationBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/AbstractPipelineAggregationBuilder.java index 33c68aff26d..cbe9cc2b895 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/AbstractPipelineAggregationBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/AbstractPipelineAggregationBuilder.java @@ -28,7 +28,7 @@ import org.elasticsearch.search.aggregations.PipelineAggregationBuilder; import java.io.IOException; import java.util.Arrays; -import java.util.List; +import java.util.Collection; import java.util.Map; import java.util.Objects; @@ -81,8 +81,8 @@ public abstract class AbstractPipelineAggregationBuilder parent, List factories, - List pipelineAggregatorFactories) { + public final void validate(AggregatorFactory parent, Collection factories, + Collection pipelineAggregatorFactories) { doValidate(parent, factories, pipelineAggregatorFactories); } @@ -99,8 +99,8 @@ public abstract class AbstractPipelineAggregationBuilder parent, List factories, - List pipelineAggregatorFactories) { + public void doValidate(AggregatorFactory parent, Collection factories, + Collection pipelineAggregatorFactories) { } @SuppressWarnings("unchecked") diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/BucketMetricsPipelineAggregationBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/BucketMetricsPipelineAggregationBuilder.java index 56db4310c94..c77922eff2a 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/BucketMetricsPipelineAggregationBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/BucketMetricsPipelineAggregationBuilder.java @@ -32,7 +32,7 @@ import org.elasticsearch.search.aggregations.pipeline.BucketHelpers.GapPolicy; import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator; import java.io.IOException; -import java.util.List; +import java.util.Collection; import java.util.Map; import java.util.Objects; import java.util.Optional; @@ -109,8 +109,8 @@ public abstract class BucketMetricsPipelineAggregationBuilder metaData) throws IOException; @Override - public void doValidate(AggregatorFactory parent, List aggBuilders, - List pipelineAggregatorFactories) { + public void doValidate(AggregatorFactory parent, Collection aggBuilders, + Collection pipelineAggregatorFactories) { if (bucketsPaths.length != 1) { throw new IllegalStateException(PipelineAggregator.Parser.BUCKETS_PATH.getPreferredName() + " must contain a single entry for aggregation [" + name + "]"); diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/percentile/PercentilesBucketPipelineAggregationBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/percentile/PercentilesBucketPipelineAggregationBuilder.java index 0870ef0e187..56dd0d3e786 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/percentile/PercentilesBucketPipelineAggregationBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/percentile/PercentilesBucketPipelineAggregationBuilder.java @@ -35,7 +35,7 @@ import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.BucketMetric import java.io.IOException; import java.util.Arrays; -import java.util.List; +import java.util.Collection; import java.util.Map; import java.util.Objects; @@ -95,8 +95,8 @@ public class PercentilesBucketPipelineAggregationBuilder } @Override - public void doValidate(AggregatorFactory parent, List aggFactories, - List pipelineAggregatorFactories) { + public void doValidate(AggregatorFactory parent, Collection aggFactories, + Collection pipelineAggregatorFactories) { super.doValidate(parent, aggFactories, pipelineAggregatorFactories); for (Double p : percents) { diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/stats/extended/ExtendedStatsBucketPipelineAggregationBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/stats/extended/ExtendedStatsBucketPipelineAggregationBuilder.java index 00db3fabaa6..84dcb03fbe9 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/stats/extended/ExtendedStatsBucketPipelineAggregationBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/stats/extended/ExtendedStatsBucketPipelineAggregationBuilder.java @@ -29,7 +29,7 @@ import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator; import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.BucketMetricsPipelineAggregationBuilder; import java.io.IOException; -import java.util.List; +import java.util.Collection; import java.util.Map; import java.util.Objects; @@ -82,8 +82,8 @@ public class ExtendedStatsBucketPipelineAggregationBuilder } @Override - public void doValidate(AggregatorFactory parent, List aggBuilders, - List pipelineAggregatorFactories) { + public void doValidate(AggregatorFactory parent, Collection aggBuilders, + Collection pipelineAggregatorFactories) { super.doValidate(parent, aggBuilders, pipelineAggregatorFactories); if (sigma < 0.0 ) { diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketsort/BucketSortPipelineAggregationBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketsort/BucketSortPipelineAggregationBuilder.java index 7f148c8e365..15c37061cd9 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketsort/BucketSortPipelineAggregationBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketsort/BucketSortPipelineAggregationBuilder.java @@ -38,6 +38,7 @@ import org.elasticsearch.search.sort.SortBuilder; import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; +import java.util.Collection; import java.util.Collections; import java.util.List; import java.util.Locale; @@ -145,8 +146,8 @@ public class BucketSortPipelineAggregationBuilder extends AbstractPipelineAggreg } @Override - public void doValidate(AggregatorFactory parent, List aggFactories, - List pipelineAggregatoractories) { + public void doValidate(AggregatorFactory parent, Collection aggFactories, + Collection pipelineAggregatoractories) { if (sorts.isEmpty() && size == null && from == 0) { throw new IllegalStateException("[" + name + "] is configured to perform nothing. Please set either of " + Arrays.asList(SearchSourceBuilder.SORT_FIELD.getPreferredName(), SIZE.getPreferredName(), FROM.getPreferredName()) diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/cumulativesum/CumulativeSumPipelineAggregationBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/cumulativesum/CumulativeSumPipelineAggregationBuilder.java index dbbb7fa534a..209af3c03a7 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/cumulativesum/CumulativeSumPipelineAggregationBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/cumulativesum/CumulativeSumPipelineAggregationBuilder.java @@ -36,6 +36,7 @@ import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.BucketMetric import java.io.IOException; import java.util.ArrayList; +import java.util.Collection; import java.util.List; import java.util.Map; import java.util.Objects; @@ -97,8 +98,8 @@ public class CumulativeSumPipelineAggregationBuilder extends AbstractPipelineAgg } @Override - public void doValidate(AggregatorFactory parent, List aggFactories, - List pipelineAggregatorFactories) { + public void doValidate(AggregatorFactory parent, Collection aggFactories, + Collection pipelineAggregatorFactories) { if (bucketsPaths.length != 1) { throw new IllegalStateException(BUCKETS_PATH.getPreferredName() + " must contain a single entry for aggregation [" + name + "]"); diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/derivative/DerivativePipelineAggregationBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/derivative/DerivativePipelineAggregationBuilder.java index ba7a2a2c03f..5fac90b0948 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/derivative/DerivativePipelineAggregationBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/derivative/DerivativePipelineAggregationBuilder.java @@ -42,6 +42,7 @@ import org.joda.time.DateTimeZone; import java.io.IOException; import java.util.ArrayList; +import java.util.Collection; import java.util.List; import java.util.Map; import java.util.Objects; @@ -156,8 +157,8 @@ public class DerivativePipelineAggregationBuilder extends AbstractPipelineAggreg } @Override - public void doValidate(AggregatorFactory parent, List aggFactories, - List pipelineAggregatoractories) { + public void doValidate(AggregatorFactory parent, Collection aggFactories, + Collection pipelineAggregatoractories) { if (bucketsPaths.length != 1) { throw new IllegalStateException(PipelineAggregator.Parser.BUCKETS_PATH.getPreferredName() + " must contain a single entry for aggregation [" + name + "]"); diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/movavg/MovAvgPipelineAggregationBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/movavg/MovAvgPipelineAggregationBuilder.java index 074ea1c2589..1f36d5395b2 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/movavg/MovAvgPipelineAggregationBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/movavg/MovAvgPipelineAggregationBuilder.java @@ -44,6 +44,7 @@ import org.elasticsearch.search.aggregations.pipeline.movavg.models.SimpleModel; import java.io.IOException; import java.text.ParseException; import java.util.ArrayList; +import java.util.Collection; import java.util.List; import java.util.Map; import java.util.Objects; @@ -260,8 +261,8 @@ public class MovAvgPipelineAggregationBuilder extends AbstractPipelineAggregatio } @Override - public void doValidate(AggregatorFactory parent, List aggFactories, - List pipelineAggregatoractories) { + public void doValidate(AggregatorFactory parent, Collection aggFactories, + Collection pipelineAggregatoractories) { if (minimize != null && minimize && !model.canBeMinimized()) { // If the user asks to minimize, but this model doesn't support // it, throw exception diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/movfn/MovFnPipelineAggregationBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/movfn/MovFnPipelineAggregationBuilder.java index 185e1c63b98..375125dbefc 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/movfn/MovFnPipelineAggregationBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/movfn/MovFnPipelineAggregationBuilder.java @@ -39,7 +39,7 @@ import org.elasticsearch.search.aggregations.pipeline.BucketHelpers.GapPolicy; import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator; import java.io.IOException; -import java.util.List; +import java.util.Collection; import java.util.Locale; import java.util.Map; import java.util.Objects; @@ -173,8 +173,8 @@ public class MovFnPipelineAggregationBuilder extends AbstractPipelineAggregation } @Override - public void doValidate(AggregatorFactory parent, List aggFactories, - List pipelineAggregatoractories) { + public void doValidate(AggregatorFactory parent, Collection aggFactories, + Collection pipelineAggregatoractories) { if (window <= 0) { throw new IllegalArgumentException("[" + WINDOW.getPreferredName() + "] must be a positive, non-zero integer."); } diff --git a/server/src/main/java/org/elasticsearch/search/lookup/LeafFieldsLookup.java b/server/src/main/java/org/elasticsearch/search/lookup/LeafFieldsLookup.java index 9ec20001adc..d98a8585ecf 100644 --- a/server/src/main/java/org/elasticsearch/search/lookup/LeafFieldsLookup.java +++ b/server/src/main/java/org/elasticsearch/search/lookup/LeafFieldsLookup.java @@ -22,10 +22,13 @@ import org.apache.lucene.index.LeafReader; import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.common.Nullable; import org.elasticsearch.index.fieldvisitor.SingleFieldsVisitor; +import org.elasticsearch.index.mapper.DocumentMapper; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.MapperService; +import org.elasticsearch.index.mapper.TypeFieldMapper; import java.io.IOException; +import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.HashMap; @@ -48,13 +51,10 @@ public class LeafFieldsLookup implements Map { private final Map cachedFieldData = new HashMap<>(); - private final SingleFieldsVisitor fieldVisitor; - LeafFieldsLookup(MapperService mapperService, @Nullable String[] types, LeafReader reader) { this.mapperService = mapperService; this.types = types; this.reader = reader; - this.fieldVisitor = new SingleFieldsVisitor(null); } public void setDocument(int docId) { @@ -142,16 +142,23 @@ public class LeafFieldsLookup implements Map { cachedFieldData.put(name, data); } if (data.fields() == null) { - String fieldName = data.fieldType().name(); - fieldVisitor.reset(fieldName); - try { - reader.document(docId, fieldVisitor); - fieldVisitor.postProcess(mapperService); - List storedFields = fieldVisitor.fields().get(data.fieldType().name()); - data.fields(singletonMap(fieldName, storedFields)); - } catch (IOException e) { - throw new ElasticsearchParseException("failed to load field [{}]", e, name); + List values; + if (TypeFieldMapper.NAME.equals(data.fieldType().name())) { + values = new ArrayList<>(1); + final DocumentMapper mapper = mapperService.documentMapper(); + if (mapper != null) { + values.add(mapper.type()); + } + } else { + values = new ArrayList(2); + SingleFieldsVisitor visitor = new SingleFieldsVisitor(data.fieldType(), values); + try { + reader.document(docId, visitor); + } catch (IOException e) { + throw new ElasticsearchParseException("failed to load field [{}]", e, name); + } } + data.fields(singletonMap(data.fieldType().name(), values)); } return data; } diff --git a/server/src/main/java/org/elasticsearch/search/query/QueryPhase.java b/server/src/main/java/org/elasticsearch/search/query/QueryPhase.java index e4f0aa6898a..56d409ef313 100644 --- a/server/src/main/java/org/elasticsearch/search/query/QueryPhase.java +++ b/server/src/main/java/org/elasticsearch/search/query/QueryPhase.java @@ -232,7 +232,10 @@ public class QueryPhase implements SearchPhase { final Runnable checkCancelled; if (timeoutRunnable != null && cancellationRunnable != null) { - checkCancelled = () -> { timeoutRunnable.run(); cancellationRunnable.run(); }; + checkCancelled = () -> { + timeoutRunnable.run(); + cancellationRunnable.run(); + }; } else if (timeoutRunnable != null) { checkCancelled = timeoutRunnable; } else if (cancellationRunnable != null) { diff --git a/server/src/main/java/org/elasticsearch/threadpool/AutoQueueAdjustingExecutorBuilder.java b/server/src/main/java/org/elasticsearch/threadpool/AutoQueueAdjustingExecutorBuilder.java index ec9d95c722d..45f53006ecd 100644 --- a/server/src/main/java/org/elasticsearch/threadpool/AutoQueueAdjustingExecutorBuilder.java +++ b/server/src/main/java/org/elasticsearch/threadpool/AutoQueueAdjustingExecutorBuilder.java @@ -28,8 +28,10 @@ import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.node.Node; import java.util.Arrays; +import java.util.Iterator; import java.util.List; import java.util.Locale; +import java.util.Map; import java.util.concurrent.ExecutorService; import java.util.concurrent.ThreadFactory; @@ -71,14 +73,42 @@ public final class AutoQueueAdjustingExecutorBuilder extends ExecutorBuilder tempMinQueueSizeSetting = Setting.intSetting(minSizeKey, minQueueSize, Setting.Property.NodeScope); this.minQueueSizeSetting = new Setting<>( - minSizeKey, - (s) -> Integer.toString(minQueueSize), - (s) -> Setting.parseInt(s, 0, tempMaxQueueSizeSetting.get(settings), minSizeKey), - Setting.Property.NodeScope); + minSizeKey, + Integer.toString(minQueueSize), + (s) -> Setting.parseInt(s, 0, minSizeKey), + new Setting.Validator() { + @Override + public void validate(Integer value, Map, Integer> settings) { + if (value > settings.get(tempMaxQueueSizeSetting)) { + throw new IllegalArgumentException("Failed to parse value [" + value + "] for setting [" + minSizeKey + + "] must be <= " + settings.get(tempMaxQueueSizeSetting)); + } + } + + @Override + public Iterator> settings() { + return Arrays.asList(tempMaxQueueSizeSetting).iterator(); + } + }, + Setting.Property.NodeScope); this.maxQueueSizeSetting = new Setting<>( maxSizeKey, - (s) -> Integer.toString(maxQueueSize), - (s) -> Setting.parseInt(s, tempMinQueueSizeSetting.get(settings), Integer.MAX_VALUE, maxSizeKey), + Integer.toString(maxQueueSize), + (s) -> Setting.parseInt(s, 0, maxSizeKey), + new Setting.Validator() { + @Override + public void validate(Integer value, Map, Integer> settings) { + if (value < settings.get(tempMinQueueSizeSetting)) { + throw new IllegalArgumentException("Failed to parse value [" + value + "] for setting [" + minSizeKey + + "] must be >= " + settings.get(tempMinQueueSizeSetting)); + } + } + + @Override + public Iterator> settings() { + return Arrays.asList(tempMinQueueSizeSetting).iterator(); + } + }, Setting.Property.NodeScope); this.frameSizeSetting = Setting.intSetting(frameSizeKey, frameSize, 100, Setting.Property.NodeScope); } @@ -141,12 +171,12 @@ public final class AutoQueueAdjustingExecutorBuilder extends ExecutorBuilder connectedNodes = ConcurrentCollections.newConcurrentMap(); private final KeyedLock connectionLock = new KeyedLock<>(); - private final Logger logger; private final Transport transport; private final ThreadPool threadPool; private final TimeValue pingSchedule; @@ -71,7 +71,6 @@ public class ConnectionManager implements Closeable { } public ConnectionManager(Settings settings, Transport transport, ThreadPool threadPool, ConnectionProfile defaultProfile) { - this.logger = Loggers.getLogger(getClass(), settings); this.transport = transport; this.threadPool = threadPool; this.pingSchedule = TcpTransport.PING_SCHEDULE.get(settings); @@ -325,7 +324,7 @@ public class ConnectionManager implements Closeable { } } - static ConnectionProfile buildDefaultConnectionProfile(Settings settings) { + public static ConnectionProfile buildDefaultConnectionProfile(Settings settings) { int connectionsPerNodeRecovery = TransportService.CONNECTIONS_PER_NODE_RECOVERY.get(settings); int connectionsPerNodeBulk = TransportService.CONNECTIONS_PER_NODE_BULK.get(settings); int connectionsPerNodeReg = TransportService.CONNECTIONS_PER_NODE_REG.get(settings); diff --git a/server/src/main/java/org/elasticsearch/transport/RemoteClusterAware.java b/server/src/main/java/org/elasticsearch/transport/RemoteClusterAware.java index f75d01a0233..a1776d04d34 100644 --- a/server/src/main/java/org/elasticsearch/transport/RemoteClusterAware.java +++ b/server/src/main/java/org/elasticsearch/transport/RemoteClusterAware.java @@ -36,12 +36,16 @@ import java.net.InetAddress; import java.net.InetSocketAddress; import java.net.UnknownHostException; import java.util.ArrayList; +import java.util.Arrays; import java.util.Collections; import java.util.EnumSet; import java.util.HashMap; import java.util.List; +import java.util.Locale; import java.util.Map; +import java.util.NavigableSet; import java.util.Set; +import java.util.TreeSet; import java.util.function.Predicate; import java.util.function.Supplier; import java.util.stream.Collectors; @@ -181,12 +185,36 @@ public abstract class RemoteClusterAware extends AbstractComponent { * {@link TransportAddress#META_ADDRESS} and their configured address will be used as the hostname for the generated discovery node. */ protected static Map>>> buildRemoteClustersDynamicConfig(Settings settings) { - Stream>> allConcreteSettings = REMOTE_CLUSTERS_SEEDS.getAllConcreteSettings(settings); + final Map>>> remoteSeeds = + buildRemoteClustersDynamicConfig(settings, REMOTE_CLUSTERS_SEEDS); + final Map>>> searchRemoteSeeds = + buildRemoteClustersDynamicConfig(settings, SEARCH_REMOTE_CLUSTERS_SEEDS); + // sort the intersection for predictable output order + final NavigableSet intersection = + new TreeSet<>(Arrays.asList( + searchRemoteSeeds.keySet().stream().filter(s -> remoteSeeds.keySet().contains(s)).sorted().toArray(String[]::new))); + if (intersection.isEmpty() == false) { + final String message = String.format( + Locale.ROOT, + "found duplicate remote cluster configurations for cluster alias%s [%s]", + intersection.size() == 1 ? "" : "es", + String.join(",", intersection)); + throw new IllegalArgumentException(message); + } + return Stream + .concat(remoteSeeds.entrySet().stream(), searchRemoteSeeds.entrySet().stream()) + .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue)); + } + + private static Map>>> buildRemoteClustersDynamicConfig( + final Settings settings, final Setting.AffixSetting> seedsSetting) { + final Stream>> allConcreteSettings = seedsSetting.getAllConcreteSettings(settings); return allConcreteSettings.collect( - Collectors.toMap(REMOTE_CLUSTERS_SEEDS::getNamespace, concreteSetting -> { - String clusterName = REMOTE_CLUSTERS_SEEDS.getNamespace(concreteSetting); + Collectors.toMap(seedsSetting::getNamespace, concreteSetting -> { + String clusterName = seedsSetting.getNamespace(concreteSetting); List addresses = concreteSetting.get(settings); - final boolean proxyMode = REMOTE_CLUSTERS_PROXY.getConcreteSettingForNamespace(clusterName).exists(settings); + final boolean proxyMode = + REMOTE_CLUSTERS_PROXY.getConcreteSettingForNamespace(clusterName).existsOrFallbackExists(settings); List> nodes = new ArrayList<>(addresses.size()); for (String address : addresses) { nodes.add(() -> buildSeedNode(clusterName, address, proxyMode)); @@ -231,9 +259,10 @@ public abstract class RemoteClusterAware extends AbstractComponent { List clusters = clusterNameResolver.resolveClusterNames(remoteClusterNames, remoteClusterName); if (clusters.isEmpty() == false) { if (indexExists.test(index)) { - // we use : as a separator for remote clusters. might conflict if there is an index that is actually named - // remote_cluster_alias:index_name - for this case we fail the request. the user can easily change the cluster alias - // if that happens + //We use ":" as a separator for remote clusters. There may be a conflict if there is an index that is named + //remote_cluster_alias:index_name - for this case we fail the request. The user can easily change the cluster alias + //if that happens. Note that indices and aliases can be created with ":" in their names names up to 6.last, which + //means such names need to be supported until 7.last. It will be possible to remove this check from 8.0 on. throw new IllegalArgumentException("Can not filter indices; index " + index + " exists but there is also a remote cluster named: " + remoteClusterName); } @@ -242,6 +271,9 @@ public abstract class RemoteClusterAware extends AbstractComponent { perClusterIndices.computeIfAbsent(clusterName, k -> new ArrayList<>()).add(indexName); } } else { + //Indices and aliases can be created with ":" in their names up to 6.last (although deprecated), and still be + //around in 7.x. That's why we need to be lenient here and treat the index as local although it contains ":". + //It will be possible to remove such leniency and assume that no local indices contain ":" only from 8.0 on. perClusterIndices.computeIfAbsent(RemoteClusterAware.LOCAL_CLUSTER_GROUP_KEY, k -> new ArrayList<>()).add(index); } } else { diff --git a/server/src/main/java/org/elasticsearch/transport/RemoteClusterService.java b/server/src/main/java/org/elasticsearch/transport/RemoteClusterService.java index 75891ef820c..dc3bd3a3536 100644 --- a/server/src/main/java/org/elasticsearch/transport/RemoteClusterService.java +++ b/server/src/main/java/org/elasticsearch/transport/RemoteClusterService.java @@ -262,14 +262,16 @@ public final class RemoteClusterService extends RemoteClusterAware implements Cl Map originalIndicesMap = new HashMap<>(); if (isCrossClusterSearchEnabled()) { final Map> groupedIndices = groupClusterIndices(indices, indexExists); - for (Map.Entry> entry : groupedIndices.entrySet()) { - String clusterAlias = entry.getKey(); - List originalIndices = entry.getValue(); - originalIndicesMap.put(clusterAlias, - new OriginalIndices(originalIndices.toArray(new String[originalIndices.size()]), indicesOptions)); - } - if (originalIndicesMap.containsKey(LOCAL_CLUSTER_GROUP_KEY) == false) { + if (groupedIndices.isEmpty()) { + //search on _all in the local cluster if neither local indices nor remote indices were specified originalIndicesMap.put(LOCAL_CLUSTER_GROUP_KEY, new OriginalIndices(Strings.EMPTY_ARRAY, indicesOptions)); + } else { + for (Map.Entry> entry : groupedIndices.entrySet()) { + String clusterAlias = entry.getKey(); + List originalIndices = entry.getValue(); + originalIndicesMap.put(clusterAlias, + new OriginalIndices(originalIndices.toArray(new String[originalIndices.size()]), indicesOptions)); + } } } else { originalIndicesMap.put(LOCAL_CLUSTER_GROUP_KEY, new OriginalIndices(indices, indicesOptions)); diff --git a/server/src/main/java/org/elasticsearch/transport/TcpTransport.java b/server/src/main/java/org/elasticsearch/transport/TcpTransport.java index d1abc261ea7..27b4aa7293e 100644 --- a/server/src/main/java/org/elasticsearch/transport/TcpTransport.java +++ b/server/src/main/java/org/elasticsearch/transport/TcpTransport.java @@ -222,7 +222,7 @@ public abstract class TcpTransport extends AbstractLifecycleComponent implements this.compress = Transport.TRANSPORT_TCP_COMPRESS.get(settings); this.networkService = networkService; this.transportName = transportName; - this.transportLogger = new TransportLogger(settings); + this.transportLogger = new TransportLogger(); final Settings defaultFeatures = DEFAULT_FEATURES_SETTING.get(settings); if (defaultFeatures == null) { @@ -1492,7 +1492,7 @@ public abstract class TcpTransport extends AbstractLifecycleComponent implements } } - protected Version executeHandshake(DiscoveryNode node, TcpChannel channel, TimeValue timeout) + public Version executeHandshake(DiscoveryNode node, TcpChannel channel, TimeValue timeout) throws IOException, InterruptedException { numHandshakes.inc(); final long requestId = responseHandlers.newRequestId(); diff --git a/server/src/main/java/org/elasticsearch/transport/TransportLogger.java b/server/src/main/java/org/elasticsearch/transport/TransportLogger.java index 3120620b053..ea01cc4ddbf 100644 --- a/server/src/main/java/org/elasticsearch/transport/TransportLogger.java +++ b/server/src/main/java/org/elasticsearch/transport/TransportLogger.java @@ -19,13 +19,13 @@ package org.elasticsearch.transport; import org.apache.logging.log4j.Logger; +import org.apache.logging.log4j.LogManager; import org.elasticsearch.Version; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.compress.Compressor; import org.elasticsearch.common.compress.CompressorFactory; import org.elasticsearch.common.compress.NotCompressedException; import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.core.internal.io.IOUtils; @@ -34,13 +34,9 @@ import java.io.IOException; public final class TransportLogger { - private final Logger logger; + private static final Logger logger = LogManager.getLogger(TransportLogger.class); private static final int HEADER_SIZE = TcpHeader.MARKER_BYTES_SIZE + TcpHeader.MESSAGE_LENGTH_SIZE; - TransportLogger(Settings settings) { - logger = Loggers.getLogger(TransportLogger.class, settings); - } - void logInboundMessage(TcpChannel channel, BytesReference message) { if (logger.isTraceEnabled()) { try { diff --git a/server/src/main/java/org/elasticsearch/transport/TransportService.java b/server/src/main/java/org/elasticsearch/transport/TransportService.java index 584a7d0df11..db14fd015fd 100644 --- a/server/src/main/java/org/elasticsearch/transport/TransportService.java +++ b/server/src/main/java/org/elasticsearch/transport/TransportService.java @@ -736,7 +736,7 @@ public class TransportService extends AbstractLifecycleComponent implements Tran @Override public String toString() { - return "processing of [" + action + "][" + requestId + "]: " + request; + return "processing of [" + requestId + "][" + action + "]: " + request; } }); } @@ -1057,7 +1057,7 @@ public class TransportService extends AbstractLifecycleComponent implements Tran @Override public String toString() { - return "TimeoutHandler for [" + action + "][" + requestId + "]"; + return "timeout handler for [" + requestId + "][" + action + "]"; } } @@ -1188,13 +1188,13 @@ public class TransportService extends AbstractLifecycleComponent implements Tran } else { threadPool.executor(executor).execute(new Runnable() { @Override - public String toString() { - return "delivery of response to [" + action + "][" + requestId + "]: " + response; + public void run() { + processResponse(handler, response); } @Override - public void run() { - DirectResponseChannel.this.processResponse(handler, response); + public String toString() { + return "delivery of response to [" + requestId + "][" + action + "]: " + response; } }); } @@ -1229,7 +1229,7 @@ public class TransportService extends AbstractLifecycleComponent implements Tran @Override public String toString() { - return "delivery of exception response to [" + action + "][" + requestId + "]: " + exception; + return "delivery of failure response to [" + requestId + "][" + action + "]: " + exception; } }); } diff --git a/server/src/test/java/org/apache/lucene/queries/SpanMatchNoDocsQueryTests.java b/server/src/test/java/org/apache/lucene/queries/SpanMatchNoDocsQueryTests.java new file mode 100644 index 00000000000..6187fc1f7f6 --- /dev/null +++ b/server/src/test/java/org/apache/lucene/queries/SpanMatchNoDocsQueryTests.java @@ -0,0 +1,117 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.apache.lucene.queries; + +import org.apache.lucene.analysis.Analyzer; +import org.apache.lucene.analysis.MockAnalyzer; +import org.apache.lucene.document.Document; +import org.apache.lucene.document.Field; +import org.apache.lucene.index.DirectoryReader; +import org.apache.lucene.index.IndexReader; +import org.apache.lucene.index.IndexWriter; +import org.apache.lucene.index.Term; +import org.apache.lucene.search.IndexSearcher; +import org.apache.lucene.search.Query; +import org.apache.lucene.search.QueryUtils; +import org.apache.lucene.search.ScoreDoc; +import org.apache.lucene.search.spans.SpanNearQuery; +import org.apache.lucene.search.spans.SpanOrQuery; +import org.apache.lucene.search.spans.SpanQuery; +import org.apache.lucene.search.spans.SpanTermQuery; +import org.apache.lucene.store.Directory; +import org.elasticsearch.test.ESTestCase; + +import java.io.IOException; + +public class SpanMatchNoDocsQueryTests extends ESTestCase { + public void testSimple() throws Exception { + SpanMatchNoDocsQuery query = new SpanMatchNoDocsQuery("field", "a good reason"); + assertEquals(query.toString(), "SpanMatchNoDocsQuery(\"a good reason\")"); + Query rewrite = query.rewrite(null); + assertTrue(rewrite instanceof SpanMatchNoDocsQuery); + assertEquals(rewrite.toString(), "SpanMatchNoDocsQuery(\"a good reason\")"); + } + + public void testQuery() throws Exception { + Directory dir = newDirectory(); + Analyzer analyzer = new MockAnalyzer(random()); + IndexWriter iw = new IndexWriter(dir, + newIndexWriterConfig(analyzer).setMaxBufferedDocs(2).setMergePolicy(newLogMergePolicy())); + addDoc("one", iw); + addDoc("two", iw); + addDoc("three", iw); + IndexReader ir = DirectoryReader.open(iw); + IndexSearcher searcher = new IndexSearcher(ir); + + Query query = new SpanMatchNoDocsQuery("unkwown", "field not found"); + assertEquals(searcher.count(query), 0); + + ScoreDoc[] hits; + hits = searcher.search(query, 1000).scoreDocs; + assertEquals(0, hits.length); + assertEquals(query.toString(), "SpanMatchNoDocsQuery(\"field not found\")"); + + SpanOrQuery orQuery = new SpanOrQuery( + new SpanMatchNoDocsQuery("unknown", "field not found"), + new SpanTermQuery(new Term("unknown", "one")) + ); + assertEquals(searcher.count(orQuery), 0); + hits = searcher.search(orQuery, 1000).scoreDocs; + assertEquals(0, hits.length); + + orQuery = new SpanOrQuery( + new SpanMatchNoDocsQuery("key", "a good reason"), + new SpanTermQuery(new Term("key", "one")) + ); + assertEquals(searcher.count(orQuery), 1); + hits = searcher.search(orQuery, 1000).scoreDocs; + assertEquals(1, hits.length); + Query rewrite = orQuery.rewrite(ir); + assertEquals(rewrite, orQuery); + + SpanNearQuery nearQuery = new SpanNearQuery( + new SpanQuery[] {new SpanMatchNoDocsQuery("same", ""), new SpanMatchNoDocsQuery("same", "")}, + 0, true); + assertEquals(searcher.count(nearQuery), 0); + hits = searcher.search(nearQuery, 1000).scoreDocs; + assertEquals(0, hits.length); + rewrite = nearQuery.rewrite(ir); + assertEquals(rewrite, nearQuery); + + iw.close(); + ir.close(); + dir.close(); + } + + public void testEquals() { + Query q1 = new SpanMatchNoDocsQuery("key1", "one"); + Query q2 = new SpanMatchNoDocsQuery("key2", "two"); + assertTrue(q1.equals(q2)); + QueryUtils.check(q1); + } + + private void addDoc(String text, IndexWriter iw) throws IOException { + Document doc = new Document(); + Field f = newTextField("key", text, Field.Store.YES); + doc.add(f); + iw.addDocument(doc); + } + +} diff --git a/server/src/test/java/org/elasticsearch/action/admin/indices/create/ShrinkIndexIT.java b/server/src/test/java/org/elasticsearch/action/admin/indices/create/ShrinkIndexIT.java index 61629515d4e..feeb9646e40 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/indices/create/ShrinkIndexIT.java +++ b/server/src/test/java/org/elasticsearch/action/admin/indices/create/ShrinkIndexIT.java @@ -62,6 +62,7 @@ import org.elasticsearch.index.shard.IndexShard; import org.elasticsearch.indices.IndicesService; import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.VersionUtils; +import org.elasticsearch.test.junit.annotations.TestLogging; import java.util.Arrays; import java.util.List; @@ -81,8 +82,11 @@ public class ShrinkIndexIT extends ESIntegTestCase { return false; } + @TestLogging("org.elasticsearch.index.store:DEBUG") public void testCreateShrinkIndexToN() { - assumeFalse("Broken on windows - https://github.com/elastic/elasticsearch/issues/33857", Constants.WINDOWS); + + assumeFalse("https://github.com/elastic/elasticsearch/issues/34080", Constants.WINDOWS); + int[][] possibleShardSplits = new int[][] {{8,4,2}, {9, 3, 1}, {4, 2, 1}, {15,5,1}}; int[] shardSplits = randomFrom(possibleShardSplits); assertEquals(shardSplits[0], (shardSplits[0] / shardSplits[1]) * shardSplits[1]); diff --git a/server/src/test/java/org/elasticsearch/action/admin/indices/create/SplitIndexIT.java b/server/src/test/java/org/elasticsearch/action/admin/indices/create/SplitIndexIT.java index 4510956358f..044f3263b4a 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/indices/create/SplitIndexIT.java +++ b/server/src/test/java/org/elasticsearch/action/admin/indices/create/SplitIndexIT.java @@ -24,6 +24,7 @@ import org.apache.lucene.search.SortField; import org.apache.lucene.search.SortedSetSelector; import org.apache.lucene.search.SortedSetSortField; import org.apache.lucene.search.join.ScoreMode; +import org.apache.lucene.util.Constants; import org.elasticsearch.Version; import org.elasticsearch.action.admin.cluster.state.ClusterStateRequest; import org.elasticsearch.action.admin.cluster.state.ClusterStateResponse; @@ -91,6 +92,9 @@ public class SplitIndexIT extends ESIntegTestCase { } public void testSplitFromOneToN() { + + assumeFalse("https://github.com/elastic/elasticsearch/issues/34080", Constants.WINDOWS); + splitToN(1, 5, 10); client().admin().indices().prepareDelete("*").get(); int randomSplit = randomIntBetween(2, 6); @@ -485,8 +489,6 @@ public class SplitIndexIT extends ESIntegTestCase { ImmutableOpenMap dataNodes = client().admin().cluster().prepareState().get().getState().nodes() .getDataNodes(); assertTrue("at least 2 nodes but was: " + dataNodes.size(), dataNodes.size() >= 2); - DiscoveryNode[] discoveryNodes = dataNodes.values().toArray(DiscoveryNode.class); - String mergeNode = discoveryNodes[0].getName(); // ensure all shards are allocated otherwise the ensure green below might not succeed since we require the merge node // if we change the setting too quickly we will end up with one replica unassigned which can't be assigned anymore due // to the require._name below. diff --git a/server/src/test/java/org/elasticsearch/action/admin/indices/mapping/get/GetMappingsResponseTests.java b/server/src/test/java/org/elasticsearch/action/admin/indices/mapping/get/GetMappingsResponseTests.java index 91c78418683..beae91df77e 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/indices/mapping/get/GetMappingsResponseTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/indices/mapping/get/GetMappingsResponseTests.java @@ -20,6 +20,7 @@ package org.elasticsearch.action.admin.indices.mapping.get; import com.carrotsearch.hppc.cursors.ObjectCursor; + import org.elasticsearch.cluster.metadata.MappingMetaData; import org.elasticsearch.common.collect.ImmutableOpenMap; import org.elasticsearch.common.xcontent.XContentParser; @@ -119,20 +120,15 @@ public class GetMappingsResponseTests extends AbstractStreamableXContentTestCase private static Map randomFieldMapping() { Map mappings = new HashMap<>(); if (randomBoolean()) { - Map regularMapping = new HashMap<>(); - regularMapping.put("type", randomBoolean() ? "text" : "keyword"); - regularMapping.put("index", "analyzed"); - regularMapping.put("analyzer", "english"); - return regularMapping; + mappings.put("type", randomBoolean() ? "text" : "keyword"); + mappings.put("index", "analyzed"); + mappings.put("analyzer", "english"); } else if (randomBoolean()) { - Map numberMapping = new HashMap<>(); - numberMapping.put("type", randomFrom("integer", "float", "long", "double")); - numberMapping.put("index", Objects.toString(randomBoolean())); - return numberMapping; + mappings.put("type", randomFrom("integer", "float", "long", "double")); + mappings.put("index", Objects.toString(randomBoolean())); } else if (randomBoolean()) { - Map objMapping = new HashMap<>(); - objMapping.put("type", "object"); - objMapping.put("dynamic", "strict"); + mappings.put("type", "object"); + mappings.put("dynamic", "strict"); Map properties = new HashMap<>(); Map props1 = new HashMap<>(); props1.put("type", randomFrom("text", "keyword")); @@ -146,12 +142,10 @@ public class GetMappingsResponseTests extends AbstractStreamableXContentTestCase props3.put("index", "false"); prop2properties.put("subsubfield", props3); props2.put("properties", prop2properties); - objMapping.put("properties", properties); - return objMapping; + mappings.put("properties", properties); } else { - Map plainMapping = new HashMap<>(); - plainMapping.put("type", "keyword"); - return plainMapping; + mappings.put("type", "keyword"); } + return mappings; } } diff --git a/server/src/test/java/org/elasticsearch/action/bulk/RetryTests.java b/server/src/test/java/org/elasticsearch/action/bulk/RetryTests.java index 320f11ff6d0..f9cdfa092ae 100644 --- a/server/src/test/java/org/elasticsearch/action/bulk/RetryTests.java +++ b/server/src/test/java/org/elasticsearch/action/bulk/RetryTests.java @@ -85,7 +85,7 @@ public class RetryTests extends ESTestCase { BulkRequest bulkRequest = createBulkRequest(); BulkResponse response = new Retry(backoff, bulkClient.threadPool()) - .withBackoff(bulkClient::bulk, bulkRequest, bulkClient.settings()) + .withBackoff(bulkClient::bulk, bulkRequest) .actionGet(); assertFalse(response.hasFailures()); @@ -97,7 +97,7 @@ public class RetryTests extends ESTestCase { BulkRequest bulkRequest = createBulkRequest(); BulkResponse response = new Retry(backoff, bulkClient.threadPool()) - .withBackoff(bulkClient::bulk, bulkRequest, bulkClient.settings()) + .withBackoff(bulkClient::bulk, bulkRequest) .actionGet(); assertTrue(response.hasFailures()); @@ -110,7 +110,7 @@ public class RetryTests extends ESTestCase { BulkRequest bulkRequest = createBulkRequest(); Retry retry = new Retry(backoff, bulkClient.threadPool()); - retry.withBackoff(bulkClient::bulk, bulkRequest, listener, bulkClient.settings()); + retry.withBackoff(bulkClient::bulk, bulkRequest, listener); listener.awaitCallbacksCalled(); listener.assertOnResponseCalled(); @@ -125,7 +125,7 @@ public class RetryTests extends ESTestCase { BulkRequest bulkRequest = createBulkRequest(); Retry retry = new Retry(backoff, bulkClient.threadPool()); - retry.withBackoff(bulkClient::bulk, bulkRequest, listener, bulkClient.settings()); + retry.withBackoff(bulkClient::bulk, bulkRequest, listener); listener.awaitCallbacksCalled(); diff --git a/server/src/test/java/org/elasticsearch/action/ingest/TrackingResultProcessorTests.java b/server/src/test/java/org/elasticsearch/action/ingest/TrackingResultProcessorTests.java deleted file mode 100644 index 3572a04529b..00000000000 --- a/server/src/test/java/org/elasticsearch/action/ingest/TrackingResultProcessorTests.java +++ /dev/null @@ -1,149 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.action.ingest; - -import org.elasticsearch.ElasticsearchException; -import org.elasticsearch.ingest.TestProcessor; -import org.elasticsearch.ingest.CompoundProcessor; -import org.elasticsearch.ingest.IngestDocument; -import org.elasticsearch.test.ESTestCase; -import org.junit.Before; - -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Collections; -import java.util.HashMap; -import java.util.List; -import java.util.Map; - -import static org.elasticsearch.ingest.CompoundProcessor.ON_FAILURE_MESSAGE_FIELD; -import static org.elasticsearch.ingest.CompoundProcessor.ON_FAILURE_PROCESSOR_TAG_FIELD; -import static org.elasticsearch.ingest.CompoundProcessor.ON_FAILURE_PROCESSOR_TYPE_FIELD; -import static org.elasticsearch.action.ingest.TrackingResultProcessor.decorate; -import static org.hamcrest.CoreMatchers.equalTo; -import static org.hamcrest.Matchers.nullValue; -import static org.hamcrest.Matchers.sameInstance; - -public class TrackingResultProcessorTests extends ESTestCase { - - private IngestDocument ingestDocument; - private List resultList; - - @Before - public void init() { - ingestDocument = new IngestDocument(new HashMap<>(), new HashMap<>()); - resultList = new ArrayList<>(); - } - - public void testActualProcessor() throws Exception { - TestProcessor actualProcessor = new TestProcessor(ingestDocument -> {}); - TrackingResultProcessor trackingProcessor = new TrackingResultProcessor(false, actualProcessor, resultList); - trackingProcessor.execute(ingestDocument); - - SimulateProcessorResult expectedResult = new SimulateProcessorResult(actualProcessor.getTag(), ingestDocument); - - assertThat(actualProcessor.getInvokedCounter(), equalTo(1)); - assertThat(resultList.size(), equalTo(1)); - - assertThat(resultList.get(0).getIngestDocument(), equalTo(expectedResult.getIngestDocument())); - assertThat(resultList.get(0).getFailure(), nullValue()); - assertThat(resultList.get(0).getProcessorTag(), equalTo(expectedResult.getProcessorTag())); - } - - public void testActualCompoundProcessorWithoutOnFailure() throws Exception { - RuntimeException exception = new RuntimeException("processor failed"); - TestProcessor testProcessor = new TestProcessor(ingestDocument -> { throw exception; }); - CompoundProcessor actualProcessor = new CompoundProcessor(testProcessor); - CompoundProcessor trackingProcessor = decorate(actualProcessor, resultList); - - try { - trackingProcessor.execute(ingestDocument); - fail("processor should throw exception"); - } catch (ElasticsearchException e) { - assertThat(e.getRootCause().getMessage(), equalTo(exception.getMessage())); - } - - SimulateProcessorResult expectedFirstResult = new SimulateProcessorResult(testProcessor.getTag(), ingestDocument); - assertThat(testProcessor.getInvokedCounter(), equalTo(1)); - assertThat(resultList.size(), equalTo(1)); - assertThat(resultList.get(0).getIngestDocument(), nullValue()); - assertThat(resultList.get(0).getFailure(), equalTo(exception)); - assertThat(resultList.get(0).getProcessorTag(), equalTo(expectedFirstResult.getProcessorTag())); - } - - public void testActualCompoundProcessorWithOnFailure() throws Exception { - RuntimeException exception = new RuntimeException("fail"); - TestProcessor failProcessor = new TestProcessor("fail", "test", ingestDocument -> { throw exception; }); - TestProcessor onFailureProcessor = new TestProcessor("success", "test", ingestDocument -> {}); - CompoundProcessor actualProcessor = new CompoundProcessor(false, - Arrays.asList(new CompoundProcessor(false, - Arrays.asList(failProcessor, onFailureProcessor), - Arrays.asList(onFailureProcessor, failProcessor))), - Arrays.asList(onFailureProcessor)); - CompoundProcessor trackingProcessor = decorate(actualProcessor, resultList); - trackingProcessor.execute(ingestDocument); - - SimulateProcessorResult expectedFailResult = new SimulateProcessorResult(failProcessor.getTag(), ingestDocument); - SimulateProcessorResult expectedSuccessResult = new SimulateProcessorResult(onFailureProcessor.getTag(), ingestDocument); - - assertThat(failProcessor.getInvokedCounter(), equalTo(2)); - assertThat(onFailureProcessor.getInvokedCounter(), equalTo(2)); - assertThat(resultList.size(), equalTo(4)); - - assertThat(resultList.get(0).getIngestDocument(), nullValue()); - assertThat(resultList.get(0).getFailure(), equalTo(exception)); - assertThat(resultList.get(0).getProcessorTag(), equalTo(expectedFailResult.getProcessorTag())); - - Map metadata = resultList.get(1).getIngestDocument().getIngestMetadata(); - assertThat(metadata.get(ON_FAILURE_MESSAGE_FIELD), equalTo("fail")); - assertThat(metadata.get(ON_FAILURE_PROCESSOR_TYPE_FIELD), equalTo("test")); - assertThat(metadata.get(ON_FAILURE_PROCESSOR_TAG_FIELD), equalTo("fail")); - assertThat(resultList.get(1).getFailure(), nullValue()); - assertThat(resultList.get(1).getProcessorTag(), equalTo(expectedSuccessResult.getProcessorTag())); - - assertThat(resultList.get(2).getIngestDocument(), nullValue()); - assertThat(resultList.get(2).getFailure(), equalTo(exception)); - assertThat(resultList.get(2).getProcessorTag(), equalTo(expectedFailResult.getProcessorTag())); - - metadata = resultList.get(3).getIngestDocument().getIngestMetadata(); - assertThat(metadata.get(ON_FAILURE_MESSAGE_FIELD), equalTo("fail")); - assertThat(metadata.get(ON_FAILURE_PROCESSOR_TYPE_FIELD), equalTo("test")); - assertThat(metadata.get(ON_FAILURE_PROCESSOR_TAG_FIELD), equalTo("fail")); - assertThat(resultList.get(3).getFailure(), nullValue()); - assertThat(resultList.get(3).getProcessorTag(), equalTo(expectedSuccessResult.getProcessorTag())); - } - - public void testActualCompoundProcessorWithIgnoreFailure() throws Exception { - RuntimeException exception = new RuntimeException("processor failed"); - TestProcessor testProcessor = new TestProcessor(ingestDocument -> { throw exception; }); - CompoundProcessor actualProcessor = new CompoundProcessor(true, Collections.singletonList(testProcessor), - Collections.emptyList()); - CompoundProcessor trackingProcessor = decorate(actualProcessor, resultList); - - trackingProcessor.execute(ingestDocument); - - SimulateProcessorResult expectedResult = new SimulateProcessorResult(testProcessor.getTag(), ingestDocument); - assertThat(testProcessor.getInvokedCounter(), equalTo(1)); - assertThat(resultList.size(), equalTo(1)); - assertThat(resultList.get(0).getIngestDocument(), equalTo(expectedResult.getIngestDocument())); - assertThat(resultList.get(0).getFailure(), sameInstance(exception)); - assertThat(resultList.get(0).getProcessorTag(), equalTo(expectedResult.getProcessorTag())); - } -} diff --git a/server/src/test/java/org/elasticsearch/action/resync/ResyncReplicationRequestTests.java b/server/src/test/java/org/elasticsearch/action/resync/ResyncReplicationRequestTests.java index 15b8e1c99d2..230eccb0578 100644 --- a/server/src/test/java/org/elasticsearch/action/resync/ResyncReplicationRequestTests.java +++ b/server/src/test/java/org/elasticsearch/action/resync/ResyncReplicationRequestTests.java @@ -38,7 +38,7 @@ public class ResyncReplicationRequestTests extends ESTestCase { final Translog.Index index = new Translog.Index("type", "id", 0, randomNonNegativeLong(), randomNonNegativeLong(), bytes, null, -1); final ShardId shardId = new ShardId(new Index("index", "uuid"), 0); - final ResyncReplicationRequest before = new ResyncReplicationRequest(shardId, 42L, new Translog.Operation[]{index}); + final ResyncReplicationRequest before = new ResyncReplicationRequest(shardId, 42L, 100, new Translog.Operation[]{index}); final BytesStreamOutput out = new BytesStreamOutput(); before.writeTo(out); diff --git a/server/src/test/java/org/elasticsearch/action/search/SearchAsyncActionTests.java b/server/src/test/java/org/elasticsearch/action/search/SearchAsyncActionTests.java index 95282e358e1..d3e8c069601 100644 --- a/server/src/test/java/org/elasticsearch/action/search/SearchAsyncActionTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/SearchAsyncActionTests.java @@ -52,9 +52,13 @@ import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.CountDownLatch; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; +import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicReference; +import static org.elasticsearch.common.util.concurrent.ConcurrentCollections.newConcurrentMap; +import static org.elasticsearch.common.util.concurrent.ConcurrentCollections.newConcurrentSet; + public class SearchAsyncActionTests extends ESTestCase { public void testSkipSearchShards() throws InterruptedException { @@ -139,7 +143,7 @@ public class SearchAsyncActionTests extends ESTestCase { protected SearchPhase getNextPhase(SearchPhaseResults results, SearchPhaseContext context) { return new SearchPhase("test") { @Override - public void run() throws IOException { + public void run() { latch.countDown(); } }; @@ -260,7 +264,6 @@ public class SearchAsyncActionTests extends ESTestCase { SearchRequest request = new SearchRequest(); request.allowPartialSearchResults(true); request.setMaxConcurrentShardRequests(randomIntBetween(1, 100)); - CountDownLatch latch = new CountDownLatch(1); AtomicReference response = new AtomicReference<>(); ActionListener responseListener = new ActionListener() { @Override @@ -277,7 +280,7 @@ public class SearchAsyncActionTests extends ESTestCase { DiscoveryNode primaryNode = new DiscoveryNode("node_1", buildNewFakeTransportAddress(), Version.CURRENT); DiscoveryNode replicaNode = new DiscoveryNode("node_2", buildNewFakeTransportAddress(), Version.CURRENT); - Map> nodeToContextMap = new HashMap<>(); + Map> nodeToContextMap = newConcurrentMap(); AtomicInteger contextIdGenerator = new AtomicInteger(0); GroupShardsIterator shardsIter = getShardsIter("idx", new OriginalIndices(new String[]{"idx"}, IndicesOptions.strictExpandOpenAndForbidClosed()), @@ -296,6 +299,8 @@ public class SearchAsyncActionTests extends ESTestCase { lookup.put(replicaNode.getId(), new MockConnection(replicaNode)); Map aliasFilters = Collections.singletonMap("_na_", new AliasFilter(null, Strings.EMPTY_ARRAY)); final ExecutorService executor = Executors.newFixedThreadPool(randomIntBetween(1, Runtime.getRuntime().availableProcessors())); + final CountDownLatch latch = new CountDownLatch(1); + final AtomicBoolean latchTriggered = new AtomicBoolean(); AbstractSearchAsyncAction asyncAction = new AbstractSearchAsyncAction( "test", @@ -326,7 +331,7 @@ public class SearchAsyncActionTests extends ESTestCase { Transport.Connection connection = getConnection(null, shard.currentNodeId()); TestSearchPhaseResult testSearchPhaseResult = new TestSearchPhaseResult(contextIdGenerator.incrementAndGet(), connection.getNode()); - Set ids = nodeToContextMap.computeIfAbsent(connection.getNode(), (n) -> new HashSet<>()); + Set ids = nodeToContextMap.computeIfAbsent(connection.getNode(), (n) -> newConcurrentSet()); ids.add(testSearchPhaseResult.getRequestId()); if (randomBoolean()) { listener.onResponse(testSearchPhaseResult); @@ -339,15 +344,15 @@ public class SearchAsyncActionTests extends ESTestCase { protected SearchPhase getNextPhase(SearchPhaseResults results, SearchPhaseContext context) { return new SearchPhase("test") { @Override - public void run() throws IOException { + public void run() { for (int i = 0; i < results.getNumShards(); i++) { TestSearchPhaseResult result = results.getAtomicArray().get(i); assertEquals(result.node.getId(), result.getSearchShardTarget().getNodeId()); sendReleaseSearchContext(result.getRequestId(), new MockConnection(result.node), OriginalIndices.NONE); } responseListener.onResponse(response); - if (latch.getCount() == 0) { - throw new AssertionError("Running a search phase after the latch has reached 0 !!!!"); + if (latchTriggered.compareAndSet(false, true) == false) { + throw new AssertionError("latch triggered twice"); } latch.countDown(); } diff --git a/server/src/test/java/org/elasticsearch/action/search/TransportSearchActionTests.java b/server/src/test/java/org/elasticsearch/action/search/TransportSearchActionTests.java index 902a8ad97a0..c763709a04e 100644 --- a/server/src/test/java/org/elasticsearch/action/search/TransportSearchActionTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/TransportSearchActionTests.java @@ -44,7 +44,6 @@ import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.RemoteClusterService; import org.elasticsearch.transport.TransportService; -import java.io.IOException; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; @@ -64,7 +63,7 @@ public class TransportSearchActionTests extends ESTestCase { ThreadPool.terminate(threadPool, 10, TimeUnit.SECONDS); } - public void testMergeShardsIterators() throws IOException { + public void testMergeShardsIterators() { List localShardIterators = new ArrayList<>(); { ShardId shardId = new ShardId("local_index", "local_index_uuid", 0); @@ -146,7 +145,7 @@ public class TransportSearchActionTests extends ESTestCase { } } - public void testProcessRemoteShards() throws IOException { + public void testProcessRemoteShards() { try (TransportService transportService = MockTransportService.createNewService(Settings.EMPTY, Version.CURRENT, threadPool, null)) { RemoteClusterService service = transportService.getRemoteClusterService(); @@ -241,12 +240,12 @@ public class TransportSearchActionTests extends ESTestCase { } public void testBuildClusters() { - OriginalIndices localIndices = randomOriginalIndices(); + OriginalIndices localIndices = randomBoolean() ? null : randomOriginalIndices(); Map remoteIndices = new HashMap<>(); Map searchShardsResponses = new HashMap<>(); int numRemoteClusters = randomIntBetween(0, 10); boolean onlySuccessful = randomBoolean(); - int localClusters = localIndices.indices().length == 0 ? 0 : 1; + int localClusters = localIndices == null ? 0 : 1; int total = numRemoteClusters + localClusters; int successful = localClusters; int skipped = 0; diff --git a/server/src/test/java/org/elasticsearch/action/support/replication/ReplicationOperationTests.java b/server/src/test/java/org/elasticsearch/action/support/replication/ReplicationOperationTests.java index e85c03411f7..8fa10c4ee26 100644 --- a/server/src/test/java/org/elasticsearch/action/support/replication/ReplicationOperationTests.java +++ b/server/src/test/java/org/elasticsearch/action/support/replication/ReplicationOperationTests.java @@ -443,6 +443,7 @@ public class ReplicationOperationTests extends ESTestCase { final ShardRouting routing; final long localCheckpoint; final long globalCheckpoint; + final long maxSeqNoOfUpdatesOrDeletes; final Supplier replicationGroupSupplier; final Map knownLocalCheckpoints = new HashMap<>(); final Map knownGlobalCheckpoints = new HashMap<>(); @@ -452,6 +453,7 @@ public class ReplicationOperationTests extends ESTestCase { this.replicationGroupSupplier = replicationGroupSupplier; this.localCheckpoint = random().nextLong(); this.globalCheckpoint = randomNonNegativeLong(); + this.maxSeqNoOfUpdatesOrDeletes = randomNonNegativeLong(); } @Override @@ -515,6 +517,11 @@ public class ReplicationOperationTests extends ESTestCase { return globalCheckpoint; } + @Override + public long maxSeqNoOfUpdatesOrDeletes() { + return maxSeqNoOfUpdatesOrDeletes; + } + @Override public ReplicationGroup getReplicationGroup() { return replicationGroupSupplier.get(); @@ -571,6 +578,7 @@ public class ReplicationOperationTests extends ESTestCase { final ShardRouting replica, final Request request, final long globalCheckpoint, + final long maxSeqNoOfUpdatesOrDeletes, final ActionListener listener) { assertTrue("replica request processed twice on [" + replica + "]", request.processedOnReplicas.add(replica)); if (opFailures.containsKey(replica)) { diff --git a/server/src/test/java/org/elasticsearch/action/support/replication/TransportReplicationActionTests.java b/server/src/test/java/org/elasticsearch/action/support/replication/TransportReplicationActionTests.java index 96861457dd3..8fd46979ab7 100644 --- a/server/src/test/java/org/elasticsearch/action/support/replication/TransportReplicationActionTests.java +++ b/server/src/test/java/org/elasticsearch/action/support/replication/TransportReplicationActionTests.java @@ -172,6 +172,7 @@ public class TransportReplicationActionTests extends ESTestCase { action = new TestAction(Settings.EMPTY, "internal:testAction", transportService, clusterService, shardStateAction, threadPool); } + @Override @After public void tearDown() throws Exception { super.tearDown(); @@ -515,6 +516,7 @@ public class TransportReplicationActionTests extends ESTestCase { ActionListener> actionListener, TransportReplicationAction.PrimaryShardReference primaryShardReference) { return new NoopReplicationOperation(request, actionListener) { + @Override public void execute() throws Exception { assertPhase(task, "primary"); assertFalse(executed.getAndSet(true)); @@ -571,6 +573,7 @@ public class TransportReplicationActionTests extends ESTestCase { ActionListener> actionListener, TransportReplicationAction.PrimaryShardReference primaryShardReference) { return new NoopReplicationOperation(request, actionListener) { + @Override public void execute() throws Exception { assertPhase(task, "primary"); assertFalse(executed.getAndSet(true)); @@ -629,6 +632,7 @@ public class TransportReplicationActionTests extends ESTestCase { routingState == ShardRoutingState.RELOCATING ? state.nodes().iterator().next().getId() : null, false, routingState), new Request(), randomNonNegativeLong(), + randomNonNegativeLong(), listener); assertTrue(listener.isDone()); assertListenerThrows("non existent node should throw a NoNodeAvailableException", listener, NoNodeAvailableException.class); @@ -637,7 +641,7 @@ public class TransportReplicationActionTests extends ESTestCase { final ShardRouting replica = randomFrom(shardRoutings.replicaShards().stream() .filter(ShardRouting::assignedToNode).collect(Collectors.toList())); listener = new PlainActionFuture<>(); - proxy.performOn(replica, new Request(), randomNonNegativeLong(), listener); + proxy.performOn(replica, new Request(), randomNonNegativeLong(), randomNonNegativeLong(), listener); assertFalse(listener.isDone()); CapturingTransport.CapturedRequest[] captures = transport.getCapturedRequestsAndClear(); @@ -700,13 +704,6 @@ public class TransportReplicationActionTests extends ESTestCase { return null; }).when(shard).acquirePrimaryOperationPermit(any(), anyString(), anyObject()); - AtomicBoolean closed = new AtomicBoolean(); - Releasable releasable = () -> { - if (closed.compareAndSet(false, true) == false) { - fail("releasable is closed twice"); - } - }; - TestAction action = new TestAction(Settings.EMPTY, "internal:testSeqNoIsSetOnPrimary", transportService, clusterService, shardStateAction, threadPool) { @@ -809,7 +806,7 @@ public class TransportReplicationActionTests extends ESTestCase { replicaOperationTransportHandler.messageReceived( new TransportReplicationAction.ConcreteReplicaRequest<>( new Request().setShardId(shardId), replicaRouting.allocationId().getId(), randomNonNegativeLong(), - randomNonNegativeLong()), + randomNonNegativeLong(), randomNonNegativeLong()), createTransportChannel(new PlainActionFuture<>()), task); } catch (ElasticsearchException e) { assertThat(e.getMessage(), containsString("simulated")); @@ -899,7 +896,7 @@ public class TransportReplicationActionTests extends ESTestCase { Request request = new Request(shardId).timeout("1ms"); action.new ReplicaOperationTransportHandler().messageReceived( new TransportReplicationAction.ConcreteReplicaRequest<>(request, "_not_a_valid_aid_", randomNonNegativeLong(), - randomNonNegativeLong()), + randomNonNegativeLong(), randomNonNegativeLong()), createTransportChannel(listener), maybeTask() ); try { @@ -943,8 +940,10 @@ public class TransportReplicationActionTests extends ESTestCase { final PlainActionFuture listener = new PlainActionFuture<>(); final Request request = new Request().setShardId(shardId); final long checkpoint = randomNonNegativeLong(); + final long maxSeqNoOfUpdatesOrDeletes = randomNonNegativeLong(); replicaOperationTransportHandler.messageReceived( - new TransportReplicationAction.ConcreteReplicaRequest<>(request, replica.allocationId().getId(), primaryTerm, checkpoint), + new TransportReplicationAction.ConcreteReplicaRequest<>(request, replica.allocationId().getId(), + primaryTerm, checkpoint, maxSeqNoOfUpdatesOrDeletes), createTransportChannel(listener), task); if (listener.isDone()) { listener.get(); // fail with the exception if there @@ -968,6 +967,8 @@ public class TransportReplicationActionTests extends ESTestCase { assertThat(capturedRequest.request, instanceOf(TransportReplicationAction.ConcreteReplicaRequest.class)); assertThat(((TransportReplicationAction.ConcreteReplicaRequest) capturedRequest.request).getGlobalCheckpoint(), equalTo(checkpoint)); + assertThat(((TransportReplicationAction.ConcreteReplicaRequest) capturedRequest.request).getMaxSeqNoOfUpdatesOrDeletes(), + equalTo(maxSeqNoOfUpdatesOrDeletes)); assertConcreteShardRequest(capturedRequest.request, request, replica.allocationId()); } @@ -1008,8 +1009,10 @@ public class TransportReplicationActionTests extends ESTestCase { final PlainActionFuture listener = new PlainActionFuture<>(); final Request request = new Request().setShardId(shardId); final long checkpoint = randomNonNegativeLong(); + final long maxSeqNoOfUpdates = randomNonNegativeLong(); replicaOperationTransportHandler.messageReceived( - new TransportReplicationAction.ConcreteReplicaRequest<>(request, replica.allocationId().getId(), primaryTerm, checkpoint), + new TransportReplicationAction.ConcreteReplicaRequest<>(request, replica.allocationId().getId(), + primaryTerm, checkpoint, maxSeqNoOfUpdates), createTransportChannel(listener), task); if (listener.isDone()) { listener.get(); // fail with the exception if there @@ -1112,8 +1115,6 @@ public class TransportReplicationActionTests extends ESTestCase { private class TestAction extends TransportReplicationAction { - private final boolean withDocumentFailureOnPrimary; - private final boolean withDocumentFailureOnReplica; TestAction(Settings settings, String actionName, TransportService transportService, ClusterService clusterService, ShardStateAction shardStateAction, @@ -1122,8 +1123,6 @@ public class TransportReplicationActionTests extends ESTestCase { shardStateAction, new ActionFilters(new HashSet<>()), new IndexNameExpressionResolver(Settings.EMPTY), Request::new, Request::new, ThreadPool.Names.SAME); - this.withDocumentFailureOnPrimary = false; - this.withDocumentFailureOnReplica = false; } TestAction(Settings settings, String actionName, TransportService transportService, @@ -1133,8 +1132,6 @@ public class TransportReplicationActionTests extends ESTestCase { shardStateAction, new ActionFilters(new HashSet<>()), new IndexNameExpressionResolver(Settings.EMPTY), Request::new, Request::new, ThreadPool.Names.SAME); - this.withDocumentFailureOnPrimary = withDocumentFailureOnPrimary; - this.withDocumentFailureOnReplica = withDocumentFailureOnReplica; } @Override @@ -1173,7 +1170,6 @@ public class TransportReplicationActionTests extends ESTestCase { Index index = (Index) invocation.getArguments()[0]; final ClusterState state = clusterService.state(); if (state.metaData().hasIndex(index.getName())) { - final IndexMetaData indexSafe = state.metaData().getIndexSafe(index); return mockIndexService(clusterService.state().metaData().getIndexSafe(index), clusterService); } else { return null; @@ -1205,7 +1201,7 @@ public class TransportReplicationActionTests extends ESTestCase { }).when(indexShard).acquirePrimaryOperationPermit(any(ActionListener.class), anyString(), anyObject()); doAnswer(invocation -> { long term = (Long)invocation.getArguments()[0]; - ActionListener callback = (ActionListener) invocation.getArguments()[2]; + ActionListener callback = (ActionListener) invocation.getArguments()[3]; final long primaryTerm = indexShard.getPendingPrimaryTerm(); if (term < primaryTerm) { throw new IllegalArgumentException(String.format(Locale.ROOT, "%s operation term [%d] is too old (current [%d])", @@ -1214,7 +1210,8 @@ public class TransportReplicationActionTests extends ESTestCase { count.incrementAndGet(); callback.onResponse(count::decrementAndGet); return null; - }).when(indexShard).acquireReplicaOperationPermit(anyLong(), anyLong(), any(ActionListener.class), anyString(), anyObject()); + }).when(indexShard) + .acquireReplicaOperationPermit(anyLong(), anyLong(), anyLong(), any(ActionListener.class), anyString(), anyObject()); when(indexShard.routingEntry()).thenAnswer(invocationOnMock -> { final ClusterState state = clusterService.state(); final RoutingNode node = state.getRoutingNodes().node(state.nodes().getLocalNodeId()); diff --git a/server/src/test/java/org/elasticsearch/action/support/replication/TransportWriteActionTests.java b/server/src/test/java/org/elasticsearch/action/support/replication/TransportWriteActionTests.java index 6dbb02974e1..b1bb967a4b2 100644 --- a/server/src/test/java/org/elasticsearch/action/support/replication/TransportWriteActionTests.java +++ b/server/src/test/java/org/elasticsearch/action/support/replication/TransportWriteActionTests.java @@ -109,6 +109,7 @@ public class TransportWriteActionTests extends ESTestCase { clusterService = createClusterService(threadPool); } + @Override @After public void tearDown() throws Exception { super.tearDown(); @@ -277,7 +278,7 @@ public class TransportWriteActionTests extends ESTestCase { TestShardRouting.newShardRouting(shardId, "NOT THERE", routingState == ShardRoutingState.RELOCATING ? state.nodes().iterator().next().getId() : null, false, routingState), new TestRequest(), - randomNonNegativeLong(), listener); + randomNonNegativeLong(), randomNonNegativeLong(), listener); assertTrue(listener.isDone()); assertListenerThrows("non existent node should throw a NoNodeAvailableException", listener, NoNodeAvailableException.class); @@ -285,7 +286,7 @@ public class TransportWriteActionTests extends ESTestCase { final ShardRouting replica = randomFrom(shardRoutings.replicaShards().stream() .filter(ShardRouting::assignedToNode).collect(Collectors.toList())); listener = new PlainActionFuture<>(); - proxy.performOn(replica, new TestRequest(), randomNonNegativeLong(), listener); + proxy.performOn(replica, new TestRequest(), randomNonNegativeLong(), randomNonNegativeLong(), listener); assertFalse(listener.isDone()); CapturingTransport.CapturedRequest[] captures = transport.getCapturedRequestsAndClear(); @@ -430,7 +431,6 @@ public class TransportWriteActionTests extends ESTestCase { Index index = (Index) invocation.getArguments()[0]; final ClusterState state = clusterService.state(); if (state.metaData().hasIndex(index.getName())) { - final IndexMetaData indexSafe = state.metaData().getIndexSafe(index); return mockIndexService(clusterService.state().metaData().getIndexSafe(index), clusterService); } else { return null; @@ -462,7 +462,8 @@ public class TransportWriteActionTests extends ESTestCase { count.incrementAndGet(); callback.onResponse(count::decrementAndGet); return null; - }).when(indexShard).acquireReplicaOperationPermit(anyLong(), anyLong(), any(ActionListener.class), anyString(), anyObject()); + }).when(indexShard) + .acquireReplicaOperationPermit(anyLong(), anyLong(), anyLong(), any(ActionListener.class), anyString(), anyObject()); when(indexShard.routingEntry()).thenAnswer(invocationOnMock -> { final ClusterState state = clusterService.state(); final RoutingNode node = state.getRoutingNodes().node(state.nodes().getLocalNodeId()); diff --git a/server/src/test/java/org/elasticsearch/action/termvectors/AbstractTermVectorsTestCase.java b/server/src/test/java/org/elasticsearch/action/termvectors/AbstractTermVectorsTestCase.java index e7606ec0718..4be46c4fc9e 100644 --- a/server/src/test/java/org/elasticsearch/action/termvectors/AbstractTermVectorsTestCase.java +++ b/server/src/test/java/org/elasticsearch/action/termvectors/AbstractTermVectorsTestCase.java @@ -48,7 +48,6 @@ import org.apache.lucene.search.TopDocs; import org.apache.lucene.store.Directory; import org.apache.lucene.store.RAMDirectory; import org.elasticsearch.action.admin.indices.alias.Alias; -import org.elasticsearch.common.inject.internal.Join; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.test.ESIntegTestCase; @@ -196,7 +195,7 @@ public abstract class AbstractTermVectorsTestCase extends ESIntegTestCase { } Locale aLocale = new Locale("en", "US"); return String.format(aLocale, "(doc: %s\n requested: %s, fields: %s)", doc, requested, - selectedFields == null ? "NULL" : Join.join(",", selectedFields)); + selectedFields == null ? "NULL" : String.join(",", selectedFields)); } } diff --git a/server/src/test/java/org/elasticsearch/action/update/UpdateRequestTests.java b/server/src/test/java/org/elasticsearch/action/update/UpdateRequestTests.java index 8e6db7d7761..ff7697745da 100644 --- a/server/src/test/java/org/elasticsearch/action/update/UpdateRequestTests.java +++ b/server/src/test/java/org/elasticsearch/action/update/UpdateRequestTests.java @@ -132,7 +132,7 @@ public class UpdateRequestTests extends ESTestCase { return null; }); scripts.put("return", vars -> null); - final MockScriptEngine engine = new MockScriptEngine("mock", scripts); + final MockScriptEngine engine = new MockScriptEngine("mock", scripts, Collections.emptyMap()); Map engines = Collections.singletonMap(engine.getType(), engine); ScriptService scriptService = new ScriptService(baseSettings, engines, ScriptModule.CORE_CONTEXTS); final Settings settings = settings(Version.CURRENT).build(); diff --git a/server/src/test/java/org/elasticsearch/bootstrap/MaxMapCountCheckTests.java b/server/src/test/java/org/elasticsearch/bootstrap/MaxMapCountCheckTests.java index 9a964a97bd7..9e552829d81 100644 --- a/server/src/test/java/org/elasticsearch/bootstrap/MaxMapCountCheckTests.java +++ b/server/src/test/java/org/elasticsearch/bootstrap/MaxMapCountCheckTests.java @@ -21,12 +21,12 @@ package org.elasticsearch.bootstrap; import org.apache.logging.log4j.Level; import org.apache.logging.log4j.Logger; +import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.core.LogEvent; import org.apache.logging.log4j.message.ParameterizedMessage; import org.apache.lucene.util.Constants; import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.common.io.PathUtils; -import org.elasticsearch.common.logging.ESLoggerFactory; import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.test.ESTestCase; @@ -137,7 +137,7 @@ public class MaxMapCountCheckTests extends ESTestCase { reset(reader); final IOException ioException = new IOException("fatal"); when(reader.readLine()).thenThrow(ioException); - final Logger logger = ESLoggerFactory.getLogger("testGetMaxMapCountIOException"); + final Logger logger = LogManager.getLogger("testGetMaxMapCountIOException"); final MockLogAppender appender = new MockLogAppender(); appender.start(); appender.addExpectation( @@ -159,7 +159,7 @@ public class MaxMapCountCheckTests extends ESTestCase { { reset(reader); when(reader.readLine()).thenReturn("eof"); - final Logger logger = ESLoggerFactory.getLogger("testGetMaxMapCountNumberFormatException"); + final Logger logger = LogManager.getLogger("testGetMaxMapCountNumberFormatException"); final MockLogAppender appender = new MockLogAppender(); appender.start(); appender.addExpectation( diff --git a/server/src/test/java/org/elasticsearch/cluster/ClusterStateTaskExecutorTests.java b/server/src/test/java/org/elasticsearch/cluster/ClusterStateTaskExecutorTests.java new file mode 100644 index 00000000000..5043ff2741a --- /dev/null +++ b/server/src/test/java/org/elasticsearch/cluster/ClusterStateTaskExecutorTests.java @@ -0,0 +1,62 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.cluster; + +import org.elasticsearch.test.ESTestCase; + +import java.util.Arrays; +import java.util.Collections; + +import static org.hamcrest.Matchers.equalTo; + +public class ClusterStateTaskExecutorTests extends ESTestCase { + + private class TestTask { + private final String description; + + TestTask(String description) { + this.description = description; + } + + @Override + public String toString() { + return description == null ? "" : "Task{" + description + "}"; + } + } + + public void testDescribeTasks() { + final ClusterStateTaskExecutor executor = (currentState, tasks) -> { + throw new AssertionError("should not be called"); + }; + + assertThat("describes an empty list", executor.describeTasks(Collections.emptyList()), equalTo("")); + assertThat("describes a singleton list", executor.describeTasks(Collections.singletonList(new TestTask("a task"))), + equalTo("Task{a task}")); + assertThat("describes a list of two tasks", + executor.describeTasks(Arrays.asList(new TestTask("a task"), new TestTask("another task"))), + equalTo("Task{a task}, Task{another task}")); + + assertThat("skips the only item if it has no description", executor.describeTasks(Collections.singletonList(new TestTask(null))), + equalTo("")); + assertThat("skips an item if it has no description", + executor.describeTasks(Arrays.asList( + new TestTask("a task"), new TestTask(null), new TestTask("another task"), new TestTask(null))), + equalTo("Task{a task}, Task{another task}")); + } +} diff --git a/server/src/test/java/org/elasticsearch/cluster/ack/AckClusterUpdateSettingsIT.java b/server/src/test/java/org/elasticsearch/cluster/ack/AckClusterUpdateSettingsIT.java index 274c48748fe..03340e211b4 100644 --- a/server/src/test/java/org/elasticsearch/cluster/ack/AckClusterUpdateSettingsIT.java +++ b/server/src/test/java/org/elasticsearch/cluster/ack/AckClusterUpdateSettingsIT.java @@ -19,6 +19,7 @@ package org.elasticsearch.cluster.ack; +import org.apache.lucene.util.LuceneTestCase.AwaitsFix; import org.elasticsearch.action.admin.cluster.node.info.NodeInfo; import org.elasticsearch.action.admin.cluster.node.info.NodesInfoResponse; import org.elasticsearch.action.admin.cluster.settings.ClusterUpdateSettingsResponse; @@ -49,6 +50,7 @@ import static org.elasticsearch.test.ESIntegTestCase.Scope.TEST; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.hamcrest.Matchers.equalTo; +@AwaitsFix(bugUrl="https://github.com/elastic/elasticsearch/issues/33673") @ClusterScope(scope = TEST, minNumDataNodes = 2) public class AckClusterUpdateSettingsIT extends ESIntegTestCase { diff --git a/server/src/test/java/org/elasticsearch/cluster/metadata/TemplateUpgradeServiceIT.java b/server/src/test/java/org/elasticsearch/cluster/metadata/TemplateUpgradeServiceIT.java index c8d5cdc6c86..c2ed291801c 100644 --- a/server/src/test/java/org/elasticsearch/cluster/metadata/TemplateUpgradeServiceIT.java +++ b/server/src/test/java/org/elasticsearch/cluster/metadata/TemplateUpgradeServiceIT.java @@ -20,10 +20,10 @@ package org.elasticsearch.cluster.metadata; import org.apache.logging.log4j.Logger; +import org.apache.logging.log4j.LogManager; import org.elasticsearch.client.Client; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; -import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.NamedXContentRegistry; @@ -54,16 +54,15 @@ public class TemplateUpgradeServiceIT extends ESIntegTestCase { return Collections.singletonList(TestPlugin.class); } - public static class TestPlugin extends Plugin { + public static final class TestPlugin extends Plugin { // This setting is used to simulate cluster state updates static final Setting UPDATE_TEMPLATE_DUMMY_SETTING = Setting.intSetting("tests.update_template_count", 0, Setting.Property.NodeScope, Setting.Property.Dynamic); + private static final Logger logger = LogManager.getLogger(TestPlugin.class); - protected final Logger logger; protected final Settings settings; public TestPlugin(Settings settings) { - this.logger = Loggers.getLogger(getClass(), settings); this.settings = settings; } diff --git a/server/src/test/java/org/elasticsearch/cluster/metadata/TemplateUpgradeServiceTests.java b/server/src/test/java/org/elasticsearch/cluster/metadata/TemplateUpgradeServiceTests.java index 776a0a158ae..f78f8495806 100644 --- a/server/src/test/java/org/elasticsearch/cluster/metadata/TemplateUpgradeServiceTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/metadata/TemplateUpgradeServiceTests.java @@ -63,7 +63,6 @@ import java.util.stream.IntStream; import static java.util.Collections.emptyMap; import static org.elasticsearch.test.ClusterServiceUtils.createClusterService; import static org.elasticsearch.test.ClusterServiceUtils.setState; -import static org.elasticsearch.test.VersionUtils.randomVersion; import static org.hamcrest.CoreMatchers.nullValue; import static org.hamcrest.CoreMatchers.startsWith; import static org.hamcrest.Matchers.containsString; @@ -298,7 +297,7 @@ public class TemplateUpgradeServiceTests extends ESTestCase { return null; }).when(mockIndicesAdminClient).deleteTemplate(any(DeleteIndexTemplateRequest.class), any(ActionListener.class)); - final TemplateUpgradeService service = new TemplateUpgradeService(Settings.EMPTY, mockClient, clusterService, threadPool, + new TemplateUpgradeService(Settings.EMPTY, mockClient, clusterService, threadPool, Arrays.asList( templates -> { assertNull(templates.put("added_test_template", IndexTemplateMetaData.builder("added_test_template") @@ -415,42 +414,6 @@ public class TemplateUpgradeServiceTests extends ESTestCase { assertThat(finishInvocation.availablePermits(), equalTo(0)); } - private static final int NODE_TEST_ITERS = 100; - - private DiscoveryNodes randomNodes(int dataAndMasterNodes, int clientNodes) { - DiscoveryNodes.Builder builder = DiscoveryNodes.builder(); - String masterNodeId = null; - for (int i = 0; i < dataAndMasterNodes; i++) { - String id = randomAlphaOfLength(10) + "_" + i; - Set roles; - if (i == 0) { - masterNodeId = id; - // The first node has to be master node - if (randomBoolean()) { - roles = EnumSet.of(DiscoveryNode.Role.MASTER, DiscoveryNode.Role.DATA); - } else { - roles = EnumSet.of(DiscoveryNode.Role.MASTER); - } - } else { - if (randomBoolean()) { - roles = EnumSet.of(DiscoveryNode.Role.DATA); - } else { - roles = EnumSet.of(DiscoveryNode.Role.MASTER); - } - } - String node = "node_" + i; - builder.add(new DiscoveryNode(node, id, buildNewFakeTransportAddress(), emptyMap(), roles, randomVersion(random()))); - } - builder.masterNodeId(masterNodeId); // Node 0 is always a master node - - for (int i = 0; i < clientNodes; i++) { - String node = "client_" + i; - builder.add(new DiscoveryNode(node, randomAlphaOfLength(10) + "__" + i, buildNewFakeTransportAddress(), emptyMap(), - EnumSet.noneOf(DiscoveryNode.Role.class), randomVersion(random()))); - } - return builder.build(); - } - public static MetaData randomMetaData(IndexTemplateMetaData... templates) { MetaData.Builder builder = MetaData.builder(); for (IndexTemplateMetaData template : templates) { diff --git a/server/src/test/java/org/elasticsearch/cluster/routing/OperationRoutingTests.java b/server/src/test/java/org/elasticsearch/cluster/routing/OperationRoutingTests.java index 98c8dc1b2ca..de3223517b9 100644 --- a/server/src/test/java/org/elasticsearch/cluster/routing/OperationRoutingTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/routing/OperationRoutingTests.java @@ -18,7 +18,6 @@ */ package org.elasticsearch.cluster.routing; -import org.elasticsearch.core.internal.io.IOUtils; import org.elasticsearch.Version; import org.elasticsearch.action.support.replication.ClusterStateCreationUtils; import org.elasticsearch.cluster.ClusterState; @@ -27,6 +26,7 @@ import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.core.internal.io.IOUtils; import org.elasticsearch.index.Index; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.node.ResponseCollectorService; @@ -530,7 +530,6 @@ public class OperationRoutingTests extends ESTestCase{ indexNames[i] = "test" + i; } ClusterState state = ClusterStateCreationUtils.stateWithAssignedPrimariesAndReplicas(indexNames, numShards, numReplicas); - final int numRepeatedSearches = 4; OperationRouting opRouting = new OperationRouting(Settings.EMPTY, new ClusterSettings(Settings.EMPTY, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS)); opRouting.setUseAdaptiveReplicaSelection(true); diff --git a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/FailedNodeRoutingTests.java b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/FailedNodeRoutingTests.java index d4645208071..fbdcadc6ec3 100644 --- a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/FailedNodeRoutingTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/FailedNodeRoutingTests.java @@ -20,6 +20,7 @@ package org.elasticsearch.cluster.routing.allocation; import com.carrotsearch.hppc.cursors.ObjectCursor; + import org.apache.logging.log4j.Logger; import org.elasticsearch.Version; import org.elasticsearch.action.admin.cluster.reroute.ClusterRerouteRequest; @@ -149,8 +150,6 @@ public class FailedNodeRoutingTests extends ESAllocationTestCase { assertTrue(state.metaData().hasIndex(name)); } - ClusterState previousState = state; - logger.info("--> starting shards"); state = cluster.applyStartedShards(state, state.getRoutingNodes().shardsWithState(INITIALIZING)); logger.info("--> starting replicas a random number of times"); diff --git a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/NodeVersionAllocationDeciderTests.java b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/NodeVersionAllocationDeciderTests.java index 88766e7943e..711e7401ad2 100644 --- a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/NodeVersionAllocationDeciderTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/NodeVersionAllocationDeciderTests.java @@ -91,10 +91,6 @@ public class NodeVersionAllocationDeciderTests extends ESAllocationTestCase { .put(IndexMetaData.builder("test").settings(settings(Version.CURRENT)).numberOfShards(5).numberOfReplicas(2)) .build(); - RoutingTable routingTable = RoutingTable.builder() - .addAsNew(metaData.index("test")) - .build(); - RoutingTable initialRoutingTable = RoutingTable.builder() .addAsNew(metaData.index("test")) .build(); diff --git a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/RandomAllocationDeciderTests.java b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/RandomAllocationDeciderTests.java index d226af26f81..ce26e41e053 100644 --- a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/RandomAllocationDeciderTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/RandomAllocationDeciderTests.java @@ -86,7 +86,6 @@ public class RandomAllocationDeciderTests extends ESAllocationTestCase { int nodeIdCounter = 0; int atMostNodes = scaledRandomIntBetween(Math.max(1, maxNumReplicas), 15); final boolean frequentNodes = randomBoolean(); - AllocationService.CommandsResult routingResult; for (int i = 0; i < numIters; i++) { logger.info("Start iteration [{}]", i); ClusterState.Builder stateBuilder = ClusterState.builder(clusterState); diff --git a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/SingleShardNoReplicasRoutingTests.java b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/SingleShardNoReplicasRoutingTests.java index 0668ba41524..25d29d0fca4 100644 --- a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/SingleShardNoReplicasRoutingTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/SingleShardNoReplicasRoutingTests.java @@ -25,7 +25,6 @@ import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.ESAllocationTestCase; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.MetaData; -import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.cluster.routing.RoutingNode; import org.elasticsearch.cluster.routing.RoutingNodes; @@ -35,9 +34,7 @@ import org.elasticsearch.cluster.routing.allocation.decider.ClusterRebalanceAllo import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.settings.Settings; -import java.util.ArrayList; import java.util.HashSet; -import java.util.List; import java.util.Set; import static org.elasticsearch.cluster.routing.ShardRoutingState.INITIALIZING; @@ -238,7 +235,6 @@ public class SingleShardNoReplicasRoutingTests extends ESAllocationTestCase { logger.info("Adding " + (numberOfIndices / 2) + " nodes"); DiscoveryNodes.Builder nodesBuilder = DiscoveryNodes.builder(); - List nodes = new ArrayList<>(); for (int i = 0; i < (numberOfIndices / 2); i++) { nodesBuilder.add(newNode("node" + i)); } diff --git a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDeciderTests.java b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDeciderTests.java index 58d19fb61cf..d2e86c13d4f 100644 --- a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDeciderTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDeciderTests.java @@ -666,7 +666,6 @@ public class DiskThresholdDeciderTests extends ESAllocationTestCase { } public void testFreeDiskPercentageAfterShardAssigned() { - RoutingNode rn = new RoutingNode("node1", newNode("node1")); DiskThresholdDecider decider = makeDecider(Settings.EMPTY); Map usages = new HashMap<>(); diff --git a/server/src/test/java/org/elasticsearch/cluster/settings/ClusterSettingsIT.java b/server/src/test/java/org/elasticsearch/cluster/settings/ClusterSettingsIT.java index 633e043ddd1..6d23866112d 100644 --- a/server/src/test/java/org/elasticsearch/cluster/settings/ClusterSettingsIT.java +++ b/server/src/test/java/org/elasticsearch/cluster/settings/ClusterSettingsIT.java @@ -20,12 +20,12 @@ package org.elasticsearch.cluster.settings; import org.apache.logging.log4j.Level; +import org.apache.logging.log4j.LogManager; import org.elasticsearch.action.admin.cluster.settings.ClusterUpdateSettingsRequestBuilder; import org.elasticsearch.action.admin.cluster.settings.ClusterUpdateSettingsResponse; import org.elasticsearch.action.admin.cluster.state.ClusterStateResponse; import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.cluster.routing.allocation.decider.EnableAllocationDecider; -import org.elasticsearch.common.logging.ESLoggerFactory; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.ByteSizeUnit; import org.elasticsearch.discovery.Discovery; @@ -355,7 +355,7 @@ public class ClusterSettingsIT extends ESIntegTestCase { public void testLoggerLevelUpdate() { assertAcked(prepareCreate("test")); - final Level level = ESLoggerFactory.getRootLogger().getLevel(); + final Level level = LogManager.getRootLogger().getLevel(); final IllegalArgumentException e = expectThrows( @@ -366,8 +366,8 @@ public class ClusterSettingsIT extends ESIntegTestCase { try { final Settings.Builder testSettings = Settings.builder().put("logger.test", "TRACE").put("logger._root", "trace"); client().admin().cluster().prepareUpdateSettings().setTransientSettings(testSettings).execute().actionGet(); - assertEquals(Level.TRACE, ESLoggerFactory.getLogger("test").getLevel()); - assertEquals(Level.TRACE, ESLoggerFactory.getRootLogger().getLevel()); + assertEquals(Level.TRACE, LogManager.getLogger("test").getLevel()); + assertEquals(Level.TRACE, LogManager.getRootLogger().getLevel()); } finally { if (randomBoolean()) { final Settings.Builder defaultSettings = Settings.builder().putNull("logger.test").putNull("logger._root"); @@ -376,8 +376,8 @@ public class ClusterSettingsIT extends ESIntegTestCase { final Settings.Builder defaultSettings = Settings.builder().putNull("logger.*"); client().admin().cluster().prepareUpdateSettings().setTransientSettings(defaultSettings).execute().actionGet(); } - assertEquals(level, ESLoggerFactory.getLogger("test").getLevel()); - assertEquals(level, ESLoggerFactory.getRootLogger().getLevel()); + assertEquals(level, LogManager.getLogger("test").getLevel()); + assertEquals(level, LogManager.getRootLogger().getLevel()); } } diff --git a/server/src/test/java/org/elasticsearch/common/ExponentiallyWeightedMovingAverageTests.java b/server/src/test/java/org/elasticsearch/common/ExponentiallyWeightedMovingAverageTests.java index 9e50d0afd71..9dc6faaa183 100644 --- a/server/src/test/java/org/elasticsearch/common/ExponentiallyWeightedMovingAverageTests.java +++ b/server/src/test/java/org/elasticsearch/common/ExponentiallyWeightedMovingAverageTests.java @@ -23,7 +23,6 @@ import org.elasticsearch.test.ESTestCase; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.lessThan; -import static org.junit.Assert.assertThat; /** * Implements exponentially weighted moving averages (commonly abbreviated EWMA) for a single value. @@ -41,19 +40,11 @@ public class ExponentiallyWeightedMovingAverageTests extends ESTestCase { } public void testInvalidAlpha() { - try { - ExponentiallyWeightedMovingAverage ewma = new ExponentiallyWeightedMovingAverage(-0.5, 10); - fail("should have failed to create EWMA"); - } catch (IllegalArgumentException e) { - assertThat(e.getMessage(), equalTo("alpha must be greater or equal to 0 and less than or equal to 1")); - } + IllegalArgumentException ex = expectThrows(IllegalArgumentException.class, () -> new ExponentiallyWeightedMovingAverage(-0.5, 10)); + assertThat(ex.getMessage(), equalTo("alpha must be greater or equal to 0 and less than or equal to 1")); - try { - ExponentiallyWeightedMovingAverage ewma = new ExponentiallyWeightedMovingAverage(1.5, 10); - fail("should have failed to create EWMA"); - } catch (IllegalArgumentException e) { - assertThat(e.getMessage(), equalTo("alpha must be greater or equal to 0 and less than or equal to 1")); - } + ex = expectThrows(IllegalArgumentException.class, () -> new ExponentiallyWeightedMovingAverage(1.5, 10)); + assertThat(ex.getMessage(), equalTo("alpha must be greater or equal to 0 and less than or equal to 1")); } public void testConvergingToValue() { diff --git a/server/src/test/java/org/elasticsearch/common/joda/JavaJodaTimeDuellingTests.java b/server/src/test/java/org/elasticsearch/common/joda/JavaJodaTimeDuellingTests.java index 5203aa07d28..61aa7dc4a46 100644 --- a/server/src/test/java/org/elasticsearch/common/joda/JavaJodaTimeDuellingTests.java +++ b/server/src/test/java/org/elasticsearch/common/joda/JavaJodaTimeDuellingTests.java @@ -71,6 +71,8 @@ public class JavaJodaTimeDuellingTests extends ESTestCase { public void testDuellingFormatsValidParsing() { assertSameDate("1522332219", "epoch_second"); + assertSameDate("1522332219.", "epoch_second"); + assertSameDate("1522332219.0", "epoch_second"); assertSameDate("0", "epoch_second"); assertSameDate("1", "epoch_second"); assertSameDate("-1", "epoch_second"); diff --git a/server/src/test/java/org/elasticsearch/common/joda/DateMathParserTests.java b/server/src/test/java/org/elasticsearch/common/joda/JodaDateMathParserTests.java similarity index 94% rename from server/src/test/java/org/elasticsearch/common/joda/DateMathParserTests.java rename to server/src/test/java/org/elasticsearch/common/joda/JodaDateMathParserTests.java index 2fad9738cb5..61448ce15ea 100644 --- a/server/src/test/java/org/elasticsearch/common/joda/DateMathParserTests.java +++ b/server/src/test/java/org/elasticsearch/common/joda/JodaDateMathParserTests.java @@ -24,17 +24,17 @@ import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.test.ESTestCase; import org.joda.time.DateTimeZone; -import java.util.TimeZone; +import java.time.ZoneId; import java.util.concurrent.atomic.AtomicBoolean; import java.util.function.LongSupplier; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; -public class DateMathParserTests extends ESTestCase { +public class JodaDateMathParserTests extends ESTestCase { FormatDateTimeFormatter formatter = Joda.forPattern("dateOptionalTime||epoch_millis"); - DateMathParser parser = new DateMathParser(formatter); + JodaDateMathParser parser = new JodaDateMathParser(formatter); void assertDateMathEquals(String toTest, String expected) { assertDateMathEquals(toTest, expected, 0, false, null); @@ -145,7 +145,7 @@ public class DateMathParserTests extends ESTestCase { public void testNow() { - final long now = parser.parse("2014-11-18T14:27:32", () -> 0, false, null); + final long now = parser.parse("2014-11-18T14:27:32", () -> 0, false, (ZoneId) null); assertDateMathEquals("now", "2014-11-18T14:27:32", now, false, null); assertDateMathEquals("now+M", "2014-12-18T14:27:32", now, false, null); @@ -159,13 +159,13 @@ public class DateMathParserTests extends ESTestCase { public void testRoundingPreservesEpochAsBaseDate() { // If a user only specifies times, then the date needs to always be 1970-01-01 regardless of rounding FormatDateTimeFormatter formatter = Joda.forPattern("HH:mm:ss"); - DateMathParser parser = new DateMathParser(formatter); + JodaDateMathParser parser = new JodaDateMathParser(formatter); assertEquals( this.formatter.parser().parseMillis("1970-01-01T04:52:20.000Z"), - parser.parse("04:52:20", () -> 0, false, null)); + parser.parse("04:52:20", () -> 0, false, (ZoneId) null)); assertEquals( this.formatter.parser().parseMillis("1970-01-01T04:52:20.999Z"), - parser.parse("04:52:20", () -> 0, true, null)); + parser.parse("04:52:20", () -> 0, true, (ZoneId) null)); } // Implicit rounding happening when parts of the date are not specified @@ -184,10 +184,10 @@ public class DateMathParserTests extends ESTestCase { // implicit rounding with explicit timezone in the date format FormatDateTimeFormatter formatter = Joda.forPattern("YYYY-MM-ddZ"); - DateMathParser parser = new DateMathParser(formatter); - long time = parser.parse("2011-10-09+01:00", () -> 0, false, null); + JodaDateMathParser parser = new JodaDateMathParser(formatter); + long time = parser.parse("2011-10-09+01:00", () -> 0, false, (ZoneId) null); assertEquals(this.parser.parse("2011-10-09T00:00:00.000+01:00", () -> 0), time); - time = parser.parse("2011-10-09+01:00", () -> 0, true, null); + time = parser.parse("2011-10-09+01:00", () -> 0, true, (ZoneId) null); assertEquals(this.parser.parse("2011-10-09T23:59:59.999+01:00", () -> 0), time); } @@ -258,7 +258,7 @@ public class DateMathParserTests extends ESTestCase { assertDateMathEquals("1418248078000||/m", "2014-12-10T21:47:00.000"); // also check other time units - DateMathParser parser = new DateMathParser(Joda.forPattern("epoch_second||dateOptionalTime")); + JodaDateMathParser parser = new JodaDateMathParser(Joda.forPattern("epoch_second||dateOptionalTime")); long datetime = parser.parse("1418248078", () -> 0); assertDateEquals(datetime, "1418248078", "2014-12-10T21:47:58.000"); @@ -298,16 +298,16 @@ public class DateMathParserTests extends ESTestCase { called.set(true); return 42L; }; - parser.parse("2014-11-18T14:27:32", now, false, null); + parser.parse("2014-11-18T14:27:32", now, false, (ZoneId) null); assertFalse(called.get()); - parser.parse("now/d", now, false, null); + parser.parse("now/d", now, false, (ZoneId) null); assertTrue(called.get()); } public void testThatUnixTimestampMayNotHaveTimeZone() { - DateMathParser parser = new DateMathParser(Joda.forPattern("epoch_millis")); + JodaDateMathParser parser = new JodaDateMathParser(Joda.forPattern("epoch_millis")); try { - parser.parse("1234567890123", () -> 42, false, DateTimeZone.forTimeZone(TimeZone.getTimeZone("CET"))); + parser.parse("1234567890123", () -> 42, false, ZoneId.of("CET")); fail("Expected ElasticsearchParseException"); } catch(ElasticsearchParseException e) { assertThat(e.getMessage(), containsString("failed to parse date field")); diff --git a/server/src/test/java/org/elasticsearch/common/settings/ScopedSettingsTests.java b/server/src/test/java/org/elasticsearch/common/settings/ScopedSettingsTests.java index 6766316fafd..10c58c562ad 100644 --- a/server/src/test/java/org/elasticsearch/common/settings/ScopedSettingsTests.java +++ b/server/src/test/java/org/elasticsearch/common/settings/ScopedSettingsTests.java @@ -19,12 +19,12 @@ package org.elasticsearch.common.settings; import org.apache.logging.log4j.Level; +import org.apache.logging.log4j.LogManager; import org.elasticsearch.Version; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.routing.allocation.decider.FilterAllocationDecider; import org.elasticsearch.cluster.routing.allocation.decider.ShardsLimitAllocationDecider; import org.elasticsearch.common.collect.Tuple; -import org.elasticsearch.common.logging.ESLoggerFactory; import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.settings.Setting.Property; import org.elasticsearch.index.IndexModule; @@ -152,7 +152,10 @@ public class ScopedSettingsTests extends ESTestCase { } try { - service.addSettingsUpdateConsumer(testSetting, testSetting2, (a, b) -> {consumer.set(a); consumer2.set(b);}); + service.addSettingsUpdateConsumer(testSetting, testSetting2, (a, b) -> { + consumer.set(a); + consumer2.set(b); + }); fail("setting not registered"); } catch (IllegalArgumentException ex) { assertEquals("Setting is not registered for key [foo.bar.baz]", ex.getMessage()); @@ -467,7 +470,10 @@ public class ScopedSettingsTests extends ESTestCase { AtomicInteger aC = new AtomicInteger(); AtomicInteger bC = new AtomicInteger(); - service.addSettingsUpdateConsumer(testSetting, testSetting2, (a, b) -> {aC.set(a); bC.set(b);}); + service.addSettingsUpdateConsumer(testSetting, testSetting2, (a, b) -> { + aC.set(a); + bC.set(b); + }); assertEquals(0, consumer.get()); assertEquals(0, consumer2.get()); @@ -899,8 +905,8 @@ public class ScopedSettingsTests extends ESTestCase { } public void testLoggingUpdates() { - final Level level = ESLoggerFactory.getRootLogger().getLevel(); - final Level testLevel = ESLoggerFactory.getLogger("test").getLevel(); + final Level level = LogManager.getRootLogger().getLevel(); + final Level testLevel = LogManager.getLogger("test").getLevel(); Level property = randomFrom(Level.values()); Settings.Builder builder = Settings.builder().put("logger.level", property); try { @@ -910,33 +916,33 @@ public class ScopedSettingsTests extends ESTestCase { IllegalArgumentException.class, () -> settings.validate(Settings.builder().put("logger._root", "boom").build(), false)); assertEquals("Unknown level constant [BOOM].", ex.getMessage()); - assertEquals(level, ESLoggerFactory.getRootLogger().getLevel()); + assertEquals(level, LogManager.getRootLogger().getLevel()); settings.applySettings(Settings.builder().put("logger._root", "TRACE").build()); - assertEquals(Level.TRACE, ESLoggerFactory.getRootLogger().getLevel()); + assertEquals(Level.TRACE, LogManager.getRootLogger().getLevel()); settings.applySettings(Settings.builder().build()); - assertEquals(property, ESLoggerFactory.getRootLogger().getLevel()); + assertEquals(property, LogManager.getRootLogger().getLevel()); settings.applySettings(Settings.builder().put("logger.test", "TRACE").build()); - assertEquals(Level.TRACE, ESLoggerFactory.getLogger("test").getLevel()); + assertEquals(Level.TRACE, LogManager.getLogger("test").getLevel()); settings.applySettings(Settings.builder().build()); - assertEquals(property, ESLoggerFactory.getLogger("test").getLevel()); + assertEquals(property, LogManager.getLogger("test").getLevel()); } finally { - Loggers.setLevel(ESLoggerFactory.getRootLogger(), level); - Loggers.setLevel(ESLoggerFactory.getLogger("test"), testLevel); + Loggers.setLevel(LogManager.getRootLogger(), level); + Loggers.setLevel(LogManager.getLogger("test"), testLevel); } } public void testFallbackToLoggerLevel() { - final Level level = ESLoggerFactory.getRootLogger().getLevel(); + final Level level = LogManager.getRootLogger().getLevel(); try { ClusterSettings settings = new ClusterSettings(Settings.builder().put("logger.level", "ERROR").build(), ClusterSettings.BUILT_IN_CLUSTER_SETTINGS); - assertEquals(level, ESLoggerFactory.getRootLogger().getLevel()); + assertEquals(level, LogManager.getRootLogger().getLevel()); settings.applySettings(Settings.builder().put("logger._root", "TRACE").build()); - assertEquals(Level.TRACE, ESLoggerFactory.getRootLogger().getLevel()); + assertEquals(Level.TRACE, LogManager.getRootLogger().getLevel()); settings.applySettings(Settings.builder().build()); // here we fall back to 'logger.level' which is our default. - assertEquals(Level.ERROR, ESLoggerFactory.getRootLogger().getLevel()); + assertEquals(Level.ERROR, LogManager.getRootLogger().getLevel()); } finally { - Loggers.setLevel(ESLoggerFactory.getRootLogger(), level); + Loggers.setLevel(LogManager.getRootLogger(), level); } } diff --git a/server/src/test/java/org/elasticsearch/common/settings/SettingsTests.java b/server/src/test/java/org/elasticsearch/common/settings/SettingsTests.java index d9cecdd604c..70e958b974b 100644 --- a/server/src/test/java/org/elasticsearch/common/settings/SettingsTests.java +++ b/server/src/test/java/org/elasticsearch/common/settings/SettingsTests.java @@ -439,10 +439,7 @@ public class SettingsTests extends ESTestCase { Settings filteredSettings = builder.build().filter((k) -> false); assertEquals(0, filteredSettings.size()); - for (String k : filteredSettings.keySet()) { - fail("no element"); - } assertFalse(filteredSettings.keySet().contains("a.c")); assertFalse(filteredSettings.keySet().contains("a")); assertFalse(filteredSettings.keySet().contains("a.b")); diff --git a/server/src/test/java/org/elasticsearch/common/time/DateFormattersTests.java b/server/src/test/java/org/elasticsearch/common/time/DateFormattersTests.java index f01db140a70..a052bf0bf42 100644 --- a/server/src/test/java/org/elasticsearch/common/time/DateFormattersTests.java +++ b/server/src/test/java/org/elasticsearch/common/time/DateFormattersTests.java @@ -21,39 +21,53 @@ package org.elasticsearch.common.time; import org.elasticsearch.test.ESTestCase; +import java.time.Instant; import java.time.ZoneId; -import java.time.ZonedDateTime; import java.time.format.DateTimeParseException; import java.time.temporal.TemporalAccessor; +import java.util.Locale; import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.not; +import static org.hamcrest.Matchers.nullValue; +import static org.hamcrest.Matchers.sameInstance; public class DateFormattersTests extends ESTestCase { public void testEpochMilliParser() { DateFormatter formatter = DateFormatters.forPattern("epoch_millis"); - DateTimeParseException e = expectThrows(DateTimeParseException.class, () -> formatter.parse("invalid")); assertThat(e.getMessage(), containsString("invalid number")); + } - // different zone, should still yield the same output, as epoch is time zone independent - ZoneId zoneId = randomZone(); - DateFormatter zonedFormatter = formatter.withZone(zoneId); + // this is not in the duelling tests, because the epoch second parser in joda time drops the milliseconds after the comma + // but is able to parse the rest + // as this feature is supported it also makes sense to make it exact + public void testEpochSecondParser() { + DateFormatter formatter = DateFormatters.forPattern("epoch_second"); - // test with negative and non negative values - assertThatSameDateTime(formatter, zonedFormatter, randomNonNegativeLong() * -1); - assertThatSameDateTime(formatter, zonedFormatter, randomNonNegativeLong()); - assertThatSameDateTime(formatter, zonedFormatter, 0); - assertThatSameDateTime(formatter, zonedFormatter, -1); - assertThatSameDateTime(formatter, zonedFormatter, 1); - - // format() output should be equal as well - assertSameFormat(formatter, randomNonNegativeLong() * -1); - assertSameFormat(formatter, randomNonNegativeLong()); - assertSameFormat(formatter, 0); - assertSameFormat(formatter, -1); - assertSameFormat(formatter, 1); + assertThat(Instant.from(formatter.parse("1234.567")).toEpochMilli(), is(1234567L)); + assertThat(Instant.from(formatter.parse("1234.")).getNano(), is(0)); + assertThat(Instant.from(formatter.parse("1234.")).getEpochSecond(), is(1234L)); + assertThat(Instant.from(formatter.parse("1234.1")).getNano(), is(100_000_000)); + assertThat(Instant.from(formatter.parse("1234.12")).getNano(), is(120_000_000)); + assertThat(Instant.from(formatter.parse("1234.123")).getNano(), is(123_000_000)); + assertThat(Instant.from(formatter.parse("1234.1234")).getNano(), is(123_400_000)); + assertThat(Instant.from(formatter.parse("1234.12345")).getNano(), is(123_450_000)); + assertThat(Instant.from(formatter.parse("1234.123456")).getNano(), is(123_456_000)); + assertThat(Instant.from(formatter.parse("1234.1234567")).getNano(), is(123_456_700)); + assertThat(Instant.from(formatter.parse("1234.12345678")).getNano(), is(123_456_780)); + assertThat(Instant.from(formatter.parse("1234.123456789")).getNano(), is(123_456_789)); + DateTimeParseException e = expectThrows(DateTimeParseException.class, () -> formatter.parse("1234.1234567890")); + assertThat(e.getMessage(), is("too much granularity after dot [1234.1234567890]")); + e = expectThrows(DateTimeParseException.class, () -> formatter.parse("1234.123456789013221")); + assertThat(e.getMessage(), is("too much granularity after dot [1234.123456789013221]")); + e = expectThrows(DateTimeParseException.class, () -> formatter.parse("abc")); + assertThat(e.getMessage(), is("invalid number [abc]")); + e = expectThrows(DateTimeParseException.class, () -> formatter.parse("1234.abc")); + assertThat(e.getMessage(), is("invalid number [1234.abc]")); } public void testEpochMilliParsersWithDifferentFormatters() { @@ -63,16 +77,54 @@ public class DateFormattersTests extends ESTestCase { assertThat(formatter.pattern(), is("strict_date_optional_time||epoch_millis")); } - private void assertThatSameDateTime(DateFormatter formatter, DateFormatter zonedFormatter, long millis) { - String millisAsString = String.valueOf(millis); - ZonedDateTime formatterZonedDateTime = DateFormatters.toZonedDateTime(formatter.parse(millisAsString)); - ZonedDateTime zonedFormatterZonedDateTime = DateFormatters.toZonedDateTime(zonedFormatter.parse(millisAsString)); - assertThat(formatterZonedDateTime.toInstant().toEpochMilli(), is(zonedFormatterZonedDateTime.toInstant().toEpochMilli())); + public void testLocales() { + assertThat(DateFormatters.forPattern("strict_date_optional_time").getLocale(), is(Locale.ROOT)); + Locale locale = randomLocale(random()); + assertThat(DateFormatters.forPattern("strict_date_optional_time").withLocale(locale).getLocale(), is(locale)); + IllegalArgumentException e = + expectThrows(IllegalArgumentException.class, () -> DateFormatters.forPattern("epoch_millis").withLocale(locale)); + assertThat(e.getMessage(), is("epoch_millis date formatter can only be in locale ROOT")); + e = expectThrows(IllegalArgumentException.class, () -> DateFormatters.forPattern("epoch_second").withLocale(locale)); + assertThat(e.getMessage(), is("epoch_second date formatter can only be in locale ROOT")); } - private void assertSameFormat(DateFormatter formatter, long millis) { - String millisAsString = String.valueOf(millis); - TemporalAccessor accessor = formatter.parse(millisAsString); - assertThat(millisAsString, is(formatter.format(accessor))); + public void testTimeZones() { + // zone is null by default due to different behaviours between java8 and above + assertThat(DateFormatters.forPattern("strict_date_optional_time").getZone(), is(nullValue())); + ZoneId zoneId = randomZone(); + assertThat(DateFormatters.forPattern("strict_date_optional_time").withZone(zoneId).getZone(), is(zoneId)); + IllegalArgumentException e = + expectThrows(IllegalArgumentException.class, () -> DateFormatters.forPattern("epoch_millis").withZone(zoneId)); + assertThat(e.getMessage(), is("epoch_millis date formatter can only be in zone offset UTC")); + e = expectThrows(IllegalArgumentException.class, () -> DateFormatters.forPattern("epoch_second").withZone(zoneId)); + assertThat(e.getMessage(), is("epoch_second date formatter can only be in zone offset UTC")); + } + + public void testEqualsAndHashcode() { + assertThat(DateFormatters.forPattern("strict_date_optional_time"), + sameInstance(DateFormatters.forPattern("strict_date_optional_time"))); + assertThat(DateFormatters.forPattern("YYYY"), equalTo(DateFormatters.forPattern("YYYY"))); + assertThat(DateFormatters.forPattern("YYYY").hashCode(), + is(DateFormatters.forPattern("YYYY").hashCode())); + + // different timezone, thus not equals + assertThat(DateFormatters.forPattern("YYYY").withZone(ZoneId.of("CET")), not(equalTo(DateFormatters.forPattern("YYYY")))); + + // different locale, thus not equals + assertThat(DateFormatters.forPattern("YYYY").withLocale(randomLocale(random())), + not(equalTo(DateFormatters.forPattern("YYYY")))); + + // different pattern, thus not equals + assertThat(DateFormatters.forPattern("YYYY"), not(equalTo(DateFormatters.forPattern("YY")))); + + DateFormatter epochSecondFormatter = DateFormatters.forPattern("epoch_second"); + assertThat(epochSecondFormatter, sameInstance(DateFormatters.forPattern("epoch_second"))); + assertThat(epochSecondFormatter, equalTo(DateFormatters.forPattern("epoch_second"))); + assertThat(epochSecondFormatter.hashCode(), is(DateFormatters.forPattern("epoch_second").hashCode())); + + DateFormatter epochMillisFormatter = DateFormatters.forPattern("epoch_millis"); + assertThat(epochMillisFormatter.hashCode(), is(DateFormatters.forPattern("epoch_millis").hashCode())); + assertThat(epochMillisFormatter, sameInstance(DateFormatters.forPattern("epoch_millis"))); + assertThat(epochMillisFormatter, equalTo(DateFormatters.forPattern("epoch_millis"))); } } diff --git a/server/src/test/java/org/elasticsearch/common/time/DateUtilsTests.java b/server/src/test/java/org/elasticsearch/common/time/DateUtilsTests.java new file mode 100644 index 00000000000..8f36258c5fe --- /dev/null +++ b/server/src/test/java/org/elasticsearch/common/time/DateUtilsTests.java @@ -0,0 +1,54 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.common.time; + +import org.elasticsearch.test.ESTestCase; +import org.joda.time.DateTimeZone; + +import java.time.Instant; +import java.time.ZoneId; +import java.util.Arrays; +import java.util.HashSet; +import java.util.Set; + +import static org.hamcrest.Matchers.equalTo; + +public class DateUtilsTests extends ESTestCase { + private static final Set IGNORE = new HashSet<>(Arrays.asList( + "Eire", "Europe/Dublin" // dublin timezone in joda does not account for DST + )); + public void testTimezoneIds() { + assertNull(DateUtils.dateTimeZoneToZoneId(null)); + assertNull(DateUtils.zoneIdToDateTimeZone(null)); + for (String jodaId : DateTimeZone.getAvailableIDs()) { + if (IGNORE.contains(jodaId)) continue; + DateTimeZone jodaTz = DateTimeZone.forID(jodaId); + ZoneId zoneId = DateUtils.dateTimeZoneToZoneId(jodaTz); // does not throw + long now = 0; + assertThat(jodaId, zoneId.getRules().getOffset(Instant.ofEpochMilli(now)).getTotalSeconds() * 1000, + equalTo(jodaTz.getOffset(now))); + if (DateUtils.DEPRECATED_SHORT_TIMEZONES.containsKey(jodaTz.getID())) { + assertWarnings("Use of short timezone id " + jodaId + " is deprecated. Use " + zoneId.getId() + " instead"); + } + // roundtrip does not throw either + assertNotNull(DateUtils.zoneIdToDateTimeZone(zoneId)); + } + } +} diff --git a/server/src/test/java/org/elasticsearch/common/time/DateMathParserTests.java b/server/src/test/java/org/elasticsearch/common/time/JavaDateMathParserTests.java similarity index 95% rename from server/src/test/java/org/elasticsearch/common/time/DateMathParserTests.java rename to server/src/test/java/org/elasticsearch/common/time/JavaDateMathParserTests.java index 66e68b0aad0..a543af0445d 100644 --- a/server/src/test/java/org/elasticsearch/common/time/DateMathParserTests.java +++ b/server/src/test/java/org/elasticsearch/common/time/JavaDateMathParserTests.java @@ -33,10 +33,10 @@ import java.util.function.LongSupplier; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.is; -public class DateMathParserTests extends ESTestCase { +public class JavaDateMathParserTests extends ESTestCase { private final DateFormatter formatter = DateFormatters.forPattern("dateOptionalTime||epoch_millis"); - private final DateMathParser parser = new DateMathParser(formatter); + private final JavaDateMathParser parser = new JavaDateMathParser(formatter); public void testBasicDates() { assertDateMathEquals("2014", "2014-01-01T00:00:00.000"); @@ -125,7 +125,7 @@ public class DateMathParserTests extends ESTestCase { } public void testNow() { - final long now = parser.parse("2014-11-18T14:27:32", () -> 0, false, null); + final long now = parser.parse("2014-11-18T14:27:32", () -> 0, false, (ZoneId) null); assertDateMathEquals("now", "2014-11-18T14:27:32", now, false, null); assertDateMathEquals("now+M", "2014-12-18T14:27:32", now, false, null); @@ -139,14 +139,14 @@ public class DateMathParserTests extends ESTestCase { public void testRoundingPreservesEpochAsBaseDate() { // If a user only specifies times, then the date needs to always be 1970-01-01 regardless of rounding DateFormatter formatter = DateFormatters.forPattern("HH:mm:ss"); - DateMathParser parser = new DateMathParser(formatter); + JavaDateMathParser parser = new JavaDateMathParser(formatter); ZonedDateTime zonedDateTime = DateFormatters.toZonedDateTime(formatter.parse("04:52:20")); assertThat(zonedDateTime.getYear(), is(1970)); long millisStart = zonedDateTime.toInstant().toEpochMilli(); - assertEquals(millisStart, parser.parse("04:52:20", () -> 0, false, null)); + assertEquals(millisStart, parser.parse("04:52:20", () -> 0, false, (ZoneId) null)); // due to rounding up, we have to add the number of milliseconds here manually long millisEnd = DateFormatters.toZonedDateTime(formatter.parse("04:52:20")).toInstant().toEpochMilli() + 999; - assertEquals(millisEnd, parser.parse("04:52:20", () -> 0, true, null)); + assertEquals(millisEnd, parser.parse("04:52:20", () -> 0, true, (ZoneId) null)); } // Implicit rounding happening when parts of the date are not specified @@ -165,10 +165,10 @@ public class DateMathParserTests extends ESTestCase { // implicit rounding with explicit timezone in the date format DateFormatter formatter = DateFormatters.forPattern("yyyy-MM-ddXXX"); - DateMathParser parser = new DateMathParser(formatter); - long time = parser.parse("2011-10-09+01:00", () -> 0, false, null); + JavaDateMathParser parser = new JavaDateMathParser(formatter); + long time = parser.parse("2011-10-09+01:00", () -> 0, false, (ZoneId) null); assertEquals(this.parser.parse("2011-10-09T00:00:00.000+01:00", () -> 0), time); - time = parser.parse("2011-10-09+01:00", () -> 0, true, null); + time = parser.parse("2011-10-09+01:00", () -> 0, true, (ZoneId) null); assertEquals(this.parser.parse("2011-10-09T23:59:59.999+01:00", () -> 0), time); } @@ -239,7 +239,7 @@ public class DateMathParserTests extends ESTestCase { assertDateMathEquals("1418248078000||/m", "2014-12-10T21:47:00.000"); // also check other time units - DateMathParser parser = new DateMathParser(DateFormatters.forPattern("epoch_second||dateOptionalTime")); + JavaDateMathParser parser = new JavaDateMathParser(DateFormatters.forPattern("epoch_second||dateOptionalTime")); long datetime = parser.parse("1418248078", () -> 0); assertDateEquals(datetime, "1418248078", "2014-12-10T21:47:58.000"); @@ -279,9 +279,9 @@ public class DateMathParserTests extends ESTestCase { called.set(true); return 42L; }; - parser.parse("2014-11-18T14:27:32", now, false, null); + parser.parse("2014-11-18T14:27:32", now, false, (ZoneId) null); assertFalse(called.get()); - parser.parse("now/d", now, false, null); + parser.parse("now/d", now, false, (ZoneId) null); assertTrue(called.get()); } diff --git a/server/src/test/java/org/elasticsearch/common/util/concurrent/AsyncIOProcessorTests.java b/server/src/test/java/org/elasticsearch/common/util/concurrent/AsyncIOProcessorTests.java index 3bf8e450bd8..387f15e3f33 100644 --- a/server/src/test/java/org/elasticsearch/common/util/concurrent/AsyncIOProcessorTests.java +++ b/server/src/test/java/org/elasticsearch/common/util/concurrent/AsyncIOProcessorTests.java @@ -39,7 +39,8 @@ public class AsyncIOProcessorTests extends ESTestCase { protected void write(List>> candidates) throws IOException { if (blockInternal) { synchronized (this) { - for (Tuple> c :candidates) { + // TODO: check why we need a loop, can't we just use received.addAndGet(candidates.size()) + for (int i = 0; i < candidates.size(); i++) { received.incrementAndGet(); } } @@ -142,8 +143,14 @@ public class AsyncIOProcessorTests extends ESTestCase { received.addAndGet(candidates.size()); } }; - processor.put(new Object(), (e) -> {notified.incrementAndGet();throw new RuntimeException();}); - processor.put(new Object(), (e) -> {notified.incrementAndGet();throw new RuntimeException();}); + processor.put(new Object(), (e) -> { + notified.incrementAndGet(); + throw new RuntimeException(); + }); + processor.put(new Object(), (e) -> { + notified.incrementAndGet(); + throw new RuntimeException(); + }); assertEquals(2, notified.get()); assertEquals(2, received.get()); } diff --git a/server/src/test/java/org/elasticsearch/common/util/iterable/IterablesTests.java b/server/src/test/java/org/elasticsearch/common/util/iterable/IterablesTests.java index 4d9d89312a3..6501c7caa1d 100644 --- a/server/src/test/java/org/elasticsearch/common/util/iterable/IterablesTests.java +++ b/server/src/test/java/org/elasticsearch/common/util/iterable/IterablesTests.java @@ -19,14 +19,14 @@ package org.elasticsearch.common.util.iterable; +import org.elasticsearch.test.ESTestCase; + import java.util.ArrayList; import java.util.Arrays; import java.util.Iterator; import java.util.List; import java.util.NoSuchElementException; -import org.elasticsearch.test.ESTestCase; - import static org.hamcrest.object.HasToString.hasToString; public class IterablesTests extends ESTestCase { @@ -64,7 +64,7 @@ public class IterablesTests extends ESTestCase { Iterable allInts = Iterables.flatten(list); int count = 0; - for(int x : allInts) { + for(@SuppressWarnings("unused") int x : allInts) { count++; } assertEquals(0, count); @@ -73,14 +73,14 @@ public class IterablesTests extends ESTestCase { // changes to the outer list are not seen since flatten pre-caches outer list on init: count = 0; - for(int x : allInts) { + for(@SuppressWarnings("unused") int x : allInts) { count++; } assertEquals(0, count); // but changes to the original inner lists are seen: list.get(0).add(0); - for(int x : allInts) { + for(@SuppressWarnings("unused") int x : allInts) { count++; } assertEquals(1, count); diff --git a/server/src/test/java/org/elasticsearch/common/xcontent/BaseXContentTestCase.java b/server/src/test/java/org/elasticsearch/common/xcontent/BaseXContentTestCase.java index 3fb5f5996be..38e75b921fa 100644 --- a/server/src/test/java/org/elasticsearch/common/xcontent/BaseXContentTestCase.java +++ b/server/src/test/java/org/elasticsearch/common/xcontent/BaseXContentTestCase.java @@ -48,6 +48,7 @@ import org.joda.time.format.ISODateTimeFormat; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.io.IOException; +import java.math.BigDecimal; import java.math.BigInteger; import java.nio.file.Path; import java.time.DayOfWeek; @@ -266,6 +267,36 @@ public abstract class BaseXContentTestCase extends ESTestCase { .endObject()); } + public void testBigIntegers() throws Exception { + assertResult("{'bigint':null}", () -> builder().startObject().field("bigint", (BigInteger) null).endObject()); + assertResult("{'bigint':[]}", () -> builder().startObject().array("bigint", new BigInteger[]{}).endObject()); + + BigInteger bigInteger = BigInteger.valueOf(Long.MAX_VALUE).add(BigInteger.ONE); + String result = "{'bigint':" + bigInteger.toString() + "}"; + assertResult(result, () -> builder().startObject().field("bigint", bigInteger).endObject()); + + result = "{'bigint':[" + bigInteger.toString() + "," + bigInteger.toString() + "," + bigInteger.toString() +"]}"; + assertResult(result, () -> builder() + .startObject() + .array("bigint", bigInteger, bigInteger, bigInteger) + .endObject()); + } + + public void testBigDecimals() throws Exception { + assertResult("{'bigdecimal':null}", () -> builder().startObject().field("bigdecimal", (BigInteger) null).endObject()); + assertResult("{'bigdecimal':[]}", () -> builder().startObject().array("bigdecimal", new BigInteger[]{}).endObject()); + + BigDecimal bigDecimal = new BigDecimal("234.43"); + String result = "{'bigdecimal':" + bigDecimal.toString() + "}"; + assertResult(result, () -> builder().startObject().field("bigdecimal", bigDecimal).endObject()); + + result = "{'bigdecimal':[" + bigDecimal.toString() + "," + bigDecimal.toString() + "," + bigDecimal.toString() +"]}"; + assertResult(result, () -> builder() + .startObject() + .array("bigdecimal", bigDecimal, bigDecimal, bigDecimal) + .endObject()); + } + public void testStrings() throws IOException { assertResult("{'string':null}", () -> builder().startObject().field("string", (String) null).endObject()); assertResult("{'string':'value'}", () -> builder().startObject().field("string", "value").endObject()); diff --git a/server/src/test/java/org/elasticsearch/discovery/AbstractDisruptionTestCase.java b/server/src/test/java/org/elasticsearch/discovery/AbstractDisruptionTestCase.java index c0b01eb5ec5..fa023882df5 100644 --- a/server/src/test/java/org/elasticsearch/discovery/AbstractDisruptionTestCase.java +++ b/server/src/test/java/org/elasticsearch/discovery/AbstractDisruptionTestCase.java @@ -87,6 +87,7 @@ public abstract class AbstractDisruptionTestCase extends ESIntegTestCase { private boolean disableBeforeIndexDeletion; + @Override @Before public void setUp() throws Exception { super.setUp(); @@ -240,7 +241,6 @@ public abstract class AbstractDisruptionTestCase extends ESIntegTestCase { public ServiceDisruptionScheme addRandomDisruptionScheme() { // TODO: add partial partitions - NetworkDisruption p; final DisruptedLinks disruptedLinks; if (randomBoolean()) { disruptedLinks = TwoPartitions.random(random(), internalCluster().getNodeNames()); diff --git a/server/src/test/java/org/elasticsearch/discovery/ClusterDisruptionIT.java b/server/src/test/java/org/elasticsearch/discovery/ClusterDisruptionIT.java index 33b6ffb9a75..3b08eb6870e 100644 --- a/server/src/test/java/org/elasticsearch/discovery/ClusterDisruptionIT.java +++ b/server/src/test/java/org/elasticsearch/discovery/ClusterDisruptionIT.java @@ -288,7 +288,6 @@ public class ClusterDisruptionIT extends AbstractDisruptionTestCase { } // simulate handling of sending shard failure during an isolation - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/33704") public void testSendingShardFailure() throws Exception { List nodes = startCluster(3, 2); String masterNode = internalCluster().getMasterName(); @@ -365,7 +364,7 @@ public class ClusterDisruptionIT extends AbstractDisruptionTestCase { public void testSearchWithRelocationAndSlowClusterStateProcessing() throws Exception { // don't use DEFAULT settings (which can cause node disconnects on a slow CI machine) configureCluster(Settings.EMPTY, 3, null, 1); - final String masterNode = internalCluster().startMasterOnlyNode(); + internalCluster().startMasterOnlyNode(); final String node_1 = internalCluster().startDataOnlyNode(); logger.info("--> creating index [test] with one shard and on replica"); diff --git a/server/src/test/java/org/elasticsearch/gateway/MetaDataStateFormatTests.java b/server/src/test/java/org/elasticsearch/gateway/MetaDataStateFormatTests.java index 0bf80e52398..0351a10dea3 100644 --- a/server/src/test/java/org/elasticsearch/gateway/MetaDataStateFormatTests.java +++ b/server/src/test/java/org/elasticsearch/gateway/MetaDataStateFormatTests.java @@ -103,7 +103,6 @@ public class MetaDataStateFormatTests extends ESTestCase { final long id = addDummyFiles("foo-", dirs); Format format = new Format("foo-"); DummyState state = new DummyState(randomRealisticUnicodeOfCodepointLengthBetween(1, 1000), randomInt(), randomLong(), randomDouble(), randomBoolean()); - int version = between(0, Integer.MAX_VALUE/2); format.write(state, dirs); for (Path file : dirs) { Path[] list = content("*", file); @@ -117,7 +116,6 @@ public class MetaDataStateFormatTests extends ESTestCase { DummyState read = format.read(NamedXContentRegistry.EMPTY, list[0]); assertThat(read, equalTo(state)); } - final int version2 = between(version, Integer.MAX_VALUE); DummyState state2 = new DummyState(randomRealisticUnicodeOfCodepointLengthBetween(1, 1000), randomInt(), randomLong(), randomDouble(), randomBoolean()); format.write(state2, dirs); @@ -145,7 +143,6 @@ public class MetaDataStateFormatTests extends ESTestCase { Format format = new Format("foo-"); DummyState state = new DummyState(randomRealisticUnicodeOfCodepointLengthBetween(1, 1000), randomInt(), randomLong(), randomDouble(), randomBoolean()); - int version = between(0, Integer.MAX_VALUE/2); format.write(state, dirs); for (Path file : dirs) { Path[] list = content("*", file); @@ -169,7 +166,6 @@ public class MetaDataStateFormatTests extends ESTestCase { final long id = addDummyFiles("foo-", dirs); Format format = new Format("foo-"); DummyState state = new DummyState(randomRealisticUnicodeOfCodepointLengthBetween(1, 1000), randomInt(), randomLong(), randomDouble(), randomBoolean()); - int version = between(0, Integer.MAX_VALUE/2); format.write(state, dirs); for (Path file : dirs) { Path[] list = content("*", file); diff --git a/server/src/test/java/org/elasticsearch/index/cache/bitset/BitSetFilterCacheTests.java b/server/src/test/java/org/elasticsearch/index/cache/bitset/BitSetFilterCacheTests.java index 1f2526b2e28..2c75437ee35 100644 --- a/server/src/test/java/org/elasticsearch/index/cache/bitset/BitSetFilterCacheTests.java +++ b/server/src/test/java/org/elasticsearch/index/cache/bitset/BitSetFilterCacheTests.java @@ -30,7 +30,6 @@ import org.apache.lucene.index.IndexWriterConfig; import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.LogByteSizeMergePolicy; import org.apache.lucene.index.Term; -import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.MatchAllDocsQuery; import org.apache.lucene.search.TermQuery; import org.apache.lucene.search.join.BitSetProducer; @@ -38,9 +37,9 @@ import org.apache.lucene.store.Directory; import org.apache.lucene.store.RAMDirectory; import org.apache.lucene.util.Accountable; import org.apache.lucene.util.BitSet; -import org.elasticsearch.core.internal.io.IOUtils; import org.elasticsearch.common.lucene.index.ElasticsearchDirectoryReader; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.core.internal.io.IOUtils; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.test.ESTestCase; @@ -89,7 +88,6 @@ public class BitSetFilterCacheTests extends ESTestCase { DirectoryReader reader = DirectoryReader.open(writer); reader = ElasticsearchDirectoryReader.wrap(reader, new ShardId("test", "_na_", 0)); - IndexSearcher searcher = new IndexSearcher(reader); BitsetFilterCache cache = new BitsetFilterCache(INDEX_SETTINGS, new BitsetFilterCache.Listener() { @Override @@ -114,7 +112,6 @@ public class BitSetFilterCacheTests extends ESTestCase { reader.close(); reader = DirectoryReader.open(writer); reader = ElasticsearchDirectoryReader.wrap(reader, new ShardId("test", "_na_", 0)); - searcher = new IndexSearcher(reader); assertThat(matchCount(filter, reader), equalTo(3)); diff --git a/server/src/test/java/org/elasticsearch/index/codec/CodecTests.java b/server/src/test/java/org/elasticsearch/index/codec/CodecTests.java index ddb2b857486..c225b090816 100644 --- a/server/src/test/java/org/elasticsearch/index/codec/CodecTests.java +++ b/server/src/test/java/org/elasticsearch/index/codec/CodecTests.java @@ -19,6 +19,7 @@ package org.elasticsearch.index.codec; +import org.apache.logging.log4j.LogManager; import org.apache.lucene.codecs.Codec; import org.apache.lucene.codecs.lucene50.Lucene50StoredFieldsFormat; import org.apache.lucene.codecs.lucene50.Lucene50StoredFieldsFormat.Mode; @@ -30,7 +31,6 @@ import org.apache.lucene.index.IndexWriterConfig; import org.apache.lucene.index.SegmentReader; import org.apache.lucene.store.Directory; import org.apache.lucene.util.LuceneTestCase.SuppressCodecs; -import org.elasticsearch.common.logging.ESLoggerFactory; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; import org.elasticsearch.index.IndexSettings; @@ -95,7 +95,7 @@ public class CodecTests extends ESTestCase { MapperRegistry mapperRegistry = new MapperRegistry(Collections.emptyMap(), Collections.emptyMap(), MapperPlugin.NOOP_FIELD_FILTER); MapperService service = new MapperService(settings, indexAnalyzers, xContentRegistry(), similarityService, mapperRegistry, () -> null); - return new CodecService(service, ESLoggerFactory.getLogger("test")); + return new CodecService(service, LogManager.getLogger("test")); } } diff --git a/server/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java b/server/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java index 8f9d90154f8..26c2453a271 100644 --- a/server/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java +++ b/server/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java @@ -19,42 +19,9 @@ package org.elasticsearch.index.engine; -import java.io.Closeable; -import java.io.IOException; -import java.io.UncheckedIOException; -import java.nio.charset.Charset; -import java.nio.file.Files; -import java.nio.file.Path; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Base64; -import java.util.Collections; -import java.util.Comparator; -import java.util.HashSet; -import java.util.Iterator; -import java.util.LinkedHashMap; -import java.util.List; -import java.util.Map; -import java.util.Queue; -import java.util.Set; -import java.util.concurrent.BrokenBarrierException; -import java.util.concurrent.CountDownLatch; -import java.util.concurrent.CyclicBarrier; -import java.util.concurrent.TimeUnit; -import java.util.concurrent.atomic.AtomicBoolean; -import java.util.concurrent.atomic.AtomicInteger; -import java.util.concurrent.atomic.AtomicLong; -import java.util.concurrent.atomic.AtomicReference; -import java.util.function.BiFunction; -import java.util.function.Function; -import java.util.function.LongSupplier; -import java.util.function.Supplier; -import java.util.function.ToLongBiFunction; -import java.util.stream.Collectors; -import java.util.stream.LongStream; - import com.carrotsearch.hppc.cursors.ObjectObjectCursor; import com.carrotsearch.randomizedtesting.generators.RandomNumbers; + import org.apache.logging.log4j.Level; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; @@ -148,6 +115,7 @@ import org.elasticsearch.index.mapper.SeqNoFieldMapper; import org.elasticsearch.index.mapper.SourceFieldMapper; import org.elasticsearch.index.seqno.LocalCheckpointTracker; import org.elasticsearch.index.seqno.ReplicationTracker; +import org.elasticsearch.index.seqno.SeqNoStats; import org.elasticsearch.index.seqno.SequenceNumbers; import org.elasticsearch.index.shard.IndexSearcherWrapper; import org.elasticsearch.index.shard.ShardId; @@ -161,6 +129,40 @@ import org.elasticsearch.test.IndexSettingsModule; import org.hamcrest.MatcherAssert; import org.hamcrest.Matchers; +import java.io.Closeable; +import java.io.IOException; +import java.io.UncheckedIOException; +import java.nio.charset.Charset; +import java.nio.file.Files; +import java.nio.file.Path; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Base64; +import java.util.Collections; +import java.util.Comparator; +import java.util.HashSet; +import java.util.Iterator; +import java.util.LinkedHashMap; +import java.util.List; +import java.util.Map; +import java.util.Queue; +import java.util.Set; +import java.util.concurrent.BrokenBarrierException; +import java.util.concurrent.CountDownLatch; +import java.util.concurrent.CyclicBarrier; +import java.util.concurrent.TimeUnit; +import java.util.concurrent.atomic.AtomicBoolean; +import java.util.concurrent.atomic.AtomicInteger; +import java.util.concurrent.atomic.AtomicLong; +import java.util.concurrent.atomic.AtomicReference; +import java.util.function.BiFunction; +import java.util.function.Function; +import java.util.function.LongSupplier; +import java.util.function.Supplier; +import java.util.function.ToLongBiFunction; +import java.util.stream.Collectors; +import java.util.stream.LongStream; + import static java.util.Collections.emptyMap; import static java.util.Collections.shuffle; import static org.elasticsearch.index.engine.Engine.Operation.Origin.LOCAL_TRANSLOG_RECOVERY; @@ -661,6 +663,7 @@ public class InternalEngineTests extends EngineTestCase { trimUnsafeCommits(engine.config()); engine = new InternalEngine(engine.config()); assertTrue(engine.isRecovering()); + engine.initializeMaxSeqNoOfUpdatesOrDeletes(); engine.recoverFromTranslog(translogHandler, Long.MAX_VALUE); Engine.Searcher searcher = wrapper.wrap(engine.acquireSearcher("test")); assertThat(counter.get(), equalTo(2)); @@ -678,6 +681,7 @@ public class InternalEngineTests extends EngineTestCase { engine = new InternalEngine(engine.config()); expectThrows(IllegalStateException.class, () -> engine.flush(true, true)); assertTrue(engine.isRecovering()); + engine.initializeMaxSeqNoOfUpdatesOrDeletes(); engine.recoverFromTranslog(translogHandler, Long.MAX_VALUE); assertFalse(engine.isRecovering()); doc = testParsedDocument("2", null, testDocumentWithTextField(), SOURCE, null); @@ -707,7 +711,8 @@ public class InternalEngineTests extends EngineTestCase { IOUtils.close(engine); } trimUnsafeCommits(engine.config()); - try (Engine recoveringEngine = new InternalEngine(engine.config())){ + try (Engine recoveringEngine = new InternalEngine(engine.config())) { + recoveringEngine.initializeMaxSeqNoOfUpdatesOrDeletes(); recoveringEngine.recoverFromTranslog(translogHandler, Long.MAX_VALUE); try (Engine.Searcher searcher = recoveringEngine.acquireSearcher("test")) { final TotalHitCountCollector collector = new TotalHitCountCollector(); @@ -744,6 +749,7 @@ public class InternalEngineTests extends EngineTestCase { } }; assertThat(getTranslog(recoveringEngine).stats().getUncommittedOperations(), equalTo(docs)); + recoveringEngine.initializeMaxSeqNoOfUpdatesOrDeletes(); recoveringEngine.recoverFromTranslog(translogHandler, Long.MAX_VALUE); assertTrue(committed.get()); } finally { @@ -778,6 +784,7 @@ public class InternalEngineTests extends EngineTestCase { initialEngine.close(); trimUnsafeCommits(initialEngine.config()); recoveringEngine = new InternalEngine(initialEngine.config()); + recoveringEngine.initializeMaxSeqNoOfUpdatesOrDeletes(); recoveringEngine.recoverFromTranslog(translogHandler, Long.MAX_VALUE); try (Engine.Searcher searcher = recoveringEngine.acquireSearcher("test")) { TopDocs topDocs = searcher.searcher().search(new MatchAllDocsQuery(), docs); @@ -811,6 +818,7 @@ public class InternalEngineTests extends EngineTestCase { } trimUnsafeCommits(config); try (InternalEngine engine = new InternalEngine(config)) { + engine.initializeMaxSeqNoOfUpdatesOrDeletes(); engine.recoverFromTranslog(translogHandler, Long.MAX_VALUE); assertThat(engine.getLocalCheckpoint(), equalTo(maxSeqNo)); assertThat(engine.getLocalCheckpointTracker().getMaxSeqNo(), equalTo(maxSeqNo)); @@ -818,6 +826,7 @@ public class InternalEngineTests extends EngineTestCase { trimUnsafeCommits(config); try (InternalEngine engine = new InternalEngine(config)) { long upToSeqNo = randomLongBetween(globalCheckpoint.get(), maxSeqNo); + engine.initializeMaxSeqNoOfUpdatesOrDeletes(); engine.recoverFromTranslog(translogHandler, upToSeqNo); assertThat(engine.getLocalCheckpoint(), equalTo(upToSeqNo)); assertThat(engine.getLocalCheckpointTracker().getMaxSeqNo(), equalTo(upToSeqNo)); @@ -1202,6 +1211,7 @@ public class InternalEngineTests extends EngineTestCase { } trimUnsafeCommits(config); engine = new InternalEngine(config); + engine.initializeMaxSeqNoOfUpdatesOrDeletes(); engine.recoverFromTranslog(translogHandler, Long.MAX_VALUE); assertEquals(engine.getLastCommittedSegmentInfos().getUserData().get(Engine.SYNC_COMMIT_ID), syncId); } @@ -1221,6 +1231,7 @@ public class InternalEngineTests extends EngineTestCase { engine.close(); trimUnsafeCommits(config); engine = new InternalEngine(config); + engine.initializeMaxSeqNoOfUpdatesOrDeletes(); engine.recoverFromTranslog(translogHandler, Long.MAX_VALUE); assertNull("Sync ID must be gone since we have a document to replay", engine.getLastCommittedSegmentInfos().getUserData().get(Engine.SYNC_COMMIT_ID)); } @@ -2196,7 +2207,8 @@ public class InternalEngineTests extends EngineTestCase { } trimUnsafeCommits(initialEngine.engineConfig); - try (InternalEngine recoveringEngine = new InternalEngine(initialEngine.config())){ + try (InternalEngine recoveringEngine = new InternalEngine(initialEngine.config())) { + recoveringEngine.initializeMaxSeqNoOfUpdatesOrDeletes(); recoveringEngine.recoverFromTranslog(translogHandler, Long.MAX_VALUE); assertEquals(primarySeqNo, recoveringEngine.getSeqNoStats(-1).getMaxSeqNo()); @@ -2540,6 +2552,7 @@ public class InternalEngineTests extends EngineTestCase { assertEquals("3", userData.get(Translog.TRANSLOG_GENERATION_KEY)); } assertEquals(engine.getTranslog().getTranslogUUID(), userData.get(Translog.TRANSLOG_UUID_KEY)); + engine.initializeMaxSeqNoOfUpdatesOrDeletes(); engine.recoverFromTranslog(translogHandler, Long.MAX_VALUE); userData = engine.getLastCommittedSegmentInfos().getUserData(); assertEquals("3", userData.get(Translog.TRANSLOG_GENERATION_KEY)); @@ -2557,6 +2570,7 @@ public class InternalEngineTests extends EngineTestCase { Map userData = engine.getLastCommittedSegmentInfos().getUserData(); assertEquals("1", userData.get(Translog.TRANSLOG_GENERATION_KEY)); assertEquals(engine.getTranslog().getTranslogUUID(), userData.get(Translog.TRANSLOG_UUID_KEY)); + engine.initializeMaxSeqNoOfUpdatesOrDeletes(); engine.recoverFromTranslog(translogHandler, Long.MAX_VALUE); assertEquals(2, engine.getTranslog().currentFileGeneration()); assertEquals(0L, engine.getTranslog().stats().getUncommittedOperations()); @@ -2571,6 +2585,7 @@ public class InternalEngineTests extends EngineTestCase { Map userData = engine.getLastCommittedSegmentInfos().getUserData(); assertEquals("1", userData.get(Translog.TRANSLOG_GENERATION_KEY)); assertEquals(engine.getTranslog().getTranslogUUID(), userData.get(Translog.TRANSLOG_UUID_KEY)); + engine.initializeMaxSeqNoOfUpdatesOrDeletes(); engine.recoverFromTranslog(translogHandler, Long.MAX_VALUE); userData = engine.getLastCommittedSegmentInfos().getUserData(); assertEquals("no changes - nothing to commit", "1", userData.get(Translog.TRANSLOG_GENERATION_KEY)); @@ -2677,6 +2692,7 @@ public class InternalEngineTests extends EngineTestCase { } } }) { + engine.initializeMaxSeqNoOfUpdatesOrDeletes(); engine.recoverFromTranslog(translogHandler, Long.MAX_VALUE); final ParsedDocument doc1 = testParsedDocument("1", null, testDocumentWithTextField(), SOURCE, null); engine.index(indexForDoc(doc1)); @@ -2688,6 +2704,7 @@ public class InternalEngineTests extends EngineTestCase { try (InternalEngine engine = new InternalEngine(config(indexSettings, store, translogPath, newMergePolicy(), null, null, globalCheckpointSupplier))) { + engine.initializeMaxSeqNoOfUpdatesOrDeletes(); engine.recoverFromTranslog(translogHandler, Long.MAX_VALUE); assertVisibleCount(engine, 1); final long committedGen = Long.valueOf( @@ -2755,6 +2772,7 @@ public class InternalEngineTests extends EngineTestCase { engine.close(); trimUnsafeCommits(copy(engine.config(), inSyncGlobalCheckpointSupplier)); engine = new InternalEngine(copy(engine.config(), inSyncGlobalCheckpointSupplier)); // we need to reuse the engine config unless the parser.mappingModified won't work + engine.initializeMaxSeqNoOfUpdatesOrDeletes(); engine.recoverFromTranslog(translogHandler, Long.MAX_VALUE); assertVisibleCount(engine, numDocs, false); @@ -2846,11 +2864,7 @@ public class InternalEngineTests extends EngineTestCase { IndexSearcher.getDefaultQueryCachingPolicy(), translogConfig, TimeValue.timeValueMinutes(5), config.getExternalRefreshListener(), config.getInternalRefreshListener(), null, new NoneCircuitBreakerService(), () -> SequenceNumbers.UNASSIGNED_SEQ_NO, primaryTerm::get, tombstoneDocSupplier()); - try { - InternalEngine internalEngine = new InternalEngine(brokenConfig); - fail("translog belongs to a different engine"); - } catch (EngineCreationFailureException ex) { - } + expectThrows(EngineCreationFailureException.class, () -> new InternalEngine(brokenConfig)); engine = createEngine(store, primaryTranslogDir); // and recover again! assertVisibleCount(engine, numDocs, false); @@ -3461,8 +3475,10 @@ public class InternalEngineTests extends EngineTestCase { engine.index(appendOnlyPrimary(doc, true, timestamp1)); assertEquals(timestamp1, engine.segmentsStats(false).getMaxUnsafeAutoIdTimestamp()); } - try (Store store = createStore(newFSDirectory(storeDir)); Engine engine = new InternalEngine(configSupplier.apply(store))) { + try (Store store = createStore(newFSDirectory(storeDir)); + InternalEngine engine = new InternalEngine(configSupplier.apply(store))) { assertEquals(IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP, engine.segmentsStats(false).getMaxUnsafeAutoIdTimestamp()); + engine.initializeMaxSeqNoOfUpdatesOrDeletes(); engine.recoverFromTranslog(translogHandler, Long.MAX_VALUE); assertEquals(timestamp1, engine.segmentsStats(false).getMaxUnsafeAutoIdTimestamp()); final ParsedDocument doc = testParsedDocument("1", null, testDocumentWithTextField(), @@ -3536,6 +3552,8 @@ public class InternalEngineTests extends EngineTestCase { } assertEquals(0, engine.getNumVersionLookups()); assertEquals(0, engine.getNumIndexVersionsLookups()); + assertThat(engine.getMaxSeenAutoIdTimestamp(), + equalTo(docs.stream().mapToLong(Engine.Index::getAutoGeneratedIdTimestamp).max().getAsLong())); assertLuceneOperations(engine, numDocs, 0, 0); } @@ -3746,6 +3764,7 @@ public class InternalEngineTests extends EngineTestCase { } trimUnsafeCommits(initialEngine.config()); try (Engine recoveringEngine = new InternalEngine(initialEngine.config())) { + recoveringEngine.initializeMaxSeqNoOfUpdatesOrDeletes(); recoveringEngine.recoverFromTranslog(translogHandler, Long.MAX_VALUE); recoveringEngine.fillSeqNoGaps(2); assertThat(recoveringEngine.getLocalCheckpoint(), greaterThanOrEqualTo((long) (docs - 1))); @@ -3857,6 +3876,7 @@ public class InternalEngineTests extends EngineTestCase { throw new UnsupportedOperationException(); } }; + noOpEngine.initializeMaxSeqNoOfUpdatesOrDeletes(); noOpEngine.recoverFromTranslog(translogHandler, Long.MAX_VALUE); final int gapsFilled = noOpEngine.fillSeqNoGaps(primaryTerm.get()); final String reason = "filling gaps"; @@ -4047,56 +4067,53 @@ public class InternalEngineTests extends EngineTestCase { } } - public void testRestoreLocalCheckpointFromTranslog() throws IOException { - engine.close(); - InternalEngine actualEngine = null; - try { - final Set completedSeqNos = new HashSet<>(); - final BiFunction supplier = (maxSeqNo, localCheckpoint) -> new LocalCheckpointTracker( - maxSeqNo, - localCheckpoint) { - @Override - public void markSeqNoAsCompleted(long seqNo) { - super.markSeqNoAsCompleted(seqNo); - completedSeqNos.add(seqNo); - } - }; - trimUnsafeCommits(engine.config()); - actualEngine = new InternalEngine(engine.config(), supplier); - final int operations = randomIntBetween(0, 1024); - final Set expectedCompletedSeqNos = new HashSet<>(); - for (int i = 0; i < operations; i++) { - if (rarely() && i < operations - 1) { + public void testRestoreLocalHistoryFromTranslog() throws IOException { + final AtomicLong globalCheckpoint = new AtomicLong(SequenceNumbers.NO_OPS_PERFORMED); + try (Store store = createStore()) { + final ArrayList seqNos = new ArrayList<>(); + final int numOps = randomIntBetween(0, 1024); + for (int i = 0; i < numOps; i++) { + if (rarely()) { continue; } - expectedCompletedSeqNos.add((long) i); + seqNos.add((long) i); } - - final ArrayList seqNos = new ArrayList<>(expectedCompletedSeqNos); Randomness.shuffle(seqNos); - for (final long seqNo : seqNos) { - final String id = Long.toString(seqNo); - final ParsedDocument doc = testParsedDocument(id, null, testDocumentWithTextField(), SOURCE, null); - final Term uid = newUid(doc); - final long time = System.nanoTime(); - actualEngine.index(new Engine.Index(uid, doc, seqNo, 1, 1, null, REPLICA, time, time, false)); - if (rarely()) { - actualEngine.rollTranslogGeneration(); + final EngineConfig engineConfig; + final SeqNoStats prevSeqNoStats; + final List prevDocs; + final int totalTranslogOps; + try (InternalEngine engine = createEngine(store, createTempDir(), globalCheckpoint::get)) { + engineConfig = engine.config(); + for (final long seqNo : seqNos) { + final String id = Long.toString(seqNo); + final ParsedDocument doc = testParsedDocument(id, null, testDocumentWithTextField(), SOURCE, null); + engine.index(replicaIndexForDoc(doc, 1, seqNo, false)); + if (rarely()) { + engine.rollTranslogGeneration(); + } + if (rarely()) { + engine.flush(); + } } + globalCheckpoint.set(randomLongBetween(SequenceNumbers.NO_OPS_PERFORMED, engine.getLocalCheckpoint())); + engine.syncTranslog(); + prevSeqNoStats = engine.getSeqNoStats(globalCheckpoint.get()); + prevDocs = getDocIds(engine, true); + totalTranslogOps = engine.getTranslog().totalOperations(); } - final long currentLocalCheckpoint = actualEngine.getLocalCheckpoint(); - final long resetLocalCheckpoint = - randomIntBetween(Math.toIntExact(SequenceNumbers.NO_OPS_PERFORMED), Math.toIntExact(currentLocalCheckpoint)); - actualEngine.getLocalCheckpointTracker().resetCheckpoint(resetLocalCheckpoint); - completedSeqNos.clear(); - actualEngine.restoreLocalCheckpointFromTranslog(); - final Set intersection = new HashSet<>(expectedCompletedSeqNos); - intersection.retainAll(LongStream.range(resetLocalCheckpoint + 1, operations).boxed().collect(Collectors.toSet())); - assertThat(completedSeqNos, equalTo(intersection)); - assertThat(actualEngine.getLocalCheckpoint(), equalTo(currentLocalCheckpoint)); - assertThat(generateNewSeqNo(actualEngine), equalTo((long) operations)); - } finally { - IOUtils.close(actualEngine); + trimUnsafeCommits(engineConfig); + try (InternalEngine engine = new InternalEngine(engineConfig)) { + engine.initializeMaxSeqNoOfUpdatesOrDeletes(); + engine.recoverFromTranslog(translogHandler, globalCheckpoint.get()); + engine.restoreLocalHistoryFromTranslog(translogHandler); + assertThat(getDocIds(engine, true), equalTo(prevDocs)); + SeqNoStats seqNoStats = engine.getSeqNoStats(globalCheckpoint.get()); + assertThat(seqNoStats.getLocalCheckpoint(), equalTo(prevSeqNoStats.getLocalCheckpoint())); + assertThat(seqNoStats.getMaxSeqNo(), equalTo(prevSeqNoStats.getMaxSeqNo())); + assertThat(engine.getTranslog().totalOperations(), equalTo(totalTranslogOps)); + } + assertConsistentHistoryBetweenTranslogAndLuceneIndex(engine, createMapperService("test")); } } @@ -4135,6 +4152,7 @@ public class InternalEngineTests extends EngineTestCase { trimUnsafeCommits(copy(replicaEngine.config(), globalCheckpoint::get)); recoveringEngine = new InternalEngine(copy(replicaEngine.config(), globalCheckpoint::get)); assertEquals(numDocsOnReplica, getTranslog(recoveringEngine).stats().getUncommittedOperations()); + recoveringEngine.initializeMaxSeqNoOfUpdatesOrDeletes(); recoveringEngine.recoverFromTranslog(translogHandler, Long.MAX_VALUE); assertEquals(maxSeqIDOnReplica, recoveringEngine.getSeqNoStats(-1).getMaxSeqNo()); assertEquals(checkpointOnReplica, recoveringEngine.getLocalCheckpoint()); @@ -4171,6 +4189,7 @@ public class InternalEngineTests extends EngineTestCase { if (flushed) { assertThat(recoveringEngine.getTranslogStats().getUncommittedOperations(), equalTo(0)); } + recoveringEngine.initializeMaxSeqNoOfUpdatesOrDeletes(); recoveringEngine.recoverFromTranslog(translogHandler, Long.MAX_VALUE); assertEquals(maxSeqIDOnReplica, recoveringEngine.getSeqNoStats(-1).getMaxSeqNo()); assertEquals(maxSeqIDOnReplica, recoveringEngine.getLocalCheckpoint()); @@ -4353,7 +4372,7 @@ public class InternalEngineTests extends EngineTestCase { final EngineConfig engineConfig = config(indexSettings, store, translogPath, NoMergePolicy.INSTANCE, null, null, () -> globalCheckpoint.get()); - try (Engine engine = new InternalEngine(engineConfig) { + try (InternalEngine engine = new InternalEngine(engineConfig) { @Override protected void commitIndexWriter(IndexWriter writer, Translog translog, String syncId) throws IOException { // Advance the global checkpoint during the flush to create a lag between a persisted global checkpoint in the translog @@ -4364,6 +4383,7 @@ public class InternalEngineTests extends EngineTestCase { super.commitIndexWriter(writer, translog, syncId); } }) { + engine.initializeMaxSeqNoOfUpdatesOrDeletes(); engine.recoverFromTranslog(translogHandler, Long.MAX_VALUE); int numDocs = scaledRandomIntBetween(10, 100); for (int docId = 0; docId < numDocs; docId++) { @@ -5033,6 +5053,34 @@ public class InternalEngineTests extends EngineTestCase { expectThrows(AlreadyClosedException.class, () -> engine.acquireSearcher("test")); } + public void testTrackMaxSeqNoOfUpdatesOrDeletesOnPrimary() throws Exception { + engine.close(); + Set liveDocIds = new HashSet<>(); + engine = new InternalEngine(engine.config()); + assertThat(engine.getMaxSeqNoOfUpdatesOrDeletes(), equalTo(-2L)); + engine.initializeMaxSeqNoOfUpdatesOrDeletes(); + int numOps = between(1, 500); + for (int i = 0; i < numOps; i++) { + long currentMaxSeqNoOfUpdates = engine.getMaxSeqNoOfUpdatesOrDeletes(); + ParsedDocument doc = createParsedDoc(Integer.toString(between(1, 100)), null); + if (randomBoolean()) { + Engine.IndexResult result = engine.index(indexForDoc(doc)); + if (liveDocIds.add(doc.id()) == false) { + assertThat("update operations on primary must advance max_seq_no_of_updates", + engine.getMaxSeqNoOfUpdatesOrDeletes(), equalTo(Math.max(currentMaxSeqNoOfUpdates, result.getSeqNo()))); + } else { + assertThat("append operations should not advance max_seq_no_of_updates", + engine.getMaxSeqNoOfUpdatesOrDeletes(), equalTo(currentMaxSeqNoOfUpdates)); + } + } else { + Engine.DeleteResult result = engine.delete(new Engine.Delete(doc.type(), doc.id(), newUid(doc.id()), primaryTerm.get())); + liveDocIds.remove(doc.id()); + assertThat("delete operations on primary must advance max_seq_no_of_updates", + engine.getMaxSeqNoOfUpdatesOrDeletes(), equalTo(Math.max(currentMaxSeqNoOfUpdates, result.getSeqNo()))); + } + } + } + static void trimUnsafeCommits(EngineConfig config) throws IOException { final Store store = config.getStore(); final TranslogConfig translogConfig = config.getTranslogConfig(); diff --git a/server/src/test/java/org/elasticsearch/index/engine/ReadOnlyEngineTests.java b/server/src/test/java/org/elasticsearch/index/engine/ReadOnlyEngineTests.java index 4080dd33d53..90469d71944 100644 --- a/server/src/test/java/org/elasticsearch/index/engine/ReadOnlyEngineTests.java +++ b/server/src/test/java/org/elasticsearch/index/engine/ReadOnlyEngineTests.java @@ -95,6 +95,7 @@ public class ReadOnlyEngineTests extends EngineTestCase { // Close and reopen the main engine InternalEngineTests.trimUnsafeCommits(config); try (InternalEngine recoveringEngine = new InternalEngine(config)) { + recoveringEngine.initializeMaxSeqNoOfUpdatesOrDeletes(); recoveringEngine.recoverFromTranslog(translogHandler, Long.MAX_VALUE); // the locked down engine should still point to the previous commit assertThat(readOnlyEngine.getLocalCheckpoint(), equalTo(lastSeqNoStats.getLocalCheckpoint())); diff --git a/server/src/test/java/org/elasticsearch/index/mapper/CompletionFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/CompletionFieldMapperTests.java index e3739eed336..cc09ae16c05 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/CompletionFieldMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/CompletionFieldMapperTests.java @@ -18,9 +18,11 @@ */ package org.elasticsearch.index.mapper; +import org.apache.lucene.document.SortedSetDocValuesField; import org.apache.lucene.index.IndexableField; import org.apache.lucene.search.Query; import org.apache.lucene.search.suggest.document.CompletionAnalyzer; +import org.apache.lucene.search.suggest.document.ContextSuggestField; import org.apache.lucene.search.suggest.document.FuzzyCompletionQuery; import org.apache.lucene.search.suggest.document.PrefixCompletionQuery; import org.apache.lucene.search.suggest.document.RegexCompletionQuery; @@ -42,11 +44,18 @@ import org.elasticsearch.common.xcontent.json.JsonXContent; import org.elasticsearch.index.IndexService; import org.elasticsearch.index.analysis.NamedAnalyzer; import org.elasticsearch.test.ESSingleNodeTestCase; +import org.hamcrest.FeatureMatcher; +import org.hamcrest.Matcher; +import org.hamcrest.Matchers; +import org.hamcrest.core.CombinableMatcher; import java.io.IOException; import java.util.Map; +import java.util.function.Function; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; +import static org.hamcrest.Matchers.arrayContainingInAnyOrder; +import static org.hamcrest.Matchers.arrayWithSize; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.instanceOf; @@ -182,6 +191,328 @@ public class CompletionFieldMapperTests extends ESSingleNodeTestCase { assertEquals("failed to parse [completion]: expected text or object, but got VALUE_NUMBER", e.getCause().getMessage()); } + public void testKeywordWithSubCompletionAndContext() throws Exception { + String mapping = Strings.toString(jsonBuilder().startObject().startObject("type1") + .startObject("properties") + .startObject("keywordfield") + .field("type", "keyword") + .startObject("fields") + .startObject("subsuggest") + .field("type", "completion") + .startArray("contexts") + .startObject() + .field("name","place_type") + .field("type","category") + .field("path","cat") + .endObject() + .endArray() + .endObject() + .endObject() + .endObject().endObject() + .endObject().endObject() + ); + + DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping)); + + ParsedDocument parsedDocument = defaultMapper.parse(SourceToParse.source("test", "type1", "1", BytesReference + .bytes(XContentFactory.jsonBuilder() + .startObject() + .array("keywordfield", "key1", "key2", "key3") + .endObject()), + XContentType.JSON)); + + ParseContext.Document indexableFields = parsedDocument.rootDoc(); + + assertThat(indexableFields.getFields("keywordfield"), arrayContainingInAnyOrder( + keywordField("key1"), + sortedSetDocValuesField("key1"), + keywordField("key2"), + sortedSetDocValuesField("key2"), + keywordField("key3"), + sortedSetDocValuesField("key3") + )); + assertThat(indexableFields.getFields("keywordfield.subsuggest"), arrayContainingInAnyOrder( + contextSuggestField("key1"), + contextSuggestField("key2"), + contextSuggestField("key3") + )); + } + + public void testCompletionWithContextAndSubCompletion() throws Exception { + String mapping = Strings.toString(jsonBuilder().startObject().startObject("type1") + .startObject("properties") + .startObject("suggest") + .field("type", "completion") + .startArray("contexts") + .startObject() + .field("name","place_type") + .field("type","category") + .field("path","cat") + .endObject() + .endArray() + .startObject("fields") + .startObject("subsuggest") + .field("type", "completion") + .startArray("contexts") + .startObject() + .field("name","place_type") + .field("type","category") + .field("path","cat") + .endObject() + .endArray() + .endObject() + .endObject() + .endObject().endObject() + .endObject().endObject() + ); + + DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping)); + + ParsedDocument parsedDocument = defaultMapper.parse(SourceToParse.source("test", "type1", "1", BytesReference + .bytes(XContentFactory.jsonBuilder() + .startObject() + .startObject("suggest") + .array("input","timmy","starbucks") + .startObject("contexts") + .array("place_type","cafe","food") + .endObject() + .field("weight", 3) + .endObject() + .endObject()), + XContentType.JSON)); + + ParseContext.Document indexableFields = parsedDocument.rootDoc(); + assertThat(indexableFields.getFields("suggest"), arrayContainingInAnyOrder( + contextSuggestField("timmy"), + contextSuggestField("starbucks") + )); + assertThat(indexableFields.getFields("suggest.subsuggest"), arrayContainingInAnyOrder( + contextSuggestField("timmy"), + contextSuggestField("starbucks") + )); + //unable to assert about context, covered in a REST test + } + + public void testCompletionWithContextAndSubCompletionIndexByPath() throws Exception { + String mapping = Strings.toString(jsonBuilder().startObject().startObject("type1") + .startObject("properties") + .startObject("suggest") + .field("type", "completion") + .startArray("contexts") + .startObject() + .field("name","place_type") + .field("type","category") + .field("path","cat") + .endObject() + .endArray() + .startObject("fields") + .startObject("subsuggest") + .field("type", "completion") + .startArray("contexts") + .startObject() + .field("name","place_type") + .field("type","category") + .field("path","cat") + .endObject() + .endArray() + .endObject() + .endObject() + .endObject().endObject() + .endObject().endObject() + ); + + DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping)); + + ParsedDocument parsedDocument = defaultMapper.parse(SourceToParse.source("test", "type1", "1", BytesReference + .bytes(XContentFactory.jsonBuilder() + .startObject() + .array("suggest", "timmy","starbucks") + .array("cat","cafe","food") + .endObject()), + XContentType.JSON)); + + ParseContext.Document indexableFields = parsedDocument.rootDoc(); + assertThat(indexableFields.getFields("suggest"), arrayContainingInAnyOrder( + contextSuggestField("timmy"), + contextSuggestField("starbucks") + )); + assertThat(indexableFields.getFields("suggest.subsuggest"), arrayContainingInAnyOrder( + contextSuggestField("timmy"), + contextSuggestField("starbucks") + )); + //unable to assert about context, covered in a REST test + } + + + public void testKeywordWithSubCompletionAndStringInsert() throws Exception { + String mapping = Strings.toString(jsonBuilder().startObject().startObject("type1") + .startObject("properties").startObject("geofield") + .field("type", "geo_point") + .startObject("fields") + .startObject("analyzed") + .field("type", "completion") + .endObject() + .endObject() + .endObject().endObject() + .endObject().endObject() + ); + + DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping)); + + ParsedDocument parsedDocument = defaultMapper.parse(SourceToParse.source("test", "type1", "1", BytesReference + .bytes(XContentFactory.jsonBuilder() + .startObject() + .field("geofield", "drm3btev3e86")//"41.12,-71.34" + .endObject()), + XContentType.JSON)); + + ParseContext.Document indexableFields = parsedDocument.rootDoc(); + assertThat(indexableFields.getFields("geofield"), arrayWithSize(2)); + assertThat(indexableFields.getFields("geofield.analyzed"), arrayContainingInAnyOrder( + suggestField("drm3btev3e86") + )); + //unable to assert about geofield content, covered in a REST test + } + + public void testCompletionTypeWithSubCompletionFieldAndStringInsert() throws Exception { + String mapping = Strings.toString(jsonBuilder().startObject().startObject("type1") + .startObject("properties").startObject("suggest") + .field("type", "completion") + .startObject("fields") + .startObject("subsuggest") + .field("type", "completion") + .endObject() + .endObject() + .endObject().endObject() + .endObject().endObject() + ); + + DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping)); + + ParsedDocument parsedDocument = defaultMapper.parse(SourceToParse.source("test", "type1", "1", BytesReference + .bytes(XContentFactory.jsonBuilder() + .startObject() + .field("suggest", "suggestion") + .endObject()), + XContentType.JSON)); + + ParseContext.Document indexableFields = parsedDocument.rootDoc(); + assertThat(indexableFields.getFields("suggest"), arrayContainingInAnyOrder( + suggestField("suggestion") + )); + assertThat(indexableFields.getFields("suggest.subsuggest"), arrayContainingInAnyOrder( + suggestField("suggestion") + )); + } + + public void testCompletionTypeWithSubCompletionFieldAndObjectInsert() throws Exception { + String mapping = Strings.toString(jsonBuilder().startObject().startObject("type1") + .startObject("properties").startObject("completion") + .field("type", "completion") + .startObject("fields") + .startObject("analyzed") + .field("type", "completion") + .endObject() + .endObject() + .endObject().endObject() + .endObject().endObject() + ); + + DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping)); + + ParsedDocument parsedDocument = defaultMapper.parse(SourceToParse.source("test", "type1", "1", BytesReference + .bytes(XContentFactory.jsonBuilder() + .startObject() + .startObject("completion") + .array("input","New York", "NY") + .field("weight",34) + .endObject() + .endObject()), + XContentType.JSON)); + + ParseContext.Document indexableFields = parsedDocument.rootDoc(); + assertThat(indexableFields.getFields("completion"), arrayContainingInAnyOrder( + suggestField("New York"), + suggestField("NY") + )); + assertThat(indexableFields.getFields("completion.analyzed"), arrayContainingInAnyOrder( + suggestField("New York"), + suggestField("NY") + )); + //unable to assert about weight, covered in a REST test + } + + public void testCompletionTypeWithSubKeywordFieldAndObjectInsert() throws Exception { + String mapping = Strings.toString(jsonBuilder().startObject().startObject("type1") + .startObject("properties").startObject("completion") + .field("type", "completion") + .startObject("fields") + .startObject("analyzed") + .field("type", "keyword") + .endObject() + .endObject() + .endObject().endObject() + .endObject().endObject() + ); + + DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping)); + + ParsedDocument parsedDocument = defaultMapper.parse(SourceToParse.source("test", "type1", "1", BytesReference + .bytes(XContentFactory.jsonBuilder() + .startObject() + .startObject("completion") + .array("input","New York", "NY") + .field("weight",34) + .endObject() + .endObject()), + XContentType.JSON)); + + ParseContext.Document indexableFields = parsedDocument.rootDoc(); + assertThat(indexableFields.getFields("completion"), arrayContainingInAnyOrder( + suggestField("New York"), + suggestField("NY") + )); + assertThat(indexableFields.getFields("completion.analyzed"), arrayContainingInAnyOrder( + keywordField("New York"), + sortedSetDocValuesField("New York"), + keywordField("NY"), + sortedSetDocValuesField("NY") + )); + //unable to assert about weight, covered in a REST test + } + + public void testCompletionTypeWithSubKeywordFieldAndStringInsert() throws Exception { + String mapping = Strings.toString(jsonBuilder().startObject().startObject("type1") + .startObject("properties").startObject("completion") + .field("type", "completion") + .startObject("fields") + .startObject("analyzed") + .field("type", "keyword") + .endObject() + .endObject() + .endObject().endObject() + .endObject().endObject() + ); + + DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping)); + + ParsedDocument parsedDocument = defaultMapper.parse(SourceToParse.source("test", "type1", "1", BytesReference + .bytes(XContentFactory.jsonBuilder() + .startObject() + .field("completion", "suggestion") + .endObject()), + XContentType.JSON)); + + ParseContext.Document indexableFields = parsedDocument.rootDoc(); + assertThat(indexableFields.getFields("completion"), arrayContainingInAnyOrder( + suggestField("suggestion") + )); + assertThat(indexableFields.getFields("completion.analyzed"), arrayContainingInAnyOrder( + keywordField("suggestion"), + sortedSetDocValuesField("suggestion") + )); + } + public void testParsingMultiValued() throws Exception { String mapping = Strings.toString(jsonBuilder().startObject().startObject("type1") .startObject("properties").startObject("completion") @@ -199,7 +530,10 @@ public class CompletionFieldMapperTests extends ESSingleNodeTestCase { .endObject()), XContentType.JSON)); IndexableField[] fields = parsedDocument.rootDoc().getFields(fieldMapper.name()); - assertSuggestFields(fields, 2); + assertThat(fields, arrayContainingInAnyOrder( + suggestField("suggestion1"), + suggestField("suggestion2") + )); } public void testParsingWithWeight() throws Exception { @@ -222,7 +556,9 @@ public class CompletionFieldMapperTests extends ESSingleNodeTestCase { .endObject()), XContentType.JSON)); IndexableField[] fields = parsedDocument.rootDoc().getFields(fieldMapper.name()); - assertSuggestFields(fields, 1); + assertThat(fields, arrayContainingInAnyOrder( + suggestField("suggestion") + )); } public void testParsingMultiValueWithWeight() throws Exception { @@ -245,7 +581,11 @@ public class CompletionFieldMapperTests extends ESSingleNodeTestCase { .endObject()), XContentType.JSON)); IndexableField[] fields = parsedDocument.rootDoc().getFields(fieldMapper.name()); - assertSuggestFields(fields, 3); + assertThat(fields, arrayContainingInAnyOrder( + suggestField("suggestion1"), + suggestField("suggestion2"), + suggestField("suggestion3") + )); } public void testParsingWithGeoFieldAlias() throws Exception { @@ -318,7 +658,11 @@ public class CompletionFieldMapperTests extends ESSingleNodeTestCase { .endObject()), XContentType.JSON)); IndexableField[] fields = parsedDocument.rootDoc().getFields(fieldMapper.name()); - assertSuggestFields(fields, 3); + assertThat(fields, arrayContainingInAnyOrder( + suggestField("suggestion1"), + suggestField("suggestion2"), + suggestField("suggestion3") + )); } public void testParsingMixed() throws Exception { @@ -351,7 +695,14 @@ public class CompletionFieldMapperTests extends ESSingleNodeTestCase { .endObject()), XContentType.JSON)); IndexableField[] fields = parsedDocument.rootDoc().getFields(fieldMapper.name()); - assertSuggestFields(fields, 6); + assertThat(fields, arrayContainingInAnyOrder( + suggestField("suggestion1"), + suggestField("suggestion2"), + suggestField("suggestion3"), + suggestField("suggestion4"), + suggestField("suggestion5"), + suggestField("suggestion6") + )); } public void testNonContextEnabledParsingWithContexts() throws Exception { @@ -508,9 +859,13 @@ public class CompletionFieldMapperTests extends ESSingleNodeTestCase { } private static void assertSuggestFields(IndexableField[] fields, int expected) { + assertFieldsOfType(fields, SuggestField.class, expected); + } + + private static void assertFieldsOfType(IndexableField[] fields, Class clazz, int expected) { int actualFieldCount = 0; for (IndexableField field : fields) { - if (field instanceof SuggestField) { + if (clazz.isInstance(field)) { actualFieldCount++; } } @@ -529,4 +884,33 @@ public class CompletionFieldMapperTests extends ESSingleNodeTestCase { ); assertThat(e.getMessage(), containsString("name cannot be empty string")); } + + private Matcher suggestField(String value) { + return Matchers.allOf(hasProperty(IndexableField::stringValue, equalTo(value)), + Matchers.instanceOf(SuggestField.class)); + } + + private Matcher contextSuggestField(String value) { + return Matchers.allOf(hasProperty(IndexableField::stringValue, equalTo(value)), + Matchers.instanceOf(ContextSuggestField.class)); + } + + private CombinableMatcher sortedSetDocValuesField(String value) { + return Matchers.both(hasProperty(IndexableField::binaryValue, equalTo(new BytesRef(value)))) + .and(Matchers.instanceOf(SortedSetDocValuesField.class)); + } + + private CombinableMatcher keywordField(String value) { + return Matchers.both(hasProperty(IndexableField::binaryValue, equalTo(new BytesRef(value)))) + .and(hasProperty(IndexableField::fieldType, Matchers.instanceOf(KeywordFieldMapper.KeywordFieldType.class))); + } + + private Matcher hasProperty(Function property, Matcher valueMatcher) { + return new FeatureMatcher(valueMatcher, "object with", property.toString()) { + @Override + protected V featureValueOf(T actual) { + return property.apply(actual); + } + }; + } } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/DateFieldTypeTests.java b/server/src/test/java/org/elasticsearch/index/mapper/DateFieldTypeTests.java index ad9d0c41494..3a185620f7b 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/DateFieldTypeTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/DateFieldTypeTests.java @@ -29,12 +29,12 @@ import org.apache.lucene.index.MultiReader; import org.apache.lucene.search.IndexOrDocValuesQuery; import org.apache.lucene.search.Query; import org.apache.lucene.store.Directory; -import org.elasticsearch.core.internal.io.IOUtils; import org.elasticsearch.Version; import org.elasticsearch.cluster.metadata.IndexMetaData; -import org.elasticsearch.common.joda.DateMathParser; import org.elasticsearch.common.joda.Joda; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.time.DateMathParser; +import org.elasticsearch.core.internal.io.IOUtils; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.mapper.DateFieldMapper.DateFieldType; import org.elasticsearch.index.mapper.MappedFieldType.Relation; @@ -121,7 +121,7 @@ public class DateFieldTypeTests extends FieldTypeTestCase { DirectoryReader reader = DirectoryReader.open(w); DateFieldType ft = new DateFieldType(); ft.setName("my_date"); - DateMathParser alternateFormat = new DateMathParser(DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER); + DateMathParser alternateFormat = DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.toDateMathParser(); doTestIsFieldWithinQuery(ft, reader, null, null); doTestIsFieldWithinQuery(ft, reader, null, alternateFormat); doTestIsFieldWithinQuery(ft, reader, DateTimeZone.UTC, null); diff --git a/server/src/test/java/org/elasticsearch/index/mapper/DocumentMapperParserTests.java b/server/src/test/java/org/elasticsearch/index/mapper/DocumentMapperParserTests.java index 8e164c86ebe..c35e083d687 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/DocumentMapperParserTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/DocumentMapperParserTests.java @@ -27,7 +27,6 @@ import org.elasticsearch.test.ESSingleNodeTestCase; import static org.hamcrest.Matchers.equalTo; -// TODO: move this test...it doesn't need to be by itself public class DocumentMapperParserTests extends ESSingleNodeTestCase { public void testTypeLevel() throws Exception { String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") diff --git a/server/src/test/java/org/elasticsearch/index/mapper/DocumentMapperMergeTests.java b/server/src/test/java/org/elasticsearch/index/mapper/DocumentMapperTests.java similarity index 98% rename from server/src/test/java/org/elasticsearch/index/mapper/DocumentMapperMergeTests.java rename to server/src/test/java/org/elasticsearch/index/mapper/DocumentMapperTests.java index 54b6b2310da..8cd39d72ad3 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/DocumentMapperMergeTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/DocumentMapperTests.java @@ -39,7 +39,7 @@ import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.notNullValue; import static org.hamcrest.Matchers.nullValue; -public class DocumentMapperMergeTests extends ESSingleNodeTestCase { +public class DocumentMapperTests extends ESSingleNodeTestCase { public void test1Merge() throws Exception { @@ -51,7 +51,8 @@ public class DocumentMapperMergeTests extends ESSingleNodeTestCase { String stage2Mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("person").startObject("properties") .startObject("name").field("type", "text").endObject() .startObject("age").field("type", "integer").endObject() - .startObject("obj1").startObject("properties").startObject("prop1").field("type", "integer").endObject().endObject().endObject() + .startObject("obj1").startObject("properties").startObject("prop1").field("type", "integer").endObject().endObject() + .endObject() .endObject().endObject().endObject()); DocumentMapper stage2 = parser.parse("person", new CompressedXContent(stage2Mapping)); @@ -70,7 +71,8 @@ public class DocumentMapperMergeTests extends ESSingleNodeTestCase { DocumentMapper mapper = parser.parse("type1", new CompressedXContent(objectMapping)); assertNull(mapper.root().dynamic()); - String withDynamicMapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type1").field("dynamic", "false").endObject().endObject()); + String withDynamicMapping = Strings.toString( + XContentFactory.jsonBuilder().startObject().startObject("type1").field("dynamic", "false").endObject().endObject()); DocumentMapper withDynamicMapper = parser.parse("type1", new CompressedXContent(withDynamicMapping)); assertThat(withDynamicMapper.root().dynamic(), equalTo(ObjectMapper.Dynamic.FALSE)); diff --git a/server/src/test/java/org/elasticsearch/index/mapper/FieldFilterMapperPluginTests.java b/server/src/test/java/org/elasticsearch/index/mapper/FieldFilterMapperPluginTests.java index af29edcef30..07a80a31deb 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/FieldFilterMapperPluginTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/FieldFilterMapperPluginTests.java @@ -35,9 +35,12 @@ import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.ESSingleNodeTestCase; import org.junit.Before; +import java.util.ArrayList; +import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.HashMap; +import java.util.List; import java.util.Map; import java.util.Set; import java.util.function.Function; @@ -90,20 +93,26 @@ public class FieldFilterMapperPluginTests extends ESSingleNodeTestCase { } public void testFieldCapabilities() { + List allFields = new ArrayList<>(ALL_FLAT_FIELDS); + allFields.addAll(ALL_OBJECT_FIELDS); FieldCapabilitiesResponse index1 = client().fieldCaps(new FieldCapabilitiesRequest().fields("*").indices("index1")).actionGet(); - assertFieldCaps(index1, ALL_FLAT_FIELDS); + assertFieldCaps(index1, allFields); FieldCapabilitiesResponse filtered = client().fieldCaps(new FieldCapabilitiesRequest().fields("*").indices("filtered")).actionGet(); - assertFieldCaps(filtered, FILTERED_FLAT_FIELDS); + List filteredFields = new ArrayList<>(FILTERED_FLAT_FIELDS); + filteredFields.addAll(ALL_OBJECT_FIELDS); + assertFieldCaps(filtered, filteredFields); //double check that submitting the filtered mappings to an unfiltered index leads to the same field_caps output //as the one coming from a filtered index with same mappings GetMappingsResponse getMappingsResponse = client().admin().indices().prepareGetMappings("filtered").get(); ImmutableOpenMap filteredMapping = getMappingsResponse.getMappings().get("filtered"); assertAcked(client().admin().indices().prepareCreate("test").addMapping("_doc", filteredMapping.get("_doc").getSourceAsMap())); FieldCapabilitiesResponse test = client().fieldCaps(new FieldCapabilitiesRequest().fields("*").indices("test")).actionGet(); - assertFieldCaps(test, FILTERED_FLAT_FIELDS); + // properties.value is an object field in the new mapping + filteredFields.add("properties.value"); + assertFieldCaps(test, filteredFields); } - private static void assertFieldCaps(FieldCapabilitiesResponse fieldCapabilitiesResponse, String[] expectedFields) { + private static void assertFieldCaps(FieldCapabilitiesResponse fieldCapabilitiesResponse, Collection expectedFields) { Map> responseMap = fieldCapabilitiesResponse.get(); Set builtInMetaDataFields = IndicesModule.getBuiltInMetaDataFields(); for (String field : builtInMetaDataFields) { @@ -118,7 +127,7 @@ public class FieldFilterMapperPluginTests extends ESSingleNodeTestCase { } private static void assertFieldMappings(Map> mappings, - String[] expectedFields) { + Collection expectedFields) { assertEquals(1, mappings.size()); Map fields = new HashMap<>(mappings.get("_doc")); Set builtInMetaDataFields = IndicesModule.getBuiltInMetaDataFields(); @@ -245,14 +254,18 @@ public class FieldFilterMapperPluginTests extends ESSingleNodeTestCase { } } - private static final String[] ALL_FLAT_FIELDS = new String[]{ + private static final Collection ALL_FLAT_FIELDS = Arrays.asList( "name.first", "name.last_visible", "birth", "age_visible", "address.street", "address.location", "address.area_visible", "properties.key_visible", "properties.key_visible.keyword", "properties.value", "properties.value.keyword_visible" - }; + ); - private static final String[] FILTERED_FLAT_FIELDS = new String[]{ - "name.last_visible", "age_visible", "address.area_visible", "properties.key_visible", "properties.value.keyword_visible" - }; + private static final Collection ALL_OBJECT_FIELDS = Arrays.asList( + "name", "address", "properties" + ); + + private static final Collection FILTERED_FLAT_FIELDS = Arrays.asList( + "name.last_visible", "age_visible", "address.area_visible", "properties.key_visible", "properties.value.keyword_visible" + ); private static final String TEST_ITEM = "{\n" + " \"_doc\": {\n" + diff --git a/server/src/test/java/org/elasticsearch/index/mapper/IgnoredFieldTypeTests.java b/server/src/test/java/org/elasticsearch/index/mapper/IgnoredFieldTypeTests.java index 4035383893d..e0cd3b1d153 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/IgnoredFieldTypeTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/IgnoredFieldTypeTests.java @@ -19,6 +19,14 @@ package org.elasticsearch.index.mapper; +import org.apache.lucene.index.IndexOptions; +import org.apache.lucene.index.Term; +import org.apache.lucene.search.PrefixQuery; +import org.apache.lucene.search.Query; +import org.apache.lucene.search.RegexpQuery; +import org.apache.lucene.search.WildcardQuery; +import org.apache.lucene.util.BytesRef; + public class IgnoredFieldTypeTests extends FieldTypeTestCase { @Override @@ -26,4 +34,30 @@ public class IgnoredFieldTypeTests extends FieldTypeTestCase { return new IgnoredFieldMapper.IgnoredFieldType(); } + public void testPrefixQuery() { + MappedFieldType ft = createDefaultFieldType(); + ft.setName("field"); + ft.setIndexOptions(IndexOptions.DOCS); + + Query expected = new PrefixQuery(new Term("field", new BytesRef("foo*"))); + assertEquals(expected, ft.prefixQuery("foo*", null, null)); + } + + public void testRegexpQuery() { + MappedFieldType ft = createDefaultFieldType(); + ft.setName("field"); + ft.setIndexOptions(IndexOptions.DOCS); + + Query expected = new RegexpQuery(new Term("field", new BytesRef("foo?"))); + assertEquals(expected, ft.regexpQuery("foo?", 0, 10, null, null)); + } + + public void testWildcardQuery() { + MappedFieldType ft = createDefaultFieldType(); + ft.setName("field"); + ft.setIndexOptions(IndexOptions.DOCS); + + Query expected = new WildcardQuery(new Term("field", new BytesRef("foo*"))); + assertEquals(expected, ft.wildcardQuery("foo*", null, null)); + } } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/IndexFieldTypeTests.java b/server/src/test/java/org/elasticsearch/index/mapper/IndexFieldTypeTests.java index ecea620f11c..82f0edf24f4 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/IndexFieldTypeTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/IndexFieldTypeTests.java @@ -18,12 +18,56 @@ */ package org.elasticsearch.index.mapper; -import org.elasticsearch.index.mapper.IndexFieldMapper; -import org.elasticsearch.index.mapper.MappedFieldType; +import org.apache.lucene.index.IndexOptions; +import org.apache.lucene.search.MatchAllDocsQuery; +import org.apache.lucene.search.MatchNoDocsQuery; +import org.elasticsearch.index.Index; +import org.elasticsearch.index.query.QueryShardContext; + +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; public class IndexFieldTypeTests extends FieldTypeTestCase { + @Override protected MappedFieldType createDefaultFieldType() { return new IndexFieldMapper.IndexFieldType(); } + + public void testPrefixQuery() { + MappedFieldType ft = createDefaultFieldType(); + ft.setName("field"); + ft.setIndexOptions(IndexOptions.DOCS); + + assertEquals(new MatchAllDocsQuery(), ft.prefixQuery("ind", null, createContext())); + assertEquals(new MatchNoDocsQuery(), ft.prefixQuery("other_ind", null, createContext())); + } + + public void testRegexpQuery() { + MappedFieldType ft = createDefaultFieldType(); + ft.setName("field"); + ft.setIndexOptions(IndexOptions.DOCS); + + assertEquals(new MatchAllDocsQuery(), ft.regexpQuery("ind.x", 0, 10, null, createContext())); + assertEquals(new MatchNoDocsQuery(), ft.regexpQuery("ind?x", 0, 10, null, createContext())); + } + + public void testWildcardQuery() { + MappedFieldType ft = createDefaultFieldType(); + ft.setName("field"); + ft.setIndexOptions(IndexOptions.DOCS); + + assertEquals(new MatchAllDocsQuery(), ft.wildcardQuery("ind*x", null, createContext())); + assertEquals(new MatchNoDocsQuery(), ft.wildcardQuery("other_ind*x", null, createContext())); + } + + private QueryShardContext createContext() { + QueryShardContext context = mock(QueryShardContext.class); + + Index index = new Index("index", "123"); + when(context.getFullyQualifiedIndex()).thenReturn(index); + when(context.index()).thenReturn(index); + + return context; + } } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/MapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/MapperTests.java index 97a72d75e3a..d2971034fd7 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/MapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/MapperTests.java @@ -37,7 +37,7 @@ public class MapperTests extends ESTestCase { } public void testBuilderContextWithIndexSettingsAsNull() { - NullPointerException e = expectThrows(NullPointerException.class, () -> new Mapper.BuilderContext(null, new ContentPath(1))); + expectThrows(NullPointerException.class, () -> new Mapper.BuilderContext(null, new ContentPath(1))); } } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/ObjectMapperMergeTests.java b/server/src/test/java/org/elasticsearch/index/mapper/ObjectMapperMergeTests.java new file mode 100644 index 00000000000..adba78546c1 --- /dev/null +++ b/server/src/test/java/org/elasticsearch/index/mapper/ObjectMapperMergeTests.java @@ -0,0 +1,125 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.index.mapper; + +import com.google.common.collect.ImmutableMap; +import org.elasticsearch.Version; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.index.mapper.FieldMapper.CopyTo; +import org.elasticsearch.index.mapper.FieldMapper.MultiFields; +import org.elasticsearch.index.mapper.TextFieldMapper.TextFieldType; +import org.elasticsearch.test.ESTestCase; +import org.junit.AfterClass; + +import java.util.Map; + +import static java.util.Collections.emptyMap; +import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_VERSION_CREATED; +import static org.hamcrest.Matchers.notNullValue; + +public class ObjectMapperMergeTests extends ESTestCase { + + private static FieldMapper barFieldMapper = createTextFieldMapper("bar"); + private static FieldMapper bazFieldMapper = createTextFieldMapper("baz"); + + private static RootObjectMapper rootObjectMapper = createRootObjectMapper( + "type1", true, ImmutableMap.of( + "disabled", createObjectMapper("disabled", false, emptyMap()), + "foo", createObjectMapper("foo", true, ImmutableMap.of( + "bar", barFieldMapper)))); + + @AfterClass + public static void cleanupReferences() { + barFieldMapper = null; + bazFieldMapper = null; + rootObjectMapper = null; + } + + public void testMerge() { + // GIVEN an enriched mapping with "baz" new field + ObjectMapper mergeWith = createRootObjectMapper( + "type1", true, ImmutableMap.of( + "disabled", createObjectMapper("disabled", false, emptyMap()), + "foo", createObjectMapper("foo", true, ImmutableMap.of( + "bar", barFieldMapper, + "baz", bazFieldMapper)))); + + // WHEN merging mappings + final ObjectMapper merged = rootObjectMapper.merge(mergeWith); + + // THEN "baz" new field is added to merged mapping + final ObjectMapper mergedFoo = (ObjectMapper) merged.getMapper("foo"); + assertThat(mergedFoo.getMapper("bar"), notNullValue()); + assertThat(mergedFoo.getMapper("baz"), notNullValue()); + } + + public void testMergeWhenDisablingField() { + // GIVEN a mapping with "foo" field disabled + ObjectMapper mergeWith = createRootObjectMapper( + "type1", true, ImmutableMap.of( + "disabled", createObjectMapper("disabled", false, emptyMap()), + "foo", createObjectMapper("foo", false, emptyMap()))); + + // WHEN merging mappings + // THEN a MapperException is thrown with an excepted message + MapperException e = expectThrows(MapperException.class, () -> rootObjectMapper.merge(mergeWith)); + assertEquals("Can't update attribute for type [type1.foo.enabled] in index mapping", e.getMessage()); + } + + public void testMergeWhenEnablingField() { + // GIVEN a mapping with "disabled" field enabled + ObjectMapper mergeWith = createRootObjectMapper( + "type1", true, ImmutableMap.of( + "disabled", createObjectMapper("disabled", true, emptyMap()), + "foo", createObjectMapper("foo", true, ImmutableMap.of( + "bar", barFieldMapper)))); + + // WHEN merging mappings + // THEN a MapperException is thrown with an excepted message + MapperException e = expectThrows(MapperException.class, () -> rootObjectMapper.merge(mergeWith)); + assertEquals("Can't update attribute for type [type1.disabled.enabled] in index mapping", e.getMessage()); + } + + private static RootObjectMapper createRootObjectMapper(String name, boolean enabled, Map mappers) { + final Settings indexSettings = Settings.builder().put(SETTING_VERSION_CREATED, Version.CURRENT).build(); + final Mapper.BuilderContext context = new Mapper.BuilderContext(indexSettings, new ContentPath()); + final RootObjectMapper rootObjectMapper = new RootObjectMapper.Builder(name).enabled(enabled).build(context); + + mappers.values().forEach(rootObjectMapper::putMapper); + + return rootObjectMapper; + } + + private static ObjectMapper createObjectMapper(String name, boolean enabled, Map mappers) { + final Settings indexSettings = Settings.builder().put(SETTING_VERSION_CREATED, Version.CURRENT).build(); + final Mapper.BuilderContext context = new Mapper.BuilderContext(indexSettings, new ContentPath()); + final ObjectMapper mapper = new ObjectMapper.Builder(name).enabled(enabled).build(context); + + mappers.values().forEach(mapper::putMapper); + + return mapper; + } + + private static TextFieldMapper createTextFieldMapper(String name) { + final TextFieldType fieldType = new TextFieldType(); + final Settings indexSettings = Settings.builder().put(SETTING_VERSION_CREATED, Version.CURRENT).build(); + + return new TextFieldMapper(name, fieldType, fieldType, -1, null, indexSettings, MultiFields.empty(), CopyTo.empty()); + } +} diff --git a/server/src/test/java/org/elasticsearch/index/mapper/RangeFieldQueryStringQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/index/mapper/RangeFieldQueryStringQueryBuilderTests.java index 0aa8565ea57..34e7081d51d 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/RangeFieldQueryStringQueryBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/RangeFieldQueryStringQueryBuilderTests.java @@ -31,8 +31,8 @@ import org.apache.lucene.search.Query; import org.elasticsearch.action.admin.indices.mapping.put.PutMappingRequest; import org.elasticsearch.common.Strings; import org.elasticsearch.common.compress.CompressedXContent; -import org.elasticsearch.common.joda.DateMathParser; import org.elasticsearch.common.network.InetAddresses; +import org.elasticsearch.common.time.DateMathParser; import org.elasticsearch.index.query.QueryShardContext; import org.elasticsearch.index.query.QueryStringQueryBuilder; import org.elasticsearch.search.internal.SearchContext; diff --git a/server/src/test/java/org/elasticsearch/index/mapper/RoutingFieldTypeTests.java b/server/src/test/java/org/elasticsearch/index/mapper/RoutingFieldTypeTests.java index d64c4c5b0cf..6f68d28c017 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/RoutingFieldTypeTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/RoutingFieldTypeTests.java @@ -18,12 +18,44 @@ */ package org.elasticsearch.index.mapper; -import org.elasticsearch.index.mapper.MappedFieldType; -import org.elasticsearch.index.mapper.RoutingFieldMapper; +import org.apache.lucene.index.IndexOptions; +import org.apache.lucene.index.Term; +import org.apache.lucene.search.PrefixQuery; +import org.apache.lucene.search.Query; +import org.apache.lucene.search.RegexpQuery; +import org.apache.lucene.search.WildcardQuery; +import org.apache.lucene.util.BytesRef; public class RoutingFieldTypeTests extends FieldTypeTestCase { @Override protected MappedFieldType createDefaultFieldType() { return new RoutingFieldMapper.RoutingFieldType(); } + + public void testPrefixQuery() { + MappedFieldType ft = createDefaultFieldType(); + ft.setName("field"); + ft.setIndexOptions(IndexOptions.DOCS); + + Query expected = new PrefixQuery(new Term("field", new BytesRef("foo*"))); + assertEquals(expected, ft.prefixQuery("foo*", null, null)); + } + + public void testRegexpQuery() { + MappedFieldType ft = createDefaultFieldType(); + ft.setName("field"); + ft.setIndexOptions(IndexOptions.DOCS); + + Query expected = new RegexpQuery(new Term("field", new BytesRef("foo?"))); + assertEquals(expected, ft.regexpQuery("foo?", 0, 10, null, null)); + } + + public void testWildcardQuery() { + MappedFieldType ft = createDefaultFieldType(); + ft.setName("field"); + ft.setIndexOptions(IndexOptions.DOCS); + + Query expected = new WildcardQuery(new Term("field", new BytesRef("foo*"))); + assertEquals(expected, ft.wildcardQuery("foo*", null, null)); + } } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/TextFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/TextFieldMapperTests.java index 4736cbe4712..f4856d51a2d 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/TextFieldMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/TextFieldMapperTests.java @@ -19,6 +19,7 @@ package org.elasticsearch.index.mapper; +import org.apache.lucene.analysis.MockSynonymAnalyzer; import org.apache.lucene.analysis.TokenStream; import org.apache.lucene.analysis.tokenattributes.CharTermAttribute; import org.apache.lucene.document.FieldType; @@ -55,6 +56,7 @@ import org.elasticsearch.index.mapper.MapperService.MergeReason; import org.elasticsearch.index.mapper.TextFieldMapper.TextFieldType; import org.elasticsearch.index.query.MatchPhraseQueryBuilder; import org.elasticsearch.index.query.QueryShardContext; +import org.elasticsearch.index.search.MatchQuery; import org.elasticsearch.index.shard.IndexShard; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.ESSingleNodeTestCase; @@ -82,10 +84,6 @@ public class TextFieldMapperTests extends ESSingleNodeTestCase { @Before public void setup() { Settings settings = Settings.builder() - .put("index.analysis.filter.mySynonyms.type", "synonym") - .putList("index.analysis.filter.mySynonyms.synonyms", Collections.singletonList("car, auto")) - .put("index.analysis.analyzer.synonym.tokenizer", "standard") - .put("index.analysis.analyzer.synonym.filter", "mySynonyms") // Stop filter remains in server as it is part of lucene-core .put("index.analysis.analyzer.my_stop_analyzer.tokenizer", "standard") .put("index.analysis.analyzer.my_stop_analyzer.filter", "stop") @@ -621,11 +619,6 @@ public class TextFieldMapperTests extends ESSingleNodeTestCase { } public void testIndexPrefixIndexTypes() throws IOException { - QueryShardContext queryShardContext = indexService.newQueryShardContext( - randomInt(20), null, () -> { - throw new UnsupportedOperationException(); - }, null); - { String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("field") @@ -739,7 +732,7 @@ public class TextFieldMapperTests extends ESSingleNodeTestCase { .endObject() .startObject("synfield") .field("type", "text") - .field("analyzer", "synonym") + .field("analyzer", "standard") // will be replaced with MockSynonymAnalyzer .field("index_phrases", true) .endObject() .endObject() @@ -766,11 +759,13 @@ public class TextFieldMapperTests extends ESSingleNodeTestCase { assertThat(q5, is(new PhraseQuery.Builder().add(new Term("field", "sparkle")).add(new Term("field", "stopword"), 2).build())); - Query q6 = new MatchPhraseQueryBuilder("synfield", "motor car").toQuery(queryShardContext); + MatchQuery matchQuery = new MatchQuery(queryShardContext); + matchQuery.setAnalyzer(new MockSynonymAnalyzer()); + Query q6 = matchQuery.parse(MatchQuery.Type.PHRASE, "synfield", "motor dogs"); assertThat(q6, is(new MultiPhraseQuery.Builder() .add(new Term[]{ - new Term("synfield._index_phrase", "motor car"), - new Term("synfield._index_phrase", "motor auto")}) + new Term("synfield._index_phrase", "motor dogs"), + new Term("synfield._index_phrase", "motor dog")}) .build())); ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", BytesReference diff --git a/server/src/test/java/org/elasticsearch/index/query/CommonTermsQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/CommonTermsQueryBuilderTests.java index fe39345dadd..5e443ec41ed 100644 --- a/server/src/test/java/org/elasticsearch/index/query/CommonTermsQueryBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/CommonTermsQueryBuilderTests.java @@ -179,7 +179,6 @@ public class CommonTermsQueryBuilderTests extends AbstractQueryTestCase query.parse(Type.PHRASE, STRING_FIELD_NAME, "")); + query.setAnalyzer(new MockGraphAnalyzer(createGiantGraphMultiTerms())); + expectThrows(BooleanQuery.TooManyClauses.class, () -> query.parse(Type.PHRASE, STRING_FIELD_NAME, "")); + } + + private static class MockGraphAnalyzer extends Analyzer { + final CannedBinaryTokenStream.BinaryToken[] tokens; + + private MockGraphAnalyzer(CannedBinaryTokenStream.BinaryToken[] tokens ) { + this.tokens = tokens; + } + @Override + protected TokenStreamComponents createComponents(String fieldName) { + Tokenizer tokenizer = new MockTokenizer(MockTokenizer.SIMPLE, true); + return new TokenStreamComponents(tokenizer) { + @Override + public TokenStream getTokenStream() { + return new CannedBinaryTokenStream(tokens); + } + + @Override + protected void setReader(final Reader reader) { + } + }; + } + } + + /** + * Creates a graph token stream with 2 side paths at each position. + **/ + private static CannedBinaryTokenStream.BinaryToken[] createGiantGraph(int numPos) { + List tokens = new ArrayList<>(); + BytesRef term1 = new BytesRef("foo"); + BytesRef term2 = new BytesRef("bar"); + for (int i = 0; i < numPos;) { + if (i % 2 == 0) { + tokens.add(new CannedBinaryTokenStream.BinaryToken(term2, 1, 1)); + tokens.add(new CannedBinaryTokenStream.BinaryToken(term1, 0, 2)); + i += 2; + } else { + tokens.add(new CannedBinaryTokenStream.BinaryToken(term2, 1, 1)); + i++; + } + } + return tokens.toArray(new CannedBinaryTokenStream.BinaryToken[0]); + } + + /** + * Creates a graph token stream with {@link BooleanQuery#getMaxClauseCount()} + * expansions at the last position. + **/ + private static CannedBinaryTokenStream.BinaryToken[] createGiantGraphMultiTerms() { + List tokens = new ArrayList<>(); + BytesRef term1 = new BytesRef("foo"); + BytesRef term2 = new BytesRef("bar"); + tokens.add(new CannedBinaryTokenStream.BinaryToken(term2, 1, 1)); + tokens.add(new CannedBinaryTokenStream.BinaryToken(term1, 0, 2)); + tokens.add(new CannedBinaryTokenStream.BinaryToken(term2, 1, 1)); + tokens.add(new CannedBinaryTokenStream.BinaryToken(term2, 1, 1)); + for (int i = 0; i < BooleanQuery.getMaxClauseCount(); i++) { + tokens.add(new CannedBinaryTokenStream.BinaryToken(term1, 0, 1)); + } + return tokens.toArray(new CannedBinaryTokenStream.BinaryToken[0]); + } } diff --git a/server/src/test/java/org/elasticsearch/index/query/QueryStringQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/QueryStringQueryBuilderTests.java index 1cc058eb724..ee56d07ca65 100644 --- a/server/src/test/java/org/elasticsearch/index/query/QueryStringQueryBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/QueryStringQueryBuilderTests.java @@ -21,6 +21,7 @@ package org.elasticsearch.index.query; import org.apache.lucene.analysis.MockSynonymAnalyzer; import org.apache.lucene.index.Term; +import org.apache.lucene.queries.BlendedTermQuery; import org.apache.lucene.search.BooleanClause; import org.apache.lucene.search.BooleanClause.Occur; import org.apache.lucene.search.BooleanQuery; @@ -1195,20 +1196,23 @@ public class QueryStringQueryBuilderTests extends AbstractQueryTestCase((MultiTermQuery)multiTermQuery).getWrappedQuery())); + equalTo(new SpanMultiTermQueryWrapper<>((MultiTermQuery) multiTermQuery).getWrappedQuery())); } public void testIllegalArgument() { @@ -154,6 +158,11 @@ public class SpanMultiTermQueryBuilderTests extends AbstractQueryTestCase new InternalEngine(config) { + @Override + public IndexResult index(Index op) throws IOException { + IndexResult result = super.index(op); + if (op.origin() == Operation.Origin.PRIMARY) { + indexedOnPrimary.countDown(); + // prevent the indexing on the primary from returning (it was added to Lucene and translog already) + // to make sure that this operation is replicated to the replica via recovery, then via replication. + try { + recoveryDone.await(); + } catch (InterruptedException e) { + throw new AssertionError(e); + } + } + return result; + } + }; + } + }) { + shards.startAll(); + Thread thread = new Thread(() -> { + IndexRequest indexRequest = new IndexRequest(index.getName(), "type").source("{}", XContentType.JSON); + try { + shards.index(indexRequest); + } catch (Exception e) { + throw new AssertionError(e); + } + }); + thread.start(); + IndexShard replica = shards.addReplica(); + Future fut = shards.asyncRecoverReplica(replica, + (shard, node) -> new RecoveryTarget(shard, node, recoveryListener, v -> {}){ + @Override + public void prepareForTranslogOperations(boolean fileBasedRecovery, int totalTranslogOps) throws IOException { + try { + indexedOnPrimary.await(); + } catch (InterruptedException e) { + throw new AssertionError(e); + } + super.prepareForTranslogOperations(fileBasedRecovery, totalTranslogOps); + } + }); + fut.get(); + recoveryDone.countDown(); + thread.join(); + shards.assertAllEqual(1); + } + } + public void testInheritMaxValidAutoIDTimestampOnRecovery() throws Exception { - //TODO: Enables this test with soft-deletes once we have timestamp - Settings settings = Settings.builder().put(IndexSettings.INDEX_SOFT_DELETES_SETTING.getKey(), false).build(); - try (ReplicationGroup shards = createGroup(0, settings)) { + try (ReplicationGroup shards = createGroup(0)) { shards.startAll(); final IndexRequest indexRequest = new IndexRequest(index.getName(), "type").source("{}", XContentType.JSON); indexRequest.onRetry(); // force an update of the timestamp @@ -267,7 +338,8 @@ public class IndexLevelReplicationTests extends ESIndexLevelReplicationTestCase barrier.await(); indexOnReplica(replicationRequest, shards, replica2, newReplica1Term); } catch (IllegalStateException ise) { - assertThat(ise.getMessage(), either(containsString("is too old")).or(containsString("cannot be a replication target"))); + assertThat(ise.getMessage(), either(containsString("is too old")) + .or(containsString("cannot be a replication target")).or(containsString("engine is closed"))); } catch (Exception e) { throw new RuntimeException(e); } @@ -309,7 +381,8 @@ public class IndexLevelReplicationTests extends ESIndexLevelReplicationTestCase indexOnReplica(replicationRequest, shards, replica, primaryPrimaryTerm); successFullyIndexed.set(true); } catch (IllegalStateException ise) { - assertThat(ise.getMessage(), either(containsString("is too old")).or(containsString("cannot be a replication target"))); + assertThat(ise.getMessage(), either(containsString("is too old")) + .or(containsString("cannot be a replication target")).or(containsString("engine is closed"))); } catch (Exception e) { throw new RuntimeException(e); } diff --git a/server/src/test/java/org/elasticsearch/index/replication/RecoveryDuringReplicationTests.java b/server/src/test/java/org/elasticsearch/index/replication/RecoveryDuringReplicationTests.java index a73d7385d9d..e32161af7fe 100644 --- a/server/src/test/java/org/elasticsearch/index/replication/RecoveryDuringReplicationTests.java +++ b/server/src/test/java/org/elasticsearch/index/replication/RecoveryDuringReplicationTests.java @@ -22,6 +22,7 @@ package org.elasticsearch.index.replication; import org.apache.logging.log4j.Logger; import org.apache.lucene.index.IndexWriter; import org.apache.lucene.index.IndexableField; +import org.elasticsearch.Version; import org.elasticsearch.action.admin.indices.flush.FlushRequest; import org.elasticsearch.action.bulk.BulkShardRequest; import org.elasticsearch.action.index.IndexRequest; @@ -404,6 +405,10 @@ public class RecoveryDuringReplicationTests extends ESIndexLevelReplicationTestC assertThat(task.getResyncedOperations(), greaterThanOrEqualTo(extraDocs)); } shards.assertAllEqual(initialDocs + extraDocs); + for (IndexShard replica : shards.getReplicas()) { + assertThat(replica.getMaxSeqNoOfUpdatesOrDeletes(), + greaterThanOrEqualTo(shards.getPrimary().getMaxSeqNoOfUpdatesOrDeletes())); + } // check translog on replica is trimmed int translogOperations = 0; @@ -488,9 +493,10 @@ public class RecoveryDuringReplicationTests extends ESIndexLevelReplicationTestC return new RecoveryTarget(indexShard, node, recoveryListener, l -> { }) { @Override - public long indexTranslogOperations(List operations, int totalTranslogOps) throws IOException { + public long indexTranslogOperations(List operations, int totalTranslogOps, + long maxSeenAutoIdTimestamp, long maxSeqNoOfUpdates) throws IOException { opsSent.set(true); - return super.indexTranslogOperations(operations, totalTranslogOps); + return super.indexTranslogOperations(operations, totalTranslogOps, maxSeenAutoIdTimestamp, maxSeqNoOfUpdates); } }; }); @@ -557,7 +563,8 @@ public class RecoveryDuringReplicationTests extends ESIndexLevelReplicationTestC replica, (indexShard, node) -> new RecoveryTarget(indexShard, node, recoveryListener, l -> {}) { @Override - public long indexTranslogOperations(final List operations, final int totalTranslogOps) + public long indexTranslogOperations(final List operations, final int totalTranslogOps, + final long maxAutoIdTimestamp, long maxSeqNoOfUpdates) throws IOException { // index a doc which is not part of the snapshot, but also does not complete on replica replicaEngineFactory.latchIndexers(1); @@ -585,7 +592,7 @@ public class RecoveryDuringReplicationTests extends ESIndexLevelReplicationTestC } catch (InterruptedException e) { throw new AssertionError(e); } - return super.indexTranslogOperations(operations, totalTranslogOps); + return super.indexTranslogOperations(operations, totalTranslogOps, maxAutoIdTimestamp, maxSeqNoOfUpdates); } }); pendingDocActiveWithExtraDocIndexed.await(); @@ -631,6 +638,49 @@ public class RecoveryDuringReplicationTests extends ESIndexLevelReplicationTestC } } + public void testTransferMaxSeenAutoIdTimestampOnResync() throws Exception { + try (ReplicationGroup shards = createGroup(2)) { + shards.startAll(); + IndexShard primary = shards.getPrimary(); + IndexShard replica1 = shards.getReplicas().get(0); + IndexShard replica2 = shards.getReplicas().get(1); + long maxTimestampOnReplica1 = -1; + long maxTimestampOnReplica2 = -1; + List replicationRequests = new ArrayList<>(); + for (int numDocs = between(1, 10), i = 0; i < numDocs; i++) { + final IndexRequest indexRequest = new IndexRequest(index.getName(), "type").source("{}", XContentType.JSON); + indexRequest.process(Version.CURRENT, null, index.getName()); + final IndexRequest copyRequest; + if (randomBoolean()) { + copyRequest = copyIndexRequest(indexRequest); + indexRequest.onRetry(); + } else { + copyRequest = copyIndexRequest(indexRequest); + copyRequest.onRetry(); + } + replicationRequests.add(copyRequest); + final BulkShardRequest bulkShardRequest = indexOnPrimary(indexRequest, primary); + if (randomBoolean()) { + indexOnReplica(bulkShardRequest, shards, replica1); + maxTimestampOnReplica1 = Math.max(maxTimestampOnReplica1, indexRequest.getAutoGeneratedTimestamp()); + } else { + indexOnReplica(bulkShardRequest, shards, replica2); + maxTimestampOnReplica2 = Math.max(maxTimestampOnReplica2, indexRequest.getAutoGeneratedTimestamp()); + } + } + assertThat(replica1.getMaxSeenAutoIdTimestamp(), equalTo(maxTimestampOnReplica1)); + assertThat(replica2.getMaxSeenAutoIdTimestamp(), equalTo(maxTimestampOnReplica2)); + shards.promoteReplicaToPrimary(replica1).get(); + assertThat(replica2.getMaxSeenAutoIdTimestamp(), equalTo(maxTimestampOnReplica1)); + for (IndexRequest request : replicationRequests) { + shards.index(request); // deliver via normal replication + } + for (IndexShard shard : shards) { + assertThat(shard.getMaxSeenAutoIdTimestamp(), equalTo(Math.max(maxTimestampOnReplica1, maxTimestampOnReplica2))); + } + } + } + public static class BlockingTarget extends RecoveryTarget { private final CountDownLatch recoveryBlocked; @@ -671,11 +721,12 @@ public class RecoveryDuringReplicationTests extends ESIndexLevelReplicationTestC } @Override - public long indexTranslogOperations(List operations, int totalTranslogOps) throws IOException { + public long indexTranslogOperations(List operations, int totalTranslogOps, + long maxAutoIdTimestamp, long maxSeqNoOfUpdates) throws IOException { if (hasBlocked() == false) { blockIfNeeded(RecoveryState.Stage.TRANSLOG); } - return super.indexTranslogOperations(operations, totalTranslogOps); + return super.indexTranslogOperations(operations, totalTranslogOps, maxAutoIdTimestamp, maxSeqNoOfUpdates); } @Override diff --git a/server/src/test/java/org/elasticsearch/index/search/MatchQueryIT.java b/server/src/test/java/org/elasticsearch/index/search/MatchQueryIT.java deleted file mode 100644 index aa154d93925..00000000000 --- a/server/src/test/java/org/elasticsearch/index/search/MatchQueryIT.java +++ /dev/null @@ -1,220 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.index.search; - -import org.elasticsearch.action.admin.indices.create.CreateIndexRequestBuilder; -import org.elasticsearch.action.index.IndexRequestBuilder; -import org.elasticsearch.action.search.SearchResponse; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.common.xcontent.XContentFactory; -import org.elasticsearch.index.query.Operator; -import org.elasticsearch.index.query.QueryBuilders; -import org.elasticsearch.test.ESIntegTestCase; -import org.junit.Before; - -import java.io.IOException; -import java.util.ArrayList; -import java.util.List; -import java.util.concurrent.ExecutionException; - -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchHits; - -public class MatchQueryIT extends ESIntegTestCase { - private static final String INDEX = "test"; - - /** - * Test setup. - */ - @Before - public void setUp() throws Exception { - super.setUp(); - CreateIndexRequestBuilder builder = prepareCreate(INDEX).setSettings( - Settings.builder() - .put(indexSettings()) - .put("index.analysis.filter.syns.type", "synonym") - .putList("index.analysis.filter.syns.synonyms", "wtf, what the fudge", "foo, bar baz") - .put("index.analysis.analyzer.lower_syns.type", "custom") - .put("index.analysis.analyzer.lower_syns.tokenizer", "standard") - .putList("index.analysis.analyzer.lower_syns.filter", "lowercase", "syns") - .put("index.analysis.filter.graphsyns.type", "synonym_graph") - .putList("index.analysis.filter.graphsyns.synonyms", "wtf, what the fudge", "foo, bar baz") - .put("index.analysis.analyzer.lower_graphsyns.type", "custom") - .put("index.analysis.analyzer.lower_graphsyns.tokenizer", "standard") - .putList("index.analysis.analyzer.lower_graphsyns.filter", "lowercase", "graphsyns") - ); - - assertAcked(builder.addMapping(INDEX, createMapping())); - ensureGreen(); - } - - private List getDocs() { - List builders = new ArrayList<>(); - builders.add(client().prepareIndex("test", "test", "1").setSource("field", "say wtf happened foo")); - builders.add(client().prepareIndex("test", "test", "2").setSource("field", "bar baz what the fudge man")); - builders.add(client().prepareIndex("test", "test", "3").setSource("field", "wtf")); - builders.add(client().prepareIndex("test", "test", "4").setSource("field", "what is the name for fudge")); - builders.add(client().prepareIndex("test", "test", "5").setSource("field", "bar two three")); - builders.add(client().prepareIndex("test", "test", "6").setSource("field", "bar baz two three")); - - return builders; - } - - /** - * Setup the index mappings for the test index. - * - * @return the json builder with the index mappings - * @throws IOException on error creating mapping json - */ - private XContentBuilder createMapping() throws IOException { - return XContentFactory.jsonBuilder() - .startObject() - .startObject(INDEX) - .startObject("properties") - .startObject("field") - .field("type", "text") - .endObject() - .endObject() - .endObject() - .endObject(); - } - - public void testSimpleMultiTermPhrase() throws ExecutionException, InterruptedException { - indexRandom(true, false, getDocs()); - - // first search using regular synonym field using phrase - SearchResponse searchResponse = client().prepareSearch(INDEX) - .setQuery(QueryBuilders.matchPhraseQuery("field", "foo two three").analyzer("lower_syns")).get(); - - // because foo -> "bar baz" where "foo" and "bar" at position 0, "baz" and "two" at position 1. - // "bar two three", "bar baz three", "foo two three", "foo baz three" - assertHitCount(searchResponse, 1L); - assertSearchHits(searchResponse, "5"); // we should not match this but we do - - // same query using graph should find correct result - searchResponse = client().prepareSearch(INDEX).setQuery(QueryBuilders.matchPhraseQuery("field", "foo two three") - .analyzer("lower_graphsyns")).get(); - - assertHitCount(searchResponse, 1L); - assertSearchHits(searchResponse, "6"); - } - - public void testSimpleMultiTermAnd() throws ExecutionException, InterruptedException { - indexRandom(true, false, getDocs()); - - // first search using regular synonym field using phrase - SearchResponse searchResponse = client().prepareSearch(INDEX).setQuery(QueryBuilders.matchQuery("field", "say what the fudge") - .operator(Operator.AND).analyzer("lower_syns")).get(); - - // Old synonyms work fine in that case, but it is coincidental - assertHitCount(searchResponse, 1L); - assertSearchHits(searchResponse, "1"); - - // same query using graph should find correct result - searchResponse = client().prepareSearch(INDEX).setQuery(QueryBuilders.matchQuery("field", "say what the fudge") - .operator(Operator.AND).analyzer("lower_graphsyns")).get(); - - assertHitCount(searchResponse, 1L); - assertSearchHits(searchResponse, "1"); - } - - public void testMinShouldMatch() throws ExecutionException, InterruptedException { - indexRandom(true, false, getDocs()); - - // no min should match - SearchResponse searchResponse = client().prepareSearch(INDEX) - .setQuery( - QueryBuilders.matchQuery("field", "three what the fudge foo") - .operator(Operator.OR).analyzer("lower_graphsyns").autoGenerateSynonymsPhraseQuery(false) - ) - .get(); - - assertHitCount(searchResponse, 6L); - assertSearchHits(searchResponse, "1", "2", "3", "4", "5", "6"); - - // same query, with min_should_match of 2 - searchResponse = client().prepareSearch(INDEX).setQuery(QueryBuilders.matchQuery("field", "three what the fudge foo") - .operator(Operator.OR).analyzer("lower_graphsyns").minimumShouldMatch("80%")).get(); - - // three wtf foo = 2 terms, match #1 - // three wtf bar baz = 3 terms, match #6 - // three what the fudge foo = 4 terms, no match - // three what the fudge bar baz = 4 terms, match #2 - assertHitCount(searchResponse, 3L); - assertSearchHits(searchResponse, "1", "2", "6"); - } - - public void testMultiTermsSynonymsPhrase() throws ExecutionException, InterruptedException { - List builders = getDocs(); - indexRandom(true, false, builders); - SearchResponse searchResponse = client().prepareSearch(INDEX) - .setQuery( - QueryBuilders.matchQuery("field", "wtf") - .analyzer("lower_graphsyns") - .operator(Operator.AND)) - .get(); - assertHitCount(searchResponse, 3L); - assertSearchHits(searchResponse, "1", "2", "3"); - } - - public void testPhrasePrefix() throws ExecutionException, InterruptedException { - List builders = getDocs(); - builders.add(client().prepareIndex("test", "test", "7").setSource("field", "WTFD!")); - builders.add(client().prepareIndex("test", "test", "8").setSource("field", "Weird Al's WHAT THE FUDGESICLE")); - indexRandom(true, false, builders); - - SearchResponse searchResponse = client().prepareSearch(INDEX).setQuery(QueryBuilders.matchPhrasePrefixQuery("field", "wtf") - .analyzer("lower_graphsyns")).get(); - - assertHitCount(searchResponse, 5L); - assertSearchHits(searchResponse, "1", "2", "3", "7", "8"); - } - - public void testCommonTerms() throws ExecutionException, InterruptedException { - String route = "commonTermsTest"; - List builders = getDocs(); - for (IndexRequestBuilder indexRequet : builders) { - // route all docs to same shard for this test - indexRequet.setRouting(route); - } - indexRandom(true, false, builders); - - // do a search with no cutoff frequency to show which docs should match - SearchResponse searchResponse = client().prepareSearch(INDEX) - .setRouting(route) - .setQuery(QueryBuilders.matchQuery("field", "bar three happened") - .operator(Operator.OR)).get(); - - assertHitCount(searchResponse, 4L); - assertSearchHits(searchResponse, "1", "2", "5", "6"); - - // do same search with cutoff and see less documents match - // in this case, essentially everything but "happened" gets excluded - searchResponse = client().prepareSearch(INDEX) - .setRouting(route) - .setQuery(QueryBuilders.matchQuery("field", "bar three happened") - .operator(Operator.OR).cutoffFrequency(1f)).get(); - - assertHitCount(searchResponse, 1L); - assertSearchHits(searchResponse, "1"); - } -} diff --git a/server/src/test/java/org/elasticsearch/index/search/MultiMatchQueryTests.java b/server/src/test/java/org/elasticsearch/index/search/MultiMatchQueryTests.java index 184d54f43b8..1087bbbf9fd 100644 --- a/server/src/test/java/org/elasticsearch/index/search/MultiMatchQueryTests.java +++ b/server/src/test/java/org/elasticsearch/index/search/MultiMatchQueryTests.java @@ -73,11 +73,7 @@ public class MultiMatchQueryTests extends ESSingleNodeTestCase { @Before public void setup() throws IOException { - Settings settings = Settings.builder() - .put("index.analysis.filter.syns.type","synonym") - .putList("index.analysis.filter.syns.synonyms","quick,fast") - .put("index.analysis.analyzer.syns.tokenizer","standard") - .put("index.analysis.analyzer.syns.filter","syns").build(); + Settings settings = Settings.builder().build(); IndexService indexService = createIndex("test", settings); MapperService mapperService = indexService.mapperService(); String mapping = "{\n" + @@ -87,11 +83,11 @@ public class MultiMatchQueryTests extends ESSingleNodeTestCase { " \"properties\":{\n" + " \"first\": {\n" + " \"type\":\"text\",\n" + - " \"analyzer\":\"syns\"\n" + + " \"analyzer\":\"standard\"\n" + " }," + " \"last\": {\n" + " \"type\":\"text\",\n" + - " \"analyzer\":\"syns\"\n" + + " \"analyzer\":\"standard\"\n" + " }" + " }" + " }\n" + @@ -221,25 +217,27 @@ public class MultiMatchQueryTests extends ESSingleNodeTestCase { QueryShardContext queryShardContext = indexService.newQueryShardContext( randomInt(20), null, () -> { throw new UnsupportedOperationException(); }, null); + MultiMatchQuery parser = new MultiMatchQuery(queryShardContext); + parser.setAnalyzer(new MockSynonymAnalyzer()); + Map fieldNames = new HashMap<>(); + fieldNames.put("name.first", 1.0f); + // check that synonym query is used for a single field - Query parsedQuery = - multiMatchQuery("quick").field("name.first") - .type(MultiMatchQueryBuilder.Type.CROSS_FIELDS).toQuery(queryShardContext); + Query parsedQuery = parser.parse(MultiMatchQueryBuilder.Type.CROSS_FIELDS, fieldNames, "dogs", null); Term[] terms = new Term[2]; - terms[0] = new Term("name.first", "quick"); - terms[1] = new Term("name.first", "fast"); + terms[0] = new Term("name.first", "dog"); + terms[1] = new Term("name.first", "dogs"); Query expectedQuery = new SynonymQuery(terms); assertThat(parsedQuery, equalTo(expectedQuery)); // check that blended term query is used for multiple fields - parsedQuery = - multiMatchQuery("quick").field("name.first").field("name.last") - .type(MultiMatchQueryBuilder.Type.CROSS_FIELDS).toQuery(queryShardContext); + fieldNames.put("name.last", 1.0f); + parsedQuery = parser.parse(MultiMatchQueryBuilder.Type.CROSS_FIELDS, fieldNames, "dogs", null); terms = new Term[4]; - terms[0] = new Term("name.first", "quick"); - terms[1] = new Term("name.first", "fast"); - terms[2] = new Term("name.last", "quick"); - terms[3] = new Term("name.last", "fast"); + terms[0] = new Term("name.first", "dog"); + terms[1] = new Term("name.first", "dogs"); + terms[2] = new Term("name.last", "dog"); + terms[3] = new Term("name.last", "dogs"); float[] boosts = new float[4]; Arrays.fill(boosts, 1.0f); expectedQuery = BlendedTermQuery.dismaxBlendedQuery(terms, boosts, 1.0f); diff --git a/server/src/test/java/org/elasticsearch/index/search/NestedHelperTests.java b/server/src/test/java/org/elasticsearch/index/search/NestedHelperTests.java index e781a3311b3..40a8251fb24 100644 --- a/server/src/test/java/org/elasticsearch/index/search/NestedHelperTests.java +++ b/server/src/test/java/org/elasticsearch/index/search/NestedHelperTests.java @@ -40,6 +40,7 @@ import org.elasticsearch.index.query.TermQueryBuilder; import org.elasticsearch.test.ESSingleNodeTestCase; import java.io.IOException; +import java.util.Collections; public class NestedHelperTests extends ESSingleNodeTestCase { @@ -115,6 +116,36 @@ public class NestedHelperTests extends ESSingleNodeTestCase { assertFalse(new NestedHelper(mapperService).mightMatchNonNestedDocs(new MatchNoDocsQuery(), "nested_missing")); } + public void testTermsQuery() { + Query termsQuery = mapperService.fullName("foo").termsQuery(Collections.singletonList("bar"), null); + assertFalse(new NestedHelper(mapperService).mightMatchNestedDocs(termsQuery)); + assertTrue(new NestedHelper(mapperService).mightMatchNonNestedDocs(termsQuery, "nested1")); + assertTrue(new NestedHelper(mapperService).mightMatchNonNestedDocs(termsQuery, "nested2")); + assertTrue(new NestedHelper(mapperService).mightMatchNonNestedDocs(termsQuery, "nested3")); + assertTrue(new NestedHelper(mapperService).mightMatchNonNestedDocs(termsQuery, "nested_missing")); + + termsQuery = mapperService.fullName("nested1.foo").termsQuery(Collections.singletonList("bar"), null); + assertTrue(new NestedHelper(mapperService).mightMatchNestedDocs(termsQuery)); + assertFalse(new NestedHelper(mapperService).mightMatchNonNestedDocs(termsQuery, "nested1")); + assertTrue(new NestedHelper(mapperService).mightMatchNonNestedDocs(termsQuery, "nested2")); + assertTrue(new NestedHelper(mapperService).mightMatchNonNestedDocs(termsQuery, "nested3")); + assertTrue(new NestedHelper(mapperService).mightMatchNonNestedDocs(termsQuery, "nested_missing")); + + termsQuery = mapperService.fullName("nested2.foo").termsQuery(Collections.singletonList("bar"), null); + assertTrue(new NestedHelper(mapperService).mightMatchNestedDocs(termsQuery)); + assertTrue(new NestedHelper(mapperService).mightMatchNonNestedDocs(termsQuery, "nested1")); + assertTrue(new NestedHelper(mapperService).mightMatchNonNestedDocs(termsQuery, "nested2")); + assertTrue(new NestedHelper(mapperService).mightMatchNonNestedDocs(termsQuery, "nested3")); + assertTrue(new NestedHelper(mapperService).mightMatchNonNestedDocs(termsQuery, "nested_missing")); + + termsQuery = mapperService.fullName("nested3.foo").termsQuery(Collections.singletonList("bar"), null); + assertTrue(new NestedHelper(mapperService).mightMatchNestedDocs(termsQuery)); + assertTrue(new NestedHelper(mapperService).mightMatchNonNestedDocs(termsQuery, "nested1")); + assertTrue(new NestedHelper(mapperService).mightMatchNonNestedDocs(termsQuery, "nested2")); + assertTrue(new NestedHelper(mapperService).mightMatchNonNestedDocs(termsQuery, "nested3")); + assertTrue(new NestedHelper(mapperService).mightMatchNonNestedDocs(termsQuery, "nested_missing")); + } + public void testTermQuery() { Query termQuery = mapperService.fullName("foo").termQuery("bar", null); assertFalse(new NestedHelper(mapperService).mightMatchNestedDocs(termQuery)); diff --git a/server/src/test/java/org/elasticsearch/index/seqno/LocalCheckpointTrackerTests.java b/server/src/test/java/org/elasticsearch/index/seqno/LocalCheckpointTrackerTests.java index aef31a16110..789a60ec55d 100644 --- a/server/src/test/java/org/elasticsearch/index/seqno/LocalCheckpointTrackerTests.java +++ b/server/src/test/java/org/elasticsearch/index/seqno/LocalCheckpointTrackerTests.java @@ -218,7 +218,6 @@ public class LocalCheckpointTrackerTests extends ESTestCase { } assertThat(tracker.getMaxSeqNo(), equalTo(maxOps - 1L)); assertThat(tracker.getCheckpoint(), equalTo(unFinishedSeq - 1L)); - assertThat(tracker.contains(randomValueOtherThan(unFinishedSeq, () -> (long) randomFrom(seqNos))), equalTo(true)); assertThat(tracker.contains(unFinishedSeq), equalTo(false)); tracker.markSeqNoAsCompleted(unFinishedSeq); assertThat(tracker.getCheckpoint(), equalTo(maxOps - 1L)); diff --git a/server/src/test/java/org/elasticsearch/index/shard/IndexSearcherWrapperTests.java b/server/src/test/java/org/elasticsearch/index/shard/IndexSearcherWrapperTests.java index e9f52d7c319..a0bf75ddb13 100644 --- a/server/src/test/java/org/elasticsearch/index/shard/IndexSearcherWrapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/shard/IndexSearcherWrapperTests.java @@ -73,7 +73,7 @@ public class IndexSearcherWrapperTests extends ESTestCase { final int sourceRefCount = open.getRefCount(); final AtomicInteger count = new AtomicInteger(); final AtomicInteger outerCount = new AtomicInteger(); - try (Engine.Searcher engineSearcher = new Engine.Searcher("foo", searcher)) { + try (Engine.Searcher engineSearcher = new Engine.Searcher("foo", searcher, s -> {}, logger)) { final Engine.Searcher wrap = wrapper.wrap(engineSearcher); assertEquals(1, wrap.reader().getRefCount()); ElasticsearchDirectoryReader.addReaderCloseListener(wrap.getDirectoryReader(), key -> { @@ -121,7 +121,7 @@ public class IndexSearcherWrapperTests extends ESTestCase { } }; final ConcurrentHashMap cache = new ConcurrentHashMap<>(); - try (Engine.Searcher engineSearcher = new Engine.Searcher("foo", searcher)) { + try (Engine.Searcher engineSearcher = new Engine.Searcher("foo", searcher, s -> {}, logger)) { try (Engine.Searcher wrap = wrapper.wrap(engineSearcher)) { ElasticsearchDirectoryReader.addReaderCloseListener(wrap.getDirectoryReader(), key -> { cache.remove(key); @@ -151,7 +151,7 @@ public class IndexSearcherWrapperTests extends ESTestCase { assertEquals(1, searcher.search(new TermQuery(new Term("field", "doc")), 1).totalHits.value); searcher.setSimilarity(iwc.getSimilarity()); IndexSearcherWrapper wrapper = new IndexSearcherWrapper(); - try (Engine.Searcher engineSearcher = new Engine.Searcher("foo", searcher)) { + try (Engine.Searcher engineSearcher = new Engine.Searcher("foo", searcher, logger)) { final Engine.Searcher wrap = wrapper.wrap(engineSearcher); assertSame(wrap, engineSearcher); } diff --git a/server/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java b/server/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java index 64e3f481ff4..487ac7e0694 100644 --- a/server/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java +++ b/server/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java @@ -315,8 +315,8 @@ public class IndexShardTests extends IndexShardTestCase { // expected } try { - indexShard.acquireReplicaOperationPermit(indexShard.getPendingPrimaryTerm(), SequenceNumbers.UNASSIGNED_SEQ_NO, null, - ThreadPool.Names.WRITE, ""); + indexShard.acquireReplicaOperationPermit(indexShard.getPendingPrimaryTerm(), SequenceNumbers.UNASSIGNED_SEQ_NO, + randomNonNegativeLong(), null, ThreadPool.Names.WRITE, ""); fail("we should not be able to increment anymore"); } catch (IndexShardClosedException e) { // expected @@ -327,7 +327,7 @@ public class IndexShardTests extends IndexShardTestCase { IndexShard indexShard = newShard(false); expectThrows(IndexShardNotStartedException.class, () -> indexShard.acquireReplicaOperationPermit(indexShard.getPendingPrimaryTerm() + randomIntBetween(1, 100), - SequenceNumbers.UNASSIGNED_SEQ_NO, null, ThreadPool.Names.WRITE, "")); + SequenceNumbers.UNASSIGNED_SEQ_NO, randomNonNegativeLong(), null, ThreadPool.Names.WRITE, "")); closeShards(indexShard); } @@ -351,6 +351,7 @@ public class IndexShardTests extends IndexShardTestCase { indexShard.acquireReplicaOperationPermit( indexShard.getPendingPrimaryTerm(), indexShard.getGlobalCheckpoint(), + indexShard.getMaxSeqNoOfUpdatesOrDeletes(), new ActionListener() { @Override public void onResponse(Releasable releasable) { @@ -484,7 +485,6 @@ public class IndexShardTests extends IndexShardTestCase { final Result result = indexOnReplicaWithGaps(indexShard, operations, Math.toIntExact(SequenceNumbers.NO_OPS_PERFORMED)); final int maxSeqNo = result.maxSeqNo; - final boolean gap = result.gap; // promote the replica final ShardRouting replicaRouting = indexShard.routingEntry(); @@ -602,7 +602,7 @@ public class IndexShardTests extends IndexShardTestCase { if (Assertions.ENABLED && indexShard.routingEntry().isRelocationTarget() == false) { assertThat(expectThrows(AssertionError.class, () -> indexShard.acquireReplicaOperationPermit(primaryTerm, - indexShard.getGlobalCheckpoint(), new ActionListener() { + indexShard.getGlobalCheckpoint(), indexShard.getMaxSeqNoOfUpdatesOrDeletes(), new ActionListener() { @Override public void onResponse(Releasable releasable) { fail(); @@ -628,7 +628,8 @@ public class IndexShardTests extends IndexShardTestCase { private Releasable acquireReplicaOperationPermitBlockingly(IndexShard indexShard, long opPrimaryTerm) throws ExecutionException, InterruptedException { PlainActionFuture fut = new PlainActionFuture<>(); - indexShard.acquireReplicaOperationPermit(opPrimaryTerm, indexShard.getGlobalCheckpoint(), fut, ThreadPool.Names.WRITE, ""); + indexShard.acquireReplicaOperationPermit(opPrimaryTerm, indexShard.getGlobalCheckpoint(), + randomNonNegativeLong(), fut, ThreadPool.Names.WRITE, ""); return fut.get(); } @@ -712,8 +713,8 @@ public class IndexShardTests extends IndexShardTestCase { } }; - indexShard.acquireReplicaOperationPermit(primaryTerm - 1, SequenceNumbers.UNASSIGNED_SEQ_NO, onLockAcquired, - ThreadPool.Names.WRITE, ""); + indexShard.acquireReplicaOperationPermit(primaryTerm - 1, SequenceNumbers.UNASSIGNED_SEQ_NO, + randomNonNegativeLong(), onLockAcquired, ThreadPool.Names.WRITE, ""); assertFalse(onResponse.get()); assertTrue(onFailure.get()); @@ -785,6 +786,7 @@ public class IndexShardTests extends IndexShardTestCase { indexShard.acquireReplicaOperationPermit( newPrimaryTerm, newGlobalCheckPoint, + randomNonNegativeLong(), listener, ThreadPool.Names.SAME, ""); } catch (Exception e) { @@ -836,6 +838,22 @@ public class IndexShardTests extends IndexShardTestCase { closeShards(indexShard); } + public void testAcquireReplicaPermitAdvanceMaxSeqNoOfUpdates() throws Exception { + IndexShard replica = newStartedShard(false); + assertThat(replica.getMaxSeqNoOfUpdatesOrDeletes(), equalTo(SequenceNumbers.NO_OPS_PERFORMED)); + long currentMaxSeqNoOfUpdates = randomLongBetween(SequenceNumbers.NO_OPS_PERFORMED, Long.MAX_VALUE); + replica.advanceMaxSeqNoOfUpdatesOrDeletes(currentMaxSeqNoOfUpdates); + + long newMaxSeqNoOfUpdates = randomLongBetween(SequenceNumbers.NO_OPS_PERFORMED, Long.MAX_VALUE); + PlainActionFuture fut = new PlainActionFuture<>(); + replica.acquireReplicaOperationPermit(replica.operationPrimaryTerm, replica.getGlobalCheckpoint(), + newMaxSeqNoOfUpdates, fut, ThreadPool.Names.WRITE, ""); + try (Releasable ignored = fut.actionGet()) { + assertThat(replica.getMaxSeqNoOfUpdatesOrDeletes(), equalTo(Math.max(currentMaxSeqNoOfUpdates, newMaxSeqNoOfUpdates))); + } + closeShards(replica); + } + public void testGlobalCheckpointSync() throws IOException { // create the primary shard with a callback that sets a boolean when the global checkpoint sync is invoked final ShardId shardId = new ShardId("index", "_na_", 0); @@ -896,27 +914,24 @@ public class IndexShardTests extends IndexShardTestCase { closeShards(replicaShard, primaryShard); } - public void testRestoreLocalCheckpointTrackerFromTranslogOnPromotion() throws IOException, InterruptedException { + public void testRestoreLocalHistoryFromTranslogOnPromotion() throws IOException, InterruptedException { final IndexShard indexShard = newStartedShard(false); final int operations = 1024 - scaledRandomIntBetween(0, 1024); indexOnReplicaWithGaps(indexShard, operations, Math.toIntExact(SequenceNumbers.NO_OPS_PERFORMED)); final long maxSeqNo = indexShard.seqNoStats().getMaxSeqNo(); - final long globalCheckpointOnReplica = SequenceNumbers.UNASSIGNED_SEQ_NO; - randomIntBetween( - Math.toIntExact(SequenceNumbers.UNASSIGNED_SEQ_NO), - Math.toIntExact(indexShard.getLocalCheckpoint())); + final long globalCheckpointOnReplica = randomLongBetween(SequenceNumbers.UNASSIGNED_SEQ_NO, indexShard.getLocalCheckpoint()); indexShard.updateGlobalCheckpointOnReplica(globalCheckpointOnReplica, "test"); - final int globalCheckpoint = - randomIntBetween( - Math.toIntExact(SequenceNumbers.UNASSIGNED_SEQ_NO), - Math.toIntExact(indexShard.getLocalCheckpoint())); - + final long globalCheckpoint = randomLongBetween(SequenceNumbers.UNASSIGNED_SEQ_NO, indexShard.getLocalCheckpoint()); + final long currentMaxSeqNoOfUpdates = indexShard.getMaxSeqNoOfUpdatesOrDeletes(); + final long maxSeqNoOfUpdatesOrDeletes = randomLongBetween(SequenceNumbers.NO_OPS_PERFORMED, maxSeqNo); + final Set docsBeforeRollback = getShardDocUIDs(indexShard); final CountDownLatch latch = new CountDownLatch(1); indexShard.acquireReplicaOperationPermit( indexShard.getPendingPrimaryTerm() + 1, globalCheckpoint, + maxSeqNoOfUpdatesOrDeletes, new ActionListener() { @Override public void onResponse(Releasable releasable) { @@ -932,6 +947,9 @@ public class IndexShardTests extends IndexShardTestCase { ThreadPool.Names.SAME, ""); latch.await(); + assertThat(indexShard.getMaxSeqNoOfUpdatesOrDeletes(), equalTo(Collections.max( + Arrays.asList(maxSeqNoOfUpdatesOrDeletes, globalCheckpoint, globalCheckpointOnReplica)) + )); final ShardRouting newRouting = indexShard.routingEntry().moveActiveReplicaToPrimary(); final CountDownLatch resyncLatch = new CountDownLatch(1); @@ -946,6 +964,10 @@ public class IndexShardTests extends IndexShardTestCase { resyncLatch.await(); assertThat(indexShard.getLocalCheckpoint(), equalTo(maxSeqNo)); assertThat(indexShard.seqNoStats().getMaxSeqNo(), equalTo(maxSeqNo)); + assertThat(getShardDocUIDs(indexShard), equalTo(docsBeforeRollback)); + assertThat(indexShard.getMaxSeqNoOfUpdatesOrDeletes(), equalTo(Collections.max( + Arrays.asList(currentMaxSeqNoOfUpdates, maxSeqNoOfUpdatesOrDeletes, globalCheckpoint, globalCheckpointOnReplica)) + )); closeShard(indexShard, false); } @@ -965,9 +987,11 @@ public class IndexShardTests extends IndexShardTestCase { final boolean shouldRollback = Math.max(globalCheckpoint, globalCheckpointOnReplica) < indexShard.seqNoStats().getMaxSeqNo() && indexShard.seqNoStats().getMaxSeqNo() != SequenceNumbers.NO_OPS_PERFORMED; final Engine beforeRollbackEngine = indexShard.getEngine(); + final long newMaxSeqNoOfUpdates = randomLongBetween(indexShard.getMaxSeqNoOfUpdatesOrDeletes(), Long.MAX_VALUE); indexShard.acquireReplicaOperationPermit( indexShard.pendingPrimaryTerm + 1, globalCheckpoint, + newMaxSeqNoOfUpdates, new ActionListener() { @Override public void onResponse(final Releasable releasable) { @@ -994,6 +1018,7 @@ public class IndexShardTests extends IndexShardTestCase { } else { assertThat(indexShard.getEngine(), sameInstance(beforeRollbackEngine)); } + assertThat(indexShard.getMaxSeqNoOfUpdatesOrDeletes(), equalTo(newMaxSeqNoOfUpdates)); // ensure that after the local checkpoint throw back and indexing again, the local checkpoint advances final Result result = indexOnReplicaWithGaps(indexShard, operations, Math.toIntExact(indexShard.getLocalCheckpoint())); assertThat(indexShard.getLocalCheckpoint(), equalTo((long) result.localCheckpoint)); @@ -1020,6 +1045,7 @@ public class IndexShardTests extends IndexShardTestCase { indexShard.acquireReplicaOperationPermit( primaryTerm + increment, indexShard.getGlobalCheckpoint(), + randomNonNegativeLong(), new ActionListener() { @Override public void onResponse(Releasable releasable) { @@ -1632,6 +1658,7 @@ public class IndexShardTests extends IndexShardTestCase { * - If flush and then recover from the existing store, delete #1 will be removed while index #0 is still retained and replayed. */ final IndexShard shard = newStartedShard(false); + shard.advanceMaxSeqNoOfUpdatesOrDeletes(1); // manually advance msu for this delete shard.applyDeleteOperationOnReplica(1, 2, "_doc", "id"); shard.getEngine().rollTranslogGeneration(); // isolate the delete in it's own generation shard.applyIndexOperationOnReplica(0, 1, IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP, false, @@ -1701,6 +1728,7 @@ public class IndexShardTests extends IndexShardTestCase { assertThat(newShard.getLocalCheckpoint(), equalTo(totalOps - 1L)); assertThat(newShard.getReplicationTracker().getTrackedLocalCheckpointForShard(newShard.routingEntry().allocationId().getId()) .getLocalCheckpoint(), equalTo(totalOps - 1L)); + assertThat(newShard.getMaxSeqNoOfUpdatesOrDeletes(), equalTo(totalOps - 1L)); assertDocCount(newShard, totalOps); assertThat(newShard.getHistoryUUID(), equalTo(historyUUID)); closeShards(newShard); @@ -1751,7 +1779,7 @@ public class IndexShardTests extends IndexShardTestCase { public void testRecoverFromStoreWithNoOps() throws IOException { final IndexShard shard = newStartedShard(true); indexDoc(shard, "_doc", "0"); - Engine.IndexResult test = indexDoc(shard, "_doc", "1"); + indexDoc(shard, "_doc", "1"); // start a replica shard and index the second doc final IndexShard otherShard = newStartedShard(false); updateMappings(otherShard, shard.indexSettings().getIndexMetaData()); @@ -2196,8 +2224,10 @@ public class IndexShardTests extends IndexShardTestCase { new RecoveryTarget(shard, discoveryNode, recoveryListener, aLong -> { }) { @Override - public long indexTranslogOperations(List operations, int totalTranslogOps) throws IOException { - final long localCheckpoint = super.indexTranslogOperations(operations, totalTranslogOps); + public long indexTranslogOperations(List operations, int totalTranslogOps, long maxSeenAutoIdTimestamp, + long maxSeqNoOfUpdatesOrDeletes) throws IOException { + final long localCheckpoint = super.indexTranslogOperations( + operations, totalTranslogOps, maxSeenAutoIdTimestamp, maxSeqNoOfUpdatesOrDeletes); assertFalse(replica.isSyncNeeded()); return localCheckpoint; } @@ -2303,8 +2333,10 @@ public class IndexShardTests extends IndexShardTestCase { new RecoveryTarget(shard, discoveryNode, recoveryListener, aLong -> { }) { @Override - public long indexTranslogOperations(List operations, int totalTranslogOps) throws IOException { - final long localCheckpoint = super.indexTranslogOperations(operations, totalTranslogOps); + public long indexTranslogOperations(List operations, int totalTranslogOps, + long maxAutoIdTimestamp, long maxSeqNoOfUpdatesOrDeletes) throws IOException { + final long localCheckpoint = super.indexTranslogOperations( + operations, totalTranslogOps, maxAutoIdTimestamp, maxSeqNoOfUpdatesOrDeletes); // Shard should now be active since we did recover: assertTrue(replica.isActive()); return localCheckpoint; @@ -2350,8 +2382,10 @@ public class IndexShardTests extends IndexShardTestCase { } @Override - public long indexTranslogOperations(List operations, int totalTranslogOps) throws IOException { - final long localCheckpoint = super.indexTranslogOperations(operations, totalTranslogOps); + public long indexTranslogOperations(List operations, int totalTranslogOps, + long maxAutoIdTimestamp, long maxSeqNoOfUpdatesOrDeletes) throws IOException { + final long localCheckpoint = super.indexTranslogOperations( + operations, totalTranslogOps, maxAutoIdTimestamp, maxSeqNoOfUpdatesOrDeletes); assertListenerCalled.accept(replica); return localCheckpoint; } @@ -2842,12 +2876,10 @@ public class IndexShardTests extends IndexShardTestCase { class Result { private final int localCheckpoint; private final int maxSeqNo; - private final boolean gap; - Result(final int localCheckpoint, final int maxSeqNo, final boolean gap) { + Result(final int localCheckpoint, final int maxSeqNo) { this.localCheckpoint = localCheckpoint; this.maxSeqNo = maxSeqNo; - this.gap = gap; } } @@ -2886,7 +2918,7 @@ public class IndexShardTests extends IndexShardTestCase { } assert localCheckpoint == indexShard.getLocalCheckpoint(); assert !gap || (localCheckpoint != max); - return new Result(localCheckpoint, max, gap); + return new Result(localCheckpoint, max); } /** A dummy repository for testing which just needs restore overridden */ @@ -3434,6 +3466,7 @@ public class IndexShardTests extends IndexShardTestCase { assertThat(getShardDocUIDs(shard), equalTo(docBelowGlobalCheckpoint)); assertThat(shard.seqNoStats().getMaxSeqNo(), equalTo(globalCheckpoint)); assertThat(shard.translogStats().estimatedNumberOfOperations(), equalTo(translogStats.estimatedNumberOfOperations())); + assertThat(shard.getMaxSeqNoOfUpdatesOrDeletes(), equalTo(globalCheckpoint)); closeShard(shard, false); } diff --git a/server/src/test/java/org/elasticsearch/index/shard/PrimaryReplicaSyncerTests.java b/server/src/test/java/org/elasticsearch/index/shard/PrimaryReplicaSyncerTests.java index 29b16ca28f4..28e625b34df 100644 --- a/server/src/test/java/org/elasticsearch/index/shard/PrimaryReplicaSyncerTests.java +++ b/server/src/test/java/org/elasticsearch/index/shard/PrimaryReplicaSyncerTests.java @@ -76,7 +76,7 @@ public class PrimaryReplicaSyncerTests extends IndexShardTestCase { // Index doc but not advance local checkpoint. shard.applyIndexOperationOnPrimary(Versions.MATCH_ANY, VersionType.INTERNAL, SourceToParse.source(shard.shardId().getIndexName(), "_doc", Integer.toString(i), new BytesArray("{}"), XContentType.JSON), - IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP, false); + randomBoolean() ? IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP : randomNonNegativeLong(), true); } long globalCheckPoint = numDocs > 0 ? randomIntBetween(0, numDocs - 1) : 0; @@ -105,6 +105,8 @@ public class PrimaryReplicaSyncerTests extends IndexShardTestCase { .findFirst() .isPresent(), is(false)); + + assertThat(resyncRequest.getMaxSeenAutoIdTimestampOnPrimary(), equalTo(shard.getMaxSeenAutoIdTimestamp())); } if (syncNeeded && globalCheckPoint < numDocs - 1) { if (shard.indexSettings.isSoftDeleteEnabled()) { @@ -208,10 +210,18 @@ public class PrimaryReplicaSyncerTests extends IndexShardTestCase { assertEquals(status.hashCode(), sameStatus.hashCode()); switch (randomInt(3)) { - case 0: task.setPhase("otherPhase"); break; - case 1: task.setResyncedOperations(task.getResyncedOperations() + 1); break; - case 2: task.setSkippedOperations(task.getSkippedOperations() + 1); break; - case 3: task.setTotalOperations(task.getTotalOperations() + 1); break; + case 0: + task.setPhase("otherPhase"); + break; + case 1: + task.setResyncedOperations(task.getResyncedOperations() + 1); + break; + case 2: + task.setSkippedOperations(task.getSkippedOperations() + 1); + break; + case 3: + task.setTotalOperations(task.getTotalOperations() + 1); + break; } PrimaryReplicaSyncer.ResyncTask.Status differentStatus = task.getStatus(); diff --git a/server/src/test/java/org/elasticsearch/index/shard/RefreshListenersTests.java b/server/src/test/java/org/elasticsearch/index/shard/RefreshListenersTests.java index 2492ab4cd8a..25f6bb75cc8 100644 --- a/server/src/test/java/org/elasticsearch/index/shard/RefreshListenersTests.java +++ b/server/src/test/java/org/elasticsearch/index/shard/RefreshListenersTests.java @@ -44,7 +44,6 @@ import org.elasticsearch.index.engine.Engine; import org.elasticsearch.index.engine.EngineConfig; import org.elasticsearch.index.engine.EngineTestCase; import org.elasticsearch.index.engine.InternalEngine; -import org.elasticsearch.index.fieldvisitor.SingleFieldsVisitor; import org.elasticsearch.index.mapper.IdFieldMapper; import org.elasticsearch.index.mapper.ParseContext.Document; import org.elasticsearch.index.mapper.ParsedDocument; @@ -127,6 +126,7 @@ public class RefreshListenersTests extends ESTestCase { new NoneCircuitBreakerService(), () -> SequenceNumbers.NO_OPS_PERFORMED, () -> primaryTerm, EngineTestCase.tombstoneDocSupplier()); engine = new InternalEngine(config); + engine.initializeMaxSeqNoOfUpdatesOrDeletes(); engine.recoverFromTranslog((e, s) -> 0, Long.MAX_VALUE); listeners.setCurrentRefreshLocationSupplier(engine::getTranslogLastWriteLocation); } @@ -323,9 +323,9 @@ public class RefreshListenersTests extends ESTestCase { try (Engine.GetResult getResult = engine.get(get, engine::acquireSearcher)) { assertTrue("document not found", getResult.exists()); assertEquals(iteration, getResult.version()); - SingleFieldsVisitor visitor = new SingleFieldsVisitor("test"); - getResult.docIdAndVersion().reader.document(getResult.docIdAndVersion().docId, visitor); - assertEquals(Arrays.asList(testFieldValue), visitor.fields().get("test")); + org.apache.lucene.document.Document document = + getResult.docIdAndVersion().reader.document(getResult.docIdAndVersion().docId); + assertEquals(new String[] {testFieldValue}, document.getValues("test")); } } catch (Exception t) { throw new RuntimeException("failure on the [" + iteration + "] iteration of thread [" + threadId + "]", t); diff --git a/server/src/test/java/org/elasticsearch/index/shard/RemoveCorruptedShardDataCommandIT.java b/server/src/test/java/org/elasticsearch/index/shard/RemoveCorruptedShardDataCommandIT.java index dc3be31734d..3c52497539e 100644 --- a/server/src/test/java/org/elasticsearch/index/shard/RemoveCorruptedShardDataCommandIT.java +++ b/server/src/test/java/org/elasticsearch/index/shard/RemoveCorruptedShardDataCommandIT.java @@ -293,11 +293,9 @@ public class RemoveCorruptedShardDataCommandIT extends ESIntegTestCase { indexRandom(false, false, false, Arrays.asList(builders)); Set translogDirs = getDirs(indexName, ShardPath.TRANSLOG_FOLDER_NAME); - // that's only for 6.x branch for bwc with elasticsearch-translog - final boolean translogOnly = randomBoolean(); - final RemoveCorruptedShardDataCommand command = new RemoveCorruptedShardDataCommand(translogOnly); - final MockTerminal terminal = new MockTerminal(); - final OptionParser parser = command.getParser(); + RemoveCorruptedShardDataCommand command = new RemoveCorruptedShardDataCommand(); + MockTerminal terminal = new MockTerminal(); + OptionParser parser = command.getParser(); if (randomBoolean() && numDocsToTruncate > 0) { // flush the replica, so it will have more docs than what the primary will have diff --git a/server/src/test/java/org/elasticsearch/index/shard/RemoveCorruptedShardDataCommandTests.java b/server/src/test/java/org/elasticsearch/index/shard/RemoveCorruptedShardDataCommandTests.java index 25330eff328..6e34bb03860 100644 --- a/server/src/test/java/org/elasticsearch/index/shard/RemoveCorruptedShardDataCommandTests.java +++ b/server/src/test/java/org/elasticsearch/index/shard/RemoveCorruptedShardDataCommandTests.java @@ -178,10 +178,10 @@ public class RemoveCorruptedShardDataCommandTests extends IndexShardTestCase { } else { assertThat(e.getMessage(), containsString("aborted by user")); } + } finally { + logger.info("--> output:\n{}", t.getOutput()); } - logger.info("--> output:\n{}", t.getOutput()); - if (corruptSegments == false) { // run command without dry-run diff --git a/server/src/test/java/org/elasticsearch/index/shard/StoreRecoveryTests.java b/server/src/test/java/org/elasticsearch/index/shard/StoreRecoveryTests.java index 9a889801c6c..7d63286c44e 100644 --- a/server/src/test/java/org/elasticsearch/index/shard/StoreRecoveryTests.java +++ b/server/src/test/java/org/elasticsearch/index/shard/StoreRecoveryTests.java @@ -39,12 +39,12 @@ import org.apache.lucene.store.Directory; import org.apache.lucene.store.IOContext; import org.apache.lucene.store.IndexOutput; import org.apache.lucene.util.BytesRef; -import org.elasticsearch.core.internal.io.IOUtils; import org.elasticsearch.Version; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.routing.OperationRouting; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.index.engine.InternalEngine; +import org.elasticsearch.core.internal.io.IOUtils; +import org.elasticsearch.index.engine.Engine; import org.elasticsearch.index.mapper.IdFieldMapper; import org.elasticsearch.index.mapper.Uid; import org.elasticsearch.index.seqno.SequenceNumbers; @@ -118,7 +118,7 @@ public class StoreRecoveryTests extends ESTestCase { final Map userData = segmentCommitInfos.getUserData(); assertThat(userData.get(SequenceNumbers.MAX_SEQ_NO), equalTo(Long.toString(maxSeqNo))); assertThat(userData.get(SequenceNumbers.LOCAL_CHECKPOINT_KEY), equalTo(Long.toString(maxSeqNo))); - assertThat(userData.get(InternalEngine.MAX_UNSAFE_AUTO_ID_TIMESTAMP_COMMIT_ID), equalTo(Long.toString(maxUnsafeAutoIdTimestamp))); + assertThat(userData.get(Engine.MAX_UNSAFE_AUTO_ID_TIMESTAMP_COMMIT_ID), equalTo(Long.toString(maxUnsafeAutoIdTimestamp))); for (SegmentCommitInfo info : segmentCommitInfos) { // check that we didn't merge assertEquals("all sources must be flush", info.info.getDiagnostics().get("source"), "flush"); @@ -142,7 +142,6 @@ public class StoreRecoveryTests extends ESTestCase { } else { indexSort = null; } - int id = 0; IndexWriterConfig iwc = newIndexWriterConfig() .setMergePolicy(NoMergePolicy.INSTANCE) .setOpenMode(IndexWriterConfig.OpenMode.CREATE); @@ -179,7 +178,7 @@ public class StoreRecoveryTests extends ESTestCase { final Map userData = segmentCommitInfos.getUserData(); assertThat(userData.get(SequenceNumbers.MAX_SEQ_NO), equalTo(Long.toString(maxSeqNo))); assertThat(userData.get(SequenceNumbers.LOCAL_CHECKPOINT_KEY), equalTo(Long.toString(maxSeqNo))); - assertThat(userData.get(InternalEngine.MAX_UNSAFE_AUTO_ID_TIMESTAMP_COMMIT_ID), equalTo(Long.toString(maxUnsafeAutoIdTimestamp))); + assertThat(userData.get(Engine.MAX_UNSAFE_AUTO_ID_TIMESTAMP_COMMIT_ID), equalTo(Long.toString(maxUnsafeAutoIdTimestamp))); for (SegmentCommitInfo info : segmentCommitInfos) { // check that we didn't merge assertEquals("all sources must be flush", info.info.getDiagnostics().get("source"), "flush"); diff --git a/server/src/test/java/org/elasticsearch/index/store/StoreTests.java b/server/src/test/java/org/elasticsearch/index/store/StoreTests.java index 584ce9b0642..6546e6ebc8c 100644 --- a/server/src/test/java/org/elasticsearch/index/store/StoreTests.java +++ b/server/src/test/java/org/elasticsearch/index/store/StoreTests.java @@ -364,10 +364,8 @@ public class StoreTests extends ESTestCase { } - final long luceneChecksum; try (IndexInput indexInput = dir.openInput("lucene_checksum.bin", IOContext.DEFAULT)) { assertEquals(luceneFileLength, indexInput.length()); - luceneChecksum = CodecUtil.retrieveChecksum(indexInput); } dir.close(); @@ -663,7 +661,6 @@ public class StoreTests extends ESTestCase { if (randomBoolean()) { store.cleanupAndVerify("test", firstMeta); String[] strings = store.directory().listAll(); - int numChecksums = 0; int numNotFound = 0; for (String file : strings) { if (file.startsWith("extra")) { @@ -679,7 +676,6 @@ public class StoreTests extends ESTestCase { } else { store.cleanupAndVerify("test", secondMeta); String[] strings = store.directory().listAll(); - int numChecksums = 0; int numNotFound = 0; for (String file : strings) { if (file.startsWith("extra")) { diff --git a/server/src/test/java/org/elasticsearch/index/translog/TranslogTests.java b/server/src/test/java/org/elasticsearch/index/translog/TranslogTests.java index a0e0c481e5f..9d18845a05e 100644 --- a/server/src/test/java/org/elasticsearch/index/translog/TranslogTests.java +++ b/server/src/test/java/org/elasticsearch/index/translog/TranslogTests.java @@ -184,7 +184,7 @@ public class TranslogTests extends ESTestCase { markCurrentGenAsCommitted(translog); } - private void commit(Translog translog, long genToRetain, long genToCommit) throws IOException { + private long commit(Translog translog, long genToRetain, long genToCommit) throws IOException { final TranslogDeletionPolicy deletionPolicy = translog.getDeletionPolicy(); deletionPolicy.setTranslogGenerationOfLastCommit(genToCommit); deletionPolicy.setMinTranslogGenerationForRecovery(genToRetain); @@ -192,6 +192,7 @@ public class TranslogTests extends ESTestCase { translog.trimUnreferencedReaders(); assertThat(minGenRequired, equalTo(translog.getMinFileGeneration())); assertFilePresences(translog); + return minGenRequired; } @Override @@ -638,12 +639,8 @@ public class TranslogTests extends ESTestCase { assertTrue(Files.exists(translogDir.resolve(Translog.getFilename(1)))); translog.add(new Translog.Index("test", "1", 0, primaryTerm.get(), new byte[]{1})); translog.close(); - try { - Translog.Snapshot snapshot = translog.newSnapshot(); - fail("translog is closed"); - } catch (AlreadyClosedException ex) { - assertEquals(ex.getMessage(), "translog is already closed"); - } + AlreadyClosedException ex = expectThrows(AlreadyClosedException.class, () -> translog.newSnapshot()); + assertEquals(ex.getMessage(), "translog is already closed"); } public void testSnapshotFromMinGen() throws Exception { @@ -844,7 +841,7 @@ public class TranslogTests extends ESTestCase { try (Translog translog = openTranslog(config, uuid)) { try (Translog.Snapshot snapshot = translog.newSnapshot()) { - for (Location loc : locations) { + for (int i = 0; i < locations.size(); i++) { snapshot.next(); } } @@ -870,7 +867,7 @@ public class TranslogTests extends ESTestCase { AtomicInteger truncations = new AtomicInteger(0); try (Translog.Snapshot snap = translog.newSnapshot()) { - for (Translog.Location location : locations) { + for (int i = 0; i < locations.size(); i++) { try { assertNotNull(snap.next()); } catch (EOFException e) { @@ -2377,6 +2374,7 @@ public class TranslogTests extends ESTestCase { } + @Override public int write(ByteBuffer src) throws IOException { if (fail.fail()) { if (partialWrite) { @@ -2485,14 +2483,9 @@ public class TranslogTests extends ESTestCase { // don't copy the new file Files.createFile(config.getTranslogPath().resolve("translog-" + (read.generation + 1) + ".tlog")); - try { - Translog tlog = new Translog(config, translog.getTranslogUUID(), translog.getDeletionPolicy(), () -> SequenceNumbers.NO_OPS_PERFORMED, primaryTerm::get); - fail("file already exists?"); - } catch (TranslogException ex) { - // all is well - assertEquals(ex.getMessage(), "failed to create new translog file"); - assertEquals(ex.getCause().getClass(), FileAlreadyExistsException.class); - } + TranslogException ex = expectThrows(TranslogException.class, () -> new Translog(config, translog.getTranslogUUID(), translog.getDeletionPolicy(), () -> SequenceNumbers.NO_OPS_PERFORMED, primaryTerm::get)); + assertEquals(ex.getMessage(), "failed to create new translog file"); + assertEquals(ex.getCause().getClass(), FileAlreadyExistsException.class); } public void testRecoverWithUnbackedNextGenAndFutureFile() throws IOException { @@ -2520,14 +2513,10 @@ public class TranslogTests extends ESTestCase { tlog.add(new Translog.Index("test", "" + 1, 1, primaryTerm.get(), Integer.toString(1).getBytes(Charset.forName("UTF-8")))); } - try { - Translog tlog = new Translog(config, translogUUID, deletionPolicy, () -> SequenceNumbers.NO_OPS_PERFORMED, primaryTerm::get); - fail("file already exists?"); - } catch (TranslogException ex) { - // all is well - assertEquals(ex.getMessage(), "failed to create new translog file"); - assertEquals(ex.getCause().getClass(), FileAlreadyExistsException.class); - } + TranslogException ex = expectThrows(TranslogException.class, + () -> new Translog(config, translogUUID, deletionPolicy, () -> SequenceNumbers.NO_OPS_PERFORMED, primaryTerm::get)); + assertEquals(ex.getMessage(), "failed to create new translog file"); + assertEquals(ex.getCause().getClass(), FileAlreadyExistsException.class); } /** @@ -3054,6 +3043,31 @@ public class TranslogTests extends ESTestCase { misbehavingTranslog.callCloseOnTragicEvent(); } + public void testMaxSeqNo() throws Exception { + Map maxSeqNoPerGeneration = new HashMap<>(); + for (int iterations = between(1, 10), i = 0; i < iterations; i++) { + long startSeqNo = randomLongBetween(0, Integer.MAX_VALUE); + List seqNos = LongStream.range(startSeqNo, startSeqNo + randomInt(100)).boxed().collect(Collectors.toList()); + Randomness.shuffle(seqNos); + for (long seqNo : seqNos) { + if (frequently()) { + translog.add(new Translog.Index("test", "id", seqNo, primaryTerm.get(), new byte[]{1})); + maxSeqNoPerGeneration.compute(translog.currentFileGeneration(), + (key, existing) -> existing == null ? seqNo : Math.max(existing, seqNo)); + } + } + translog.rollGeneration(); + } + translog.sync(); + assertThat(translog.getMaxSeqNo(), + equalTo(maxSeqNoPerGeneration.isEmpty() ? SequenceNumbers.NO_OPS_PERFORMED : Collections.max(maxSeqNoPerGeneration.values()))); + long minRetainedGen = commit(translog, randomLongBetween(1, translog.currentFileGeneration()), translog.currentFileGeneration()); + long expectedMaxSeqNo = maxSeqNoPerGeneration.entrySet().stream() + .filter(e -> e.getKey() >= minRetainedGen).mapToLong(e -> e.getValue()) + .max().orElse(SequenceNumbers.NO_OPS_PERFORMED); + assertThat(translog.getMaxSeqNo(), equalTo(expectedMaxSeqNo)); + } + static class SortedSnapshot implements Translog.Snapshot { private final Translog.Snapshot snapshot; private List operations = null; diff --git a/server/src/test/java/org/elasticsearch/indices/IndexingMemoryControllerTests.java b/server/src/test/java/org/elasticsearch/indices/IndexingMemoryControllerTests.java index 01d7dc2a535..e5a23b155e8 100644 --- a/server/src/test/java/org/elasticsearch/indices/IndexingMemoryControllerTests.java +++ b/server/src/test/java/org/elasticsearch/indices/IndexingMemoryControllerTests.java @@ -295,7 +295,6 @@ public class IndexingMemoryControllerTests extends ESSingleNodeTestCase { .put("indices.memory.index_buffer_size", "4mb").build()); IndexShard shard0 = test.getShard(0); IndexShard shard1 = test.getShard(1); - IndexShard shard2 = test.getShard(2); controller.simulateIndexing(shard0); controller.simulateIndexing(shard0); controller.simulateIndexing(shard0); diff --git a/server/src/test/java/org/elasticsearch/indices/analyze/AnalyzeActionIT.java b/server/src/test/java/org/elasticsearch/indices/analyze/AnalyzeActionIT.java index f814f4c227a..2094c20c890 100644 --- a/server/src/test/java/org/elasticsearch/indices/analyze/AnalyzeActionIT.java +++ b/server/src/test/java/org/elasticsearch/indices/analyze/AnalyzeActionIT.java @@ -121,54 +121,6 @@ public class AnalyzeActionIT extends ESIntegTestCase { assertThat(analyzeResponse.getTokens().get(0).getPositionLength(), equalTo(1)); } - public void testAnalyzeWithNonDefaultPostionLength() throws Exception { - assertAcked(prepareCreate("test").addAlias(new Alias("alias")) - .setSettings(Settings.builder().put(indexSettings()) - .put("index.analysis.filter.syns.type", "synonym") - .putList("index.analysis.filter.syns.synonyms", "wtf, what the fudge") - .put("index.analysis.analyzer.custom_syns.tokenizer", "standard") - .putList("index.analysis.analyzer.custom_syns.filter", "lowercase", "syns"))); - ensureGreen(); - - AnalyzeResponse analyzeResponse = client().admin().indices().prepareAnalyze("say what the fudge").setIndex("test").setAnalyzer("custom_syns").get(); - assertThat(analyzeResponse.getTokens().size(), equalTo(5)); - - AnalyzeResponse.AnalyzeToken token = analyzeResponse.getTokens().get(0); - assertThat(token.getTerm(), equalTo("say")); - assertThat(token.getPosition(), equalTo(0)); - assertThat(token.getStartOffset(), equalTo(0)); - assertThat(token.getEndOffset(), equalTo(3)); - assertThat(token.getPositionLength(), equalTo(1)); - - token = analyzeResponse.getTokens().get(1); - assertThat(token.getTerm(), equalTo("what")); - assertThat(token.getPosition(), equalTo(1)); - assertThat(token.getStartOffset(), equalTo(4)); - assertThat(token.getEndOffset(), equalTo(8)); - assertThat(token.getPositionLength(), equalTo(1)); - - token = analyzeResponse.getTokens().get(2); - assertThat(token.getTerm(), equalTo("wtf")); - assertThat(token.getPosition(), equalTo(1)); - assertThat(token.getStartOffset(), equalTo(4)); - assertThat(token.getEndOffset(), equalTo(18)); - assertThat(token.getPositionLength(), equalTo(3)); - - token = analyzeResponse.getTokens().get(3); - assertThat(token.getTerm(), equalTo("the")); - assertThat(token.getPosition(), equalTo(2)); - assertThat(token.getStartOffset(), equalTo(9)); - assertThat(token.getEndOffset(), equalTo(12)); - assertThat(token.getPositionLength(), equalTo(1)); - - token = analyzeResponse.getTokens().get(4); - assertThat(token.getTerm(), equalTo("fudge")); - assertThat(token.getPosition(), equalTo(3)); - assertThat(token.getStartOffset(), equalTo(13)); - assertThat(token.getEndOffset(), equalTo(18)); - assertThat(token.getPositionLength(), equalTo(1)); - } - public void testAnalyzerWithFieldOrTypeTests() throws Exception { assertAcked(prepareCreate("test").addAlias(new Alias("alias"))); ensureGreen(); diff --git a/server/src/test/java/org/elasticsearch/indices/cluster/AbstractIndicesClusterStateServiceTestCase.java b/server/src/test/java/org/elasticsearch/indices/cluster/AbstractIndicesClusterStateServiceTestCase.java index c68e4870aae..e3c15ceda1d 100644 --- a/server/src/test/java/org/elasticsearch/indices/cluster/AbstractIndicesClusterStateServiceTestCase.java +++ b/server/src/test/java/org/elasticsearch/indices/cluster/AbstractIndicesClusterStateServiceTestCase.java @@ -324,7 +324,6 @@ public abstract class AbstractIndicesClusterStateServiceTestCase extends ESTestC * Mock for {@link IndexShard} */ protected class MockIndexShard implements IndicesClusterStateService.Shard { - private volatile long clusterStateVersion; private volatile ShardRouting shardRouting; private volatile RecoveryState recoveryState; private volatile Set inSyncAllocationIds; @@ -372,7 +371,6 @@ public abstract class AbstractIndicesClusterStateServiceTestCase extends ESTestC this.shardRouting = shardRouting; if (shardRouting.primary()) { term = newPrimaryTerm; - this.clusterStateVersion = applyingClusterStateVersion; this.inSyncAllocationIds = inSyncAllocationIds; this.routingTable = routingTable; } diff --git a/server/src/test/java/org/elasticsearch/indices/flush/FlushIT.java b/server/src/test/java/org/elasticsearch/indices/flush/FlushIT.java index 4483ce0d606..ea23ae6308e 100644 --- a/server/src/test/java/org/elasticsearch/indices/flush/FlushIT.java +++ b/server/src/test/java/org/elasticsearch/indices/flush/FlushIT.java @@ -239,7 +239,8 @@ public class FlushIT extends ESIntegTestCase { private void indexDoc(Engine engine, String id) throws IOException { final ParsedDocument doc = InternalEngineTests.createParsedDoc(id, null); - final Engine.IndexResult indexResult = engine.index(new Engine.Index(new Term("_id", Uid.encodeId(doc.id())), 1L, doc)); + final Engine.IndexResult indexResult = engine.index(new Engine.Index(new Term("_id", Uid.encodeId(doc.id())), doc, + engine.getLocalCheckpoint() + 1, 1L, 1L, null, Engine.Operation.Origin.REPLICA, randomLong(), -1L, false)); assertThat(indexResult.getFailure(), nullValue()); } diff --git a/server/src/test/java/org/elasticsearch/indices/mapping/UpdateMappingIntegrationIT.java b/server/src/test/java/org/elasticsearch/indices/mapping/UpdateMappingIntegrationIT.java index af9bf9910ec..2e54490ed78 100644 --- a/server/src/test/java/org/elasticsearch/indices/mapping/UpdateMappingIntegrationIT.java +++ b/server/src/test/java/org/elasticsearch/indices/mapping/UpdateMappingIntegrationIT.java @@ -74,7 +74,6 @@ public class UpdateMappingIntegrationIT extends ESIntegTestCase { client().admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID).setWaitForGreenStatus().execute().actionGet(); int recCount = randomIntBetween(200, 600); - int numberOfTypes = randomIntBetween(1, 5); List indexRequests = new ArrayList<>(); for (int rec = 0; rec < recCount; rec++) { String type = "type"; diff --git a/server/src/test/java/org/elasticsearch/indices/recovery/IndexRecoveryIT.java b/server/src/test/java/org/elasticsearch/indices/recovery/IndexRecoveryIT.java index 6a6970675eb..c24ba2f8612 100644 --- a/server/src/test/java/org/elasticsearch/indices/recovery/IndexRecoveryIT.java +++ b/server/src/test/java/org/elasticsearch/indices/recovery/IndexRecoveryIT.java @@ -57,7 +57,6 @@ import org.elasticsearch.test.ESIntegTestCase.ClusterScope; import org.elasticsearch.test.ESIntegTestCase.Scope; import org.elasticsearch.test.InternalTestCluster; import org.elasticsearch.test.junit.annotations.TestLogging; -import org.elasticsearch.test.store.MockFSDirectoryService; import org.elasticsearch.test.store.MockFSIndexStore; import org.elasticsearch.test.transport.MockTransportService; import org.elasticsearch.test.transport.StubbableTransport; @@ -247,7 +246,6 @@ public class IndexRecoveryIT extends ESIntegTestCase { validateIndexRecoveryState(nodeBRecoveryState.getIndex()); } - @AwaitsFix(bugUrl="https://github.com/elastic/elasticsearch/issues/32686") @TestLogging( "_root:DEBUG," + "org.elasticsearch.cluster.service:TRACE," @@ -550,7 +548,6 @@ public class IndexRecoveryIT extends ESIntegTestCase { final Settings nodeSettings = Settings.builder() .put(RecoverySettings.INDICES_RECOVERY_RETRY_DELAY_NETWORK_SETTING.getKey(), "100ms") .put(RecoverySettings.INDICES_RECOVERY_INTERNAL_ACTION_TIMEOUT_SETTING.getKey(), "1s") - .put(MockFSDirectoryService.RANDOM_PREVENT_DOUBLE_WRITE_SETTING.getKey(), false) // restarted recoveries will delete temp files and write them again .build(); // start a master node internalCluster().startNode(nodeSettings); diff --git a/server/src/test/java/org/elasticsearch/indices/recovery/RecoverySourceHandlerTests.java b/server/src/test/java/org/elasticsearch/indices/recovery/RecoverySourceHandlerTests.java index 0f7a72aacf3..7791e51445a 100644 --- a/server/src/test/java/org/elasticsearch/indices/recovery/RecoverySourceHandlerTests.java +++ b/server/src/test/java/org/elasticsearch/indices/recovery/RecoverySourceHandlerTests.java @@ -207,11 +207,12 @@ public class RecoverySourceHandlerTests extends ESTestCase { public Translog.Operation next() throws IOException { return operations.get(counter++); } - }); + }, randomNonNegativeLong(), randomNonNegativeLong()); final int expectedOps = (int) (endingSeqNo - startingSeqNo + 1); assertThat(result.totalOperations, equalTo(expectedOps)); final ArgumentCaptor shippedOpsCaptor = ArgumentCaptor.forClass(List.class); - verify(recoveryTarget).indexTranslogOperations(shippedOpsCaptor.capture(), ArgumentCaptor.forClass(Integer.class).capture()); + verify(recoveryTarget).indexTranslogOperations(shippedOpsCaptor.capture(), ArgumentCaptor.forClass(Integer.class).capture(), + ArgumentCaptor.forClass(Long.class).capture(), ArgumentCaptor.forClass(Long.class).capture()); List shippedOps = new ArrayList<>(); for (List list: shippedOpsCaptor.getAllValues()) { shippedOps.addAll(list); @@ -249,7 +250,7 @@ public class RecoverySourceHandlerTests extends ESTestCase { } while (op != null && opsToSkip.contains(op)); return op; } - })); + }, randomNonNegativeLong(), randomNonNegativeLong())); } } @@ -420,7 +421,8 @@ public class RecoverySourceHandlerTests extends ESTestCase { } @Override - long phase2(long startingSeqNo, long requiredSeqNoRangeStart, long endingSeqNo, Translog.Snapshot snapshot) throws IOException { + long phase2(long startingSeqNo, long requiredSeqNoRangeStart, long endingSeqNo, Translog.Snapshot snapshot, + long maxSeenAutoIdTimestamp, long maxSeqNoOfUpdatesOrDeletes) { phase2Called.set(true); return SequenceNumbers.UNASSIGNED_SEQ_NO; } diff --git a/server/src/test/java/org/elasticsearch/indices/recovery/RecoveryTests.java b/server/src/test/java/org/elasticsearch/indices/recovery/RecoveryTests.java index 45535e19672..2a53c79448d 100644 --- a/server/src/test/java/org/elasticsearch/indices/recovery/RecoveryTests.java +++ b/server/src/test/java/org/elasticsearch/indices/recovery/RecoveryTests.java @@ -127,6 +127,7 @@ public class RecoveryTests extends ESIndexLevelReplicationTestCase { final String indexName = orgReplica.shardId().getIndexName(); // delete #1 + orgReplica.advanceMaxSeqNoOfUpdatesOrDeletes(1); // manually advance msu for this delete orgReplica.applyDeleteOperationOnReplica(1, 2, "type", "id"); getTranslog(orgReplica).rollGeneration(); // isolate the delete in it's own generation // index #0 @@ -190,6 +191,7 @@ public class RecoveryTests extends ESIndexLevelReplicationTestCase { final String indexName = orgReplica.shardId().getIndexName(); // delete #1 + orgReplica.advanceMaxSeqNoOfUpdatesOrDeletes(1); // manually advance msu for this delete orgReplica.applyDeleteOperationOnReplica(1, 2, "type", "id"); orgReplica.flush(new FlushRequest().force(true)); // isolate delete#1 in its own translog generation and lucene segment // index #0 diff --git a/server/src/test/java/org/elasticsearch/indices/store/IndicesStoreIntegrationIT.java b/server/src/test/java/org/elasticsearch/indices/store/IndicesStoreIntegrationIT.java index 7e99ccbbe61..f9028a51a3c 100644 --- a/server/src/test/java/org/elasticsearch/indices/store/IndicesStoreIntegrationIT.java +++ b/server/src/test/java/org/elasticsearch/indices/store/IndicesStoreIntegrationIT.java @@ -94,7 +94,7 @@ public class IndicesStoreIntegrationIT extends ESIntegTestCase { } public void testIndexCleanup() throws Exception { - final String masterNode = internalCluster().startNode(Settings.builder().put(Node.NODE_DATA_SETTING.getKey(), false)); + internalCluster().startNode(Settings.builder().put(Node.NODE_DATA_SETTING.getKey(), false)); final String node_1 = internalCluster().startNode(Settings.builder().put(Node.NODE_MASTER_SETTING.getKey(), false)); final String node_2 = internalCluster().startNode(Settings.builder().put(Node.NODE_MASTER_SETTING.getKey(), false)); logger.info("--> creating index [test] with one shard and on replica"); @@ -325,7 +325,7 @@ public class IndicesStoreIntegrationIT extends ESIntegTestCase { } public void testShardActiveElsewhereDoesNotDeleteAnother() throws Exception { - final String masterNode = internalCluster().startMasterOnlyNode(); + internalCluster().startMasterOnlyNode(); final List nodes = internalCluster().startDataOnlyNodes(4); final String node1 = nodes.get(0); diff --git a/server/src/test/java/org/elasticsearch/ingest/ConditionalProcessorTests.java b/server/src/test/java/org/elasticsearch/ingest/ConditionalProcessorTests.java index 12b4078ddf8..c7d4dfa4e68 100644 --- a/server/src/test/java/org/elasticsearch/ingest/ConditionalProcessorTests.java +++ b/server/src/test/java/org/elasticsearch/ingest/ConditionalProcessorTests.java @@ -19,13 +19,6 @@ package org.elasticsearch.ingest; -import java.util.ArrayList; -import java.util.Collections; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.concurrent.CompletableFuture; -import java.util.function.Consumer; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.script.MockScriptEngine; import org.elasticsearch.script.Script; @@ -34,6 +27,14 @@ import org.elasticsearch.script.ScriptService; import org.elasticsearch.script.ScriptType; import org.elasticsearch.test.ESTestCase; +import java.util.ArrayList; +import java.util.Collections; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.concurrent.CompletableFuture; +import java.util.function.Consumer; + import static org.hamcrest.Matchers.hasKey; import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.not; @@ -52,7 +53,8 @@ public class ConditionalProcessorTests extends ESTestCase { Script.DEFAULT_SCRIPT_LANG, Collections.singletonMap( scriptName, ctx -> trueValue.equals(ctx.get(conditionalField)) - ) + ), + Collections.emptyMap() ) ), new HashMap<>(ScriptModule.CORE_CONTEXTS) @@ -120,7 +122,8 @@ public class ConditionalProcessorTests extends ESTestCase { } return false; } - ) + ), + Collections.emptyMap() ) ), new HashMap<>(ScriptModule.CORE_CONTEXTS) diff --git a/server/src/test/java/org/elasticsearch/ingest/IngestServiceTests.java b/server/src/test/java/org/elasticsearch/ingest/IngestServiceTests.java index e3f52f35b79..4de39349dc5 100644 --- a/server/src/test/java/org/elasticsearch/ingest/IngestServiceTests.java +++ b/server/src/test/java/org/elasticsearch/ingest/IngestServiceTests.java @@ -19,16 +19,6 @@ package org.elasticsearch.ingest; -import java.util.Arrays; -import java.util.Collections; -import java.util.Comparator; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.Objects; -import java.util.concurrent.ExecutorService; -import java.util.function.BiConsumer; -import java.util.function.Consumer; import org.apache.lucene.util.SetOnce; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.ElasticsearchParseException; @@ -59,13 +49,22 @@ import org.hamcrest.CustomTypeSafeMatcher; import org.mockito.ArgumentMatcher; import org.mockito.invocation.InvocationOnMock; +import java.util.Arrays; +import java.util.Collections; +import java.util.Comparator; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Objects; +import java.util.concurrent.ExecutorService; +import java.util.function.BiConsumer; +import java.util.function.Consumer; + import static java.util.Collections.emptyMap; import static java.util.Collections.emptySet; import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.Matchers.hasKey; import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.is; -import static org.hamcrest.Matchers.not; import static org.hamcrest.Matchers.notNullValue; import static org.hamcrest.Matchers.nullValue; import static org.hamcrest.Matchers.sameInstance; @@ -769,16 +768,14 @@ public class IngestServiceTests extends ESTestCase { previousClusterState = clusterState; clusterState = IngestService.innerPut(putRequest, clusterState); ingestService.applyClusterState(new ClusterChangedEvent("", clusterState, previousClusterState)); - final Map configurationMap = new HashMap<>(); - configurationMap.put("_id1", new PipelineConfiguration("_id1", new BytesArray("{}"), XContentType.JSON)); - configurationMap.put("_id2", new PipelineConfiguration("_id2", new BytesArray("{}"), XContentType.JSON)); - ingestService.updatePipelineStats(new IngestMetadata(configurationMap)); + @SuppressWarnings("unchecked") final BiConsumer failureHandler = mock(BiConsumer.class); @SuppressWarnings("unchecked") final Consumer completionHandler = mock(Consumer.class); final IndexRequest indexRequest = new IndexRequest("_index"); indexRequest.setPipeline("_id1"); + indexRequest.source(randomAlphaOfLength(10), randomAlphaOfLength(10)); ingestService.executeBulkRequest(Collections.singletonList(indexRequest), failureHandler, completionHandler, indexReq -> {}); final IngestStats afterFirstRequestStats = ingestService.stats(); assertThat(afterFirstRequestStats.getStatsPerPipeline().size(), equalTo(2)); @@ -793,23 +790,60 @@ public class IngestServiceTests extends ESTestCase { assertThat(afterSecondRequestStats.getStatsPerPipeline().get("_id1").getIngestCount(), equalTo(1L)); assertThat(afterSecondRequestStats.getStatsPerPipeline().get("_id2").getIngestCount(), equalTo(1L)); assertThat(afterSecondRequestStats.getTotalStats().getIngestCount(), equalTo(2L)); + + //update cluster state and ensure that new stats are added to old stats + putRequest = new PutPipelineRequest("_id1", + new BytesArray("{\"processors\": [{\"mock\" : {}}, {\"mock\" : {}}]}"), XContentType.JSON); + previousClusterState = clusterState; + clusterState = IngestService.innerPut(putRequest, clusterState); + ingestService.applyClusterState(new ClusterChangedEvent("", clusterState, previousClusterState)); + indexRequest.setPipeline("_id1"); + ingestService.executeBulkRequest(Collections.singletonList(indexRequest), failureHandler, completionHandler, indexReq -> {}); + final IngestStats afterThirdRequestStats = ingestService.stats(); + assertThat(afterThirdRequestStats.getStatsPerPipeline().size(), equalTo(2)); + assertThat(afterThirdRequestStats.getStatsPerPipeline().get("_id1").getIngestCount(), equalTo(2L)); + assertThat(afterThirdRequestStats.getStatsPerPipeline().get("_id2").getIngestCount(), equalTo(1L)); + assertThat(afterThirdRequestStats.getTotalStats().getIngestCount(), equalTo(3L)); + } - // issue: https://github.com/elastic/elasticsearch/issues/18126 - public void testUpdatingStatsWhenRemovingPipelineWorks() { - IngestService ingestService = createWithProcessors(); - Map configurationMap = new HashMap<>(); - configurationMap.put("_id1", new PipelineConfiguration("_id1", new BytesArray("{}"), XContentType.JSON)); - configurationMap.put("_id2", new PipelineConfiguration("_id2", new BytesArray("{}"), XContentType.JSON)); - ingestService.updatePipelineStats(new IngestMetadata(configurationMap)); - assertThat(ingestService.stats().getStatsPerPipeline(), hasKey("_id1")); - assertThat(ingestService.stats().getStatsPerPipeline(), hasKey("_id2")); + public void testExecuteWithDrop() { + Map factories = new HashMap<>(); + factories.put("drop", new DropProcessor.Factory()); + factories.put("mock", (processorFactories, tag, config) -> new Processor() { + @Override + public IngestDocument execute(final IngestDocument ingestDocument) { + throw new AssertionError("Document should have been dropped but reached this processor"); + } - configurationMap = new HashMap<>(); - configurationMap.put("_id3", new PipelineConfiguration("_id3", new BytesArray("{}"), XContentType.JSON)); - ingestService.updatePipelineStats(new IngestMetadata(configurationMap)); - assertThat(ingestService.stats().getStatsPerPipeline(), not(hasKey("_id1"))); - assertThat(ingestService.stats().getStatsPerPipeline(), not(hasKey("_id2"))); + @Override + public String getType() { + return null; + } + + @Override + public String getTag() { + return null; + } + }); + IngestService ingestService = createWithProcessors(factories); + PutPipelineRequest putRequest = new PutPipelineRequest("_id", + new BytesArray("{\"processors\": [{\"drop\" : {}}, {\"mock\" : {}}]}"), XContentType.JSON); + ClusterState clusterState = ClusterState.builder(new ClusterName("_name")).build(); // Start empty + ClusterState previousClusterState = clusterState; + clusterState = IngestService.innerPut(putRequest, clusterState); + ingestService.applyClusterState(new ClusterChangedEvent("", clusterState, previousClusterState)); + final IndexRequest indexRequest = new IndexRequest("_index", "_type", "_id").source(emptyMap()).setPipeline("_id"); + @SuppressWarnings("unchecked") + final BiConsumer failureHandler = mock(BiConsumer.class); + @SuppressWarnings("unchecked") + final Consumer completionHandler = mock(Consumer.class); + @SuppressWarnings("unchecked") + final Consumer dropHandler = mock(Consumer.class); + ingestService.executeBulkRequest(Collections.singletonList(indexRequest), failureHandler, completionHandler, dropHandler); + verify(failureHandler, never()).accept(any(), any()); + verify(completionHandler, times(1)).accept(null); + verify(dropHandler, times(1)).accept(indexRequest); } private IngestDocument eqIndexTypeId(final Map source) { diff --git a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/PipelineProcessorTests.java b/server/src/test/java/org/elasticsearch/ingest/PipelineProcessorTests.java similarity index 59% rename from modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/PipelineProcessorTests.java rename to server/src/test/java/org/elasticsearch/ingest/PipelineProcessorTests.java index 6e18bac40d4..018ded346d4 100644 --- a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/PipelineProcessorTests.java +++ b/server/src/test/java/org/elasticsearch/ingest/PipelineProcessorTests.java @@ -16,21 +16,19 @@ * specific language governing permissions and limitations * under the License. */ -package org.elasticsearch.ingest.common; +package org.elasticsearch.ingest; +import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.test.ESTestCase; + +import java.time.Clock; +import java.util.Arrays; import java.util.Collections; import java.util.HashMap; import java.util.Map; import java.util.concurrent.CompletableFuture; -import org.elasticsearch.ElasticsearchException; -import org.elasticsearch.ingest.CompoundProcessor; -import org.elasticsearch.ingest.IngestDocument; -import org.elasticsearch.ingest.IngestService; -import org.elasticsearch.ingest.Pipeline; -import org.elasticsearch.ingest.Processor; -import org.elasticsearch.ingest.RandomDocumentPicks; -import org.elasticsearch.test.ESTestCase; +import static org.hamcrest.CoreMatchers.equalTo; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; @@ -110,7 +108,7 @@ public class PipelineProcessorTests extends ESTestCase { () -> factory.create(Collections.emptyMap(), null, outerConfig).execute(testIngestDocument) ); assertEquals( - "Recursive invocation of pipeline [inner] detected.", e.getRootCause().getMessage() + "Cycle detected for pipeline: inner", e.getRootCause().getMessage() ); } @@ -129,4 +127,81 @@ public class PipelineProcessorTests extends ESTestCase { outerProc.execute(testIngestDocument); outerProc.execute(testIngestDocument); } + + public void testPipelineProcessorWithPipelineChain() throws Exception { + String pipeline1Id = "pipeline1"; + String pipeline2Id = "pipeline2"; + String pipeline3Id = "pipeline3"; + IngestService ingestService = mock(IngestService.class); + PipelineProcessor.Factory factory = new PipelineProcessor.Factory(ingestService); + + Map pipeline1ProcessorConfig = new HashMap<>(); + pipeline1ProcessorConfig.put("pipeline", pipeline2Id); + PipelineProcessor pipeline1Processor = factory.create(Collections.emptyMap(), null, pipeline1ProcessorConfig); + + Map pipeline2ProcessorConfig = new HashMap<>(); + pipeline2ProcessorConfig.put("pipeline", pipeline3Id); + PipelineProcessor pipeline2Processor = factory.create(Collections.emptyMap(), null, pipeline2ProcessorConfig); + + Clock clock = mock(Clock.class); + when(clock.millis()).thenReturn(0L).thenReturn(0L); + Pipeline pipeline1 = new Pipeline( + pipeline1Id, null, null, new CompoundProcessor(pipeline1Processor), clock + ); + + String key1 = randomAlphaOfLength(10); + clock = mock(Clock.class); + when(clock.millis()).thenReturn(0L).thenReturn(3L); + Pipeline pipeline2 = new Pipeline( + pipeline2Id, null, null, new CompoundProcessor(true, + Arrays.asList( + new TestProcessor(ingestDocument -> { + ingestDocument.setFieldValue(key1, randomInt()); + }), + pipeline2Processor), + Collections.emptyList()), + clock + ); + clock = mock(Clock.class); + when(clock.millis()).thenReturn(0L).thenReturn(2L); + Pipeline pipeline3 = new Pipeline( + pipeline3Id, null, null, new CompoundProcessor( + new TestProcessor(ingestDocument -> { + throw new RuntimeException("error"); + })), clock + ); + when(ingestService.getPipeline(pipeline1Id)).thenReturn(pipeline1); + when(ingestService.getPipeline(pipeline2Id)).thenReturn(pipeline2); + when(ingestService.getPipeline(pipeline3Id)).thenReturn(pipeline3); + + IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), new HashMap<>()); + //start the chain + ingestDocument.executePipeline(pipeline1); + assertNotNull(ingestDocument.getSourceAndMetadata().get(key1)); + + //check the stats + IngestStats.Stats pipeline1Stats = pipeline1.getMetrics().createStats(); + IngestStats.Stats pipeline2Stats = pipeline2.getMetrics().createStats(); + IngestStats.Stats pipeline3Stats = pipeline3.getMetrics().createStats(); + + //current + assertThat(pipeline1Stats.getIngestCurrent(), equalTo(0L)); + assertThat(pipeline2Stats.getIngestCurrent(), equalTo(0L)); + assertThat(pipeline3Stats.getIngestCurrent(), equalTo(0L)); + + //count + assertThat(pipeline1Stats.getIngestCount(), equalTo(1L)); + assertThat(pipeline2Stats.getIngestCount(), equalTo(1L)); + assertThat(pipeline3Stats.getIngestCount(), equalTo(1L)); + + //time + assertThat(pipeline1Stats.getIngestTimeInMillis(), equalTo(0L)); + assertThat(pipeline2Stats.getIngestTimeInMillis(), equalTo(3L)); + assertThat(pipeline3Stats.getIngestTimeInMillis(), equalTo(2L)); + + //failure + assertThat(pipeline1Stats.getIngestFailedCount(), equalTo(0L)); + assertThat(pipeline2Stats.getIngestFailedCount(), equalTo(0L)); + assertThat(pipeline3Stats.getIngestFailedCount(), equalTo(1L)); + } } diff --git a/server/src/test/java/org/elasticsearch/ingest/TrackingResultProcessorTests.java b/server/src/test/java/org/elasticsearch/ingest/TrackingResultProcessorTests.java new file mode 100644 index 00000000000..7a7f9b77372 --- /dev/null +++ b/server/src/test/java/org/elasticsearch/ingest/TrackingResultProcessorTests.java @@ -0,0 +1,315 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.ingest; + +import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.action.ingest.SimulateProcessorResult; +import org.elasticsearch.test.ESTestCase; +import org.junit.Before; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collections; +import java.util.HashMap; +import java.util.IdentityHashMap; +import java.util.List; +import java.util.Map; +import java.util.Set; + +import static org.elasticsearch.ingest.CompoundProcessor.ON_FAILURE_MESSAGE_FIELD; +import static org.elasticsearch.ingest.CompoundProcessor.ON_FAILURE_PROCESSOR_TAG_FIELD; +import static org.elasticsearch.ingest.CompoundProcessor.ON_FAILURE_PROCESSOR_TYPE_FIELD; +import static org.elasticsearch.ingest.TrackingResultProcessor.decorate; +import static org.hamcrest.CoreMatchers.equalTo; +import static org.hamcrest.CoreMatchers.not; +import static org.hamcrest.Matchers.nullValue; +import static org.hamcrest.Matchers.sameInstance; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.times; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; + +public class TrackingResultProcessorTests extends ESTestCase { + + private IngestDocument ingestDocument; + private List resultList; + private Set pipelinesSeen; + + @Before + public void init() { + ingestDocument = new IngestDocument(new HashMap<>(), new HashMap<>()); + resultList = new ArrayList<>(); + pipelinesSeen = Collections.newSetFromMap(new IdentityHashMap<>()); + } + + public void testActualProcessor() throws Exception { + TestProcessor actualProcessor = new TestProcessor(ingestDocument -> {}); + TrackingResultProcessor trackingProcessor = new TrackingResultProcessor(false, actualProcessor, resultList); + trackingProcessor.execute(ingestDocument); + + SimulateProcessorResult expectedResult = new SimulateProcessorResult(actualProcessor.getTag(), ingestDocument); + + assertThat(actualProcessor.getInvokedCounter(), equalTo(1)); + assertThat(resultList.size(), equalTo(1)); + + assertThat(resultList.get(0).getIngestDocument(), equalTo(expectedResult.getIngestDocument())); + assertThat(resultList.get(0).getFailure(), nullValue()); + assertThat(resultList.get(0).getProcessorTag(), equalTo(expectedResult.getProcessorTag())); + } + + public void testActualCompoundProcessorWithoutOnFailure() throws Exception { + RuntimeException exception = new RuntimeException("processor failed"); + TestProcessor testProcessor = new TestProcessor(ingestDocument -> { throw exception; }); + CompoundProcessor actualProcessor = new CompoundProcessor(testProcessor); + CompoundProcessor trackingProcessor = decorate(actualProcessor, resultList, pipelinesSeen); + + try { + trackingProcessor.execute(ingestDocument); + fail("processor should throw exception"); + } catch (ElasticsearchException e) { + assertThat(e.getRootCause().getMessage(), equalTo(exception.getMessage())); + } + + SimulateProcessorResult expectedFirstResult = new SimulateProcessorResult(testProcessor.getTag(), ingestDocument); + assertThat(testProcessor.getInvokedCounter(), equalTo(1)); + assertThat(resultList.size(), equalTo(1)); + assertThat(resultList.get(0).getIngestDocument(), nullValue()); + assertThat(resultList.get(0).getFailure(), equalTo(exception)); + assertThat(resultList.get(0).getProcessorTag(), equalTo(expectedFirstResult.getProcessorTag())); + } + + public void testActualCompoundProcessorWithOnFailure() throws Exception { + RuntimeException exception = new RuntimeException("fail"); + TestProcessor failProcessor = new TestProcessor("fail", "test", ingestDocument -> { throw exception; }); + TestProcessor onFailureProcessor = new TestProcessor("success", "test", ingestDocument -> {}); + CompoundProcessor actualProcessor = new CompoundProcessor(false, + Arrays.asList(new CompoundProcessor(false, + Arrays.asList(failProcessor, onFailureProcessor), + Arrays.asList(onFailureProcessor, failProcessor))), + Arrays.asList(onFailureProcessor)); + CompoundProcessor trackingProcessor = decorate(actualProcessor, resultList, pipelinesSeen); + trackingProcessor.execute(ingestDocument); + + SimulateProcessorResult expectedFailResult = new SimulateProcessorResult(failProcessor.getTag(), ingestDocument); + SimulateProcessorResult expectedSuccessResult = new SimulateProcessorResult(onFailureProcessor.getTag(), ingestDocument); + + assertThat(failProcessor.getInvokedCounter(), equalTo(2)); + assertThat(onFailureProcessor.getInvokedCounter(), equalTo(2)); + assertThat(resultList.size(), equalTo(4)); + + assertThat(resultList.get(0).getIngestDocument(), nullValue()); + assertThat(resultList.get(0).getFailure(), equalTo(exception)); + assertThat(resultList.get(0).getProcessorTag(), equalTo(expectedFailResult.getProcessorTag())); + + Map metadata = resultList.get(1).getIngestDocument().getIngestMetadata(); + assertThat(metadata.get(ON_FAILURE_MESSAGE_FIELD), equalTo("fail")); + assertThat(metadata.get(ON_FAILURE_PROCESSOR_TYPE_FIELD), equalTo("test")); + assertThat(metadata.get(ON_FAILURE_PROCESSOR_TAG_FIELD), equalTo("fail")); + assertThat(resultList.get(1).getFailure(), nullValue()); + assertThat(resultList.get(1).getProcessorTag(), equalTo(expectedSuccessResult.getProcessorTag())); + + assertThat(resultList.get(2).getIngestDocument(), nullValue()); + assertThat(resultList.get(2).getFailure(), equalTo(exception)); + assertThat(resultList.get(2).getProcessorTag(), equalTo(expectedFailResult.getProcessorTag())); + + metadata = resultList.get(3).getIngestDocument().getIngestMetadata(); + assertThat(metadata.get(ON_FAILURE_MESSAGE_FIELD), equalTo("fail")); + assertThat(metadata.get(ON_FAILURE_PROCESSOR_TYPE_FIELD), equalTo("test")); + assertThat(metadata.get(ON_FAILURE_PROCESSOR_TAG_FIELD), equalTo("fail")); + assertThat(resultList.get(3).getFailure(), nullValue()); + assertThat(resultList.get(3).getProcessorTag(), equalTo(expectedSuccessResult.getProcessorTag())); + } + + public void testActualCompoundProcessorWithIgnoreFailure() throws Exception { + RuntimeException exception = new RuntimeException("processor failed"); + TestProcessor testProcessor = new TestProcessor(ingestDocument -> { throw exception; }); + CompoundProcessor actualProcessor = new CompoundProcessor(true, Collections.singletonList(testProcessor), + Collections.emptyList()); + CompoundProcessor trackingProcessor = decorate(actualProcessor, resultList, pipelinesSeen); + + trackingProcessor.execute(ingestDocument); + + SimulateProcessorResult expectedResult = new SimulateProcessorResult(testProcessor.getTag(), ingestDocument); + assertThat(testProcessor.getInvokedCounter(), equalTo(1)); + assertThat(resultList.size(), equalTo(1)); + assertThat(resultList.get(0).getIngestDocument(), equalTo(expectedResult.getIngestDocument())); + assertThat(resultList.get(0).getFailure(), sameInstance(exception)); + assertThat(resultList.get(0).getProcessorTag(), equalTo(expectedResult.getProcessorTag())); + } + + public void testActualPipelineProcessor() throws Exception { + String pipelineId = "pipeline1"; + IngestService ingestService = mock(IngestService.class); + Map pipelineConfig = new HashMap<>(); + pipelineConfig.put("pipeline", pipelineId); + PipelineProcessor.Factory factory = new PipelineProcessor.Factory(ingestService); + + String key1 = randomAlphaOfLength(10); + String key2 = randomAlphaOfLength(10); + String key3 = randomAlphaOfLength(10); + + Pipeline pipeline = new Pipeline( + pipelineId, null, null, new CompoundProcessor( + new TestProcessor(ingestDocument -> {ingestDocument.setFieldValue(key1, randomInt()); }), + new TestProcessor(ingestDocument -> {ingestDocument.setFieldValue(key2, randomInt()); }), + new TestProcessor(ingestDocument -> { ingestDocument.setFieldValue(key3, randomInt()); })) + ); + when(ingestService.getPipeline(pipelineId)).thenReturn(pipeline); + + PipelineProcessor pipelineProcessor = factory.create(Collections.emptyMap(), null, pipelineConfig); + CompoundProcessor actualProcessor = new CompoundProcessor(pipelineProcessor); + + CompoundProcessor trackingProcessor = decorate(actualProcessor, resultList, pipelinesSeen); + + trackingProcessor.execute(ingestDocument); + + SimulateProcessorResult expectedResult = new SimulateProcessorResult(actualProcessor.getTag(), ingestDocument); + + verify(ingestService).getPipeline(pipelineId); + assertThat(resultList.size(), equalTo(3)); + + assertTrue(resultList.get(0).getIngestDocument().hasField(key1)); + assertFalse(resultList.get(0).getIngestDocument().hasField(key2)); + assertFalse(resultList.get(0).getIngestDocument().hasField(key3)); + + assertTrue(resultList.get(1).getIngestDocument().hasField(key1)); + assertTrue(resultList.get(1).getIngestDocument().hasField(key2)); + assertFalse(resultList.get(1).getIngestDocument().hasField(key3)); + + assertThat(resultList.get(2).getIngestDocument(), equalTo(expectedResult.getIngestDocument())); + assertThat(resultList.get(2).getFailure(), nullValue()); + assertThat(resultList.get(2).getProcessorTag(), nullValue()); + } + + public void testActualPipelineProcessorWithHandledFailure() throws Exception { + RuntimeException exception = new RuntimeException("processor failed"); + + String pipelineId = "pipeline1"; + IngestService ingestService = mock(IngestService.class); + Map pipelineConfig = new HashMap<>(); + pipelineConfig.put("pipeline", pipelineId); + PipelineProcessor.Factory factory = new PipelineProcessor.Factory(ingestService); + + String key1 = randomAlphaOfLength(10); + String key2 = randomAlphaOfLength(10); + String key3 = randomAlphaOfLength(10); + + Pipeline pipeline = new Pipeline( + pipelineId, null, null, new CompoundProcessor( + new TestProcessor(ingestDocument -> { ingestDocument.setFieldValue(key1, randomInt()); }), + new CompoundProcessor( + false, + Collections.singletonList(new TestProcessor(ingestDocument -> { throw exception; })), + Collections.singletonList(new TestProcessor(ingestDocument -> { ingestDocument.setFieldValue(key2, randomInt()); })) + ), + new TestProcessor(ingestDocument -> { ingestDocument.setFieldValue(key3, randomInt()); })) + ); + when(ingestService.getPipeline(pipelineId)).thenReturn(pipeline); + + PipelineProcessor pipelineProcessor = factory.create(Collections.emptyMap(), null, pipelineConfig); + CompoundProcessor actualProcessor = new CompoundProcessor(pipelineProcessor); + + CompoundProcessor trackingProcessor = decorate(actualProcessor, resultList, pipelinesSeen); + + trackingProcessor.execute(ingestDocument); + + SimulateProcessorResult expectedResult = new SimulateProcessorResult(actualProcessor.getTag(), ingestDocument); + + verify(ingestService).getPipeline(pipelineId); + assertThat(resultList.size(), equalTo(4)); + + assertTrue(resultList.get(0).getIngestDocument().hasField(key1)); + assertFalse(resultList.get(0).getIngestDocument().hasField(key2)); + assertFalse(resultList.get(0).getIngestDocument().hasField(key3)); + + //failed processor + assertNull(resultList.get(1).getIngestDocument()); + assertThat(resultList.get(1).getFailure().getMessage(), equalTo(exception.getMessage())); + + assertTrue(resultList.get(2).getIngestDocument().hasField(key1)); + assertTrue(resultList.get(2).getIngestDocument().hasField(key2)); + assertFalse(resultList.get(2).getIngestDocument().hasField(key3)); + + assertThat(resultList.get(3).getIngestDocument(), equalTo(expectedResult.getIngestDocument())); + assertThat(resultList.get(3).getFailure(), nullValue()); + assertThat(resultList.get(3).getProcessorTag(), nullValue()); + } + + public void testActualPipelineProcessorWithCycle() throws Exception { + String pipelineId = "pipeline1"; + IngestService ingestService = mock(IngestService.class); + Map pipelineConfig = new HashMap<>(); + pipelineConfig.put("pipeline", pipelineId); + PipelineProcessor.Factory factory = new PipelineProcessor.Factory(ingestService); + + PipelineProcessor pipelineProcessor = factory.create(Collections.emptyMap(), null, pipelineConfig); + Pipeline pipeline = new Pipeline( + pipelineId, null, null, new CompoundProcessor(pipelineProcessor) + ); + when(ingestService.getPipeline(pipelineId)).thenReturn(pipeline); + + CompoundProcessor actualProcessor = new CompoundProcessor(pipelineProcessor); + + IllegalStateException exception = expectThrows(IllegalStateException.class, + () -> decorate(actualProcessor, resultList, pipelinesSeen)); + assertThat(exception.getMessage(), equalTo("Cycle detected for pipeline: pipeline1")); + } + + + public void testActualPipelineProcessorRepeatedInvocation() throws Exception { + String pipelineId = "pipeline1"; + IngestService ingestService = mock(IngestService.class); + Map pipelineConfig = new HashMap<>(); + pipelineConfig.put("pipeline", pipelineId); + PipelineProcessor.Factory factory = new PipelineProcessor.Factory(ingestService); + + String key1 = randomAlphaOfLength(10); + PipelineProcessor pipelineProcessor = factory.create(Collections.emptyMap(), null, pipelineConfig); + Pipeline pipeline = new Pipeline( + pipelineId, null, null, new CompoundProcessor( + new TestProcessor(ingestDocument -> { ingestDocument.setFieldValue(key1, randomInt()); })) + ); + when(ingestService.getPipeline(pipelineId)).thenReturn(pipeline); + + CompoundProcessor actualProcessor = new CompoundProcessor(pipelineProcessor, pipelineProcessor); + + CompoundProcessor trackingProcessor = decorate(actualProcessor, resultList, pipelinesSeen); + + trackingProcessor.execute(ingestDocument); + + SimulateProcessorResult expectedResult = new SimulateProcessorResult(actualProcessor.getTag(), ingestDocument); + + verify(ingestService, times(2)).getPipeline(pipelineId); + assertThat(resultList.size(), equalTo(2)); + + assertThat(resultList.get(0).getIngestDocument(), not(equalTo(expectedResult.getIngestDocument()))); + assertThat(resultList.get(0).getFailure(), nullValue()); + assertThat(resultList.get(0).getProcessorTag(), nullValue()); + + assertThat(resultList.get(1).getIngestDocument(), equalTo(expectedResult.getIngestDocument())); + assertThat(resultList.get(1).getFailure(), nullValue()); + assertThat(resultList.get(1).getProcessorTag(), nullValue()); + + //each invocation updates key1 with a random int + assertNotEquals(resultList.get(0).getIngestDocument().getSourceAndMetadata().get(key1), + resultList.get(1).getIngestDocument().getSourceAndMetadata().get(key1)); + } + +} diff --git a/server/src/test/java/org/elasticsearch/monitor/jvm/JvmGcMonitorServiceSettingsTests.java b/server/src/test/java/org/elasticsearch/monitor/jvm/JvmGcMonitorServiceSettingsTests.java index f3e86c532d5..4431f1ead80 100644 --- a/server/src/test/java/org/elasticsearch/monitor/jvm/JvmGcMonitorServiceSettingsTests.java +++ b/server/src/test/java/org/elasticsearch/monitor/jvm/JvmGcMonitorServiceSettingsTests.java @@ -42,7 +42,10 @@ public class JvmGcMonitorServiceSettingsTests extends ESTestCase { public void testEmptySettingsAreOkay() throws InterruptedException { AtomicBoolean scheduled = new AtomicBoolean(); execute(Settings.EMPTY, - (command, interval, name) -> { scheduled.set(true); return new MockCancellable(); }, + (command, interval, name) -> { + scheduled.set(true); + return new MockCancellable(); + }, () -> assertTrue(scheduled.get())); } @@ -50,7 +53,10 @@ public class JvmGcMonitorServiceSettingsTests extends ESTestCase { Settings settings = Settings.builder().put("monitor.jvm.gc.enabled", "false").build(); AtomicBoolean scheduled = new AtomicBoolean(); execute(settings, - (command, interval, name) -> { scheduled.set(true); return new MockCancellable(); }, + (command, interval, name) -> { + scheduled.set(true); + return new MockCancellable(); + }, () -> assertFalse(scheduled.get())); } diff --git a/server/src/test/java/org/elasticsearch/persistent/PersistentTasksNodeServiceTests.java b/server/src/test/java/org/elasticsearch/persistent/PersistentTasksNodeServiceTests.java index 906ecf23205..50bcf594926 100644 --- a/server/src/test/java/org/elasticsearch/persistent/PersistentTasksNodeServiceTests.java +++ b/server/src/test/java/org/elasticsearch/persistent/PersistentTasksNodeServiceTests.java @@ -31,16 +31,16 @@ import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.cluster.routing.RoutingTable; import org.elasticsearch.common.UUIDs; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.persistent.PersistentTasksCustomMetaData.Assignment; +import org.elasticsearch.persistent.PersistentTasksCustomMetaData.PersistentTask; +import org.elasticsearch.persistent.TestPersistentTasksPlugin.TestParams; +import org.elasticsearch.persistent.TestPersistentTasksPlugin.TestPersistentTasksExecutor; import org.elasticsearch.tasks.Task; import org.elasticsearch.tasks.TaskId; import org.elasticsearch.tasks.TaskManager; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.threadpool.TestThreadPool; import org.elasticsearch.threadpool.ThreadPool; -import org.elasticsearch.persistent.PersistentTasksCustomMetaData.Assignment; -import org.elasticsearch.persistent.PersistentTasksCustomMetaData.PersistentTask; -import org.elasticsearch.persistent.TestPersistentTasksPlugin.TestParams; -import org.elasticsearch.persistent.TestPersistentTasksPlugin.TestPersistentTasksExecutor; import org.junit.After; import org.junit.Before; @@ -334,13 +334,11 @@ public class PersistentTasksNodeServiceTests extends ESTestCase { private final PersistentTaskParams params; private final AllocatedPersistentTask task; private final PersistentTaskState state; - private final PersistentTasksExecutor holder; - Execution(PersistentTaskParams params, AllocatedPersistentTask task, PersistentTaskState state, PersistentTasksExecutor holder) { + Execution(PersistentTaskParams params, AllocatedPersistentTask task, PersistentTaskState state) { this.params = params; this.task = task; this.state = state; - this.holder = holder; } } @@ -356,7 +354,7 @@ public class PersistentTasksNodeServiceTests extends ESTestCase { final PersistentTaskState state, final AllocatedPersistentTask task, final PersistentTasksExecutor executor) { - executions.add(new Execution(params, task, state, executor)); + executions.add(new Execution(params, task, state)); } public Execution get(int i) { diff --git a/server/src/test/java/org/elasticsearch/recovery/FullRollingRestartIT.java b/server/src/test/java/org/elasticsearch/recovery/FullRollingRestartIT.java index 50035e1027b..6624d4eb8de 100644 --- a/server/src/test/java/org/elasticsearch/recovery/FullRollingRestartIT.java +++ b/server/src/test/java/org/elasticsearch/recovery/FullRollingRestartIT.java @@ -148,7 +148,7 @@ public class FullRollingRestartIT extends ESIntegTestCase { } internalCluster().restartRandomDataNode(); ensureGreen(); - ClusterState afterState = client().admin().cluster().prepareState().get().getState(); + client().admin().cluster().prepareState().get().getState(); recoveryResponse = client().admin().indices().prepareRecoveries("test").get(); for (RecoveryState recoveryState : recoveryResponse.shardRecoveryStates().get("test")) { diff --git a/server/src/test/java/org/elasticsearch/repositories/RepositoriesServiceIT.java b/server/src/test/java/org/elasticsearch/repositories/RepositoriesServiceIT.java index 05c9746aa49..28537022e3f 100644 --- a/server/src/test/java/org/elasticsearch/repositories/RepositoriesServiceIT.java +++ b/server/src/test/java/org/elasticsearch/repositories/RepositoriesServiceIT.java @@ -54,7 +54,6 @@ public class RepositoriesServiceIT extends ESIntegTestCase { final Client client = client(); final RepositoriesService repositoriesService = cluster.getDataOrMasterNodeInstances(RepositoriesService.class).iterator().next(); - final Settings settings = cluster.getDefaultSettings(); final Settings.Builder repoSettings = Settings.builder().put("location", randomRepoPath()); diff --git a/server/src/test/java/org/elasticsearch/rest/RestControllerTests.java b/server/src/test/java/org/elasticsearch/rest/RestControllerTests.java index cbf55428971..d35a8b5d249 100644 --- a/server/src/test/java/org/elasticsearch/rest/RestControllerTests.java +++ b/server/src/test/java/org/elasticsearch/rest/RestControllerTests.java @@ -212,7 +212,6 @@ public class RestControllerTests extends ESTestCase { }; final RestController restController = new RestController(Settings.EMPTY, Collections.emptySet(), wrapper, null, circuitBreakerService, usageService); - final ThreadContext threadContext = new ThreadContext(Settings.EMPTY); restController.dispatchRequest(new FakeRestRequest.Builder(xContentRegistry()).build(), null, null, Optional.of(handler)); assertTrue(wrapperCalled.get()); assertFalse(handlerCalled.get()); diff --git a/server/src/test/java/org/elasticsearch/rest/RestHttpResponseHeadersTests.java b/server/src/test/java/org/elasticsearch/rest/RestHttpResponseHeadersTests.java index ebe8ae00ac0..e5e8bce6d6d 100644 --- a/server/src/test/java/org/elasticsearch/rest/RestHttpResponseHeadersTests.java +++ b/server/src/test/java/org/elasticsearch/rest/RestHttpResponseHeadersTests.java @@ -40,7 +40,6 @@ import java.util.stream.Collectors; import static org.hamcrest.CoreMatchers.notNullValue; import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.is; -import static org.mockito.Mockito.mock; public class RestHttpResponseHeadersTests extends ESTestCase { @@ -114,7 +113,6 @@ public class RestHttpResponseHeadersTests extends ESTestCase { // Send the request and verify the response status code FakeRestChannel restChannel = new FakeRestChannel(restRequest, false, 1); - NodeClient client = mock(NodeClient.class); restController.dispatchRequest(restRequest, restChannel, new ThreadContext(Settings.EMPTY)); assertThat(restChannel.capturedResponse().status().getStatus(), is(405)); diff --git a/server/src/test/java/org/elasticsearch/rest/action/admin/indices/RestValidateQueryActionTests.java b/server/src/test/java/org/elasticsearch/rest/action/admin/indices/RestValidateQueryActionTests.java new file mode 100644 index 00000000000..980a2c2e34e --- /dev/null +++ b/server/src/test/java/org/elasticsearch/rest/action/admin/indices/RestValidateQueryActionTests.java @@ -0,0 +1,151 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.rest.action.admin.indices; + +import org.elasticsearch.action.Action; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.ActionRequest; +import org.elasticsearch.action.admin.indices.validate.query.ValidateQueryAction; +import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.action.support.TransportAction; +import org.elasticsearch.client.node.NodeClient; +import org.elasticsearch.common.bytes.BytesArray; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.xcontent.XContentType; +import org.elasticsearch.rest.RestController; +import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.search.AbstractSearchTestCase; +import org.elasticsearch.tasks.Task; +import org.elasticsearch.tasks.TaskManager; +import org.elasticsearch.test.rest.FakeRestChannel; +import org.elasticsearch.test.rest.FakeRestRequest; +import org.elasticsearch.threadpool.TestThreadPool; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.usage.UsageService; +import org.junit.AfterClass; +import org.junit.BeforeClass; + +import java.util.Collections; +import java.util.HashMap; +import java.util.Map; + +import static java.util.Collections.emptyMap; +import static java.util.Collections.emptySet; +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.equalTo; + +public class RestValidateQueryActionTests extends AbstractSearchTestCase { + + private static ThreadPool threadPool = new TestThreadPool(RestValidateQueryActionTests.class.getName()); + private static NodeClient client = new NodeClient(Settings.EMPTY, threadPool); + + private static UsageService usageService = new UsageService(Settings.EMPTY); + private static RestController controller = new RestController(Settings.EMPTY, emptySet(), null, client, null, usageService); + private static RestValidateQueryAction action = new RestValidateQueryAction(Settings.EMPTY, controller); + + /** + * Configures {@link NodeClient} to stub {@link ValidateQueryAction} transport action. + *

    + * This lower level of validation is out of the scope of this test. + */ + @BeforeClass + public static void stubValidateQueryAction() { + final TaskManager taskManager = new TaskManager(Settings.EMPTY, threadPool, Collections.emptySet()); + + final TransportAction transportAction = new TransportAction(Settings.EMPTY, ValidateQueryAction.NAME, + new ActionFilters(Collections.emptySet()), taskManager) { + @Override + protected void doExecute(Task task, ActionRequest request, ActionListener listener) { + } + }; + + final Map actions = new HashMap<>(); + actions.put(ValidateQueryAction.INSTANCE, transportAction); + + client.initialize(actions, () -> "local", null); + } + + @AfterClass + public static void terminateThreadPool() throws InterruptedException { + terminate(threadPool); + + threadPool = null; + client = null; + + usageService = null; + controller = null; + action = null; + } + + public void testRestValidateQueryAction() throws Exception { + // GIVEN a valid query + final String content = "{\"query\":{\"bool\":{\"must\":{\"term\":{\"user\":\"kimchy\"}}}}}"; + + final RestRequest request = createRestRequest(content); + final FakeRestChannel channel = new FakeRestChannel(request, true, 0); + + // WHEN + action.handleRequest(request, channel, client); + + // THEN query is valid (i.e. not marked as invalid) + assertThat(channel.responses().get(), equalTo(0)); + assertThat(channel.errors().get(), equalTo(0)); + assertNull(channel.capturedResponse()); + } + + public void testRestValidateQueryAction_emptyQuery() throws Exception { + // GIVEN an empty (i.e. invalid) query wrapped into a valid JSON + final String content = "{\"query\":{}}"; + + final RestRequest request = createRestRequest(content); + final FakeRestChannel channel = new FakeRestChannel(request, true, 0); + + // WHEN + action.handleRequest(request, channel, client); + + // THEN query is marked as invalid + assertThat(channel.responses().get(), equalTo(1)); + assertThat(channel.errors().get(), equalTo(0)); + assertThat(channel.capturedResponse().content().utf8ToString(), containsString("{\"valid\":false}")); + } + + public void testRestValidateQueryAction_malformedQuery() throws Exception { + // GIVEN an invalid query due to a malformed JSON + final String content = "{malformed_json}"; + + final RestRequest request = createRestRequest(content); + final FakeRestChannel channel = new FakeRestChannel(request, true, 0); + + // WHEN + action.handleRequest(request, channel, client); + + // THEN query is marked as invalid + assertThat(channel.responses().get(), equalTo(1)); + assertThat(channel.errors().get(), equalTo(0)); + assertThat(channel.capturedResponse().content().utf8ToString(), containsString("{\"valid\":false}")); + } + + private RestRequest createRestRequest(String content) { + return new FakeRestRequest.Builder(xContentRegistry()) + .withPath("index1/type1/_validate/query") + .withParams(emptyMap()) + .withContent(new BytesArray(content), XContentType.JSON) + .build(); + } +} diff --git a/server/src/test/java/org/elasticsearch/script/ScriptServiceTests.java b/server/src/test/java/org/elasticsearch/script/ScriptServiceTests.java index ea8b6a92234..fcb868c0e0e 100644 --- a/server/src/test/java/org/elasticsearch/script/ScriptServiceTests.java +++ b/server/src/test/java/org/elasticsearch/script/ScriptServiceTests.java @@ -66,12 +66,12 @@ public class ScriptServiceTests extends ESTestCase { scripts.put(i + "+" + i, p -> null); // only care about compilation, not execution } scripts.put("script", p -> null); - scriptEngine = new MockScriptEngine(Script.DEFAULT_SCRIPT_LANG, scripts); + scriptEngine = new MockScriptEngine(Script.DEFAULT_SCRIPT_LANG, scripts, Collections.emptyMap()); //prevent duplicates using map contexts = new HashMap<>(ScriptModule.CORE_CONTEXTS); engines = new HashMap<>(); engines.put(scriptEngine.getType(), scriptEngine); - engines.put("test", new MockScriptEngine("test", scripts)); + engines.put("test", new MockScriptEngine("test", scripts, Collections.emptyMap())); logger.info("--> setup script service"); } diff --git a/server/src/test/java/org/elasticsearch/script/StoredScriptSourceTests.java b/server/src/test/java/org/elasticsearch/script/StoredScriptSourceTests.java index 79d5c67bc78..d628561e2c6 100644 --- a/server/src/test/java/org/elasticsearch/script/StoredScriptSourceTests.java +++ b/server/src/test/java/org/elasticsearch/script/StoredScriptSourceTests.java @@ -35,7 +35,6 @@ public class StoredScriptSourceTests extends AbstractSerializingTestCase { + + private NamedWriteableRegistry namedWriteableRegistry; + private NamedXContentRegistry namedXContentRegistry; + + @Before + public void setUp() throws Exception { + super.setUp(); + + // register aggregations as NamedWriteable + SearchModule searchModule = new SearchModule(Settings.EMPTY, false, emptyList()); + namedWriteableRegistry = new NamedWriteableRegistry(searchModule.getNamedWriteables()); + namedXContentRegistry = new NamedXContentRegistry(searchModule.getNamedXContents()); + } + + @Override + protected NamedWriteableRegistry getNamedWriteableRegistry() { + return namedWriteableRegistry; + } + + @Override + protected NamedXContentRegistry xContentRegistry() { + return namedXContentRegistry; + } + + @Override + protected Builder doParseInstance(XContentParser parser) throws IOException { + // parseAggregators expects to be already inside the xcontent object + assertThat(parser.nextToken(), equalTo(XContentParser.Token.START_OBJECT)); + AggregatorFactories.Builder builder = AggregatorFactories.parseAggregators(parser); + return builder; + } + + @Override + protected Builder createTestInstance() { + AggregatorFactories.Builder builder = new AggregatorFactories.Builder(); + + // ensure that the unlikely does not happen: 2 aggs share the same name + Set names = new HashSet<>(); + for (int i = 0; i < randomIntBetween(1, 20); ++i) { + AggregationBuilder aggBuilder = getRandomAggregation(); + if (names.add(aggBuilder.getName())) { + builder.addAggregator(aggBuilder); + } + } + + for (int i = 0; i < randomIntBetween(0, 20); ++i) { + PipelineAggregationBuilder aggBuilder = getRandomPipelineAggregation(); + if (names.add(aggBuilder.getName())) { + builder.addPipelineAggregator(aggBuilder); + } + } + + return builder; + } + + @Override + protected Reader instanceReader() { + return AggregatorFactories.Builder::new; + } + + public void testUnorderedEqualsSubSet() { + Set names = new HashSet<>(); + List aggBuilders = new ArrayList<>(); + + while (names.size() < 2) { + AggregationBuilder aggBuilder = getRandomAggregation(); + + if (names.add(aggBuilder.getName())) { + aggBuilders.add(aggBuilder); + } + } + + AggregatorFactories.Builder builder1 = new AggregatorFactories.Builder(); + AggregatorFactories.Builder builder2 = new AggregatorFactories.Builder(); + + builder1.addAggregator(aggBuilders.get(0)); + builder1.addAggregator(aggBuilders.get(1)); + builder2.addAggregator(aggBuilders.get(1)); + + assertFalse(builder1.equals(builder2)); + assertFalse(builder2.equals(builder1)); + assertNotEquals(builder1.hashCode(), builder2.hashCode()); + + builder2.addAggregator(aggBuilders.get(0)); + assertTrue(builder1.equals(builder2)); + assertTrue(builder2.equals(builder1)); + assertEquals(builder1.hashCode(), builder2.hashCode()); + + builder1.addPipelineAggregator(getRandomPipelineAggregation()); + assertFalse(builder1.equals(builder2)); + assertFalse(builder2.equals(builder1)); + assertNotEquals(builder1.hashCode(), builder2.hashCode()); + } + + private static AggregationBuilder getRandomAggregation() { + // just a couple of aggregations, sufficient for the purpose of this test + final int randomAggregatorPoolSize = 4; + switch (randomIntBetween(1, randomAggregatorPoolSize)) { + case 1: + return AggregationBuilders.avg(randomAlphaOfLengthBetween(3, 10)); + case 2: + return AggregationBuilders.min(randomAlphaOfLengthBetween(3, 10)); + case 3: + return AggregationBuilders.max(randomAlphaOfLengthBetween(3, 10)); + case 4: + return AggregationBuilders.sum(randomAlphaOfLengthBetween(3, 10)); + } + + // never reached + return null; + } + + private static PipelineAggregationBuilder getRandomPipelineAggregation() { + // just 1 type of pipeline agg, sufficient for the purpose of this test + String name = randomAlphaOfLengthBetween(3, 20); + String bucketsPath = randomAlphaOfLengthBetween(3, 20); + PipelineAggregationBuilder builder = new CumulativeSumPipelineAggregationBuilder(name, bucketsPath); + return builder; + } +} diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/AggregatorFactoriesTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/AggregatorFactoriesTests.java index 642092507fe..731156457a4 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/AggregatorFactoriesTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/AggregatorFactoriesTests.java @@ -41,7 +41,7 @@ import org.elasticsearch.search.aggregations.pipeline.bucketscript.BucketScriptP import org.elasticsearch.test.AbstractQueryTestCase; import org.elasticsearch.test.ESTestCase; -import java.util.List; +import java.util.Collection; import java.util.Random; import java.util.regex.Matcher; import java.util.regex.Pattern; @@ -74,7 +74,7 @@ public class AggregatorFactoriesTests extends ESTestCase { public void testGetAggregatorFactories_returnsUnmodifiableList() { AggregatorFactories.Builder builder = new AggregatorFactories.Builder().addAggregator(AggregationBuilders.avg("foo")); - List aggregatorFactories = builder.getAggregatorFactories(); + Collection aggregatorFactories = builder.getAggregatorFactories(); assertThat(aggregatorFactories.size(), equalTo(1)); expectThrows(UnsupportedOperationException.class, () -> aggregatorFactories.add(AggregationBuilders.avg("bar"))); } @@ -82,7 +82,7 @@ public class AggregatorFactoriesTests extends ESTestCase { public void testGetPipelineAggregatorFactories_returnsUnmodifiableList() { AggregatorFactories.Builder builder = new AggregatorFactories.Builder().addPipelineAggregator( PipelineAggregatorBuilders.avgBucket("foo", "path1")); - List pipelineAggregatorFactories = builder.getPipelineAggregatorFactories(); + Collection pipelineAggregatorFactories = builder.getPipelineAggregatorFactories(); assertThat(pipelineAggregatorFactories.size(), equalTo(1)); expectThrows(UnsupportedOperationException.class, () -> pipelineAggregatorFactories.add(PipelineAggregatorBuilders.avgBucket("bar", "path2"))); @@ -269,10 +269,10 @@ public class AggregatorFactoriesTests extends ESTestCase { AggregatorFactories.Builder rewritten = builder .rewrite(new QueryRewriteContext(xContentRegistry, null, null, () -> 0L)); assertNotSame(builder, rewritten); - List aggregatorFactories = rewritten.getAggregatorFactories(); + Collection aggregatorFactories = rewritten.getAggregatorFactories(); assertEquals(1, aggregatorFactories.size()); - assertThat(aggregatorFactories.get(0), instanceOf(FilterAggregationBuilder.class)); - FilterAggregationBuilder rewrittenFilterAggBuilder = (FilterAggregationBuilder) aggregatorFactories.get(0); + assertThat(aggregatorFactories.iterator().next(), instanceOf(FilterAggregationBuilder.class)); + FilterAggregationBuilder rewrittenFilterAggBuilder = (FilterAggregationBuilder) aggregatorFactories.iterator().next(); assertNotSame(filterAggBuilder, rewrittenFilterAggBuilder); assertNotEquals(filterAggBuilder, rewrittenFilterAggBuilder); // Check the filter was rewritten from a wrapper query to a terms query diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/BasePipelineAggregationTestCase.java b/server/src/test/java/org/elasticsearch/search/aggregations/BasePipelineAggregationTestCase.java index c7bbcfc1477..ea25e86b3a2 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/BasePipelineAggregationTestCase.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/BasePipelineAggregationTestCase.java @@ -121,7 +121,7 @@ public abstract class BasePipelineAggregationTestCase 0L))); diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/GeoDistanceRangeTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/GeoDistanceRangeTests.java index fcdbc81c0c6..fc0a749f064 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/GeoDistanceRangeTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/GeoDistanceRangeTests.java @@ -41,7 +41,7 @@ public class GeoDistanceRangeTests extends BaseAggregationTestCase @Override protected RangeAggregationBuilder createTestAggregatorBuilder() { int numRanges = randomIntBetween(1, 10); - RangeAggregationBuilder factory = new RangeAggregationBuilder("foo"); + RangeAggregationBuilder factory = new RangeAggregationBuilder(randomAlphaOfLengthBetween(3, 10)); for (int i = 0; i < numRanges; i++) { String key = null; if (randomBoolean()) { diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/SamplerTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/SamplerTests.java index e4de490f6b2..5a9a28baf82 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/SamplerTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/SamplerTests.java @@ -26,7 +26,7 @@ public class SamplerTests extends BaseAggregationTestCase testSearchCase(query, timestamps, + expectThrows(TooManyBucketsException.class, () -> testSearchCase(query, timestamps, aggregation -> aggregation.dateHistogramInterval(DateHistogramInterval.seconds(5)).field(DATE_FIELD), histogram -> {}, 2)); - exc = expectThrows(TooManyBucketsException.class, () -> testSearchAndReduceCase(query, timestamps, + expectThrows(TooManyBucketsException.class, () -> testSearchAndReduceCase(query, timestamps, aggregation -> aggregation.dateHistogramInterval(DateHistogramInterval.seconds(5)).field(DATE_FIELD), histogram -> {}, 2)); - exc = expectThrows(TooManyBucketsException.class, () -> testSearchAndReduceCase(query, timestamps, + expectThrows(TooManyBucketsException.class, () -> testSearchAndReduceCase(query, timestamps, aggregation -> aggregation.dateHistogramInterval(DateHistogramInterval.seconds(5)).field(DATE_FIELD).minDocCount(0L), histogram -> {}, 100)); - exc = expectThrows(TooManyBucketsException.class, () -> testSearchAndReduceCase(query, timestamps, + expectThrows(TooManyBucketsException.class, () -> testSearchAndReduceCase(query, timestamps, aggregation -> aggregation.dateHistogramInterval(DateHistogramInterval.seconds(5)) .field(DATE_FIELD) diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/DateHistogramTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/DateHistogramTests.java index ecd8868aabd..5148b0b8575 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/DateHistogramTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/DateHistogramTests.java @@ -41,7 +41,7 @@ public class DateHistogramTests extends BaseAggregationTestCase ((List) script.get("states")).size())); + Collections.singletonMap(REDUCE_SCRIPT_NAME, script -> ((List) script.get("states")).size()), + Collections.emptyMap()); Map engines = Collections.singletonMap(scriptEngine.getType(), scriptEngine); return new ScriptService(Settings.EMPTY, engines, ScriptModule.CORE_CONTEXTS); } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalTopHitsTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalTopHitsTests.java index 3e97ec94f6b..2cee3548aba 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalTopHitsTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalTopHitsTests.java @@ -79,7 +79,7 @@ public class InternalTopHitsTests extends InternalAggregationTestCase usedDocIds = new HashSet<>(); @@ -112,7 +112,8 @@ public class InternalTopHitsTests extends InternalAggregationTestCase inputs) { SearchHits actualHits = reduced.getHits(); List> allHits = new ArrayList<>(); - float maxScore = Float.MIN_VALUE; + float maxScore = Float.NEGATIVE_INFINITY; long totalHits = 0; for (int input = 0; input < inputs.size(); input++) { SearchHits internalHits = inputs.get(input).getHits(); @@ -199,7 +200,7 @@ public class InternalTopHitsTests extends InternalAggregationTestCase engines = Collections.singletonMap(scriptEngine.getType(), scriptEngine); ScriptService scriptService = new ScriptService(Settings.EMPTY, engines, ScriptModule.CORE_CONTEXTS); return new QueryShardContext(0, mapperService.getIndexSettings(), null, null, mapperService, null, scriptService, diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/StatsTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/StatsTests.java index e2db3ac2fb4..cdaa860eef1 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/StatsTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/StatsTests.java @@ -23,7 +23,7 @@ public class StatsTests extends AbstractNumericMetricTestCase client() .prepareSearch("idx") .addAggregation( terms("terms") @@ -451,21 +450,18 @@ public class ExtendedStatsBucketIT extends ESIntegTestCase { .extendedBounds(minRandomValue, maxRandomValue) .subAggregation(sum("sum").field(SINGLE_VALUED_FIELD_NAME))) .subAggregation(extendedStatsBucket("extended_stats_bucket", "histo>sum") - .sigma(-1.0))).execute().actionGet(); - fail("Illegal sigma was provided but no exception was thrown."); - } catch (Exception e) { - Throwable cause = ExceptionsHelper.unwrapCause(e); - if (cause == null) { - throw e; - } else if (cause instanceof SearchPhaseExecutionException) { - SearchPhaseExecutionException spee = (SearchPhaseExecutionException) e; - Throwable rootCause = spee.getRootCause(); - if (!(rootCause instanceof IllegalArgumentException)) { - throw e; - } - } else if (!(cause instanceof IllegalArgumentException)) { - throw e; + .sigma(-1.0))).execute().actionGet()); + Throwable cause = ExceptionsHelper.unwrapCause(ex); + if (cause == null) { + throw ex; + } else if (cause instanceof SearchPhaseExecutionException) { + SearchPhaseExecutionException spee = (SearchPhaseExecutionException) ex; + Throwable rootCause = spee.getRootCause(); + if (!(rootCause instanceof IllegalArgumentException)) { + throw ex; } + } else if (!(cause instanceof IllegalArgumentException)) { + throw ex; } } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/MaxBucketIT.java b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/MaxBucketIT.java index c3075da8271..4841c5e596a 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/MaxBucketIT.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/MaxBucketIT.java @@ -21,15 +21,26 @@ package org.elasticsearch.search.aggregations.pipeline; import org.elasticsearch.action.index.IndexRequestBuilder; import org.elasticsearch.action.search.SearchResponse; +import org.elasticsearch.action.support.WriteRequest; +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentHelper; +import org.elasticsearch.common.xcontent.XContentType; +import org.elasticsearch.search.aggregations.AggregationBuilders; import org.elasticsearch.search.aggregations.BucketOrder; import org.elasticsearch.search.aggregations.bucket.filter.Filter; +import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramAggregationBuilder; +import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramInterval; import org.elasticsearch.search.aggregations.bucket.histogram.Histogram; import org.elasticsearch.search.aggregations.bucket.histogram.Histogram.Bucket; import org.elasticsearch.search.aggregations.bucket.terms.IncludeExclude; import org.elasticsearch.search.aggregations.bucket.terms.Terms; +import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregationBuilder; import org.elasticsearch.search.aggregations.metrics.Sum; +import org.elasticsearch.search.aggregations.metrics.SumAggregationBuilder; import org.elasticsearch.search.aggregations.pipeline.BucketHelpers.GapPolicy; import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.InternalBucketMetricValue; +import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.max.MaxBucketPipelineAggregationBuilder; import org.elasticsearch.test.ESIntegTestCase; import java.util.ArrayList; @@ -475,4 +486,56 @@ public class MaxBucketIT extends ESIntegTestCase { assertThat(maxBucketValue.value(), equalTo(maxTermsValue)); assertThat(maxBucketValue.keys(), equalTo(maxTermsKeys.toArray(new String[maxTermsKeys.size()]))); } + + /** + * https://github.com/elastic/elasticsearch/issues/33514 + * + * This bug manifests as the max_bucket agg ("peak") being added to the response twice, because + * the pipeline agg is run twice. This makes invalid JSON and breaks conversion to maps. + * The bug was caused by an UnmappedTerms being the chosen as the first reduction target. UnmappedTerms + * delegated reduction to the first non-unmapped agg, which would reduce and run pipeline aggs. But then + * execution returns to the UnmappedTerms and _it_ runs pipelines as well, doubling up on the values. + * + * Applies to any pipeline agg, not just max. + */ + public void testFieldIsntWrittenOutTwice() throws Exception { + // you need to add an additional index with no fields in order to trigger this (or potentially a shard) + // so that there is an UnmappedTerms in the list to reduce. + createIndex("foo_1"); + + XContentBuilder builder = jsonBuilder().startObject().startObject("properties") + .startObject("@timestamp").field("type", "date").endObject() + .startObject("license").startObject("properties") + .startObject("count").field("type", "long").endObject() + .startObject("partnumber").field("type", "text").startObject("fields").startObject("keyword") + .field("type", "keyword").field("ignore_above", 256) + .endObject().endObject().endObject() + .endObject().endObject().endObject().endObject(); + assertAcked(client().admin().indices().prepareCreate("foo_2") + .addMapping("doc", builder).get()); + + XContentBuilder docBuilder = jsonBuilder().startObject() + .startObject("license").field("partnumber", "foobar").field("count", 2).endObject() + .field("@timestamp", "2018-07-08T08:07:00.599Z") + .endObject(); + + client().prepareIndex("foo_2", "doc").setSource(docBuilder).setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE).get(); + + client().admin().indices().prepareRefresh(); + + TermsAggregationBuilder groupByLicenseAgg = AggregationBuilders.terms("group_by_license_partnumber") + .field("license.partnumber.keyword"); + MaxBucketPipelineAggregationBuilder peakPipelineAggBuilder = + PipelineAggregatorBuilders.maxBucket("peak", "licenses_per_day>total_licenses"); + SumAggregationBuilder sumAggBuilder = AggregationBuilders.sum("total_licenses").field("license.count"); + DateHistogramAggregationBuilder licensePerDayBuilder = + AggregationBuilders.dateHistogram("licenses_per_day").field("@timestamp").dateHistogramInterval(DateHistogramInterval.DAY); + licensePerDayBuilder.subAggregation(sumAggBuilder); + groupByLicenseAgg.subAggregation(licensePerDayBuilder); + groupByLicenseAgg.subAggregation(peakPipelineAggBuilder); + + SearchResponse response = client().prepareSearch("foo_*").setSize(0).addAggregation(groupByLicenseAgg).get(); + BytesReference bytes = XContentHelper.toXContent(response, XContentType.JSON, false); + XContentHelper.convertToMap(bytes, false, XContentType.JSON); + } } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/AvgBucketTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/AvgBucketTests.java index 223cbb231ea..c504aa3f461 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/AvgBucketTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/AvgBucketTests.java @@ -26,9 +26,9 @@ import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator; import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.avg.AvgBucketPipelineAggregationBuilder; import org.elasticsearch.search.aggregations.support.ValueType; -import java.util.ArrayList; import java.util.Collections; -import java.util.List; +import java.util.HashSet; +import java.util.Set; public class AvgBucketTests extends AbstractBucketMetricsTestCase { @@ -40,27 +40,27 @@ public class AvgBucketTests extends AbstractBucketMetricsTestCase aggBuilders = new ArrayList<>(); + final Set aggBuilders = new HashSet<>(); aggBuilders.add(singleBucketAgg); aggBuilders.add(multiBucketAgg); // First try to point to a non-existent agg final AvgBucketPipelineAggregationBuilder builder = new AvgBucketPipelineAggregationBuilder("name", "invalid_agg>metric"); IllegalArgumentException ex = expectThrows(IllegalArgumentException.class, - () -> builder.validate(null, aggBuilders, Collections.emptyList())); + () -> builder.validate(null, aggBuilders, Collections.emptySet())); assertEquals(PipelineAggregator.Parser.BUCKETS_PATH.getPreferredName() + " aggregation does not exist for aggregation [name]: invalid_agg>metric", ex.getMessage()); // Now try to point to a single bucket agg AvgBucketPipelineAggregationBuilder builder2 = new AvgBucketPipelineAggregationBuilder("name", "global>metric"); - ex = expectThrows(IllegalArgumentException.class, () -> builder2.validate(null, aggBuilders, Collections.emptyList())); + ex = expectThrows(IllegalArgumentException.class, () -> builder2.validate(null, aggBuilders, Collections.emptySet())); assertEquals("The first aggregation in " + PipelineAggregator.Parser.BUCKETS_PATH.getPreferredName() + " must be a multi-bucket aggregation for aggregation [name] found :" + GlobalAggregationBuilder.class.getName() + " for buckets path: global>metric", ex.getMessage()); // Now try to point to a valid multi-bucket agg (no exception should be thrown) AvgBucketPipelineAggregationBuilder builder3 = new AvgBucketPipelineAggregationBuilder("name", "terms>metric"); - builder3.validate(null, aggBuilders, Collections.emptyList()); + builder3.validate(null, aggBuilders, Collections.emptySet()); } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/ExtendedStatsBucketTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/ExtendedStatsBucketTests.java index d1775687000..43303205b46 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/ExtendedStatsBucketTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/ExtendedStatsBucketTests.java @@ -28,9 +28,9 @@ import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator; import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.stats.extended.ExtendedStatsBucketPipelineAggregationBuilder; import org.elasticsearch.search.aggregations.support.ValueType; -import java.util.ArrayList; import java.util.Collections; -import java.util.List; +import java.util.HashSet; +import java.util.Set; import static org.hamcrest.Matchers.equalTo; @@ -65,7 +65,7 @@ public class ExtendedStatsBucketTests extends AbstractBucketMetricsTestCase aggBuilders = new ArrayList<>(); + final Set aggBuilders = new HashSet<>(); aggBuilders.add(singleBucketAgg); aggBuilders.add(multiBucketAgg); @@ -73,13 +73,13 @@ public class ExtendedStatsBucketTests extends AbstractBucketMetricsTestCasemetric"); IllegalArgumentException ex = expectThrows(IllegalArgumentException.class, - () -> builder.validate(null, aggBuilders, Collections.emptyList())); + () -> builder.validate(null, aggBuilders, Collections.emptySet())); assertEquals(PipelineAggregator.Parser.BUCKETS_PATH.getPreferredName() + " aggregation does not exist for aggregation [name]: invalid_agg>metric", ex.getMessage()); // Now try to point to a single bucket agg ExtendedStatsBucketPipelineAggregationBuilder builder2 = new ExtendedStatsBucketPipelineAggregationBuilder("name", "global>metric"); - ex = expectThrows(IllegalArgumentException.class, () -> builder2.validate(null, aggBuilders, Collections.emptyList())); + ex = expectThrows(IllegalArgumentException.class, () -> builder2.validate(null, aggBuilders, Collections.emptySet())); assertEquals("The first aggregation in " + PipelineAggregator.Parser.BUCKETS_PATH.getPreferredName() + " must be a multi-bucket aggregation for aggregation [name] found :" + GlobalAggregationBuilder.class.getName() + " for buckets path: global>metric", ex.getMessage()); @@ -87,6 +87,6 @@ public class ExtendedStatsBucketTests extends AbstractBucketMetricsTestCasemetric"); - builder3.validate(null, aggBuilders, Collections.emptyList()); + builder3.validate(null, aggBuilders, Collections.emptySet()); } } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/MaxBucketTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/MaxBucketTests.java index a8e78a31f95..cbf31130d38 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/MaxBucketTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/MaxBucketTests.java @@ -26,9 +26,9 @@ import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator; import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.max.MaxBucketPipelineAggregationBuilder; import org.elasticsearch.search.aggregations.support.ValueType; -import java.util.ArrayList; import java.util.Collections; -import java.util.List; +import java.util.HashSet; +import java.util.Set; public class MaxBucketTests extends AbstractBucketMetricsTestCase { @@ -40,20 +40,20 @@ public class MaxBucketTests extends AbstractBucketMetricsTestCase aggBuilders = new ArrayList<>(); + final Set aggBuilders = new HashSet<>(); aggBuilders.add(singleBucketAgg); aggBuilders.add(multiBucketAgg); // First try to point to a non-existent agg final MaxBucketPipelineAggregationBuilder builder = new MaxBucketPipelineAggregationBuilder("name", "invalid_agg>metric"); IllegalArgumentException ex = expectThrows(IllegalArgumentException.class, - () -> builder.validate(null, aggBuilders, Collections.emptyList())); + () -> builder.validate(null, aggBuilders, Collections.emptySet())); assertEquals(PipelineAggregator.Parser.BUCKETS_PATH.getPreferredName() + " aggregation does not exist for aggregation [name]: invalid_agg>metric", ex.getMessage()); // Now try to point to a single bucket agg MaxBucketPipelineAggregationBuilder builder2 = new MaxBucketPipelineAggregationBuilder("name", "global>metric"); - ex = expectThrows(IllegalArgumentException.class, () -> builder2.validate(null, aggBuilders, Collections.emptyList())); + ex = expectThrows(IllegalArgumentException.class, () -> builder2.validate(null, aggBuilders, Collections.emptySet())); assertEquals("The first aggregation in " + PipelineAggregator.Parser.BUCKETS_PATH.getPreferredName() + " must be a multi-bucket aggregation for aggregation [name] found :" + GlobalAggregationBuilder.class.getName() + " for buckets path: global>metric", ex.getMessage()); @@ -61,7 +61,7 @@ public class MaxBucketTests extends AbstractBucketMetricsTestCasemetric"); - builder3.validate(null, aggBuilders, Collections.emptyList()); + builder3.validate(null, aggBuilders, Collections.emptySet()); } } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/MinBucketTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/MinBucketTests.java index 21efed4a5cf..eca1db24ff7 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/MinBucketTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/MinBucketTests.java @@ -26,9 +26,9 @@ import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator; import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.min.MinBucketPipelineAggregationBuilder; import org.elasticsearch.search.aggregations.support.ValueType; -import java.util.ArrayList; import java.util.Collections; -import java.util.List; +import java.util.HashSet; +import java.util.Set; public class MinBucketTests extends AbstractBucketMetricsTestCase { @@ -40,20 +40,20 @@ public class MinBucketTests extends AbstractBucketMetricsTestCase aggBuilders = new ArrayList<>(); + final Set aggBuilders = new HashSet<>(); aggBuilders.add(singleBucketAgg); aggBuilders.add(multiBucketAgg); // First try to point to a non-existent agg final MinBucketPipelineAggregationBuilder builder = new MinBucketPipelineAggregationBuilder("name", "invalid_agg>metric"); IllegalArgumentException ex = expectThrows(IllegalArgumentException.class, - () -> builder.validate(null, aggBuilders, Collections.emptyList())); + () -> builder.validate(null, aggBuilders, Collections.emptySet())); assertEquals(PipelineAggregator.Parser.BUCKETS_PATH.getPreferredName() + " aggregation does not exist for aggregation [name]: invalid_agg>metric", ex.getMessage()); // Now try to point to a single bucket agg MinBucketPipelineAggregationBuilder builder2 = new MinBucketPipelineAggregationBuilder("name", "global>metric"); - ex = expectThrows(IllegalArgumentException.class, () -> builder2.validate(null, aggBuilders, Collections.emptyList())); + ex = expectThrows(IllegalArgumentException.class, () -> builder2.validate(null, aggBuilders, Collections.emptySet())); assertEquals("The first aggregation in " + PipelineAggregator.Parser.BUCKETS_PATH.getPreferredName() + " must be a multi-bucket aggregation for aggregation [name] found :" + GlobalAggregationBuilder.class.getName() + " for buckets path: global>metric", ex.getMessage()); @@ -61,7 +61,7 @@ public class MinBucketTests extends AbstractBucketMetricsTestCasemetric"); - builder3.validate(null, aggBuilders, Collections.emptyList()); + builder3.validate(null, aggBuilders, Collections.emptySet()); } } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/PercentilesBucketTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/PercentilesBucketTests.java index 4851c969722..a6040aaf9f6 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/PercentilesBucketTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/PercentilesBucketTests.java @@ -28,9 +28,9 @@ import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator; import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.percentile.PercentilesBucketPipelineAggregationBuilder; import org.elasticsearch.search.aggregations.support.ValueType; -import java.util.ArrayList; import java.util.Collections; -import java.util.List; +import java.util.HashSet; +import java.util.Set; import static org.hamcrest.Matchers.equalTo; @@ -69,7 +69,7 @@ public class PercentilesBucketTests extends AbstractBucketMetricsTestCase aggBuilders = new ArrayList<>(); + final Set aggBuilders = new HashSet<>(); aggBuilders.add(singleBucketAgg); aggBuilders.add(multiBucketAgg); @@ -77,13 +77,13 @@ public class PercentilesBucketTests extends AbstractBucketMetricsTestCasemetric"); IllegalArgumentException ex = expectThrows(IllegalArgumentException.class, - () -> builder.validate(null, aggBuilders, Collections.emptyList())); + () -> builder.validate(null, aggBuilders, Collections.emptySet())); assertEquals(PipelineAggregator.Parser.BUCKETS_PATH.getPreferredName() + " aggregation does not exist for aggregation [name]: invalid_agg>metric", ex.getMessage()); // Now try to point to a single bucket agg PercentilesBucketPipelineAggregationBuilder builder2 = new PercentilesBucketPipelineAggregationBuilder("name", "global>metric"); - ex = expectThrows(IllegalArgumentException.class, () -> builder2.validate(null, aggBuilders, Collections.emptyList())); + ex = expectThrows(IllegalArgumentException.class, () -> builder2.validate(null, aggBuilders, Collections.emptySet())); assertEquals("The first aggregation in " + PipelineAggregator.Parser.BUCKETS_PATH.getPreferredName() + " must be a multi-bucket aggregation for aggregation [name] found :" + GlobalAggregationBuilder.class.getName() + " for buckets path: global>metric", ex.getMessage()); @@ -91,6 +91,6 @@ public class PercentilesBucketTests extends AbstractBucketMetricsTestCasemetric"); - builder3.validate(null, aggBuilders, Collections.emptyList()); + builder3.validate(null, aggBuilders, Collections.emptySet()); } } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/StatsBucketTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/StatsBucketTests.java index 7611d7b07b3..bcd90778136 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/StatsBucketTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/StatsBucketTests.java @@ -26,9 +26,9 @@ import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator; import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.stats.StatsBucketPipelineAggregationBuilder; import org.elasticsearch.search.aggregations.support.ValueType; -import java.util.ArrayList; import java.util.Collections; -import java.util.List; +import java.util.HashSet; +import java.util.Set; public class StatsBucketTests extends AbstractBucketMetricsTestCase { @@ -41,20 +41,20 @@ public class StatsBucketTests extends AbstractBucketMetricsTestCase aggBuilders = new ArrayList<>(); + final Set aggBuilders = new HashSet<>(); aggBuilders.add(singleBucketAgg); aggBuilders.add(multiBucketAgg); // First try to point to a non-existent agg final StatsBucketPipelineAggregationBuilder builder = new StatsBucketPipelineAggregationBuilder("name", "invalid_agg>metric"); IllegalArgumentException ex = expectThrows(IllegalArgumentException.class, - () -> builder.validate(null, aggBuilders, Collections.emptyList())); + () -> builder.validate(null, aggBuilders, Collections.emptySet())); assertEquals(PipelineAggregator.Parser.BUCKETS_PATH.getPreferredName() + " aggregation does not exist for aggregation [name]: invalid_agg>metric", ex.getMessage()); // Now try to point to a single bucket agg StatsBucketPipelineAggregationBuilder builder2 = new StatsBucketPipelineAggregationBuilder("name", "global>metric"); - ex = expectThrows(IllegalArgumentException.class, () -> builder2.validate(null, aggBuilders, Collections.emptyList())); + ex = expectThrows(IllegalArgumentException.class, () -> builder2.validate(null, aggBuilders, Collections.emptySet())); assertEquals("The first aggregation in " + PipelineAggregator.Parser.BUCKETS_PATH.getPreferredName() + " must be a multi-bucket aggregation for aggregation [name] found :" + GlobalAggregationBuilder.class.getName() + " for buckets path: global>metric", ex.getMessage()); @@ -62,7 +62,7 @@ public class StatsBucketTests extends AbstractBucketMetricsTestCasemetric"); - builder3.validate(null, aggBuilders, Collections.emptyList()); + builder3.validate(null, aggBuilders, Collections.emptySet()); } } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/SumBucketTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/SumBucketTests.java index 62fc1f97797..be6c7f92342 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/SumBucketTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/SumBucketTests.java @@ -26,9 +26,9 @@ import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator; import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.sum.SumBucketPipelineAggregationBuilder; import org.elasticsearch.search.aggregations.support.ValueType; -import java.util.ArrayList; import java.util.Collections; -import java.util.List; +import java.util.HashSet; +import java.util.Set; public class SumBucketTests extends AbstractBucketMetricsTestCase { @@ -40,20 +40,20 @@ public class SumBucketTests extends AbstractBucketMetricsTestCase aggBuilders = new ArrayList<>(); + final Set aggBuilders = new HashSet<>(); aggBuilders.add(singleBucketAgg); aggBuilders.add(multiBucketAgg); // First try to point to a non-existent agg final SumBucketPipelineAggregationBuilder builder = new SumBucketPipelineAggregationBuilder("name", "invalid_agg>metric"); IllegalArgumentException ex = expectThrows(IllegalArgumentException.class, - () -> builder.validate(null, aggBuilders, Collections.emptyList())); + () -> builder.validate(null, aggBuilders, Collections.emptySet())); assertEquals(PipelineAggregator.Parser.BUCKETS_PATH.getPreferredName() + " aggregation does not exist for aggregation [name]: invalid_agg>metric", ex.getMessage()); // Now try to point to a single bucket agg SumBucketPipelineAggregationBuilder builder2 = new SumBucketPipelineAggregationBuilder("name", "global>metric"); - ex = expectThrows(IllegalArgumentException.class, () -> builder2.validate(null, aggBuilders, Collections.emptyList())); + ex = expectThrows(IllegalArgumentException.class, () -> builder2.validate(null, aggBuilders, Collections.emptySet())); assertEquals("The first aggregation in " + PipelineAggregator.Parser.BUCKETS_PATH.getPreferredName() + " must be a multi-bucket aggregation for aggregation [name] found :" + GlobalAggregationBuilder.class.getName() + " for buckets path: global>metric", ex.getMessage()); @@ -61,7 +61,7 @@ public class SumBucketTests extends AbstractBucketMetricsTestCasemetric"); - builder3.validate(null, aggBuilders, Collections.emptyList()); + builder3.validate(null, aggBuilders, Collections.emptySet()); } } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/moving/avg/MovAvgIT.java b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/moving/avg/MovAvgIT.java index d14f93b7a51..41bbf053ff1 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/moving/avg/MovAvgIT.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/moving/avg/MovAvgIT.java @@ -24,6 +24,7 @@ import org.elasticsearch.action.index.IndexRequestBuilder; import org.elasticsearch.action.search.SearchPhaseExecutionException; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.support.WriteRequest; +import org.elasticsearch.client.Client; import org.elasticsearch.common.collect.EvictingQueue; import org.elasticsearch.search.aggregations.bucket.histogram.Histogram; import org.elasticsearch.search.aggregations.bucket.histogram.Histogram.Bucket; @@ -411,7 +412,7 @@ public class MovAvgIT extends ESIntegTestCase { .prepareSearch("idx").setTypes("type") .addAggregation( histogram("histo").field(INTERVAL_FIELD).interval(interval) - .extendedBounds(0L, (long) (interval * (numBuckets - 1))) + .extendedBounds(0L, interval * (numBuckets - 1)) .subAggregation(metric) .subAggregation(movingAvg("movavg_counts","_count") .window(windowSize) @@ -459,7 +460,7 @@ public class MovAvgIT extends ESIntegTestCase { .prepareSearch("idx").setTypes("type") .addAggregation( histogram("histo").field(INTERVAL_FIELD).interval(interval) - .extendedBounds(0L, (long) (interval * (numBuckets - 1))) + .extendedBounds(0L, interval * (numBuckets - 1)) .subAggregation(metric) .subAggregation(movingAvg("movavg_counts", "_count") .window(windowSize) @@ -507,7 +508,7 @@ public class MovAvgIT extends ESIntegTestCase { .prepareSearch("idx").setTypes("type") .addAggregation( histogram("histo").field(INTERVAL_FIELD).interval(interval) - .extendedBounds(0L, (long) (interval * (numBuckets - 1))) + .extendedBounds(0L, interval * (numBuckets - 1)) .subAggregation(metric) .subAggregation(movingAvg("movavg_counts", "_count") .window(windowSize) @@ -555,7 +556,7 @@ public class MovAvgIT extends ESIntegTestCase { .prepareSearch("idx").setTypes("type") .addAggregation( histogram("histo").field(INTERVAL_FIELD).interval(interval) - .extendedBounds(0L, (long) (interval * (numBuckets - 1))) + .extendedBounds(0L, interval * (numBuckets - 1)) .subAggregation(metric) .subAggregation(movingAvg("movavg_counts", "_count") .window(windowSize) @@ -604,7 +605,7 @@ public class MovAvgIT extends ESIntegTestCase { .prepareSearch("idx").setTypes("type") .addAggregation( histogram("histo").field(INTERVAL_FIELD).interval(interval) - .extendedBounds(0L, (long) (interval * (numBuckets - 1))) + .extendedBounds(0L, interval * (numBuckets - 1)) .subAggregation(metric) .subAggregation(movingAvg("movavg_counts", "_count") .window(windowSize) @@ -708,7 +709,7 @@ public class MovAvgIT extends ESIntegTestCase { .prepareSearch("idx").setTypes("type") .addAggregation( histogram("histo").field(INTERVAL_FIELD).interval(interval) - .extendedBounds(0L, (long) (interval * (numBuckets - 1))) + .extendedBounds(0L, interval * (numBuckets - 1)) .subAggregation(randomMetric("the_metric", VALUE_FIELD)) .subAggregation(movingAvg("movavg_counts", "the_metric") .window(0) @@ -746,7 +747,7 @@ public class MovAvgIT extends ESIntegTestCase { .prepareSearch("idx").setTypes("type") .addAggregation( histogram("histo").field(INTERVAL_FIELD).interval(interval) - .extendedBounds(0L, (long) (interval * (numBuckets - 1))) + .extendedBounds(0L, interval * (numBuckets - 1)) .subAggregation(randomMetric("the_metric", VALUE_FIELD)) .subAggregation(movingAvg("movavg_counts", "_count") .window(-10) @@ -810,7 +811,7 @@ public class MovAvgIT extends ESIntegTestCase { .prepareSearch("idx").setTypes("type") .addAggregation( histogram("histo").field(INTERVAL_FIELD).interval(interval) - .extendedBounds(0L, (long) (interval * (numBuckets - 1))) + .extendedBounds(0L, interval * (numBuckets - 1)) .subAggregation(randomMetric("the_metric", VALUE_FIELD)) .subAggregation(movingAvg("movavg_counts", "the_metric") .window(windowSize) @@ -831,7 +832,7 @@ public class MovAvgIT extends ESIntegTestCase { .prepareSearch("idx").setTypes("type") .addAggregation( histogram("histo").field(INTERVAL_FIELD).interval(interval) - .extendedBounds(0L, (long) (interval * (numBuckets - 1))) + .extendedBounds(0L, interval * (numBuckets - 1)) .subAggregation(randomMetric("the_metric", VALUE_FIELD)) .subAggregation(movingAvg("movavg_counts", "the_metric") .window(windowSize) @@ -846,13 +847,13 @@ public class MovAvgIT extends ESIntegTestCase { } } + @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/34046") public void testHoltWintersNotEnoughData() { - try { - SearchResponse response = client() - .prepareSearch("idx").setTypes("type") + Client client = client(); + expectThrows(SearchPhaseExecutionException.class, () -> client.prepareSearch("idx").setTypes("type") .addAggregation( histogram("histo").field(INTERVAL_FIELD).interval(interval) - .extendedBounds(0L, (long) (interval * (numBuckets - 1))) + .extendedBounds(0L, interval * (numBuckets - 1)) .subAggregation(metric) .subAggregation(movingAvg("movavg_counts", "_count") .window(10) @@ -864,11 +865,7 @@ public class MovAvgIT extends ESIntegTestCase { .modelBuilder(new HoltWintersModel.HoltWintersModelBuilder() .alpha(alpha).beta(beta).gamma(gamma).period(20).seasonalityType(seasonalityType)) .gapPolicy(gapPolicy)) - ).execute().actionGet(); - } catch (SearchPhaseExecutionException e) { - // All good - } - + ).execute().actionGet()); } public void testTwoMovAvgsWithPredictions() { @@ -982,23 +979,19 @@ public class MovAvgIT extends ESIntegTestCase { } } + @AwaitsFix(bugUrl="https://github.com/elastic/elasticsearch/issues/34046") public void testBadModelParams() { - try { - SearchResponse response = client() + expectThrows(SearchPhaseExecutionException.class, () -> client() .prepareSearch("idx").setTypes("type") .addAggregation( histogram("histo").field(INTERVAL_FIELD).interval(interval) - .extendedBounds(0L, (long) (interval * (numBuckets - 1))) + .extendedBounds(0L, interval * (numBuckets - 1)) .subAggregation(metric) .subAggregation(movingAvg("movavg_counts", "_count") .window(10) .modelBuilder(randomModelBuilder(100)) .gapPolicy(gapPolicy)) - ).execute().actionGet(); - } catch (SearchPhaseExecutionException e) { - // All good - } - + ).execute().actionGet()); } public void testHoltWintersMinimization() { @@ -1006,7 +999,7 @@ public class MovAvgIT extends ESIntegTestCase { .prepareSearch("idx").setTypes("type") .addAggregation( histogram("histo").field(INTERVAL_FIELD).interval(interval) - .extendedBounds(0L, (long) (interval * (numBuckets - 1))) + .extendedBounds(0L, interval * (numBuckets - 1)) .subAggregation(metric) .subAggregation(movingAvg("movavg_counts", "_count") .window(windowSize) @@ -1092,7 +1085,7 @@ public class MovAvgIT extends ESIntegTestCase { .prepareSearch("idx").setTypes("type") .addAggregation( histogram("histo").field(INTERVAL_FIELD).interval(interval) - .extendedBounds(0L, (long) (interval * (numBuckets - 1))) + .extendedBounds(0L, interval * (numBuckets - 1)) .subAggregation(metric) .subAggregation(movingAvg("movavg_counts", "_count") .window(numBuckets) @@ -1146,7 +1139,7 @@ public class MovAvgIT extends ESIntegTestCase { .prepareSearch("idx").setTypes("type") .addAggregation( histogram("histo").field(INTERVAL_FIELD).interval(interval) - .extendedBounds(0L, (long) (interval * (numBuckets - 1))) + .extendedBounds(0L, interval * (numBuckets - 1)) .subAggregation(metric) .subAggregation(movingAvg("movavg_counts", "_count") .window(numBuckets) @@ -1164,7 +1157,7 @@ public class MovAvgIT extends ESIntegTestCase { .prepareSearch("idx").setTypes("type") .addAggregation( histogram("histo").field(INTERVAL_FIELD).interval(interval) - .extendedBounds(0L, (long) (interval * (numBuckets - 1))) + .extendedBounds(0L, interval * (numBuckets - 1)) .subAggregation(metric) .subAggregation(movingAvg("movavg_counts", "_count") .window(numBuckets) @@ -1194,7 +1187,7 @@ public class MovAvgIT extends ESIntegTestCase { .prepareSearch("idx").setTypes("type") .addAggregation( histogram("histo").field(INTERVAL_FIELD).interval(interval) - .extendedBounds(0L, (long) (interval * (numBuckets - 1))) + .extendedBounds(0L, interval * (numBuckets - 1)) .subAggregation(metric) .subAggregation(movingAvg("movavg_counts", "_count") .window(numBuckets) diff --git a/server/src/test/java/org/elasticsearch/search/fetch/FetchSubPhasePluginIT.java b/server/src/test/java/org/elasticsearch/search/fetch/FetchSubPhasePluginIT.java index 0912236e018..20f73b5903d 100644 --- a/server/src/test/java/org/elasticsearch/search/fetch/FetchSubPhasePluginIT.java +++ b/server/src/test/java/org/elasticsearch/search/fetch/FetchSubPhasePluginIT.java @@ -19,7 +19,7 @@ package org.elasticsearch.search.fetch; - +import org.apache.logging.log4j.LogManager; import org.apache.lucene.index.PostingsEnum; import org.apache.lucene.index.TermsEnum; import org.apache.lucene.util.BytesRef; @@ -30,7 +30,6 @@ import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.document.DocumentField; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.logging.ESLoggerFactory; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.index.termvectors.TermVectorsService; @@ -146,7 +145,7 @@ public class FetchSubPhasePluginIT extends ESIntegTestCase { } hitField.getValues().add(tv); } catch (IOException e) { - ESLoggerFactory.getLogger(FetchSubPhasePluginIT.class.getName()).info("Swallowed exception", e); + LogManager.getLogger(FetchSubPhasePluginIT.class).info("Swallowed exception", e); } } } diff --git a/server/src/test/java/org/elasticsearch/search/fetch/subphase/highlight/HighlightBuilderTests.java b/server/src/test/java/org/elasticsearch/search/fetch/subphase/highlight/HighlightBuilderTests.java index 50f2261c722..46bca911e9c 100644 --- a/server/src/test/java/org/elasticsearch/search/fetch/subphase/highlight/HighlightBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/search/fetch/subphase/highlight/HighlightBuilderTests.java @@ -710,9 +710,11 @@ public class HighlightBuilderTests extends ESTestCase { switch (randomIntBetween(0, 2)) { // change settings that only exists on top level case 0: - mutation.useExplicitFieldOrder(!original.useExplicitFieldOrder()); break; + mutation.useExplicitFieldOrder(!original.useExplicitFieldOrder()); + break; case 1: - mutation.encoder(original.encoder() + randomAlphaOfLength(2)); break; + mutation.encoder(original.encoder() + randomAlphaOfLength(2)); + break; case 2: if (randomBoolean()) { // add another field diff --git a/server/src/test/java/org/elasticsearch/search/fetch/subphase/highlight/HighlighterSearchIT.java b/server/src/test/java/org/elasticsearch/search/fetch/subphase/highlight/HighlighterSearchIT.java index 069c72c10b4..5120abfbdc6 100644 --- a/server/src/test/java/org/elasticsearch/search/fetch/subphase/highlight/HighlighterSearchIT.java +++ b/server/src/test/java/org/elasticsearch/search/fetch/subphase/highlight/HighlighterSearchIT.java @@ -44,7 +44,6 @@ import org.elasticsearch.index.query.AbstractQueryBuilder; import org.elasticsearch.index.query.IdsQueryBuilder; import org.elasticsearch.index.query.MatchQueryBuilder; import org.elasticsearch.index.query.MultiMatchQueryBuilder; -import org.elasticsearch.index.query.Operator; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.index.query.functionscore.FunctionScoreQueryBuilder; @@ -84,7 +83,6 @@ import static org.elasticsearch.index.query.QueryBuilders.commonTermsQuery; import static org.elasticsearch.index.query.QueryBuilders.constantScoreQuery; import static org.elasticsearch.index.query.QueryBuilders.existsQuery; import static org.elasticsearch.index.query.QueryBuilders.fuzzyQuery; -import static org.elasticsearch.index.query.QueryBuilders.matchPhrasePrefixQuery; import static org.elasticsearch.index.query.QueryBuilders.matchPhraseQuery; import static org.elasticsearch.index.query.QueryBuilders.matchQuery; import static org.elasticsearch.index.query.QueryBuilders.multiMatchQuery; @@ -1475,117 +1473,6 @@ public class HighlighterSearchIT extends ESIntegTestCase { assertHighlight(searchResponse, 0, "field2", 0, 1, equalTo("The quick brown fox jumps over the lazy dog")); } - public void testPhrasePrefix() throws IOException { - Builder builder = Settings.builder() - .put(indexSettings()) - .put("index.analysis.analyzer.synonym.tokenizer", "standard") - .putList("index.analysis.analyzer.synonym.filter", "synonym", "lowercase") - .put("index.analysis.filter.synonym.type", "synonym") - .putList("index.analysis.filter.synonym.synonyms", "quick => fast"); - - assertAcked(prepareCreate("first_test_index").setSettings(builder.build()).addMapping("type1", type1TermVectorMapping())); - - ensureGreen(); - - client().prepareIndex("first_test_index", "type1", "0").setSource( - "field0", "The quick brown fox jumps over the lazy dog", - "field1", "The quick brown fox jumps over the lazy dog").get(); - client().prepareIndex("first_test_index", "type1", "1").setSource("field1", - "The quick browse button is a fancy thing, right bro?").get(); - refresh(); - logger.info("--> highlighting and searching on field0"); - - SearchSourceBuilder source = searchSource() - .query(matchPhrasePrefixQuery("field0", "bro")) - .highlighter(highlight().field("field0").order("score").preTags("").postTags("")); - SearchResponse searchResponse = client().search(searchRequest("first_test_index").source(source)).actionGet(); - - assertHighlight(searchResponse, 0, "field0", 0, 1, equalTo("The quick brown fox jumps over the lazy dog")); - - source = searchSource() - .query(matchPhrasePrefixQuery("field0", "quick bro")) - .highlighter(highlight().field("field0").order("score").preTags("").postTags("")); - - searchResponse = client().search(searchRequest("first_test_index").source(source)).actionGet(); - assertHighlight(searchResponse, 0, "field0", 0, 1, - equalTo("The quick brown fox jumps over the lazy dog")); - - logger.info("--> highlighting and searching on field1"); - source = searchSource() - .query(boolQuery() - .should(matchPhrasePrefixQuery("field1", "test")) - .should(matchPhrasePrefixQuery("field1", "bro")) - ) - .highlighter(highlight().field("field1").order("score").preTags("").postTags("")); - - searchResponse = client().search(searchRequest("first_test_index").source(source)).actionGet(); - assertThat(searchResponse.getHits().totalHits, equalTo(2L)); - for (int i = 0; i < 2; i++) { - assertHighlight(searchResponse, i, "field1", 0, 1, anyOf( - equalTo("The quick browse button is a fancy thing, right bro?"), - equalTo("The quick brown fox jumps over the lazy dog"))); - } - - source = searchSource() - .query(matchPhrasePrefixQuery("field1", "quick bro")) - .highlighter(highlight().field("field1").order("score").preTags("").postTags("")); - - searchResponse = client().search(searchRequest("first_test_index").source(source)).actionGet(); - - assertHighlight(searchResponse, 0, "field1", 0, 1, anyOf( - equalTo("The quick browse button is a fancy thing, right bro?"), - equalTo("The quick brown fox jumps over the lazy dog"))); - assertHighlight(searchResponse, 1, "field1", 0, 1, anyOf( - equalTo("The quick browse button is a fancy thing, right bro?"), - equalTo("The quick brown fox jumps over the lazy dog"))); - - assertAcked(prepareCreate("second_test_index").setSettings(builder.build()).addMapping("doc", - "field4", "type=text,term_vector=with_positions_offsets,analyzer=synonym", - "field3", "type=text,analyzer=synonym")); - // with synonyms - client().prepareIndex("second_test_index", "doc", "0").setSource( - "type", "type2", - "field4", "The quick brown fox jumps over the lazy dog", - "field3", "The quick brown fox jumps over the lazy dog").get(); - client().prepareIndex("second_test_index", "doc", "1").setSource( - "type", "type2", - "field4", "The quick browse button is a fancy thing, right bro?").get(); - client().prepareIndex("second_test_index", "doc", "2").setSource( - "type", "type2", - "field4", "a quick fast blue car").get(); - refresh(); - - source = searchSource().postFilter(termQuery("type", "type2")).query(matchPhrasePrefixQuery("field3", "fast bro")) - .highlighter(highlight().field("field3").order("score").preTags("").postTags("")); - - searchResponse = client().search(searchRequest("second_test_index").source(source)).actionGet(); - - assertHighlight(searchResponse, 0, "field3", 0, 1, - equalTo("The quick brown fox jumps over the lazy dog")); - - logger.info("--> highlighting and searching on field4"); - source = searchSource().postFilter(termQuery("type", "type2")).query(matchPhrasePrefixQuery("field4", "the fast bro")) - .highlighter(highlight().field("field4").order("score").preTags("").postTags("")); - searchResponse = client().search(searchRequest("second_test_index").source(source)).actionGet(); - - assertHighlight(searchResponse, 0, "field4", 0, 1, anyOf( - equalTo("The quick browse button is a fancy thing, right bro?"), - equalTo("The quick brown fox jumps over the lazy dog"))); - assertHighlight(searchResponse, 1, "field4", 0, 1, anyOf( - equalTo("The quick browse button is a fancy thing, right bro?"), - equalTo("The quick brown fox jumps over the lazy dog"))); - - logger.info("--> highlighting and searching on field4"); - source = searchSource().postFilter(termQuery("type", "type2")) - .query(matchPhrasePrefixQuery("field4", "a fast quick blue ca")) - .highlighter(highlight().field("field4").order("score").preTags("").postTags("")); - searchResponse = client().search(searchRequest("second_test_index").source(source)).actionGet(); - - assertHighlight(searchResponse, 0, "field4", 0, 1, - anyOf(equalTo("a quick fast blue car"), - equalTo("a quick fast blue car"))); - } - public void testPlainHighlightDifferentFragmenter() throws Exception { assertAcked(prepareCreate("test") .addMapping("type1", "tags", "type=text")); @@ -2919,46 +2806,7 @@ public class HighlighterSearchIT extends ESIntegTestCase { assertThat(field.getFragments()[0].string(), equalTo("brown")); } - public void testSynonyms() throws IOException { - Builder builder = Settings.builder() - .put(indexSettings()) - .put("index.analysis.analyzer.synonym.tokenizer", "standard") - .putList("index.analysis.analyzer.synonym.filter", "synonym", "lowercase") - .put("index.analysis.filter.synonym.type", "synonym") - .putList("index.analysis.filter.synonym.synonyms", "fast,quick"); - assertAcked(prepareCreate("test").setSettings(builder.build()) - .addMapping("type1", "field1", - "type=text,term_vector=with_positions_offsets,search_analyzer=synonym," + - "analyzer=standard,index_options=offsets")); - ensureGreen(); - - client().prepareIndex("test", "type1", "0").setSource( - "field1", "The quick brown fox jumps over the lazy dog").get(); - refresh(); - for (String highlighterType : ALL_TYPES) { - logger.info("--> highlighting (type=" + highlighterType + ") and searching on field1"); - SearchSourceBuilder source = searchSource() - .query(matchQuery("field1", "quick brown fox").operator(Operator.AND)) - .highlighter( - highlight() - .field("field1") - .order("score") - .preTags("") - .postTags("") - .highlighterType(highlighterType)); - SearchResponse searchResponse = client().search(searchRequest("test").source(source)).actionGet(); - assertHighlight(searchResponse, 0, "field1", 0, 1, - equalTo("The quick brown fox jumps over the lazy dog")); - - source = searchSource() - .query(matchQuery("field1", "fast brown fox").operator(Operator.AND)) - .highlighter(highlight().field("field1").order("score").preTags("").postTags("")); - searchResponse = client().search(searchRequest("test").source(source)).actionGet(); - assertHighlight(searchResponse, 0, "field1", 0, 1, - equalTo("The quick brown fox jumps over the lazy dog")); - } - } public void testHighlightQueryRewriteDatesWithNow() throws Exception { assertAcked(client().admin().indices().prepareCreate("index-1").addMapping("type", "d", "type=date", diff --git a/server/src/test/java/org/elasticsearch/search/functionscore/QueryRescorerIT.java b/server/src/test/java/org/elasticsearch/search/functionscore/QueryRescorerIT.java index fe50aaf9b73..aa9d9c4b87e 100644 --- a/server/src/test/java/org/elasticsearch/search/functionscore/QueryRescorerIT.java +++ b/server/src/test/java/org/elasticsearch/search/functionscore/QueryRescorerIT.java @@ -156,13 +156,9 @@ public class QueryRescorerIT extends ESIntegTestCase { public void testMoreDocs() throws Exception { Builder builder = Settings.builder(); - builder.put("index.analysis.analyzer.synonym.tokenizer", "standard"); - builder.putList("index.analysis.analyzer.synonym.filter", "synonym", "lowercase"); - builder.put("index.analysis.filter.synonym.type", "synonym"); - builder.putList("index.analysis.filter.synonym.synonyms", "ave => ave, avenue", "street => str, street"); XContentBuilder mapping = XContentFactory.jsonBuilder().startObject().startObject("type1").startObject("properties") - .startObject("field1").field("type", "text").field("analyzer", "whitespace").field("search_analyzer", "synonym") + .startObject("field1").field("type", "text").field("analyzer", "whitespace") .endObject().endObject().endObject().endObject(); assertAcked(client().admin().indices().prepareCreate("test").addMapping("type1", mapping) @@ -234,13 +230,9 @@ public class QueryRescorerIT extends ESIntegTestCase { // Tests a rescore window smaller than number of hits: public void testSmallRescoreWindow() throws Exception { Builder builder = Settings.builder(); - builder.put("index.analysis.analyzer.synonym.tokenizer", "standard"); - builder.putList("index.analysis.analyzer.synonym.filter", "synonym", "lowercase"); - builder.put("index.analysis.filter.synonym.type", "synonym"); - builder.putList("index.analysis.filter.synonym.synonyms", "ave => ave, avenue", "street => str, street"); XContentBuilder mapping = XContentFactory.jsonBuilder().startObject().startObject("type1").startObject("properties") - .startObject("field1").field("type", "text").field("analyzer", "whitespace").field("search_analyzer", "synonym") + .startObject("field1").field("type", "text").field("analyzer", "whitespace") .endObject().endObject().endObject().endObject(); assertAcked(client().admin().indices().prepareCreate("test").addMapping("type1", mapping) @@ -306,13 +298,9 @@ public class QueryRescorerIT extends ESIntegTestCase { // Tests a rescorer that penalizes the scores: public void testRescorerMadeScoresWorse() throws Exception { Builder builder = Settings.builder(); - builder.put("index.analysis.analyzer.synonym.tokenizer", "standard"); - builder.putList("index.analysis.analyzer.synonym.filter", "synonym", "lowercase"); - builder.put("index.analysis.filter.synonym.type", "synonym"); - builder.putList("index.analysis.filter.synonym.synonyms", "ave => ave, avenue", "street => str, street"); XContentBuilder mapping = XContentFactory.jsonBuilder().startObject().startObject("type1").startObject("properties") - .startObject("field1").field("type", "text").field("analyzer", "whitespace").field("search_analyzer", "synonym") + .startObject("field1").field("type", "text").field("analyzer", "whitespace") .endObject().endObject().endObject().endObject(); assertAcked(client().admin().indices().prepareCreate("test").addMapping("type1", mapping) diff --git a/server/src/test/java/org/elasticsearch/search/geo/GeoFilterIT.java b/server/src/test/java/org/elasticsearch/search/geo/GeoFilterIT.java index 8d2f7cd6993..ce6c00f359f 100644 --- a/server/src/test/java/org/elasticsearch/search/geo/GeoFilterIT.java +++ b/server/src/test/java/org/elasticsearch/search/geo/GeoFilterIT.java @@ -19,6 +19,7 @@ package org.elasticsearch.search.geo; +import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.message.ParameterizedMessage; import org.apache.lucene.spatial.prefix.RecursivePrefixTreeStrategy; import org.apache.lucene.spatial.prefix.tree.GeohashPrefixTree; @@ -42,7 +43,6 @@ import org.elasticsearch.common.geo.builders.LineStringBuilder; import org.elasticsearch.common.geo.builders.MultiPolygonBuilder; import org.elasticsearch.common.geo.builders.PointBuilder; import org.elasticsearch.common.geo.builders.PolygonBuilder; -import org.elasticsearch.common.logging.ESLoggerFactory; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; @@ -471,8 +471,7 @@ public class GeoFilterIT extends ESIntegTestCase { return true; } catch (UnsupportedSpatialOperation e) { final SpatialOperation finalRelation = relation; - ESLoggerFactory - .getLogger(GeoFilterIT.class.getName()) + LogManager.getLogger(GeoFilterIT.class) .info(() -> new ParameterizedMessage("Unsupported spatial operation {}", finalRelation), e); return false; } diff --git a/server/src/test/java/org/elasticsearch/search/lookup/LeafFieldsLookupTests.java b/server/src/test/java/org/elasticsearch/search/lookup/LeafFieldsLookupTests.java index 1195893a28a..e3ae802baba 100644 --- a/server/src/test/java/org/elasticsearch/search/lookup/LeafFieldsLookupTests.java +++ b/server/src/test/java/org/elasticsearch/search/lookup/LeafFieldsLookupTests.java @@ -31,7 +31,6 @@ import org.junit.Before; import java.util.Collections; import java.util.List; -import static org.mockito.AdditionalAnswers.returnsFirstArg; import static org.mockito.Matchers.any; import static org.mockito.Matchers.anyInt; import static org.mockito.Matchers.anyObject; @@ -48,7 +47,9 @@ public class LeafFieldsLookupTests extends ESTestCase { MappedFieldType fieldType = mock(MappedFieldType.class); when(fieldType.name()).thenReturn("field"); - when(fieldType.valueForDisplay(anyObject())).then(returnsFirstArg()); + // Add 10 when valueForDisplay is called so it is easy to be sure it *was* called + when(fieldType.valueForDisplay(anyObject())).then(invocation -> + (Double) invocation.getArguments()[0] + 10); MapperService mapperService = mock(MapperService.class); when(mapperService.fullName("field")).thenReturn(fieldType); @@ -77,7 +78,7 @@ public class LeafFieldsLookupTests extends ESTestCase { List values = fieldLookup.getValues(); assertNotNull(values); assertEquals(1, values.size()); - assertEquals(2.718, values.get(0)); + assertEquals(12.718, values.get(0)); } public void testLookupWithFieldAlias() { @@ -87,6 +88,6 @@ public class LeafFieldsLookupTests extends ESTestCase { List values = fieldLookup.getValues(); assertNotNull(values); assertEquals(1, values.size()); - assertEquals(2.718, values.get(0)); + assertEquals(12.718, values.get(0)); } } diff --git a/server/src/test/java/org/elasticsearch/search/morelikethis/MoreLikeThisIT.java b/server/src/test/java/org/elasticsearch/search/morelikethis/MoreLikeThisIT.java index dedd0f03664..607133ea8f2 100644 --- a/server/src/test/java/org/elasticsearch/search/morelikethis/MoreLikeThisIT.java +++ b/server/src/test/java/org/elasticsearch/search/morelikethis/MoreLikeThisIT.java @@ -23,7 +23,6 @@ import org.elasticsearch.action.admin.indices.create.CreateIndexRequestBuilder; import org.elasticsearch.action.index.IndexRequestBuilder; import org.elasticsearch.action.search.SearchPhaseExecutionException; import org.elasticsearch.action.search.SearchResponse; -import org.elasticsearch.client.Client; import org.elasticsearch.cluster.health.ClusterHealthStatus; import org.elasticsearch.common.Strings; import org.elasticsearch.common.settings.Settings; @@ -213,7 +212,6 @@ public class MoreLikeThisIT extends ESIntegTestCase { } public void testMoreLikeThisIssue2197() throws Exception { - Client client = client(); String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("bar") .startObject("properties") .endObject() diff --git a/server/src/test/java/org/elasticsearch/search/profile/aggregation/AggregationProfilerIT.java b/server/src/test/java/org/elasticsearch/search/profile/aggregation/AggregationProfilerIT.java index 51bc5cc4e24..92bf4d6acad 100644 --- a/server/src/test/java/org/elasticsearch/search/profile/aggregation/AggregationProfilerIT.java +++ b/server/src/test/java/org/elasticsearch/search/profile/aggregation/AggregationProfilerIT.java @@ -31,6 +31,7 @@ import org.elasticsearch.test.ESIntegTestCase; import java.util.ArrayList; import java.util.List; import java.util.Map; +import java.util.stream.Collectors; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; @@ -360,10 +361,12 @@ public class AggregationProfilerIT extends ESIntegTestCase { assertThat(histoBreakdown.get(AggregationTimingType.REDUCE.toString()), equalTo(0L)); assertThat(histoAggResult.getProfiledChildren().size(), equalTo(2)); - ProfileResult tagsAggResult = histoAggResult.getProfiledChildren().get(0); + Map histoAggResultSubAggregations = histoAggResult.getProfiledChildren().stream() + .collect(Collectors.toMap(ProfileResult::getLuceneDescription, s -> s)); + + ProfileResult tagsAggResult = histoAggResultSubAggregations.get("tags"); assertThat(tagsAggResult, notNullValue()); assertThat(tagsAggResult.getQueryName(), equalTo(GlobalOrdinalsStringTermsAggregator.class.getSimpleName())); - assertThat(tagsAggResult.getLuceneDescription(), equalTo("tags")); assertThat(tagsAggResult.getTime(), greaterThan(0L)); Map tagsBreakdown = tagsAggResult.getTimeBreakdown(); assertThat(tagsBreakdown, notNullValue()); @@ -377,10 +380,12 @@ public class AggregationProfilerIT extends ESIntegTestCase { assertThat(tagsBreakdown.get(AggregationTimingType.REDUCE.toString()), equalTo(0L)); assertThat(tagsAggResult.getProfiledChildren().size(), equalTo(2)); - ProfileResult avgAggResult = tagsAggResult.getProfiledChildren().get(0); + Map tagsAggResultSubAggregations = tagsAggResult.getProfiledChildren().stream() + .collect(Collectors.toMap(ProfileResult::getLuceneDescription, s -> s)); + + ProfileResult avgAggResult = tagsAggResultSubAggregations.get("avg"); assertThat(avgAggResult, notNullValue()); assertThat(avgAggResult.getQueryName(), equalTo("AvgAggregator")); - assertThat(avgAggResult.getLuceneDescription(), equalTo("avg")); assertThat(avgAggResult.getTime(), greaterThan(0L)); Map avgBreakdown = tagsAggResult.getTimeBreakdown(); assertThat(avgBreakdown, notNullValue()); @@ -394,10 +399,9 @@ public class AggregationProfilerIT extends ESIntegTestCase { assertThat(avgBreakdown.get(AggregationTimingType.REDUCE.toString()), equalTo(0L)); assertThat(avgAggResult.getProfiledChildren().size(), equalTo(0)); - ProfileResult maxAggResult = tagsAggResult.getProfiledChildren().get(1); + ProfileResult maxAggResult = tagsAggResultSubAggregations.get("max"); assertThat(maxAggResult, notNullValue()); assertThat(maxAggResult.getQueryName(), equalTo("MaxAggregator")); - assertThat(maxAggResult.getLuceneDescription(), equalTo("max")); assertThat(maxAggResult.getTime(), greaterThan(0L)); Map maxBreakdown = tagsAggResult.getTimeBreakdown(); assertThat(maxBreakdown, notNullValue()); @@ -411,10 +415,9 @@ public class AggregationProfilerIT extends ESIntegTestCase { assertThat(maxBreakdown.get(AggregationTimingType.REDUCE.toString()), equalTo(0L)); assertThat(maxAggResult.getProfiledChildren().size(), equalTo(0)); - ProfileResult stringsAggResult = histoAggResult.getProfiledChildren().get(1); + ProfileResult stringsAggResult = histoAggResultSubAggregations.get("strings"); assertThat(stringsAggResult, notNullValue()); assertThat(stringsAggResult.getQueryName(), equalTo(GlobalOrdinalsStringTermsAggregator.class.getSimpleName())); - assertThat(stringsAggResult.getLuceneDescription(), equalTo("strings")); assertThat(stringsAggResult.getTime(), greaterThan(0L)); Map stringsBreakdown = stringsAggResult.getTimeBreakdown(); assertThat(stringsBreakdown, notNullValue()); @@ -428,10 +431,12 @@ public class AggregationProfilerIT extends ESIntegTestCase { assertThat(stringsBreakdown.get(AggregationTimingType.REDUCE.toString()), equalTo(0L)); assertThat(stringsAggResult.getProfiledChildren().size(), equalTo(3)); - avgAggResult = stringsAggResult.getProfiledChildren().get(0); + Map stringsAggResultSubAggregations = stringsAggResult.getProfiledChildren().stream() + .collect(Collectors.toMap(ProfileResult::getLuceneDescription, s -> s)); + + avgAggResult = stringsAggResultSubAggregations.get("avg"); assertThat(avgAggResult, notNullValue()); assertThat(avgAggResult.getQueryName(), equalTo("AvgAggregator")); - assertThat(avgAggResult.getLuceneDescription(), equalTo("avg")); assertThat(avgAggResult.getTime(), greaterThan(0L)); avgBreakdown = stringsAggResult.getTimeBreakdown(); assertThat(avgBreakdown, notNullValue()); @@ -445,10 +450,9 @@ public class AggregationProfilerIT extends ESIntegTestCase { assertThat(avgBreakdown.get(AggregationTimingType.REDUCE.toString()), equalTo(0L)); assertThat(avgAggResult.getProfiledChildren().size(), equalTo(0)); - maxAggResult = stringsAggResult.getProfiledChildren().get(1); + maxAggResult = stringsAggResultSubAggregations.get("max"); assertThat(maxAggResult, notNullValue()); assertThat(maxAggResult.getQueryName(), equalTo("MaxAggregator")); - assertThat(maxAggResult.getLuceneDescription(), equalTo("max")); assertThat(maxAggResult.getTime(), greaterThan(0L)); maxBreakdown = stringsAggResult.getTimeBreakdown(); assertThat(maxBreakdown, notNullValue()); @@ -462,7 +466,7 @@ public class AggregationProfilerIT extends ESIntegTestCase { assertThat(maxBreakdown.get(AggregationTimingType.REDUCE.toString()), equalTo(0L)); assertThat(maxAggResult.getProfiledChildren().size(), equalTo(0)); - tagsAggResult = stringsAggResult.getProfiledChildren().get(2); + tagsAggResult = stringsAggResultSubAggregations.get("tags"); assertThat(tagsAggResult, notNullValue()); assertThat(tagsAggResult.getQueryName(), equalTo(GlobalOrdinalsStringTermsAggregator.class.getSimpleName())); assertThat(tagsAggResult.getLuceneDescription(), equalTo("tags")); @@ -479,10 +483,12 @@ public class AggregationProfilerIT extends ESIntegTestCase { assertThat(tagsBreakdown.get(AggregationTimingType.REDUCE.toString()), equalTo(0L)); assertThat(tagsAggResult.getProfiledChildren().size(), equalTo(2)); - avgAggResult = tagsAggResult.getProfiledChildren().get(0); + tagsAggResultSubAggregations = tagsAggResult.getProfiledChildren().stream() + .collect(Collectors.toMap(ProfileResult::getLuceneDescription, s -> s)); + + avgAggResult = tagsAggResultSubAggregations.get("avg"); assertThat(avgAggResult, notNullValue()); assertThat(avgAggResult.getQueryName(), equalTo("AvgAggregator")); - assertThat(avgAggResult.getLuceneDescription(), equalTo("avg")); assertThat(avgAggResult.getTime(), greaterThan(0L)); avgBreakdown = tagsAggResult.getTimeBreakdown(); assertThat(avgBreakdown, notNullValue()); @@ -496,10 +502,9 @@ public class AggregationProfilerIT extends ESIntegTestCase { assertThat(avgBreakdown.get(AggregationTimingType.REDUCE.toString()), equalTo(0L)); assertThat(avgAggResult.getProfiledChildren().size(), equalTo(0)); - maxAggResult = tagsAggResult.getProfiledChildren().get(1); + maxAggResult = tagsAggResultSubAggregations.get("max"); assertThat(maxAggResult, notNullValue()); assertThat(maxAggResult.getQueryName(), equalTo("MaxAggregator")); - assertThat(maxAggResult.getLuceneDescription(), equalTo("max")); assertThat(maxAggResult.getTime(), greaterThan(0L)); maxBreakdown = tagsAggResult.getTimeBreakdown(); assertThat(maxBreakdown, notNullValue()); diff --git a/server/src/test/java/org/elasticsearch/search/profile/query/QueryProfilerTests.java b/server/src/test/java/org/elasticsearch/search/profile/query/QueryProfilerTests.java index ba58a79953b..b7a9c8cb69a 100644 --- a/server/src/test/java/org/elasticsearch/search/profile/query/QueryProfilerTests.java +++ b/server/src/test/java/org/elasticsearch/search/profile/query/QueryProfilerTests.java @@ -82,7 +82,7 @@ public class QueryProfilerTests extends ESTestCase { } reader = w.getReader(); w.close(); - Engine.Searcher engineSearcher = new Engine.Searcher("test", new IndexSearcher(reader)); + Engine.Searcher engineSearcher = new Engine.Searcher("test", new IndexSearcher(reader), null); searcher = new ContextIndexSearcher(engineSearcher, IndexSearcher.getDefaultQueryCache(), MAYBE_CACHE_POLICY); } @@ -363,7 +363,7 @@ public class QueryProfilerTests extends ESTestCase { public void testApproximations() throws IOException { QueryProfiler profiler = new QueryProfiler(); - Engine.Searcher engineSearcher = new Engine.Searcher("test", new IndexSearcher(reader)); + Engine.Searcher engineSearcher = new Engine.Searcher("test", new IndexSearcher(reader), logger); // disable query caching since we want to test approximations, which won't // be exposed on a cached entry ContextIndexSearcher searcher = new ContextIndexSearcher(engineSearcher, null, MAYBE_CACHE_POLICY); diff --git a/server/src/test/java/org/elasticsearch/search/query/QueryStringIT.java b/server/src/test/java/org/elasticsearch/search/query/QueryStringIT.java index f2d69fc377d..8a09e5a919a 100644 --- a/server/src/test/java/org/elasticsearch/search/query/QueryStringIT.java +++ b/server/src/test/java/org/elasticsearch/search/query/QueryStringIT.java @@ -20,16 +20,13 @@ package org.elasticsearch.search.query; import org.elasticsearch.ExceptionsHelper; -import org.elasticsearch.action.admin.indices.create.CreateIndexRequestBuilder; import org.elasticsearch.action.index.IndexRequestBuilder; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.query.Operator; -import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.index.query.QueryStringQueryBuilder; import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.SearchHits; @@ -48,7 +45,6 @@ import static org.elasticsearch.test.StreamsUtils.copyToStringFromClasspath; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchHits; import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; @@ -252,92 +248,6 @@ public class QueryStringIT extends ESIntegTestCase { containsString("unit [D] not supported for date math [-2D]")); } - private void setupIndexWithGraph(String index) throws Exception { - CreateIndexRequestBuilder builder = prepareCreate(index).setSettings( - Settings.builder() - .put(indexSettings()) - .put("index.analysis.filter.graphsyns.type", "synonym_graph") - .putList("index.analysis.filter.graphsyns.synonyms", "wtf, what the fudge", "foo, bar baz") - .put("index.analysis.analyzer.lower_graphsyns.type", "custom") - .put("index.analysis.analyzer.lower_graphsyns.tokenizer", "standard") - .putList("index.analysis.analyzer.lower_graphsyns.filter", "lowercase", "graphsyns") - ); - - XContentBuilder mapping = XContentFactory.jsonBuilder().startObject().startObject(index).startObject("properties") - .startObject("field").field("type", "text").endObject().endObject().endObject().endObject(); - - assertAcked(builder.addMapping(index, mapping)); - ensureGreen(); - - List builders = new ArrayList<>(); - builders.add(client().prepareIndex(index, index, "1").setSource("field", "say wtf happened foo")); - builders.add(client().prepareIndex(index, index, "2").setSource("field", "bar baz what the fudge man")); - builders.add(client().prepareIndex(index, index, "3").setSource("field", "wtf")); - builders.add(client().prepareIndex(index, index, "4").setSource("field", "what is the name for fudge")); - builders.add(client().prepareIndex(index, index, "5").setSource("field", "bar two three")); - builders.add(client().prepareIndex(index, index, "6").setSource("field", "bar baz two three")); - - indexRandom(true, false, builders); - } - - public void testGraphQueries() throws Exception { - String index = "graph_test_index"; - setupIndexWithGraph(index); - - // phrase - SearchResponse searchResponse = client().prepareSearch(index).setQuery( - QueryBuilders.queryStringQuery("\"foo two three\"") - .defaultField("field") - .analyzer("lower_graphsyns")).get(); - - assertHitCount(searchResponse, 1L); - assertSearchHits(searchResponse, "6"); - - // and - searchResponse = client().prepareSearch(index).setQuery( - QueryBuilders.queryStringQuery("say what the fudge") - .defaultField("field") - .defaultOperator(Operator.AND) - .autoGenerateSynonymsPhraseQuery(false) - .analyzer("lower_graphsyns")).get(); - - assertHitCount(searchResponse, 1L); - assertSearchHits(searchResponse, "1"); - - // or - searchResponse = client().prepareSearch(index).setQuery( - QueryBuilders.queryStringQuery("three what the fudge foo") - .defaultField("field") - .defaultOperator(Operator.OR) - .autoGenerateSynonymsPhraseQuery(false) - .analyzer("lower_graphsyns")).get(); - - assertHitCount(searchResponse, 6L); - assertSearchHits(searchResponse, "1", "2", "3", "4", "5", "6"); - - // min should match - searchResponse = client().prepareSearch(index).setQuery( - QueryBuilders.queryStringQuery("three what the fudge foo") - .defaultField("field") - .defaultOperator(Operator.OR) - .autoGenerateSynonymsPhraseQuery(false) - .analyzer("lower_graphsyns") - .minimumShouldMatch("80%")).get(); - - assertHitCount(searchResponse, 3L); - assertSearchHits(searchResponse, "1", "2", "6"); - - // multi terms synonyms phrase - searchResponse = client().prepareSearch(index).setQuery( - QueryBuilders.queryStringQuery("what the fudge") - .defaultField("field") - .defaultOperator(Operator.AND) - .analyzer("lower_graphsyns")) - .get(); - assertHitCount(searchResponse, 3L); - assertSearchHits(searchResponse, "1", "2", "3"); - } - public void testLimitOnExpandedFields() throws Exception { XContentBuilder builder = jsonBuilder(); builder.startObject(); diff --git a/server/src/test/java/org/elasticsearch/search/query/SearchQueryIT.java b/server/src/test/java/org/elasticsearch/search/query/SearchQueryIT.java index 860c3e074f3..6068f890259 100644 --- a/server/src/test/java/org/elasticsearch/search/query/SearchQueryIT.java +++ b/server/src/test/java/org/elasticsearch/search/query/SearchQueryIT.java @@ -21,7 +21,6 @@ package org.elasticsearch.search.query; import org.apache.lucene.search.join.ScoreMode; import org.apache.lucene.util.English; -import org.elasticsearch.action.admin.indices.create.CreateIndexRequestBuilder; import org.elasticsearch.action.index.IndexRequestBuilder; import org.elasticsearch.action.search.SearchPhaseExecutionException; import org.elasticsearch.action.search.SearchResponse; @@ -349,98 +348,6 @@ public class SearchQueryIT extends ESIntegTestCase { assertThirdHit(searchResponse, hasId("2")); } - public void testCommonTermsQueryStackedTokens() throws Exception { - assertAcked(prepareCreate("test") - .setSettings(Settings.builder() - .put(indexSettings()) - .put(SETTING_NUMBER_OF_SHARDS,1) - .put("index.analysis.filter.syns.type","synonym") - .putList("index.analysis.filter.syns.synonyms","quick,fast") - .put("index.analysis.analyzer.syns.tokenizer","standard") - .put("index.analysis.analyzer.syns.filter","syns") - ) - .addMapping("type1", "field1", "type=text,analyzer=syns", "field2", "type=text,analyzer=syns")); - - indexRandom(true, client().prepareIndex("test", "type1", "3").setSource("field1", "quick lazy huge brown pidgin", "field2", "the quick lazy huge brown fox jumps over the tree"), - client().prepareIndex("test", "type1", "1").setSource("field1", "the quick brown fox"), - client().prepareIndex("test", "type1", "2").setSource("field1", "the quick lazy huge brown fox jumps over the tree") ); - - SearchResponse searchResponse = client().prepareSearch().setQuery(commonTermsQuery("field1", "the fast brown").cutoffFrequency(3).lowFreqOperator(Operator.OR)).get(); - assertHitCount(searchResponse, 3L); - assertFirstHit(searchResponse, hasId("1")); - assertSecondHit(searchResponse, hasId("2")); - assertThirdHit(searchResponse, hasId("3")); - - searchResponse = client().prepareSearch().setQuery(commonTermsQuery("field1", "the fast brown").cutoffFrequency(3).lowFreqOperator(Operator.AND)).get(); - assertThat(searchResponse.getHits().getTotalHits(), equalTo(2L)); - assertFirstHit(searchResponse, hasId("1")); - assertSecondHit(searchResponse, hasId("2")); - - // Default - searchResponse = client().prepareSearch().setQuery(commonTermsQuery("field1", "the fast brown").cutoffFrequency(3)).get(); - assertHitCount(searchResponse, 3L); - assertFirstHit(searchResponse, hasId("1")); - assertSecondHit(searchResponse, hasId("2")); - assertThirdHit(searchResponse, hasId("3")); - - - searchResponse = client().prepareSearch().setQuery(commonTermsQuery("field1", "the fast huge fox").lowFreqMinimumShouldMatch("3")).get(); - assertHitCount(searchResponse, 1L); - assertFirstHit(searchResponse, hasId("2")); - - searchResponse = client().prepareSearch().setQuery(commonTermsQuery("field1", "the fast lazy fox brown").cutoffFrequency(1).highFreqMinimumShouldMatch("5")).get(); - assertHitCount(searchResponse, 2L); - assertFirstHit(searchResponse, hasId("2")); - assertSecondHit(searchResponse, hasId("1")); - - searchResponse = client().prepareSearch().setQuery(commonTermsQuery("field1", "the fast lazy fox brown").cutoffFrequency(1).highFreqMinimumShouldMatch("6")).get(); - assertHitCount(searchResponse, 1L); - assertFirstHit(searchResponse, hasId("2")); - - // Default - searchResponse = client().prepareSearch().setQuery(commonTermsQuery("field1", "the fast lazy fox brown").cutoffFrequency(1)).get(); - assertHitCount(searchResponse, 1L); - assertFirstHit(searchResponse, hasId("2")); - - searchResponse = client().prepareSearch().setQuery(commonTermsQuery("field1", "the quick brown").cutoffFrequency(3).analyzer("stop")).get(); - assertHitCount(searchResponse, 3L); - // stop drops "the" since its a stopword - assertFirstHit(searchResponse, hasId("1")); - assertSecondHit(searchResponse, hasId("3")); - assertThirdHit(searchResponse, hasId("2")); - - // try the same with match query - searchResponse = client().prepareSearch().setQuery(matchQuery("field1", "the fast brown").cutoffFrequency(3).operator(Operator.AND)).get(); - assertHitCount(searchResponse, 2L); - assertFirstHit(searchResponse, hasId("1")); - assertSecondHit(searchResponse, hasId("2")); - - searchResponse = client().prepareSearch().setQuery(matchQuery("field1", "the fast brown").cutoffFrequency(3).operator(Operator.OR)).get(); - assertHitCount(searchResponse, 3L); - assertFirstHit(searchResponse, hasId("1")); - assertSecondHit(searchResponse, hasId("2")); - assertThirdHit(searchResponse, hasId("3")); - - searchResponse = client().prepareSearch().setQuery(matchQuery("field1", "the fast brown").cutoffFrequency(3).operator(Operator.AND).analyzer("stop")).get(); - assertHitCount(searchResponse, 3L); - // stop drops "the" since its a stopword - assertFirstHit(searchResponse, hasId("1")); - assertSecondHit(searchResponse, hasId("3")); - assertThirdHit(searchResponse, hasId("2")); - - searchResponse = client().prepareSearch().setQuery(matchQuery("field1", "the fast brown").cutoffFrequency(3).minimumShouldMatch("3")).get(); - assertHitCount(searchResponse, 2L); - assertFirstHit(searchResponse, hasId("1")); - assertSecondHit(searchResponse, hasId("2")); - - // try the same with multi match query - searchResponse = client().prepareSearch().setQuery(multiMatchQuery("the fast brown", "field1", "field2").cutoffFrequency(3).operator(Operator.AND)).get(); - assertHitCount(searchResponse, 3L); - assertFirstHit(searchResponse, hasId("3")); - assertSecondHit(searchResponse, hasId("1")); - assertThirdHit(searchResponse, hasId("2")); - } - public void testQueryStringAnalyzedWildcard() throws Exception { createIndex("test"); @@ -1535,69 +1442,6 @@ public class SearchQueryIT extends ESIntegTestCase { assertHitCount(client().prepareSearch("test").setQuery(queryStringQuery("field\\*:/value[01]/")).get(), 1); } - // see #3881 - for extensive description of the issue - public void testMatchQueryWithSynonyms() throws IOException { - CreateIndexRequestBuilder builder = prepareCreate("test").setSettings(Settings.builder() - .put(indexSettings()) - .put("index.analysis.analyzer.index.type", "custom") - .put("index.analysis.analyzer.index.tokenizer", "standard") - .put("index.analysis.analyzer.index.filter", "lowercase") - .put("index.analysis.analyzer.search.type", "custom") - .put("index.analysis.analyzer.search.tokenizer", "standard") - .putList("index.analysis.analyzer.search.filter", "lowercase", "synonym") - .put("index.analysis.filter.synonym.type", "synonym") - .putList("index.analysis.filter.synonym.synonyms", "fast, quick")); - assertAcked(builder.addMapping("test", "text", "type=text,analyzer=index,search_analyzer=search")); - - client().prepareIndex("test", "test", "1").setSource("text", "quick brown fox").get(); - refresh(); - SearchResponse searchResponse = client().prepareSearch("test").setQuery(matchQuery("text", "quick").operator(Operator.AND)).get(); - assertHitCount(searchResponse, 1); - searchResponse = client().prepareSearch("test").setQuery(matchQuery("text", "quick brown").operator(Operator.AND)).get(); - assertHitCount(searchResponse, 1); - searchResponse = client().prepareSearch("test").setQuery(matchQuery("text", "fast").operator(Operator.AND)).get(); - assertHitCount(searchResponse, 1); - - client().prepareIndex("test", "test", "2").setSource("text", "fast brown fox").get(); - refresh(); - searchResponse = client().prepareSearch("test").setQuery(matchQuery("text", "quick").operator(Operator.AND)).get(); - assertHitCount(searchResponse, 2); - searchResponse = client().prepareSearch("test").setQuery(matchQuery("text", "quick brown").operator(Operator.AND)).get(); - assertHitCount(searchResponse, 2); - } - - public void testQueryStringWithSynonyms() throws IOException { - CreateIndexRequestBuilder builder = prepareCreate("test").setSettings(Settings.builder() - .put(indexSettings()) - .put("index.analysis.analyzer.index.type", "custom") - .put("index.analysis.analyzer.index.tokenizer", "standard") - .put("index.analysis.analyzer.index.filter", "lowercase") - .put("index.analysis.analyzer.search.type", "custom") - .put("index.analysis.analyzer.search.tokenizer", "standard") - .putList("index.analysis.analyzer.search.filter", "lowercase", "synonym") - .put("index.analysis.filter.synonym.type", "synonym") - .putList("index.analysis.filter.synonym.synonyms", "fast, quick")); - assertAcked(builder.addMapping("test", "text", "type=text,analyzer=index,search_analyzer=search")); - - client().prepareIndex("test", "test", "1").setSource("text", "quick brown fox").get(); - refresh(); - - SearchResponse searchResponse = client().prepareSearch("test").setQuery(queryStringQuery("quick").defaultField("text").defaultOperator(Operator.AND)).get(); - assertHitCount(searchResponse, 1); - searchResponse = client().prepareSearch("test").setQuery(queryStringQuery("quick brown").defaultField("text").defaultOperator(Operator.AND)).get(); - assertHitCount(searchResponse, 1); - searchResponse = client().prepareSearch().setQuery(queryStringQuery("fast").defaultField("text").defaultOperator(Operator.AND)).get(); - assertHitCount(searchResponse, 1); - - client().prepareIndex("test", "test", "2").setSource("text", "fast brown fox").get(); - refresh(); - - searchResponse = client().prepareSearch("test").setQuery(queryStringQuery("quick").defaultField("text").defaultOperator(Operator.AND)).get(); - assertHitCount(searchResponse, 2); - searchResponse = client().prepareSearch("test").setQuery(queryStringQuery("quick brown").defaultField("text").defaultOperator(Operator.AND)).get(); - assertHitCount(searchResponse, 2); - } - // see #3797 public void testMultiMatchLenientIssue3797() { createIndex("test"); @@ -1744,7 +1588,7 @@ public class SearchQueryIT extends ESIntegTestCase { assertThat(searchResponse.getHits().getAt(0).getId(), is("3")); // When we use long values, it means we have ms since epoch UTC based so we don't apply any transformation - Exception e = expectThrows(SearchPhaseExecutionException.class, () -> + expectThrows(SearchPhaseExecutionException.class, () -> client().prepareSearch("test") .setQuery(QueryBuilders.rangeQuery("date").from(1388534400000L).to(1388537940999L).timeZone("+01:00")) .get()); diff --git a/server/src/test/java/org/elasticsearch/search/rescore/QueryRescorerBuilderTests.java b/server/src/test/java/org/elasticsearch/search/rescore/QueryRescorerBuilderTests.java index 700b3949fac..24621a12d39 100644 --- a/server/src/test/java/org/elasticsearch/search/rescore/QueryRescorerBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/search/rescore/QueryRescorerBuilderTests.java @@ -201,12 +201,10 @@ public class QueryRescorerBuilderTests extends ESTestCase { rescoreBuilder.setRescoreQueryWeight(randomFloat()); rescoreBuilder.setScoreMode(QueryRescoreMode.Max); - QueryRescoreContext rescoreContext = (QueryRescoreContext) rescoreBuilder.buildContext(mockShardContext); QueryRescorerBuilder rescoreRewritten = rescoreBuilder.rewrite(mockShardContext); assertEquals(rescoreRewritten.getQueryWeight(), rescoreBuilder.getQueryWeight(), 0.01f); assertEquals(rescoreRewritten.getRescoreQueryWeight(), rescoreBuilder.getRescoreQueryWeight(), 0.01f); assertEquals(rescoreRewritten.getScoreMode(), rescoreBuilder.getScoreMode()); - } /** diff --git a/server/src/test/java/org/elasticsearch/search/sort/AbstractSortTestCase.java b/server/src/test/java/org/elasticsearch/search/sort/AbstractSortTestCase.java index 2285af3ec46..e301e8c11c3 100644 --- a/server/src/test/java/org/elasticsearch/search/sort/AbstractSortTestCase.java +++ b/server/src/test/java/org/elasticsearch/search/sort/AbstractSortTestCase.java @@ -84,7 +84,7 @@ public abstract class AbstractSortTestCase> extends EST .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) .build(); Map, Object>> scripts = Collections.singletonMap(MOCK_SCRIPT_NAME, p -> null); - ScriptEngine engine = new MockScriptEngine(MockScriptEngine.NAME, scripts); + ScriptEngine engine = new MockScriptEngine(MockScriptEngine.NAME, scripts, Collections.emptyMap()); scriptService = new ScriptService(baseSettings, Collections.singletonMap(engine.getType(), engine), ScriptModule.CORE_CONTEXTS); SearchModule searchModule = new SearchModule(Settings.EMPTY, false, emptyList()); diff --git a/server/src/test/java/org/elasticsearch/search/suggest/CompletionSuggestSearchIT.java b/server/src/test/java/org/elasticsearch/search/suggest/CompletionSuggestSearchIT.java index ca21cbc86ca..52893a3c032 100644 --- a/server/src/test/java/org/elasticsearch/search/suggest/CompletionSuggestSearchIT.java +++ b/server/src/test/java/org/elasticsearch/search/suggest/CompletionSuggestSearchIT.java @@ -522,28 +522,6 @@ public class CompletionSuggestSearchIT extends ESIntegTestCase { assertSuggestions("b", "The Beatles"); } - public void testThatSynonymsWork() throws Exception { - Settings.Builder settingsBuilder = Settings.builder() - .put("analysis.analyzer.suggest_analyzer_synonyms.type", "custom") - .put("analysis.analyzer.suggest_analyzer_synonyms.tokenizer", "standard") - .putList("analysis.analyzer.suggest_analyzer_synonyms.filter", "lowercase", "my_synonyms") - .put("analysis.filter.my_synonyms.type", "synonym") - .putList("analysis.filter.my_synonyms.synonyms", "foo,renamed"); - completionMappingBuilder.searchAnalyzer("suggest_analyzer_synonyms").indexAnalyzer("suggest_analyzer_synonyms"); - createIndexAndMappingAndSettings(settingsBuilder.build(), completionMappingBuilder); - - client().prepareIndex(INDEX, TYPE, "1").setSource(jsonBuilder() - .startObject().startObject(FIELD) - .startArray("input").value("Foo Fighters").endArray() - .endObject().endObject() - ).get(); - - refresh(); - - // get suggestions for renamed - assertSuggestions("r", "Foo Fighters"); - } - public void testThatUpgradeToMultiFieldsWorks() throws Exception { final XContentBuilder mapping = jsonBuilder() .startObject() diff --git a/server/src/test/java/org/elasticsearch/search/suggest/SuggestSearchIT.java b/server/src/test/java/org/elasticsearch/search/suggest/SuggestSearchIT.java index 995a2c10fe5..98ed6a4a598 100644 --- a/server/src/test/java/org/elasticsearch/search/suggest/SuggestSearchIT.java +++ b/server/src/test/java/org/elasticsearch/search/suggest/SuggestSearchIT.java @@ -972,11 +972,8 @@ public class SuggestSearchIT extends ESIntegTestCase { assertSuggestionSize(searchSuggest, 0, 25480, "title"); // Just to prove that we've run through a ton of options suggest.size(1); - long start = System.currentTimeMillis(); searchSuggest = searchSuggest("united states house of representatives elections in washington 2006", "title", suggest); - long total = System.currentTimeMillis() - start; assertSuggestion(searchSuggest, 0, 0, "title", "united states house of representatives elections in washington 2006"); - // assertThat(total, lessThan(1000L)); // Takes many seconds without fix - just for debugging } public void testSuggestWithFieldAlias() throws Exception { @@ -1168,7 +1165,7 @@ public class SuggestSearchIT extends ESIntegTestCase { .endObject() .endObject()); - PhraseSuggestionBuilder in = suggest.collateQuery(filterStr); + suggest.collateQuery(filterStr); try { searchSuggest("united states house of representatives elections in washington 2006", numShards.numPrimaries, namedSuggestion); fail("Post filter error has been swallowed"); @@ -1186,7 +1183,6 @@ public class SuggestSearchIT extends ESIntegTestCase { .endObject()); - PhraseSuggestionBuilder phraseSuggestWithNoParams = suggest.collateQuery(collateWithParams); try { searchSuggest("united states house of representatives elections in washington 2006", numShards.numPrimaries, namedSuggestion); fail("Malformed query (lack of additional params) should fail"); diff --git a/server/src/test/java/org/elasticsearch/search/suggest/completion/CompletionSuggesterBuilderTests.java b/server/src/test/java/org/elasticsearch/search/suggest/completion/CompletionSuggesterBuilderTests.java index 88e6ce64666..f7423d3f55a 100644 --- a/server/src/test/java/org/elasticsearch/search/suggest/completion/CompletionSuggesterBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/search/suggest/completion/CompletionSuggesterBuilderTests.java @@ -45,7 +45,6 @@ import static org.hamcrest.Matchers.instanceOf; public class CompletionSuggesterBuilderTests extends AbstractSuggestionBuilderTestCase { private static final String[] SHUFFLE_PROTECTED_FIELDS = new String[] { CompletionSuggestionBuilder.CONTEXTS_FIELD.getPreferredName() }; - private static final Map> contextMap = new HashMap<>(); private static String categoryContextName; private static String geoQueryContextName; private static List> contextMappings = new ArrayList<>(); diff --git a/server/src/test/java/org/elasticsearch/snapshots/SharedClusterSnapshotRestoreIT.java b/server/src/test/java/org/elasticsearch/snapshots/SharedClusterSnapshotRestoreIT.java index 632a1ecbee1..96ddfc44dba 100644 --- a/server/src/test/java/org/elasticsearch/snapshots/SharedClusterSnapshotRestoreIT.java +++ b/server/src/test/java/org/elasticsearch/snapshots/SharedClusterSnapshotRestoreIT.java @@ -2118,17 +2118,14 @@ public class SharedClusterSnapshotRestoreIT extends AbstractSnapshotIntegTestCas .put("compress", randomBoolean()) .put("chunk_size", randomIntBetween(100, 1000), ByteSizeUnit.BYTES))); - logger.info("--> create test index with synonyms search analyzer"); + logger.info("--> create test index with case-preserving search analyzer"); Settings.Builder indexSettings = Settings.builder() .put(indexSettings()) .put(SETTING_NUMBER_OF_REPLICAS, between(0, 1)) .put(INDEX_REFRESH_INTERVAL_SETTING.getKey(), "10s") .put("index.analysis.analyzer.my_analyzer.type", "custom") - .put("index.analysis.analyzer.my_analyzer.tokenizer", "standard") - .putList("index.analysis.analyzer.my_analyzer.filter", "lowercase", "my_synonym") - .put("index.analysis.filter.my_synonym.type", "synonym") - .put("index.analysis.filter.my_synonym.synonyms", "foo => bar"); + .put("index.analysis.analyzer.my_analyzer.tokenizer", "standard"); assertAcked(prepareCreate("test-idx", 2, indexSettings)); @@ -2137,12 +2134,13 @@ public class SharedClusterSnapshotRestoreIT extends AbstractSnapshotIntegTestCas final int numdocs = randomIntBetween(10, 100); IndexRequestBuilder[] builders = new IndexRequestBuilder[numdocs]; for (int i = 0; i < builders.length; i++) { - builders[i] = client().prepareIndex("test-idx", "type1", Integer.toString(i)).setSource("field1", "bar " + i); + builders[i] = client().prepareIndex("test-idx", "type1", Integer.toString(i)).setSource("field1", "Foo bar " + i); } indexRandom(true, builders); flushAndRefresh(); assertHitCount(client.prepareSearch("test-idx").setSize(0).setQuery(matchQuery("field1", "foo")).get(), numdocs); + assertHitCount(client.prepareSearch("test-idx").setSize(0).setQuery(matchQuery("field1", "Foo")).get(), 0); assertHitCount(client.prepareSearch("test-idx").setSize(0).setQuery(matchQuery("field1", "bar")).get(), numdocs); logger.info("--> snapshot it"); @@ -2195,9 +2193,8 @@ public class SharedClusterSnapshotRestoreIT extends AbstractSnapshotIntegTestCas // Make sure that number of shards didn't change assertThat(getSettingsResponse.getSetting("test-idx", SETTING_NUMBER_OF_SHARDS), equalTo("" + numberOfShards)); assertThat(getSettingsResponse.getSetting("test-idx", "index.analysis.analyzer.my_analyzer.type"), equalTo("standard")); - assertThat(getSettingsResponse.getSetting("test-idx", "index.analysis.filter.my_synonym.type"), nullValue()); - assertHitCount(client.prepareSearch("test-idx").setSize(0).setQuery(matchQuery("field1", "foo")).get(), 0); + assertHitCount(client.prepareSearch("test-idx").setSize(0).setQuery(matchQuery("field1", "Foo")).get(), numdocs); assertHitCount(client.prepareSearch("test-idx").setSize(0).setQuery(matchQuery("field1", "bar")).get(), numdocs); logger.info("--> delete the index and recreate it while deleting all index settings"); @@ -2217,7 +2214,7 @@ public class SharedClusterSnapshotRestoreIT extends AbstractSnapshotIntegTestCas // Make sure that number of shards didn't change assertThat(getSettingsResponse.getSetting("test-idx", SETTING_NUMBER_OF_SHARDS), equalTo("" + numberOfShards)); - assertHitCount(client.prepareSearch("test-idx").setSize(0).setQuery(matchQuery("field1", "foo")).get(), 0); + assertHitCount(client.prepareSearch("test-idx").setSize(0).setQuery(matchQuery("field1", "Foo")).get(), numdocs); assertHitCount(client.prepareSearch("test-idx").setSize(0).setQuery(matchQuery("field1", "bar")).get(), numdocs); } @@ -2819,7 +2816,7 @@ public class SharedClusterSnapshotRestoreIT extends AbstractSnapshotIntegTestCas Predicate isRestorableIndex = index -> corruptedIndex.getName().equals(index) == false; - RestoreSnapshotResponse restoreSnapshotResponse = client().admin().cluster().prepareRestoreSnapshot("test-repo", "test-snap") + client().admin().cluster().prepareRestoreSnapshot("test-repo", "test-snap") .setIndices(nbDocsPerIndex.keySet().stream().filter(isRestorableIndex).toArray(String[]::new)) .setRestoreGlobalState(randomBoolean()) .setWaitForCompletion(true) diff --git a/server/src/test/java/org/elasticsearch/threadpool/AutoQueueAdjustingExecutorBuilderTests.java b/server/src/test/java/org/elasticsearch/threadpool/AutoQueueAdjustingExecutorBuilderTests.java index 836193423f1..ef5f87b940c 100644 --- a/server/src/test/java/org/elasticsearch/threadpool/AutoQueueAdjustingExecutorBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/threadpool/AutoQueueAdjustingExecutorBuilderTests.java @@ -25,10 +25,10 @@ import static org.hamcrest.CoreMatchers.containsString; public class AutoQueueAdjustingExecutorBuilderTests extends ESThreadPoolTestCase { - public void testValidatingMinMaxSettings() throws Exception { + public void testValidatingMinMaxSettings() { Settings settings = Settings.builder() - .put("thread_pool.search.min_queue_size", randomIntBetween(30, 100)) - .put("thread_pool.search.max_queue_size", randomIntBetween(1,25)) + .put("thread_pool.test.min_queue_size", randomIntBetween(30, 100)) + .put("thread_pool.test.max_queue_size", randomIntBetween(1,25)) .build(); try { new AutoQueueAdjustingExecutorBuilder(settings, "test", 1, 15, 1, 100, 10); @@ -36,6 +36,70 @@ public class AutoQueueAdjustingExecutorBuilderTests extends ESThreadPoolTestCase } catch (IllegalArgumentException e) { assertThat(e.getMessage(), containsString("Failed to parse value")); } + + settings = Settings.builder() + .put("thread_pool.test.min_queue_size", 10) + .put("thread_pool.test.max_queue_size", 9) + .build(); + try { + new AutoQueueAdjustingExecutorBuilder(settings, "test", 1, 15, 1, 100, 2000).getSettings(settings); + fail("should have thrown an exception"); + } catch (IllegalArgumentException e) { + assertEquals(e.getMessage(), "Failed to parse value [10] for setting [thread_pool.test.min_queue_size] must be <= 9"); + } + + settings = Settings.builder() + .put("thread_pool.test.min_queue_size", 11) + .put("thread_pool.test.max_queue_size", 10) + .build(); + try { + new AutoQueueAdjustingExecutorBuilder(settings, "test", 1, 15, 1, 100, 2000).getSettings(settings); + fail("should have thrown an exception"); + } catch (IllegalArgumentException e) { + assertEquals(e.getMessage(), "Failed to parse value [11] for setting [thread_pool.test.min_queue_size] must be <= 10"); + } + + settings = Settings.builder() + .put("thread_pool.test.min_queue_size", 101) + .build(); + try { + new AutoQueueAdjustingExecutorBuilder(settings, "test", 1, 15, 100, 100, 2000).getSettings(settings); + fail("should have thrown an exception"); + } catch (IllegalArgumentException e) { + assertEquals(e.getMessage(), "Failed to parse value [101] for setting [thread_pool.test.min_queue_size] must be <= 100"); + } + + settings = Settings.builder() + .put("thread_pool.test.max_queue_size", 99) + .build(); + try { + new AutoQueueAdjustingExecutorBuilder(settings, "test", 1, 15, 100, 100, 2000).getSettings(settings); + fail("should have thrown an exception"); + } catch (IllegalArgumentException e) { + assertEquals(e.getMessage(), "Failed to parse value [100] for setting [thread_pool.test.min_queue_size] must be <= 99"); + } + } + + public void testSetLowerSettings() { + Settings settings = Settings.builder() + .put("thread_pool.test.min_queue_size", 10) + .put("thread_pool.test.max_queue_size", 10) + .build(); + AutoQueueAdjustingExecutorBuilder test = new AutoQueueAdjustingExecutorBuilder(settings, "test", 1, 1000, 1000, 1000, 2000); + AutoQueueAdjustingExecutorBuilder.AutoExecutorSettings s = test.getSettings(settings); + assertEquals(10, s.maxQueueSize); + assertEquals(10, s.minQueueSize); + } + + public void testSetHigherSettings() { + Settings settings = Settings.builder() + .put("thread_pool.test.min_queue_size", 2000) + .put("thread_pool.test.max_queue_size", 3000) + .build(); + AutoQueueAdjustingExecutorBuilder test = new AutoQueueAdjustingExecutorBuilder(settings, "test", 1, 1000, 1000, 1000, 2000); + AutoQueueAdjustingExecutorBuilder.AutoExecutorSettings s = test.getSettings(settings); + assertEquals(3000, s.maxQueueSize); + assertEquals(2000, s.minQueueSize); } } diff --git a/server/src/test/java/org/elasticsearch/threadpool/SimpleThreadPoolIT.java b/server/src/test/java/org/elasticsearch/threadpool/SimpleThreadPoolIT.java index cec5f9b1be2..5f286a5ff0a 100644 --- a/server/src/test/java/org/elasticsearch/threadpool/SimpleThreadPoolIT.java +++ b/server/src/test/java/org/elasticsearch/threadpool/SimpleThreadPoolIT.java @@ -55,7 +55,7 @@ public class SimpleThreadPoolIT extends ESIntegTestCase { } } logger.info("pre node threads are {}", preNodeStartThreadNames); - String node = internalCluster().startNode(); + internalCluster().startNode(); logger.info("do some indexing, flushing, optimize, and searches"); int numDocs = randomIntBetween(2, 100); IndexRequestBuilder[] builders = new IndexRequestBuilder[numDocs]; diff --git a/server/src/test/java/org/elasticsearch/threadpool/UpdateThreadPoolSettingsTests.java b/server/src/test/java/org/elasticsearch/threadpool/UpdateThreadPoolSettingsTests.java index ea281f7d9ae..c004ed9b3bc 100644 --- a/server/src/test/java/org/elasticsearch/threadpool/UpdateThreadPoolSettingsTests.java +++ b/server/src/test/java/org/elasticsearch/threadpool/UpdateThreadPoolSettingsTests.java @@ -61,7 +61,6 @@ public class UpdateThreadPoolSettingsTests extends ESThreadPoolTestCase { } public void testWriteThreadPoolsMaxSize() throws InterruptedException { - final String name = Names.WRITE; final int maxSize = 1 + EsExecutors.numberOfProcessors(Settings.EMPTY); final int tooBig = randomIntBetween(1 + maxSize, Integer.MAX_VALUE); diff --git a/server/src/test/java/org/elasticsearch/transport/RemoteClusterConnectionTests.java b/server/src/test/java/org/elasticsearch/transport/RemoteClusterConnectionTests.java index f5d23c4f3f8..e77180508b6 100644 --- a/server/src/test/java/org/elasticsearch/transport/RemoteClusterConnectionTests.java +++ b/server/src/test/java/org/elasticsearch/transport/RemoteClusterConnectionTests.java @@ -53,6 +53,7 @@ import org.elasticsearch.index.IndexNotFoundException; import org.elasticsearch.mocksocket.MockServerSocket; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.VersionUtils; +import org.elasticsearch.test.junit.annotations.TestLogging; import org.elasticsearch.test.transport.MockTransportService; import org.elasticsearch.test.transport.StubbableTransport; import org.elasticsearch.threadpool.TestThreadPool; @@ -834,6 +835,7 @@ public class RemoteClusterConnectionTests extends ESTestCase { } } + @TestLogging("_root:DEBUG, org.elasticsearch.transport:TRACE") public void testCloseWhileConcurrentlyConnecting() throws IOException, InterruptedException, BrokenBarrierException { List knownNodes = new CopyOnWriteArrayList<>(); try (MockTransportService seedTransport = startTransport("seed_node", knownNodes, Version.CURRENT); diff --git a/server/src/test/java/org/elasticsearch/transport/RemoteClusterServiceTests.java b/server/src/test/java/org/elasticsearch/transport/RemoteClusterServiceTests.java index ac8f578a67a..94ac7e963c1 100644 --- a/server/src/test/java/org/elasticsearch/transport/RemoteClusterServiceTests.java +++ b/server/src/test/java/org/elasticsearch/transport/RemoteClusterServiceTests.java @@ -25,6 +25,7 @@ import org.elasticsearch.action.OriginalIndices; import org.elasticsearch.action.admin.cluster.shards.ClusterSearchShardsResponse; import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.cluster.node.DiscoveryNode; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.settings.AbstractScopedSettings; import org.elasticsearch.common.settings.ClusterSettings; @@ -61,7 +62,10 @@ import java.util.stream.Collectors; import static org.hamcrest.CoreMatchers.containsString; import static org.hamcrest.CoreMatchers.instanceOf; import static org.hamcrest.Matchers.anyOf; +import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.hasSize; +import static org.hamcrest.Matchers.hasToString; import static org.hamcrest.Matchers.startsWith; public class RemoteClusterServiceTests extends ESTestCase { @@ -119,17 +123,19 @@ public class RemoteClusterServiceTests extends ESTestCase { public void testBuildRemoteClustersDynamicConfig() throws Exception { Map>>> map = RemoteClusterService.buildRemoteClustersDynamicConfig( - Settings.builder().put("cluster.remote.foo.seeds", "192.168.0.1:8080") - .put("cluster.remote.bar.seeds", "[::1]:9090") - .put("cluster.remote.boom.seeds", "boom-node1.internal:1000") - .put("cluster.remote.boom.proxy", "foo.bar.com:1234").build()); - assertEquals(3, map.size()); - assertTrue(map.containsKey("foo")); - assertTrue(map.containsKey("bar")); - assertTrue(map.containsKey("boom")); - assertEquals(1, map.get("foo").v2().size()); - assertEquals(1, map.get("bar").v2().size()); - assertEquals(1, map.get("boom").v2().size()); + Settings.builder() + .put("cluster.remote.foo.seeds", "192.168.0.1:8080") + .put("cluster.remote.bar.seeds", "[::1]:9090") + .put("cluster.remote.boom.seeds", "boom-node1.internal:1000") + .put("cluster.remote.boom.proxy", "foo.bar.com:1234") + .put("search.remote.quux.seeds", "quux:9300") + .put("search.remote.quux.proxy", "quux-proxy:19300") + .build()); + assertThat(map.keySet(), containsInAnyOrder(equalTo("foo"), equalTo("bar"), equalTo("boom"), equalTo("quux"))); + assertThat(map.get("foo").v2(), hasSize(1)); + assertThat(map.get("bar").v2(), hasSize(1)); + assertThat(map.get("boom").v2(), hasSize(1)); + assertThat(map.get("quux").v2(), hasSize(1)); DiscoveryNode foo = map.get("foo").v2().get(0).get(); assertEquals("", map.get("foo").v1()); @@ -149,8 +155,42 @@ public class RemoteClusterServiceTests extends ESTestCase { assertEquals(boom.getId(), "boom#boom-node1.internal:1000"); assertEquals("foo.bar.com:1234", map.get("boom").v1()); assertEquals(boom.getVersion(), Version.CURRENT.minimumCompatibilityVersion()); + + DiscoveryNode quux = map.get("quux").v2().get(0).get(); + assertEquals(quux.getAddress(), new TransportAddress(TransportAddress.META_ADDRESS, 0)); + assertEquals("quux", quux.getHostName()); + assertEquals(quux.getId(), "quux#quux:9300"); + assertEquals("quux-proxy:19300", map.get("quux").v1()); + assertEquals(quux.getVersion(), Version.CURRENT.minimumCompatibilityVersion()); + + assertSettingDeprecationsAndWarnings(new String[]{"search.remote.quux.seeds", "search.remote.quux.proxy"}); } + public void testBuildRemoteClustersDynamicConfigWithDuplicate() { + final IllegalArgumentException e = expectThrows( + IllegalArgumentException.class, + () -> RemoteClusterService.buildRemoteClustersDynamicConfig( + Settings.builder() + .put("cluster.remote.foo.seeds", "192.168.0.1:8080") + .put("search.remote.foo.seeds", "192.168.0.1:8080") + .build())); + assertThat(e, hasToString(containsString("found duplicate remote cluster configurations for cluster alias [foo]"))); + assertSettingDeprecationsAndWarnings(new String[]{"search.remote.foo.seeds"}); + } + + public void testBuildRemoteClustersDynamicConfigWithDuplicates() { + final IllegalArgumentException e = expectThrows( + IllegalArgumentException.class, + () -> RemoteClusterService.buildRemoteClustersDynamicConfig( + Settings.builder() + .put("cluster.remote.foo.seeds", "192.168.0.1:8080") + .put("search.remote.foo.seeds", "192.168.0.1:8080") + .put("cluster.remote.bar.seeds", "192.168.0.1:8080") + .put("search.remote.bar.seeds", "192.168.0.1:8080") + .build())); + assertThat(e, hasToString(containsString("found duplicate remote cluster configurations for cluster aliases [bar,foo]"))); + assertSettingDeprecationsAndWarnings(new String[]{"search.remote.bar.seeds", "search.remote.foo.seeds"}); + } public void testGroupClusterIndices() throws IOException { List knownNodes = new CopyOnWriteArrayList<>(); @@ -179,10 +219,9 @@ public class RemoteClusterServiceTests extends ESTestCase { Map> perClusterIndices = service.groupClusterIndices(new String[]{"foo:bar", "cluster_1:bar", "cluster_2:foo:bar", "cluster_1:test", "cluster_2:foo*", "foo", "cluster*:baz", "*:boo", "no*match:boo"}, i -> false); - String[] localIndices = perClusterIndices.computeIfAbsent(RemoteClusterAware.LOCAL_CLUSTER_GROUP_KEY, - k -> Collections.emptyList()).toArray(new String[0]); - assertNotNull(perClusterIndices.remove(RemoteClusterAware.LOCAL_CLUSTER_GROUP_KEY)); - assertArrayEquals(new String[]{"foo:bar", "foo", "no*match:boo"}, localIndices); + List localIndices = perClusterIndices.remove(RemoteClusterAware.LOCAL_CLUSTER_GROUP_KEY); + assertNotNull(localIndices); + assertEquals(Arrays.asList("foo:bar", "foo", "no*match:boo"), localIndices); assertEquals(2, perClusterIndices.size()); assertEquals(Arrays.asList("bar", "test", "baz", "boo"), perClusterIndices.get("cluster_1")); assertEquals(Arrays.asList("foo:bar", "foo*", "baz", "boo"), perClusterIndices.get("cluster_2")); @@ -198,6 +237,68 @@ public class RemoteClusterServiceTests extends ESTestCase { } } + public void testGroupIndices() throws IOException { + List knownNodes = new CopyOnWriteArrayList<>(); + try (MockTransportService seedTransport = startTransport("cluster_1_node", knownNodes, Version.CURRENT); + MockTransportService otherSeedTransport = startTransport("cluster_2_node", knownNodes, Version.CURRENT)) { + DiscoveryNode seedNode = seedTransport.getLocalDiscoNode(); + DiscoveryNode otherSeedNode = otherSeedTransport.getLocalDiscoNode(); + knownNodes.add(seedTransport.getLocalDiscoNode()); + knownNodes.add(otherSeedTransport.getLocalDiscoNode()); + Collections.shuffle(knownNodes, random()); + + try (MockTransportService transportService = MockTransportService.createNewService(Settings.EMPTY, Version.CURRENT, threadPool, + null)) { + transportService.start(); + transportService.acceptIncomingRequests(); + Settings.Builder builder = Settings.builder(); + builder.putList("cluster.remote.cluster_1.seeds", seedNode.getAddress().toString()); + builder.putList("cluster.remote.cluster_2.seeds", otherSeedNode.getAddress().toString()); + try (RemoteClusterService service = new RemoteClusterService(builder.build(), transportService)) { + assertFalse(service.isCrossClusterSearchEnabled()); + service.initializeRemoteClusters(); + assertTrue(service.isCrossClusterSearchEnabled()); + assertTrue(service.isRemoteClusterRegistered("cluster_1")); + assertTrue(service.isRemoteClusterRegistered("cluster_2")); + assertFalse(service.isRemoteClusterRegistered("foo")); + { + Map perClusterIndices = service.groupIndices(IndicesOptions.LENIENT_EXPAND_OPEN, + new String[]{"foo:bar", "cluster_1:bar", "cluster_2:foo:bar", "cluster_1:test", "cluster_2:foo*", "foo", + "cluster*:baz", "*:boo", "no*match:boo"}, + i -> false); + assertEquals(3, perClusterIndices.size()); + assertArrayEquals(new String[]{"foo:bar", "foo", "no*match:boo"}, + perClusterIndices.get(RemoteClusterAware.LOCAL_CLUSTER_GROUP_KEY).indices()); + assertArrayEquals(new String[]{"bar", "test", "baz", "boo"}, perClusterIndices.get("cluster_1").indices()); + assertArrayEquals(new String[]{"foo:bar", "foo*", "baz", "boo"}, perClusterIndices.get("cluster_2").indices()); + } + { + IllegalArgumentException iae = expectThrows(IllegalArgumentException.class, () -> + service.groupClusterIndices(new String[]{"foo:bar", "cluster_1:bar", + "cluster_2:foo:bar", "cluster_1:test", "cluster_2:foo*", "foo"}, "cluster_1:bar"::equals)); + assertEquals("Can not filter indices; index cluster_1:bar exists but there is also a remote cluster named:" + + " cluster_1", iae.getMessage()); + } + { + Map perClusterIndices = service.groupIndices(IndicesOptions.LENIENT_EXPAND_OPEN, + new String[]{"cluster_1:bar", "cluster_2:foo*"}, + i -> false); + assertEquals(2, perClusterIndices.size()); + assertArrayEquals(new String[]{"bar"}, perClusterIndices.get("cluster_1").indices()); + assertArrayEquals(new String[]{"foo*"}, perClusterIndices.get("cluster_2").indices()); + } + { + Map perClusterIndices = service.groupIndices(IndicesOptions.LENIENT_EXPAND_OPEN, + Strings.EMPTY_ARRAY, + i -> false); + assertEquals(1, perClusterIndices.size()); + assertArrayEquals(Strings.EMPTY_ARRAY, perClusterIndices.get(RemoteClusterAware.LOCAL_CLUSTER_GROUP_KEY).indices()); + } + } + } + } + } + public void testIncrementallyAddClusters() throws IOException { List knownNodes = new CopyOnWriteArrayList<>(); try (MockTransportService seedTransport = startTransport("cluster_1_node", knownNodes, Version.CURRENT); diff --git a/server/src/test/java/org/elasticsearch/transport/TcpTransportTests.java b/server/src/test/java/org/elasticsearch/transport/TcpTransportTests.java index bc7ef0fd5d2..c6fb1f406cf 100644 --- a/server/src/test/java/org/elasticsearch/transport/TcpTransportTests.java +++ b/server/src/test/java/org/elasticsearch/transport/TcpTransportTests.java @@ -223,6 +223,7 @@ public class TcpTransportTests extends ESTestCase { StreamInput streamIn = reference.streamInput(); streamIn.skip(TcpHeader.MARKER_BYTES_SIZE); + @SuppressWarnings("unused") int len = streamIn.readInt(); long requestId = streamIn.readLong(); assertEquals(42, requestId); diff --git a/server/src/test/java/org/elasticsearch/transport/TransportLoggerTests.java b/server/src/test/java/org/elasticsearch/transport/TransportLoggerTests.java index 42a61008820..9f2b60c6901 100644 --- a/server/src/test/java/org/elasticsearch/transport/TransportLoggerTests.java +++ b/server/src/test/java/org/elasticsearch/transport/TransportLoggerTests.java @@ -55,7 +55,7 @@ public class TransportLoggerTests extends ESTestCase { } public void testLoggingHandler() throws IOException { - TransportLogger transportLogger = new TransportLogger(Settings.EMPTY); + TransportLogger transportLogger = new TransportLogger(); final String writePattern = ".*\\[length: \\d+" + diff --git a/server/src/test/java/org/elasticsearch/update/UpdateIT.java b/server/src/test/java/org/elasticsearch/update/UpdateIT.java index 85ebf01ef28..70489e5c1de 100644 --- a/server/src/test/java/org/elasticsearch/update/UpdateIT.java +++ b/server/src/test/java/org/elasticsearch/update/UpdateIT.java @@ -19,19 +19,6 @@ package org.elasticsearch.update; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Collection; -import java.util.Collections; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.concurrent.CopyOnWriteArrayList; -import java.util.concurrent.CountDownLatch; -import java.util.concurrent.Semaphore; -import java.util.concurrent.TimeUnit; -import java.util.function.Function; - import org.elasticsearch.ElasticsearchTimeoutException; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionRequestValidationException; @@ -56,6 +43,19 @@ import org.elasticsearch.script.ScriptType; import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.InternalSettingsPlugin; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collection; +import java.util.Collections; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.concurrent.CopyOnWriteArrayList; +import java.util.concurrent.CountDownLatch; +import java.util.concurrent.Semaphore; +import java.util.concurrent.TimeUnit; +import java.util.function.Function; + import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertThrows; @@ -586,15 +586,13 @@ public class UpdateIT extends ESIntegTestCase { final class UpdateThread extends Thread { final Map failedMap = new HashMap<>(); final int numberOfIds; - final int updatesPerId; final int maxUpdateRequests = numberOfIdsPerThread*numberOfUpdatesPerId; final int maxDeleteRequests = numberOfIdsPerThread*numberOfUpdatesPerId; private final Semaphore updateRequestsOutstanding = new Semaphore(maxUpdateRequests); private final Semaphore deleteRequestsOutstanding = new Semaphore(maxDeleteRequests); - UpdateThread(int numberOfIds, int updatesPerId) { + UpdateThread(int numberOfIds) { this.numberOfIds = numberOfIds; - this.updatesPerId = updatesPerId; } final class UpdateListener implements ActionListener { @@ -725,7 +723,7 @@ public class UpdateIT extends ESIntegTestCase { final List threads = new ArrayList<>(); for (int i = 0; i < numberOfThreads; i++) { - UpdateThread ut = new UpdateThread(numberOfIdsPerThread, numberOfUpdatesPerId); + UpdateThread ut = new UpdateThread(numberOfIdsPerThread); ut.start(); threads.add(ut); } @@ -749,7 +747,7 @@ public class UpdateIT extends ESIntegTestCase { //This means that we add 1 to the expected versions and attempts //All the previous operations should be complete or failed at this point for (int i = 0; i < numberOfIdsPerThread; ++i) { - UpdateResponse ur = client().prepareUpdate("test", "type1", Integer.toString(i)) + client().prepareUpdate("test", "type1", Integer.toString(i)) .setScript(fieldIncScript) .setRetryOnConflict(Integer.MAX_VALUE) .setUpsert(jsonBuilder().startObject().field("field", 1).endObject()) diff --git a/server/src/test/java/org/elasticsearch/validate/SimpleValidateQueryIT.java b/server/src/test/java/org/elasticsearch/validate/SimpleValidateQueryIT.java index cdbc2c702d8..9ef47af29cd 100644 --- a/server/src/test/java/org/elasticsearch/validate/SimpleValidateQueryIT.java +++ b/server/src/test/java/org/elasticsearch/validate/SimpleValidateQueryIT.java @@ -180,42 +180,6 @@ public class SimpleValidateQueryIT extends ESIntegTestCase { assertThat(validateQueryResponse.getQueryExplanation().get(0).getExplanation(), containsString("field:value1")); } - public void testExplainMatchPhrasePrefix() { - assertAcked(prepareCreate("test").setSettings( - Settings.builder().put(indexSettings()) - .put("index.analysis.filter.syns.type", "synonym") - .putList("index.analysis.filter.syns.synonyms", "one,two") - .put("index.analysis.analyzer.syns.tokenizer", "standard") - .putList("index.analysis.analyzer.syns.filter", "syns") - ).addMapping("test", "field","type=text,analyzer=syns")); - ensureGreen(); - - ValidateQueryResponse validateQueryResponse = client().admin().indices().prepareValidateQuery("test") - .setQuery(QueryBuilders.matchPhrasePrefixQuery("field", "foo")).setExplain(true).get(); - assertThat(validateQueryResponse.isValid(), equalTo(true)); - assertThat(validateQueryResponse.getQueryExplanation().size(), equalTo(1)); - assertThat(validateQueryResponse.getQueryExplanation().get(0).getExplanation(), containsString("field:\"foo*\"")); - - validateQueryResponse = client().admin().indices().prepareValidateQuery("test") - .setQuery(QueryBuilders.matchPhrasePrefixQuery("field", "foo bar")).setExplain(true).get(); - assertThat(validateQueryResponse.isValid(), equalTo(true)); - assertThat(validateQueryResponse.getQueryExplanation().size(), equalTo(1)); - assertThat(validateQueryResponse.getQueryExplanation().get(0).getExplanation(), containsString("field:\"foo bar*\"")); - - // Stacked tokens - validateQueryResponse = client().admin().indices().prepareValidateQuery("test") - .setQuery(QueryBuilders.matchPhrasePrefixQuery("field", "one bar")).setExplain(true).get(); - assertThat(validateQueryResponse.isValid(), equalTo(true)); - assertThat(validateQueryResponse.getQueryExplanation().size(), equalTo(1)); - assertThat(validateQueryResponse.getQueryExplanation().get(0).getExplanation(), containsString("field:\"(one two) bar*\"")); - - validateQueryResponse = client().admin().indices().prepareValidateQuery("test") - .setQuery(QueryBuilders.matchPhrasePrefixQuery("field", "foo one")).setExplain(true).get(); - assertThat(validateQueryResponse.isValid(), equalTo(true)); - assertThat(validateQueryResponse.getQueryExplanation().size(), equalTo(1)); - assertThat(validateQueryResponse.getQueryExplanation().get(0).getExplanation(), containsString("field:\"foo (one* two*)\"")); - } - public void testExplainWithRewriteValidateQuery() throws Exception { client().admin().indices().prepareCreate("test") .addMapping("type1", "field", "type=text,analyzer=whitespace") diff --git a/server/src/test/java/org/elasticsearch/versioning/SimpleVersioningIT.java b/server/src/test/java/org/elasticsearch/versioning/SimpleVersioningIT.java index 588118db4ae..e8d9dd0fc2c 100644 --- a/server/src/test/java/org/elasticsearch/versioning/SimpleVersioningIT.java +++ b/server/src/test/java/org/elasticsearch/versioning/SimpleVersioningIT.java @@ -20,8 +20,8 @@ package org.elasticsearch.versioning; import org.apache.lucene.util.TestUtil; import org.elasticsearch.action.ActionResponse; -import org.elasticsearch.action.DocWriteResponse; import org.elasticsearch.action.DocWriteRequest; +import org.elasticsearch.action.DocWriteResponse; import org.elasticsearch.action.bulk.BulkResponse; import org.elasticsearch.action.delete.DeleteResponse; import org.elasticsearch.action.index.IndexResponse; @@ -358,7 +358,6 @@ public class SimpleVersioningIT extends ESIntegTestCase { // zero-pad sequential logger.info("--> use zero-padded sequential ids"); ids = new IDSource() { - final int radix = TestUtil.nextInt(random, Character.MIN_RADIX, Character.MAX_RADIX); final String zeroPad = String.format(Locale.ROOT, "%0" + TestUtil.nextInt(random, 4, 20) + "d", 0); int upto; @@ -374,7 +373,6 @@ public class SimpleVersioningIT extends ESIntegTestCase { logger.info("--> use random long ids"); ids = new IDSource() { final int radix = TestUtil.nextInt(random, Character.MIN_RADIX, Character.MAX_RADIX); - int upto; @Override public String next() { @@ -387,8 +385,6 @@ public class SimpleVersioningIT extends ESIntegTestCase { logger.info("--> use zero-padded random long ids"); ids = new IDSource() { final int radix = TestUtil.nextInt(random, Character.MIN_RADIX, Character.MAX_RADIX); - final String zeroPad = String.format(Locale.ROOT, "%015d", 0); - int upto; @Override public String next() { diff --git a/test/framework/src/main/java/org/elasticsearch/index/engine/EngineTestCase.java b/test/framework/src/main/java/org/elasticsearch/index/engine/EngineTestCase.java index 86f7bd903cc..12f0d645d8a 100644 --- a/test/framework/src/main/java/org/elasticsearch/index/engine/EngineTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/index/engine/EngineTestCase.java @@ -477,6 +477,7 @@ public abstract class EngineTestCase extends ESTestCase { } InternalEngine internalEngine = createInternalEngine(indexWriterFactory, localCheckpointTrackerSupplier, seqNoForOperation, config); + internalEngine.initializeMaxSeqNoOfUpdatesOrDeletes(); internalEngine.recoverFromTranslog(translogHandler, Long.MAX_VALUE); return internalEngine; } @@ -502,7 +503,7 @@ public abstract class EngineTestCase extends ESTestCase { @Nullable final ToLongBiFunction seqNoForOperation, final EngineConfig config) { if (localCheckpointTrackerSupplier == null) { - return new InternalEngine(config) { + return new InternalTestEngine(config) { @Override IndexWriter createWriter(Directory directory, IndexWriterConfig iwc) throws IOException { return (indexWriterFactory != null) ? @@ -518,7 +519,7 @@ public abstract class EngineTestCase extends ESTestCase { } }; } else { - return new InternalEngine(config, localCheckpointTrackerSupplier) { + return new InternalTestEngine(config, localCheckpointTrackerSupplier) { @Override IndexWriter createWriter(Directory directory, IndexWriterConfig iwc) throws IOException { return (indexWriterFactory != null) ? @@ -574,11 +575,11 @@ public abstract class EngineTestCase extends ESTestCase { return new BytesArray(string.getBytes(Charset.defaultCharset())); } - protected static Term newUid(String id) { + public static Term newUid(String id) { return new Term("_id", Uid.encodeId(id)); } - protected Term newUid(ParsedDocument doc) { + public static Term newUid(ParsedDocument doc) { return newUid(doc.id()); } @@ -642,7 +643,7 @@ public abstract class EngineTestCase extends ESTestCase { throw new UnsupportedOperationException("unknown version type: " + versionType); } if (randomBoolean()) { - op = new Engine.Index(id, testParsedDocument(docId, null, testDocumentWithTextField(valuePrefix + i), B_1, null), + op = new Engine.Index(id, testParsedDocument(docId, null, testDocumentWithTextField(valuePrefix + i), SOURCE, null), forReplica && i >= startWithSeqNo ? i * 2 : SequenceNumbers.UNASSIGNED_SEQ_NO, forReplica && i >= startWithSeqNo && incrementTermWhenIntroducingSeqNo ? primaryTerm + 1 : primaryTerm, version, @@ -733,7 +734,7 @@ public abstract class EngineTestCase extends ESTestCase { } } - protected void concurrentlyApplyOps(List ops, InternalEngine engine) throws InterruptedException { + public static void concurrentlyApplyOps(List ops, InternalEngine engine) throws InterruptedException { Thread[] thread = new Thread[randomIntBetween(3, 5)]; CountDownLatch startGun = new CountDownLatch(thread.length); AtomicInteger offset = new AtomicInteger(-1); @@ -876,7 +877,7 @@ public abstract class EngineTestCase extends ESTestCase { } } - protected MapperService createMapperService(String type) throws IOException { + public static MapperService createMapperService(String type) throws IOException { IndexMetaData indexMetaData = IndexMetaData.builder("test") .settings(Settings.builder() .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) diff --git a/test/framework/src/main/java/org/elasticsearch/index/engine/InternalTestEngine.java b/test/framework/src/main/java/org/elasticsearch/index/engine/InternalTestEngine.java new file mode 100644 index 00000000000..8c52d57aabc --- /dev/null +++ b/test/framework/src/main/java/org/elasticsearch/index/engine/InternalTestEngine.java @@ -0,0 +1,74 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.index.engine; + +import org.elasticsearch.common.util.concurrent.ConcurrentCollections; +import org.elasticsearch.index.seqno.LocalCheckpointTracker; +import org.elasticsearch.index.seqno.SequenceNumbers; + +import java.io.IOException; +import java.util.Map; +import java.util.function.BiFunction; + +/** + * An alternative of {@link InternalEngine} that allows tweaking internals to reduce noise in engine tests. + */ +class InternalTestEngine extends InternalEngine { + private final Map idToMaxSeqNo = ConcurrentCollections.newConcurrentMap(); + + InternalTestEngine(EngineConfig engineConfig) { + super(engineConfig); + } + + InternalTestEngine(EngineConfig engineConfig, BiFunction localCheckpointTrackerSupplier) { + super(engineConfig, localCheckpointTrackerSupplier); + } + + @Override + public IndexResult index(Index index) throws IOException { + if (index.seqNo() != SequenceNumbers.UNASSIGNED_SEQ_NO) { + idToMaxSeqNo.compute(index.id(), (id, existing) -> { + if (existing == null) { + return index.seqNo(); + } else { + long maxSeqNo = Math.max(index.seqNo(), existing); + advanceMaxSeqNoOfUpdatesOrDeletes(maxSeqNo); + return maxSeqNo; + } + }); + } + return super.index(index); + } + + @Override + public DeleteResult delete(Delete delete) throws IOException { + if (delete.seqNo() != SequenceNumbers.UNASSIGNED_SEQ_NO) { + final long maxSeqNo = idToMaxSeqNo.compute(delete.id(), (id, existing) -> { + if (existing == null) { + return delete.seqNo(); + } else { + return Math.max(delete.seqNo(), existing); + } + }); + advanceMaxSeqNoOfUpdatesOrDeletes(maxSeqNo); + } + return super.delete(delete); + } +} diff --git a/test/framework/src/main/java/org/elasticsearch/index/mapper/FieldTypeTestCase.java b/test/framework/src/main/java/org/elasticsearch/index/mapper/FieldTypeTestCase.java index 42eab104d6a..8914bad5c41 100644 --- a/test/framework/src/main/java/org/elasticsearch/index/mapper/FieldTypeTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/index/mapper/FieldTypeTestCase.java @@ -21,9 +21,6 @@ package org.elasticsearch.index.mapper; import org.apache.lucene.analysis.standard.StandardAnalyzer; import org.apache.lucene.search.Query; import org.apache.lucene.search.similarities.BM25Similarity; -import org.elasticsearch.Version; -import org.elasticsearch.cluster.metadata.IndexMetaData; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.analysis.AnalyzerScope; import org.elasticsearch.index.analysis.NamedAnalyzer; import org.elasticsearch.index.query.QueryShardContext; @@ -37,8 +34,6 @@ import java.util.List; /** Base test case for subclasses of MappedFieldType */ public abstract class FieldTypeTestCase extends ESTestCase { - private static final Settings INDEX_SETTINGS = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT).build(); - /** Abstraction for mutating a property of a MappedFieldType */ public abstract static class Modifier { /** The name of the property that is being modified. Used in test failure messages. */ diff --git a/test/framework/src/main/java/org/elasticsearch/index/replication/ESIndexLevelReplicationTestCase.java b/test/framework/src/main/java/org/elasticsearch/index/replication/ESIndexLevelReplicationTestCase.java index 5f0909db0d3..9021fd1efbb 100644 --- a/test/framework/src/main/java/org/elasticsearch/index/replication/ESIndexLevelReplicationTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/index/replication/ESIndexLevelReplicationTestCase.java @@ -55,6 +55,8 @@ import org.elasticsearch.cluster.routing.ShardRoutingHelper; import org.elasticsearch.cluster.routing.ShardRoutingState; import org.elasticsearch.cluster.routing.TestShardRouting; import org.elasticsearch.common.collect.Iterators; +import org.elasticsearch.common.io.stream.BytesStreamOutput; +import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.lease.Releasable; import org.elasticsearch.common.lease.Releasables; import org.elasticsearch.common.settings.Settings; @@ -95,6 +97,7 @@ import java.util.stream.StreamSupport; import static org.hamcrest.Matchers.empty; import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.greaterThanOrEqualTo; public abstract class ESIndexLevelReplicationTestCase extends IndexShardTestCase { @@ -137,6 +140,17 @@ public abstract class ESIndexLevelReplicationTestCase extends IndexShardTestCase return metaData.build(); } + protected IndexRequest copyIndexRequest(IndexRequest inRequest) throws IOException { + final IndexRequest outRequest = new IndexRequest(); + try (BytesStreamOutput out = new BytesStreamOutput()) { + inRequest.writeTo(out); + try (StreamInput in = out.bytes().streamInput()) { + outRequest.readFrom(in); + } + } + return outRequest; + } + protected DiscoveryNode getDiscoveryNode(String id) { return new DiscoveryNode(id, id, buildNewFakeTransportAddress(), Collections.emptyMap(), Collections.singleton(DiscoveryNode.Role.DATA), Version.CURRENT); @@ -428,6 +442,13 @@ public abstract class ESIndexLevelReplicationTestCase extends IndexShardTestCase public synchronized void close() throws Exception { if (closed == false) { closed = true; + for (IndexShard replica : replicas) { + try { + assertThat(replica.getMaxSeenAutoIdTimestamp(), equalTo(primary.getMaxSeenAutoIdTimestamp())); + assertThat(replica.getMaxSeqNoOfUpdatesOrDeletes(), greaterThanOrEqualTo(primary.getMaxSeqNoOfUpdatesOrDeletes())); + } catch (AlreadyClosedException ignored) { + } + } closeShards(this); } else { throw new AlreadyClosedException("too bad"); @@ -544,6 +565,11 @@ public abstract class ESIndexLevelReplicationTestCase extends IndexShardTestCase return replicationGroup.getPrimary().getGlobalCheckpoint(); } + @Override + public long maxSeqNoOfUpdatesOrDeletes() { + return replicationGroup.getPrimary().getMaxSeqNoOfUpdatesOrDeletes(); + } + @Override public org.elasticsearch.index.shard.ReplicationGroup getReplicationGroup() { return replicationGroup.primary.getReplicationGroup(); @@ -558,12 +584,14 @@ public abstract class ESIndexLevelReplicationTestCase extends IndexShardTestCase final ShardRouting replicaRouting, final ReplicaRequest request, final long globalCheckpoint, + final long maxSeqNoOfUpdatesOrDeletes, final ActionListener listener) { IndexShard replica = replicationGroup.replicas.stream() .filter(s -> replicaRouting.isSameAllocation(s.routingEntry())).findFirst().get(); replica.acquireReplicaOperationPermit( replicationGroup.primary.getPendingPrimaryTerm(), globalCheckpoint, + maxSeqNoOfUpdatesOrDeletes, new ActionListener() { @Override public void onResponse(Releasable releasable) { @@ -640,7 +668,8 @@ public abstract class ESIndexLevelReplicationTestCase extends IndexShardTestCase @Override protected void performOnReplica(BulkShardRequest request, IndexShard replica) throws Exception { - executeShardBulkOnReplica(request, replica, getPrimaryShard().getPendingPrimaryTerm(), getPrimaryShard().getGlobalCheckpoint()); + executeShardBulkOnReplica(request, replica, getPrimaryShard().getPendingPrimaryTerm(), + getPrimaryShard().getGlobalCheckpoint(), getPrimaryShard().getMaxSeqNoOfUpdatesOrDeletes()); } } @@ -671,10 +700,10 @@ public abstract class ESIndexLevelReplicationTestCase extends IndexShardTestCase } private void executeShardBulkOnReplica(BulkShardRequest request, IndexShard replica, long operationPrimaryTerm, - long globalCheckpointOnPrimary) throws Exception { + long globalCheckpointOnPrimary, long maxSeqNoOfUpdatesOrDeletes) throws Exception { final PlainActionFuture permitAcquiredFuture = new PlainActionFuture<>(); - replica.acquireReplicaOperationPermit( - operationPrimaryTerm, globalCheckpointOnPrimary, permitAcquiredFuture, ThreadPool.Names.SAME, request); + replica.acquireReplicaOperationPermit(operationPrimaryTerm, globalCheckpointOnPrimary, + maxSeqNoOfUpdatesOrDeletes, permitAcquiredFuture, ThreadPool.Names.SAME, request); final Translog.Location location; try (Releasable ignored = permitAcquiredFuture.actionGet()) { location = TransportShardBulkAction.performOnReplica(request, replica); @@ -704,14 +733,16 @@ public abstract class ESIndexLevelReplicationTestCase extends IndexShardTestCase } void indexOnReplica(BulkShardRequest request, ReplicationGroup group, IndexShard replica, long term) throws Exception { - executeShardBulkOnReplica(request, replica, term, group.primary.getGlobalCheckpoint()); + executeShardBulkOnReplica(request, replica, term, + group.primary.getGlobalCheckpoint(), group.primary.getMaxSeqNoOfUpdatesOrDeletes()); } /** * Executes the delete request on the given replica shard. */ void deleteOnReplica(BulkShardRequest request, ReplicationGroup group, IndexShard replica) throws Exception { - executeShardBulkOnReplica(request, replica, group.primary.getPendingPrimaryTerm(), group.primary.getGlobalCheckpoint()); + executeShardBulkOnReplica(request, replica, group.primary.getPendingPrimaryTerm(), + group.primary.getGlobalCheckpoint(), group.primary.getMaxSeqNoOfUpdatesOrDeletes()); } class GlobalCheckpointSync extends ReplicationAction< @@ -755,7 +786,8 @@ public abstract class ESIndexLevelReplicationTestCase extends IndexShardTestCase @Override protected void performOnReplica(ResyncReplicationRequest request, IndexShard replica) throws Exception { - executeResyncOnReplica(replica, request, getPrimaryShard().getPendingPrimaryTerm(), getPrimaryShard().getGlobalCheckpoint()); + executeResyncOnReplica(replica, request, getPrimaryShard().getPendingPrimaryTerm(), + getPrimaryShard().getGlobalCheckpoint(), getPrimaryShard().getMaxSeqNoOfUpdatesOrDeletes()); } } @@ -768,12 +800,12 @@ public abstract class ESIndexLevelReplicationTestCase extends IndexShardTestCase return result; } - private void executeResyncOnReplica(IndexShard replica, ResyncReplicationRequest request, - long operationPrimaryTerm, long globalCheckpointOnPrimary) throws Exception { + private void executeResyncOnReplica(IndexShard replica, ResyncReplicationRequest request, long operationPrimaryTerm, + long globalCheckpointOnPrimary, long maxSeqNoOfUpdatesOrDeletes) throws Exception { final Translog.Location location; final PlainActionFuture acquirePermitFuture = new PlainActionFuture<>(); - replica.acquireReplicaOperationPermit( - operationPrimaryTerm, globalCheckpointOnPrimary, acquirePermitFuture, ThreadPool.Names.SAME, request); + replica.acquireReplicaOperationPermit(operationPrimaryTerm, globalCheckpointOnPrimary, + maxSeqNoOfUpdatesOrDeletes, acquirePermitFuture, ThreadPool.Names.SAME, request); try (Releasable ignored = acquirePermitFuture.actionGet()) { location = TransportResyncReplicationAction.performOnReplica(request, replica); } diff --git a/test/framework/src/main/java/org/elasticsearch/index/shard/IndexShardTestCase.java b/test/framework/src/main/java/org/elasticsearch/index/shard/IndexShardTestCase.java index 540b68ee409..c9ef79720a2 100644 --- a/test/framework/src/main/java/org/elasticsearch/index/shard/IndexShardTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/index/shard/IndexShardTestCase.java @@ -94,6 +94,7 @@ import java.util.stream.Collectors; import static org.elasticsearch.cluster.routing.TestShardRouting.newShardRouting; import static org.hamcrest.Matchers.contains; +import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.hasSize; /** @@ -447,6 +448,7 @@ public abstract class IndexShardTestCase extends ESTestCase { IndexShard shard = shardFunction.apply(primary); if (primary) { recoverShardFromStore(shard); + assertThat(shard.getMaxSeqNoOfUpdatesOrDeletes(), equalTo(shard.seqNoStats().getMaxSeqNo())); } else { recoveryEmptyReplica(shard, true); } @@ -697,8 +699,9 @@ public abstract class IndexShardTestCase extends ESTestCase { shard.updateLocalCheckpointForShard(shard.routingEntry().allocationId().getId(), shard.getLocalCheckpoint()); } else { - result = shard.applyIndexOperationOnReplica(shard.seqNoStats().getMaxSeqNo() + 1, 0, - IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP, false, sourceToParse); + final long seqNo = shard.seqNoStats().getMaxSeqNo() + 1; + shard.advanceMaxSeqNoOfUpdatesOrDeletes(seqNo); // manually replicate max_seq_no_of_updates + result = shard.applyIndexOperationOnReplica(seqNo, 0, IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP, false, sourceToParse); if (result.getResultType() == Engine.Result.Type.MAPPING_UPDATE_REQUIRED) { throw new TransportReplicationAction.RetryOnReplicaException(shard.shardId, "Mappings are not available on the replica yet, triggered update: " + result.getRequiredMappingUpdate()); @@ -718,7 +721,9 @@ public abstract class IndexShardTestCase extends ESTestCase { result = shard.applyDeleteOperationOnPrimary(Versions.MATCH_ANY, type, id, VersionType.INTERNAL); shard.updateLocalCheckpointForShard(shard.routingEntry().allocationId().getId(), shard.getEngine().getLocalCheckpoint()); } else { - result = shard.applyDeleteOperationOnReplica(shard.seqNoStats().getMaxSeqNo() + 1, 0L, type, id); + final long seqNo = shard.seqNoStats().getMaxSeqNo() + 1; + shard.advanceMaxSeqNoOfUpdatesOrDeletes(seqNo); // manually replicate max_seq_no_of_updates + result = shard.applyDeleteOperationOnReplica(seqNo, 0L, type, id); } return result; } diff --git a/test/framework/src/main/java/org/elasticsearch/indices/analysis/AnalysisFactoryTestCase.java b/test/framework/src/main/java/org/elasticsearch/indices/analysis/AnalysisFactoryTestCase.java index 2164fe32a39..70a42032ea4 100644 --- a/test/framework/src/main/java/org/elasticsearch/indices/analysis/AnalysisFactoryTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/indices/analysis/AnalysisFactoryTestCase.java @@ -31,8 +31,6 @@ import org.elasticsearch.index.analysis.PreConfiguredTokenizer; import org.elasticsearch.index.analysis.ShingleTokenFilterFactory; import org.elasticsearch.index.analysis.StandardTokenizerFactory; import org.elasticsearch.index.analysis.StopTokenFilterFactory; -import org.elasticsearch.index.analysis.SynonymGraphTokenFilterFactory; -import org.elasticsearch.index.analysis.SynonymTokenFilterFactory; import org.elasticsearch.plugins.AnalysisPlugin; import org.elasticsearch.test.ESTestCase; @@ -169,8 +167,8 @@ public abstract class AnalysisFactoryTestCase extends ESTestCase { .put("stemmeroverride", MovedToAnalysisCommon.class) .put("stop", StopTokenFilterFactory.class) .put("swedishlightstem", MovedToAnalysisCommon.class) - .put("synonym", SynonymTokenFilterFactory.class) - .put("synonymgraph", SynonymGraphTokenFilterFactory.class) + .put("synonym", MovedToAnalysisCommon.class) + .put("synonymgraph", MovedToAnalysisCommon.class) .put("trim", MovedToAnalysisCommon.class) .put("truncate", MovedToAnalysisCommon.class) .put("turkishlowercase", MovedToAnalysisCommon.class) diff --git a/test/framework/src/main/java/org/elasticsearch/ingest/TestTemplateService.java b/test/framework/src/main/java/org/elasticsearch/ingest/TestTemplateService.java index 4777d7c4ef2..5bbf39d8fdc 100644 --- a/test/framework/src/main/java/org/elasticsearch/ingest/TestTemplateService.java +++ b/test/framework/src/main/java/org/elasticsearch/ingest/TestTemplateService.java @@ -43,8 +43,7 @@ public class TestTemplateService extends ScriptService { } private TestTemplateService(boolean compilationException) { - super(Settings.EMPTY, Collections.singletonMap(DEFAULT_TEMPLATE_LANG, - new MockScriptEngine(MockScriptEngine.NAME, Collections.emptyMap())), Collections.emptyMap()); + super(Settings.EMPTY, Collections.singletonMap(DEFAULT_TEMPLATE_LANG, new MockScriptEngine()), Collections.emptyMap()); this.compilationException = compilationException; } diff --git a/test/framework/src/main/java/org/elasticsearch/repositories/ESBlobStoreContainerTestCase.java b/test/framework/src/main/java/org/elasticsearch/repositories/ESBlobStoreContainerTestCase.java index 9f12c369991..3e4e639dd01 100644 --- a/test/framework/src/main/java/org/elasticsearch/repositories/ESBlobStoreContainerTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/repositories/ESBlobStoreContainerTestCase.java @@ -103,7 +103,7 @@ public abstract class ESBlobStoreContainerTestCase extends ESTestCase { int length = randomIntBetween(10, 100); String name = "bar-0-"; generatedBlobs.put(name, (long) length); - byte[] data = writeRandomBlob(container, name, length); + writeRandomBlob(container, name, length); Map blobs = container.listBlobs(); assertThat(blobs.size(), equalTo(numberOfFooBlobs + numberOfBarBlobs)); diff --git a/test/framework/src/main/java/org/elasticsearch/script/MockScriptEngine.java b/test/framework/src/main/java/org/elasticsearch/script/MockScriptEngine.java index be38ae95a32..76031dca84f 100644 --- a/test/framework/src/main/java/org/elasticsearch/script/MockScriptEngine.java +++ b/test/framework/src/main/java/org/elasticsearch/script/MockScriptEngine.java @@ -53,18 +53,26 @@ import static java.util.Collections.emptyMap; */ public class MockScriptEngine implements ScriptEngine { + /** A non-typed compiler for a single custom context */ + public interface ContextCompiler { + Object compile(Function, Object> script, Map params); + } + public static final String NAME = "mockscript"; private final String type; private final Map, Object>> scripts; + private final Map, ContextCompiler> contexts; - public MockScriptEngine(String type, Map, Object>> scripts) { + public MockScriptEngine(String type, Map, Object>> scripts, + Map, ContextCompiler> contexts) { this.type = type; this.scripts = Collections.unmodifiableMap(scripts); + this.contexts = Collections.unmodifiableMap(contexts); } public MockScriptEngine() { - this(NAME, Collections.emptyMap()); + this(NAME, Collections.emptyMap(), Collections.emptyMap()); } @Override @@ -97,16 +105,14 @@ public class MockScriptEngine implements ScriptEngine { } }; return context.factoryClazz.cast(factory); - } else if (context.instanceClazz.equals(ExecutableScript.class)) { - ExecutableScript.Factory factory = mockCompiled::createExecutableScript; - return context.factoryClazz.cast(factory); } else if (context.instanceClazz.equals(IngestScript.class)) { - IngestScript.Factory factory = parameters -> new IngestScript(parameters) { - @Override - public void execute(Map ctx) { - script.apply(ctx); - } - }; + IngestScript.Factory factory = vars -> + new IngestScript(vars) { + @Override + public void execute(Map ctx) { + script.apply(ctx); + } + }; return context.factoryClazz.cast(factory); } else if (context.instanceClazz.equals(IngestConditionalScript.class)) { IngestConditionalScript.Factory factory = parameters -> new IngestConditionalScript(parameters) { @@ -117,9 +123,9 @@ public class MockScriptEngine implements ScriptEngine { }; return context.factoryClazz.cast(factory); } else if (context.instanceClazz.equals(UpdateScript.class)) { - UpdateScript.Factory factory = parameters -> new UpdateScript(parameters) { + UpdateScript.Factory factory = (parameters, ctx) -> new UpdateScript(parameters, ctx) { @Override - public void execute(Map ctx) { + public void execute() { final Map vars = new HashMap<>(); vars.put("ctx", ctx); vars.put("params", parameters); @@ -159,16 +165,17 @@ public class MockScriptEngine implements ScriptEngine { return context.factoryClazz.cast(factory); } else if (context.instanceClazz.equals(TemplateScript.class)) { TemplateScript.Factory factory = vars -> { - // TODO: need a better way to implement all these new contexts - // this is just a shim to act as an executable script just as before - ExecutableScript execScript = mockCompiled.createExecutableScript(vars); - return new TemplateScript(vars) { - @Override - public String execute() { - return (String) execScript.run(); - } - }; + Map varsWithParams = new HashMap<>(); + if (vars != null) { + varsWithParams.put("params", vars); + } + return new TemplateScript(vars) { + @Override + public String execute() { + return (String) script.apply(varsWithParams); + } }; + }; return context.factoryClazz.cast(factory); } else if (context.instanceClazz.equals(FilterScript.class)) { FilterScript.Factory factory = mockCompiled::createFilterScript; @@ -198,6 +205,10 @@ public class MockScriptEngine implements ScriptEngine { ScriptedMetricAggContexts.ReduceScript.Factory factory = mockCompiled::createMetricAggReduceScript; return context.factoryClazz.cast(factory); } + ContextCompiler compiler = contexts.get(context); + if (compiler != null) { + return context.factoryClazz.cast(compiler.compile(script, params)); + } throw new IllegalArgumentException("mock script engine does not know how to handle context [" + context.name + "]"); } @@ -219,19 +230,6 @@ public class MockScriptEngine implements ScriptEngine { return name; } - public ExecutableScript createExecutableScript(Map params) { - Map context = new HashMap<>(); - if (options != null) { - context.putAll(options); // TODO: remove this once scripts know to look for options under options key - context.put("options", options); - } - if (params != null) { - context.putAll(params); // TODO: remove this once scripts know to look for params under params key - context.put("params", params); - } - return new MockExecutableScript(context, script != null ? script : ctx -> source); - } - public SearchScript.LeafFactory createSearchScript(Map params, SearchLookup lookup) { Map context = new HashMap<>(); if (options != null) { @@ -282,27 +280,6 @@ public class MockScriptEngine implements ScriptEngine { } } - public class MockExecutableScript implements ExecutableScript { - - private final Function, Object> script; - private final Map vars; - - public MockExecutableScript(Map vars, Function, Object> script) { - this.vars = vars; - this.script = script; - } - - @Override - public void setNextVar(String name, Object value) { - vars.put(name, value); - } - - @Override - public Object run() { - return script.apply(vars); - } - } - public class MockSearchScript implements SearchScript.LeafFactory { private final Function, Object> script; diff --git a/test/framework/src/main/java/org/elasticsearch/script/MockScriptPlugin.java b/test/framework/src/main/java/org/elasticsearch/script/MockScriptPlugin.java index cd951a3b53f..34aca79ec47 100644 --- a/test/framework/src/main/java/org/elasticsearch/script/MockScriptPlugin.java +++ b/test/framework/src/main/java/org/elasticsearch/script/MockScriptPlugin.java @@ -24,6 +24,7 @@ import org.elasticsearch.plugins.Plugin; import org.elasticsearch.plugins.ScriptPlugin; import java.util.Collection; +import java.util.Collections; import java.util.Map; import java.util.function.Function; @@ -36,11 +37,15 @@ public abstract class MockScriptPlugin extends Plugin implements ScriptPlugin { @Override public ScriptEngine getScriptEngine(Settings settings, Collection> contexts) { - return new MockScriptEngine(pluginScriptLang(), pluginScripts()); + return new MockScriptEngine(pluginScriptLang(), pluginScripts(), pluginContextCompilers()); } protected abstract Map, Object>> pluginScripts(); + protected Map, MockScriptEngine.ContextCompiler> pluginContextCompilers() { + return Collections.emptyMap(); + } + public String pluginScriptLang() { return NAME; } diff --git a/test/framework/src/main/java/org/elasticsearch/search/aggregations/AggregatorTestCase.java b/test/framework/src/main/java/org/elasticsearch/search/aggregations/AggregatorTestCase.java index 17202839a65..6f9c46b4dc4 100644 --- a/test/framework/src/main/java/org/elasticsearch/search/aggregations/AggregatorTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/search/aggregations/AggregatorTestCase.java @@ -240,7 +240,7 @@ public abstract class AggregatorTestCase extends ESTestCase { } protected SearchContext createSearchContext(IndexSearcher indexSearcher, IndexSettings indexSettings) { - Engine.Searcher searcher = new Engine.Searcher("aggregator_test", indexSearcher); + Engine.Searcher searcher = new Engine.Searcher("aggregator_test", indexSearcher, logger); QueryCache queryCache = new DisabledQueryCache(indexSettings); QueryCachingPolicy queryCachingPolicy = new QueryCachingPolicy() { @Override diff --git a/test/framework/src/main/java/org/elasticsearch/search/aggregations/BaseAggregationTestCase.java b/test/framework/src/main/java/org/elasticsearch/search/aggregations/BaseAggregationTestCase.java index 4a6e1700549..ffd19d8e94d 100644 --- a/test/framework/src/main/java/org/elasticsearch/search/aggregations/BaseAggregationTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/search/aggregations/BaseAggregationTestCase.java @@ -32,6 +32,10 @@ import org.elasticsearch.search.aggregations.support.ValuesSourceAggregationBuil import org.elasticsearch.test.AbstractBuilderTestCase; import java.io.IOException; +import java.util.ArrayList; +import java.util.HashSet; +import java.util.List; +import java.util.Set; import static org.elasticsearch.test.EqualsHashCodeTestUtils.checkEqualsAndHashCode; import static org.hamcrest.Matchers.hasSize; @@ -63,6 +67,58 @@ public abstract class BaseAggregationTestCase testAggs = createTestAggregatorBuilders(); + + for (AB testAgg : testAggs) { + factoriesBuilder.addAggregator(testAgg); + } + + XContentBuilder builder = XContentFactory.contentBuilder(randomFrom(XContentType.values())); + if (randomBoolean()) { + builder.prettyPrint(); + } + factoriesBuilder.toXContent(builder, ToXContent.EMPTY_PARAMS); + XContentBuilder shuffled = shuffleXContent(builder); + XContentParser parser = createParser(shuffled); + + assertSame(XContentParser.Token.START_OBJECT, parser.nextToken()); + AggregatorFactories.Builder parsed = AggregatorFactories.parseAggregators(parser); + + assertThat(parsed.getAggregatorFactories(), hasSize(testAggs.size())); + assertThat(parsed.getPipelineAggregatorFactories(), hasSize(0)); + assertEquals(factoriesBuilder, parsed); + assertEquals(factoriesBuilder.hashCode(), parsed.hashCode()); + } + + /** + * Create at least 2 aggregations and test equality and hash + */ + public void testSerializationMulti() throws IOException { + AggregatorFactories.Builder builder = AggregatorFactories.builder(); + List testAggs = createTestAggregatorBuilders(); + + for (AB testAgg : testAggs) { + builder.addAggregator(testAgg); + } + + try (BytesStreamOutput output = new BytesStreamOutput()) { + builder.writeTo(output); + + try (StreamInput in = new NamedWriteableAwareStreamInput(output.bytes().streamInput(), namedWriteableRegistry())) { + AggregatorFactories.Builder newBuilder = new AggregatorFactories.Builder(in); + + assertEquals(builder, newBuilder); + assertEquals(builder.hashCode(), newBuilder.hashCode()); + assertNotSame(builder, newBuilder); + } + } + } + /** * Generic test that checks that the toString method renders the XContent * correctly. @@ -82,7 +138,7 @@ public abstract class BaseAggregationTestCase createTestAggregatorBuilders() { + int numberOfAggregatorBuilders = randomIntBetween(2, 10); + + // ensure that we do not create 2 aggregations with the same name + Set names = new HashSet<>(); + List aggBuilders = new ArrayList<>(); + + while (names.size() < numberOfAggregatorBuilders) { + AB aggBuilder = createTestAggregatorBuilder(); + + if (names.add(aggBuilder.getName())) { + aggBuilders.add(aggBuilder); + } + } + return aggBuilders; + } } diff --git a/test/framework/src/main/java/org/elasticsearch/test/AbstractXContentTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/AbstractXContentTestCase.java index c740a65d28a..767cd6d260c 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/AbstractXContentTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/AbstractXContentTestCase.java @@ -20,6 +20,7 @@ package org.elasticsearch.test; import org.elasticsearch.common.CheckedBiFunction; +import org.elasticsearch.common.CheckedBiConsumer; import org.elasticsearch.common.CheckedFunction; import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesReference; @@ -38,34 +39,147 @@ import java.util.function.Supplier; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertToXContentEquivalent; -public abstract class AbstractXContentTestCase extends ESTestCase { +public abstract class AbstractXContentTestCase extends ESTestCase { protected static final int NUMBER_OF_TEST_RUNS = 20; - public static void testFromXContent(int numberOfTestRuns, Supplier instanceSupplier, - boolean supportsUnknownFields, String[] shuffleFieldsExceptions, - Predicate randomFieldsExcludeFilter, - CheckedBiFunction - createParserFunction, - CheckedFunction parseFunction, - BiConsumer assertEqualsConsumer, - boolean assertToXContentEquivalence, - ToXContent.Params toXContentParams) throws IOException { - for (int runs = 0; runs < numberOfTestRuns; runs++) { - T testInstance = instanceSupplier.get(); - XContentType xContentType = randomFrom(XContentType.values()); - BytesReference shuffledContent = insertRandomFieldsAndShuffle(testInstance, xContentType, supportsUnknownFields, - shuffleFieldsExceptions, randomFieldsExcludeFilter, createParserFunction, toXContentParams); - XContentParser parser = createParserFunction.apply(XContentFactory.xContent(xContentType), shuffledContent); - T parsed = parseFunction.apply(parser); - assertEqualsConsumer.accept(testInstance, parsed); - if (assertToXContentEquivalence) { - assertToXContentEquivalent( + public static XContentTester xContentTester( + CheckedBiFunction createParser, + Supplier instanceSupplier, + CheckedBiConsumer toXContent, + CheckedFunction fromXContent) { + return new XContentTester( + createParser, + instanceSupplier, + (testInstance, xContentType) -> { + try (XContentBuilder builder = XContentBuilder.builder(xContentType.xContent())) { + toXContent.accept(testInstance, builder); + return BytesReference.bytes(builder); + } + }, + fromXContent); + } + + public static XContentTester xContentTester( + CheckedBiFunction createParser, + Supplier instanceSupplier, + CheckedFunction fromXContent) { + return xContentTester(createParser, instanceSupplier, ToXContent.EMPTY_PARAMS, fromXContent); + } + + public static XContentTester xContentTester( + CheckedBiFunction createParser, + Supplier instanceSupplier, + ToXContent.Params toXContentParams, + CheckedFunction fromXContent) { + return new XContentTester( + createParser, + instanceSupplier, + (testInstance, xContentType) -> XContentHelper.toXContent(testInstance, xContentType, toXContentParams, false), - XContentHelper.toXContent(parsed, xContentType, toXContentParams, false), - xContentType); + fromXContent); + } + + /** + * Tests converting to and from xcontent. + */ + public static class XContentTester { + private final CheckedBiFunction createParser; + private final Supplier instanceSupplier; + private final CheckedBiFunction toXContent; + private final CheckedFunction fromXContent; + + private int numberOfTestRuns = NUMBER_OF_TEST_RUNS; + private boolean supportsUnknownFields = false; + private String[] shuffleFieldsExceptions = Strings.EMPTY_ARRAY; + private Predicate randomFieldsExcludeFilter = field -> false; + private BiConsumer assertEqualsConsumer = (expectedInstance, newInstance) -> { + assertNotSame(newInstance, expectedInstance); + assertEquals(expectedInstance, newInstance); + assertEquals(expectedInstance.hashCode(), newInstance.hashCode()); + }; + private boolean assertToXContentEquivalence = true; + + private XContentTester( + CheckedBiFunction createParser, + Supplier instanceSupplier, + CheckedBiFunction toXContent, + CheckedFunction fromXContent) { + this.createParser = createParser; + this.instanceSupplier = instanceSupplier; + this.toXContent = toXContent; + this.fromXContent = fromXContent; + } + + public void test() throws IOException { + for (int runs = 0; runs < numberOfTestRuns; runs++) { + T testInstance = instanceSupplier.get(); + XContentType xContentType = randomFrom(XContentType.values()); + BytesReference originalXContent = toXContent.apply(testInstance, xContentType); + BytesReference shuffledContent = insertRandomFieldsAndShuffle(originalXContent, xContentType, supportsUnknownFields, + shuffleFieldsExceptions, randomFieldsExcludeFilter, createParser); + XContentParser parser = createParser.apply(XContentFactory.xContent(xContentType), shuffledContent); + T parsed = fromXContent.apply(parser); + assertEqualsConsumer.accept(testInstance, parsed); + if (assertToXContentEquivalence) { + assertToXContentEquivalent( + toXContent.apply(testInstance, xContentType), + toXContent.apply(parsed, xContentType), + xContentType); + } } } + + public XContentTester numberOfTestRuns(int numberOfTestRuns) { + this.numberOfTestRuns = numberOfTestRuns; + return this; + } + + public XContentTester supportsUnknownFields(boolean supportsUnknownFields) { + this.supportsUnknownFields = supportsUnknownFields; + return this; + } + + public XContentTester shuffleFieldsExceptions(String[] shuffleFieldsExceptions) { + this.shuffleFieldsExceptions = shuffleFieldsExceptions; + return this; + } + + public XContentTester randomFieldsExcludeFilter(Predicate randomFieldsExcludeFilter) { + this.randomFieldsExcludeFilter = randomFieldsExcludeFilter; + return this; + } + + public XContentTester assertEqualsConsumer(BiConsumer assertEqualsConsumer) { + this.assertEqualsConsumer = assertEqualsConsumer; + return this; + } + + public XContentTester assertToXContentEquivalence(boolean assertToXContentEquivalence) { + this.assertToXContentEquivalence = assertToXContentEquivalence; + return this; + } + } + + public static void testFromXContent( + int numberOfTestRuns, + Supplier instanceSupplier, + boolean supportsUnknownFields, + String[] shuffleFieldsExceptions, + Predicate randomFieldsExcludeFilter, + CheckedBiFunction createParserFunction, + CheckedFunction fromXContent, + BiConsumer assertEqualsConsumer, + boolean assertToXContentEquivalence, + ToXContent.Params toXContentParams) throws IOException { + xContentTester(createParserFunction, instanceSupplier, toXContentParams, fromXContent) + .numberOfTestRuns(numberOfTestRuns) + .supportsUnknownFields(supportsUnknownFields) + .shuffleFieldsExceptions(shuffleFieldsExceptions) + .randomFieldsExcludeFilter(randomFieldsExcludeFilter) + .assertEqualsConsumer(assertEqualsConsumer) + .assertToXContentEquivalence(assertToXContentEquivalence) + .test(); } /** @@ -133,11 +247,9 @@ public abstract class AbstractXContentTestCase extends EST return ToXContent.EMPTY_PARAMS; } - static BytesReference insertRandomFieldsAndShuffle(ToXContent testInstance, XContentType xContentType, + static BytesReference insertRandomFieldsAndShuffle(BytesReference xContent, XContentType xContentType, boolean supportsUnknownFields, String[] shuffleFieldsExceptions, Predicate randomFieldsExcludeFilter, - CheckedBiFunction createParserFunction, - ToXContent.Params toXContentParams) throws IOException { - BytesReference xContent = XContentHelper.toXContent(testInstance, xContentType, toXContentParams, false); + CheckedBiFunction createParserFunction) throws IOException { BytesReference withRandomFields; if (supportsUnknownFields) { // add a few random fields to check that the parser is lenient on new fields diff --git a/test/framework/src/main/java/org/elasticsearch/test/CorruptionUtils.java b/test/framework/src/main/java/org/elasticsearch/test/CorruptionUtils.java index e33babb5eb6..b2c562d43a6 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/CorruptionUtils.java +++ b/test/framework/src/main/java/org/elasticsearch/test/CorruptionUtils.java @@ -20,6 +20,7 @@ package org.elasticsearch.test; import com.carrotsearch.randomizedtesting.generators.RandomPicks; import org.apache.logging.log4j.Logger; +import org.apache.logging.log4j.LogManager; import org.apache.lucene.codecs.CodecUtil; import org.apache.lucene.index.IndexWriter; import org.apache.lucene.store.ChecksumIndexInput; @@ -27,7 +28,6 @@ import org.apache.lucene.store.Directory; import org.apache.lucene.store.FSDirectory; import org.apache.lucene.store.IOContext; import org.apache.lucene.store.IndexInput; -import org.elasticsearch.common.logging.ESLoggerFactory; import java.io.IOException; import java.nio.ByteBuffer; @@ -45,7 +45,7 @@ import static org.junit.Assert.assertTrue; public final class CorruptionUtils { - private static Logger logger = ESLoggerFactory.getLogger("test"); + private static final Logger logger = LogManager.getLogger(CorruptionUtils.class); private CorruptionUtils() {} public static void corruptIndex(Random random, Path indexPath, boolean corruptSegments) throws IOException { @@ -78,24 +78,19 @@ public final class CorruptionUtils { checksumBeforeCorruption = CodecUtil.retrieveChecksum(input); } try (FileChannel raf = FileChannel.open(fileToCorrupt, StandardOpenOption.READ, StandardOpenOption.WRITE)) { - // read - raf.position(random.nextInt((int) Math.min(Integer.MAX_VALUE, raf.size()))); - long filePointer = raf.position(); - ByteBuffer bb = ByteBuffer.wrap(new byte[1]); - raf.read(bb); - bb.flip(); + long maxPosition = raf.size(); - // corrupt - byte oldValue = bb.get(0); - byte newValue = (byte) (oldValue + 1); - bb.put(0, newValue); - - // rewrite - raf.position(filePointer); - raf.write(bb); - logger.info("Corrupting file -- flipping at position {} from {} to {} file: {}", filePointer, - Integer.toHexString(oldValue), Integer.toHexString(newValue), fileToCorrupt.getFileName()); + if (fileToCorrupt.getFileName().toString().endsWith(".cfs") && maxPosition > 4) { + // TODO: it is known that Lucene does not check the checksum of CFS file (CompoundFileS, like an archive) + // see note at https://github.com/elastic/elasticsearch/pull/33911 + // so far, don't corrupt crc32 part of checksum (last 4 bytes) of cfs file + // checksum is 8 bytes: first 4 bytes have to be zeros, while crc32 value is not verified + maxPosition -= 4; + } + final int position = random.nextInt((int) Math.min(Integer.MAX_VALUE, maxPosition)); + corruptAt(fileToCorrupt, raf, position); } + long checksumAfterCorruption; long actualChecksumAfterCorruption; try (ChecksumIndexInput input = dir.openChecksumInput(fileToCorrupt.getFileName().toString(), IOContext.DEFAULT)) { @@ -120,5 +115,25 @@ public final class CorruptionUtils { } } + static void corruptAt(Path path, FileChannel channel, int position) throws IOException { + // read + channel.position(position); + long filePointer = channel.position(); + ByteBuffer bb = ByteBuffer.wrap(new byte[1]); + channel.read(bb); + bb.flip(); + + // corrupt + byte oldValue = bb.get(0); + byte newValue = (byte) (oldValue + 1); + bb.put(0, newValue); + + // rewrite + channel.position(filePointer); + channel.write(bb); + logger.info("Corrupting file -- flipping at position {} from {} to {} file: {}", filePointer, + Integer.toHexString(oldValue), Integer.toHexString(newValue), path.getFileName()); + } + } diff --git a/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java index fa9d4635a7d..d5d2168c7dd 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java @@ -197,6 +197,7 @@ import java.util.concurrent.ExecutionException; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicLong; +import java.util.function.BiFunction; import java.util.function.BooleanSupplier; import java.util.function.Function; import java.util.stream.Collectors; @@ -2359,17 +2360,28 @@ public abstract class ESIntegTestCase extends ESTestCase { } protected void assertSeqNos() throws Exception { + final BiFunction getInstanceShardInstance = (clusterState, shardRouting) -> { + if (shardRouting.assignedToNode() == false) { + return null; + } + final DiscoveryNode assignedNode = clusterState.nodes().get(shardRouting.currentNodeId()); + if (assignedNode == null) { + return null; + } + return internalCluster().getInstance(IndicesService.class, assignedNode.getName()).getShardOrNull(shardRouting.shardId()); + }; assertBusy(() -> { final ClusterState state = clusterService().state(); for (ObjectObjectCursor indexRoutingTable : state.routingTable().indicesRouting()) { for (IntObjectCursor indexShardRoutingTable : indexRoutingTable.value.shards()) { ShardRouting primaryShardRouting = indexShardRoutingTable.value.primaryShard(); - if (primaryShardRouting == null || primaryShardRouting.assignedToNode() == false) { + if (primaryShardRouting == null) { continue; } - DiscoveryNode primaryNode = state.nodes().get(primaryShardRouting.currentNodeId()); - IndexShard primaryShard = internalCluster().getInstance(IndicesService.class, primaryNode.getName()) - .indexServiceSafe(primaryShardRouting.index()).getShard(primaryShardRouting.id()); + final IndexShard primaryShard = getInstanceShardInstance.apply(state, primaryShardRouting); + if (primaryShard == null) { + continue; //just ignore - shard movement + } final SeqNoStats primarySeqNoStats; final ObjectLongMap syncGlobalCheckpoints; try { @@ -2381,12 +2393,10 @@ public abstract class ESIntegTestCase extends ESTestCase { assertThat(primaryShardRouting + " should have set the global checkpoint", primarySeqNoStats.getGlobalCheckpoint(), not(equalTo(SequenceNumbers.UNASSIGNED_SEQ_NO))); for (ShardRouting replicaShardRouting : indexShardRoutingTable.value.replicaShards()) { - if (replicaShardRouting.assignedToNode() == false) { - continue; + final IndexShard replicaShard = getInstanceShardInstance.apply(state, replicaShardRouting); + if (replicaShard == null) { + continue; //just ignore - shard movement } - DiscoveryNode replicaNode = state.nodes().get(replicaShardRouting.currentNodeId()); - IndexShard replicaShard = internalCluster().getInstance(IndicesService.class, replicaNode.getName()) - .indexServiceSafe(replicaShardRouting.index()).getShard(replicaShardRouting.id()); final SeqNoStats seqNoStats; try { seqNoStats = replicaShard.seqNoStats(); diff --git a/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java index d8cd22d92db..52ec9d15739 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java @@ -408,7 +408,7 @@ public abstract class ESTestCase extends LuceneTestCase { } try { final List actualWarnings = threadContext.getResponseHeaders().get("Warning"); - assertNotNull(actualWarnings); + assertNotNull("no warnings, expected: " + Arrays.asList(expectedWarnings), actualWarnings); final Set actualWarningValues = actualWarnings.stream().map(DeprecationLogger::extractWarningValueFromWarningHeader).collect(Collectors.toSet()); for (String msg : expectedWarnings) { @@ -1384,7 +1384,7 @@ public abstract class ESTestCase extends LuceneTestCase { return new ScriptModule(Settings.EMPTY, singletonList(new ScriptPlugin() { @Override public ScriptEngine getScriptEngine(Settings settings, Collection> contexts) { - return new MockScriptEngine(MockScriptEngine.NAME, Collections.singletonMap("1", script -> "1")); + return new MockScriptEngine(MockScriptEngine.NAME, Collections.singletonMap("1", script -> "1"), Collections.emptyMap()); } })); } diff --git a/test/framework/src/main/java/org/elasticsearch/test/InternalAggregationTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/InternalAggregationTestCase.java index 1149c7b0941..facbc6ec84b 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/InternalAggregationTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/InternalAggregationTestCase.java @@ -86,36 +86,36 @@ import org.elasticsearch.search.aggregations.bucket.terms.ParsedLongTerms; import org.elasticsearch.search.aggregations.bucket.terms.ParsedStringTerms; import org.elasticsearch.search.aggregations.bucket.terms.StringTerms; import org.elasticsearch.search.aggregations.metrics.AvgAggregationBuilder; -import org.elasticsearch.search.aggregations.metrics.ParsedAvg; import org.elasticsearch.search.aggregations.metrics.CardinalityAggregationBuilder; -import org.elasticsearch.search.aggregations.metrics.ParsedCardinality; +import org.elasticsearch.search.aggregations.metrics.ExtendedStatsAggregationBuilder; import org.elasticsearch.search.aggregations.metrics.GeoBoundsAggregationBuilder; -import org.elasticsearch.search.aggregations.metrics.ParsedGeoBounds; import org.elasticsearch.search.aggregations.metrics.GeoCentroidAggregationBuilder; -import org.elasticsearch.search.aggregations.metrics.ParsedGeoCentroid; -import org.elasticsearch.search.aggregations.metrics.MaxAggregationBuilder; -import org.elasticsearch.search.aggregations.metrics.ParsedMax; -import org.elasticsearch.search.aggregations.metrics.MinAggregationBuilder; -import org.elasticsearch.search.aggregations.metrics.ParsedMin; import org.elasticsearch.search.aggregations.metrics.InternalHDRPercentileRanks; import org.elasticsearch.search.aggregations.metrics.InternalHDRPercentiles; -import org.elasticsearch.search.aggregations.metrics.ParsedHDRPercentileRanks; -import org.elasticsearch.search.aggregations.metrics.ParsedHDRPercentiles; import org.elasticsearch.search.aggregations.metrics.InternalTDigestPercentileRanks; import org.elasticsearch.search.aggregations.metrics.InternalTDigestPercentiles; +import org.elasticsearch.search.aggregations.metrics.MaxAggregationBuilder; +import org.elasticsearch.search.aggregations.metrics.MinAggregationBuilder; +import org.elasticsearch.search.aggregations.metrics.ParsedAvg; +import org.elasticsearch.search.aggregations.metrics.ParsedCardinality; +import org.elasticsearch.search.aggregations.metrics.ParsedExtendedStats; +import org.elasticsearch.search.aggregations.metrics.ParsedGeoBounds; +import org.elasticsearch.search.aggregations.metrics.ParsedGeoCentroid; +import org.elasticsearch.search.aggregations.metrics.ParsedHDRPercentileRanks; +import org.elasticsearch.search.aggregations.metrics.ParsedHDRPercentiles; +import org.elasticsearch.search.aggregations.metrics.ParsedMax; +import org.elasticsearch.search.aggregations.metrics.ParsedMin; +import org.elasticsearch.search.aggregations.metrics.ParsedScriptedMetric; +import org.elasticsearch.search.aggregations.metrics.ParsedStats; +import org.elasticsearch.search.aggregations.metrics.ParsedSum; import org.elasticsearch.search.aggregations.metrics.ParsedTDigestPercentileRanks; import org.elasticsearch.search.aggregations.metrics.ParsedTDigestPercentiles; -import org.elasticsearch.search.aggregations.metrics.ParsedScriptedMetric; -import org.elasticsearch.search.aggregations.metrics.ScriptedMetricAggregationBuilder; -import org.elasticsearch.search.aggregations.metrics.ParsedStats; -import org.elasticsearch.search.aggregations.metrics.StatsAggregationBuilder; -import org.elasticsearch.search.aggregations.metrics.ExtendedStatsAggregationBuilder; -import org.elasticsearch.search.aggregations.metrics.ParsedExtendedStats; -import org.elasticsearch.search.aggregations.metrics.ParsedSum; -import org.elasticsearch.search.aggregations.metrics.SumAggregationBuilder; import org.elasticsearch.search.aggregations.metrics.ParsedTopHits; -import org.elasticsearch.search.aggregations.metrics.TopHitsAggregationBuilder; import org.elasticsearch.search.aggregations.metrics.ParsedValueCount; +import org.elasticsearch.search.aggregations.metrics.ScriptedMetricAggregationBuilder; +import org.elasticsearch.search.aggregations.metrics.StatsAggregationBuilder; +import org.elasticsearch.search.aggregations.metrics.SumAggregationBuilder; +import org.elasticsearch.search.aggregations.metrics.TopHitsAggregationBuilder; import org.elasticsearch.search.aggregations.metrics.ValueCountAggregationBuilder; import org.elasticsearch.search.aggregations.pipeline.InternalSimpleValue; import org.elasticsearch.search.aggregations.pipeline.ParsedSimpleValue; @@ -134,6 +134,7 @@ import org.elasticsearch.search.aggregations.pipeline.derivative.ParsedDerivativ import java.io.IOException; import java.util.ArrayList; import java.util.Collections; +import java.util.Comparator; import java.util.HashMap; import java.util.List; import java.util.Map; @@ -153,6 +154,16 @@ public abstract class InternalAggregationTestCase public static final int DEFAULT_MAX_BUCKETS = 100000; protected static final double TOLERANCE = 1e-10; + private static final Comparator INTERNAL_AGG_COMPARATOR = (agg1, agg2) -> { + if (agg1.isMapped() == agg2.isMapped()) { + return 0; + } else if (agg1.isMapped() && agg2.isMapped() == false) { + return -1; + } else { + return 1; + } + }; + private final NamedWriteableRegistry namedWriteableRegistry = new NamedWriteableRegistry( new SearchModule(Settings.EMPTY, false, emptyList()).getNamedWriteables()); @@ -239,6 +250,8 @@ public abstract class InternalAggregationTestCase inputs.add(t); toReduce.add(t); } + // Sort aggs so that unmapped come last. This mimicks the behavior of InternalAggregations.reduce() + inputs.sort(INTERNAL_AGG_COMPARATOR); ScriptService mockScriptService = mockScriptService(); MockBigArrays bigArrays = new MockBigArrays(new MockPageCacheRecycler(Settings.EMPTY), new NoneCircuitBreakerService()); if (randomBoolean() && toReduce.size() > 1) { diff --git a/test/framework/src/main/java/org/elasticsearch/test/InternalTestCluster.java b/test/framework/src/main/java/org/elasticsearch/test/InternalTestCluster.java index 69142dba638..794c7fef783 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/InternalTestCluster.java +++ b/test/framework/src/main/java/org/elasticsearch/test/InternalTestCluster.java @@ -24,6 +24,7 @@ import com.carrotsearch.randomizedtesting.SysGlobals; import com.carrotsearch.randomizedtesting.generators.RandomNumbers; import com.carrotsearch.randomizedtesting.generators.RandomPicks; import com.carrotsearch.randomizedtesting.generators.RandomStrings; + import org.apache.logging.log4j.Logger; import org.apache.lucene.store.AlreadyClosedException; import org.elasticsearch.ElasticsearchException; @@ -165,10 +166,6 @@ public final class InternalTestCluster extends TestCluster { private final Logger logger = Loggers.getLogger(getClass()); - - private static final AtomicInteger clusterOrdinal = new AtomicInteger(); - - public static final int DEFAULT_LOW_NUM_MASTER_NODES = 1; public static final int DEFAULT_HIGH_NUM_MASTER_NODES = 3; @@ -317,7 +314,6 @@ public final class InternalTestCluster extends TestCluster { this.mockPlugins = mockPlugins; - sharedNodesSeeds = new long[numSharedDedicatedMasterNodes + numSharedDataNodes + numSharedCoordOnlyNodes]; for (int i = 0; i < sharedNodesSeeds.length; i++) { sharedNodesSeeds[i] = random.nextLong(); @@ -2062,6 +2058,7 @@ public final class InternalTestCluster extends TestCluster { return null; } + @Override public synchronized Iterable getClients() { ensureOpen(); return () -> { diff --git a/test/framework/src/main/java/org/elasticsearch/test/engine/AssertingSearcher.java b/test/framework/src/main/java/org/elasticsearch/test/engine/AssertingSearcher.java index cf2d69e36d5..0dbdaa55e33 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/engine/AssertingSearcher.java +++ b/test/framework/src/main/java/org/elasticsearch/test/engine/AssertingSearcher.java @@ -38,10 +38,8 @@ class AssertingSearcher extends Engine.Searcher { private final Logger logger; private final AtomicBoolean closed = new AtomicBoolean(false); - AssertingSearcher(IndexSearcher indexSearcher, final Engine.Searcher wrappedSearcher, - ShardId shardId, - Logger logger) { - super(wrappedSearcher.source(), indexSearcher); + AssertingSearcher(IndexSearcher indexSearcher, final Engine.Searcher wrappedSearcher, ShardId shardId, Logger logger) { + super(wrappedSearcher.source(), indexSearcher, s -> {throw new AssertionError();}, logger); // we only use the given index searcher here instead of the IS of the wrapped searcher. the IS might be a wrapped searcher // with a wrapped reader. this.wrappedSearcher = wrappedSearcher; @@ -52,11 +50,6 @@ class AssertingSearcher extends Engine.Searcher { "IndexReader#getRefCount() was [" + initialRefCount + "] expected a value > [0] - reader is already closed"; } - @Override - public String source() { - return wrappedSearcher.source(); - } - @Override public void close() { synchronized (lock) { diff --git a/test/framework/src/main/java/org/elasticsearch/test/junit/listeners/LoggingListener.java b/test/framework/src/main/java/org/elasticsearch/test/junit/listeners/LoggingListener.java index e021df52c60..eb300ba302b 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/junit/listeners/LoggingListener.java +++ b/test/framework/src/main/java/org/elasticsearch/test/junit/listeners/LoggingListener.java @@ -20,7 +20,7 @@ package org.elasticsearch.test.junit.listeners; import org.apache.logging.log4j.Logger; -import org.elasticsearch.common.logging.ESLoggerFactory; +import org.apache.logging.log4j.LogManager; import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.test.junit.annotations.TestLogging; import org.junit.runner.Description; @@ -78,7 +78,7 @@ public class LoggingListener extends RunListener { */ private static Logger resolveLogger(String loggerName) { if (loggerName.equalsIgnoreCase("_root")) { - return ESLoggerFactory.getRootLogger(); + return LogManager.getRootLogger(); } return Loggers.getLogger(loggerName); } diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java index 1b29a9112c2..a5f23104dea 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java @@ -635,7 +635,7 @@ public abstract class ESRestTestCase extends ESTestCase { if (name.startsWith(".monitoring-")) { return true; } - if (name.startsWith(".watch-history-")) { + if (name.startsWith(".watch") || name.startsWith(".triggered_watches")) { return true; } if (name.startsWith(".ml-")) { diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/restspec/ClientYamlSuiteRestApiParser.java b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/restspec/ClientYamlSuiteRestApiParser.java index b0c2a713d61..9719532b942 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/restspec/ClientYamlSuiteRestApiParser.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/restspec/ClientYamlSuiteRestApiParser.java @@ -64,8 +64,7 @@ public class ClientYamlSuiteRestApiParser { if ("url".equals(parser.currentName())) { String currentFieldName = "url"; - int innerLevel = -1; - while(parser.nextToken() != XContentParser.Token.END_OBJECT || innerLevel >= 0) { + while(parser.nextToken() != XContentParser.Token.END_OBJECT) { if (parser.currentToken() == XContentParser.Token.FIELD_NAME) { currentFieldName = parser.currentName(); } @@ -108,13 +107,6 @@ public class ClientYamlSuiteRestApiParser { restApi.addParam(param, PARAMETER_PARSER.parse(parser, null).isRequired()); } } - - if (parser.currentToken() == XContentParser.Token.START_OBJECT) { - innerLevel++; - } - if (parser.currentToken() == XContentParser.Token.END_OBJECT) { - innerLevel--; - } } } diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/section/ClientYamlTestSuite.java b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/section/ClientYamlTestSuite.java index a8cc9646129..85796494ba9 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/section/ClientYamlTestSuite.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/section/ClientYamlTestSuite.java @@ -21,6 +21,7 @@ package org.elasticsearch.test.rest.yaml.section; import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.xcontent.LoggingDeprecationHandler; import org.elasticsearch.common.xcontent.NamedXContentRegistry; +import org.elasticsearch.common.xcontent.XContentParseException; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.yaml.YamlXContent; @@ -73,9 +74,10 @@ public class ClientYamlTestSuite { } public static ClientYamlTestSuite parse(String api, String suiteName, XContentParser parser) throws IOException { - parser.nextToken(); - assert parser.currentToken() == XContentParser.Token.START_OBJECT : "expected token to be START_OBJECT but was " - + parser.currentToken(); + if (parser.nextToken() != XContentParser.Token.START_OBJECT) { + throw new XContentParseException(parser.getTokenLocation(), + "expected token to be START_OBJECT but was " + parser.currentToken()); + } ClientYamlTestSuite restTestSuite = new ClientYamlTestSuite(api, suiteName); diff --git a/test/framework/src/main/java/org/elasticsearch/test/store/MockFSDirectoryService.java b/test/framework/src/main/java/org/elasticsearch/test/store/MockFSDirectoryService.java index 98eb0b10502..cdc33b38b86 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/store/MockFSDirectoryService.java +++ b/test/framework/src/main/java/org/elasticsearch/test/store/MockFSDirectoryService.java @@ -21,6 +21,7 @@ package org.elasticsearch.test.store; import com.carrotsearch.randomizedtesting.SeedUtils; import com.carrotsearch.randomizedtesting.generators.RandomPicks; + import org.apache.logging.log4j.Logger; import org.apache.lucene.index.CheckIndex; import org.apache.lucene.store.BaseDirectoryWrapper; @@ -62,10 +63,6 @@ public class MockFSDirectoryService extends FsDirectoryService { Setting.doubleSetting("index.store.mock.random.io_exception_rate_on_open", 0.0d, 0.0d, Property.IndexScope, Property.NodeScope); public static final Setting RANDOM_IO_EXCEPTION_RATE_SETTING = Setting.doubleSetting("index.store.mock.random.io_exception_rate", 0.0d, 0.0d, Property.IndexScope, Property.NodeScope); - public static final Setting RANDOM_PREVENT_DOUBLE_WRITE_SETTING = - Setting.boolSetting("index.store.mock.random.prevent_double_write", true, Property.IndexScope, Property.NodeScope); - public static final Setting RANDOM_NO_DELETE_OPEN_FILE_SETTING = - Setting.boolSetting("index.store.mock.random.no_delete_open_file", true, Property.IndexScope, Property.NodeScope); public static final Setting CRASH_INDEX_SETTING = Setting.boolSetting("index.store.mock.random.crash_index", true, Property.IndexScope, Property.NodeScope); @@ -74,8 +71,6 @@ public class MockFSDirectoryService extends FsDirectoryService { private final double randomIOExceptionRate; private final double randomIOExceptionRateOnOpen; private final MockDirectoryWrapper.Throttling throttle; - private final boolean preventDoubleWrite; - private final boolean noDeleteOpenFile; private final boolean crashIndex; @Inject @@ -87,9 +82,6 @@ public class MockFSDirectoryService extends FsDirectoryService { randomIOExceptionRate = RANDOM_IO_EXCEPTION_RATE_SETTING.get(indexSettings); randomIOExceptionRateOnOpen = RANDOM_IO_EXCEPTION_RATE_ON_OPEN_SETTING.get(indexSettings); - preventDoubleWrite = RANDOM_PREVENT_DOUBLE_WRITE_SETTING.get(indexSettings); - noDeleteOpenFile = RANDOM_NO_DELETE_OPEN_FILE_SETTING.exists(indexSettings) ? - RANDOM_NO_DELETE_OPEN_FILE_SETTING.get(indexSettings) : random.nextBoolean(); random.nextInt(shardId.getId() + 1); // some randomness per shard throttle = MockDirectoryWrapper.Throttling.NEVER; crashIndex = CRASH_INDEX_SETTING.get(indexSettings); diff --git a/test/framework/src/main/java/org/elasticsearch/test/store/MockFSIndexStore.java b/test/framework/src/main/java/org/elasticsearch/test/store/MockFSIndexStore.java index 3b876f3c383..82ab9fc4121 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/store/MockFSIndexStore.java +++ b/test/framework/src/main/java/org/elasticsearch/test/store/MockFSIndexStore.java @@ -61,8 +61,6 @@ public class MockFSIndexStore extends IndexStore { return Arrays.asList(INDEX_CHECK_INDEX_ON_CLOSE_SETTING, MockFSDirectoryService.CRASH_INDEX_SETTING, MockFSDirectoryService.RANDOM_IO_EXCEPTION_RATE_SETTING, - MockFSDirectoryService.RANDOM_PREVENT_DOUBLE_WRITE_SETTING, - MockFSDirectoryService.RANDOM_NO_DELETE_OPEN_FILE_SETTING, MockFSDirectoryService.RANDOM_IO_EXCEPTION_RATE_ON_OPEN_SETTING); } @@ -86,6 +84,7 @@ public class MockFSIndexStore extends IndexStore { super(indexSettings); } + @Override public DirectoryService newDirectoryService(ShardPath path) { return new MockFSDirectoryService(indexSettings, this, path); } diff --git a/test/framework/src/main/java/org/elasticsearch/transport/AbstractSimpleTransportTestCase.java b/test/framework/src/main/java/org/elasticsearch/transport/AbstractSimpleTransportTestCase.java index 989afd04dab..b10ad2d0eb3 100644 --- a/test/framework/src/main/java/org/elasticsearch/transport/AbstractSimpleTransportTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/transport/AbstractSimpleTransportTestCase.java @@ -173,7 +173,7 @@ public abstract class AbstractSimpleTransportTestCase extends ESTestCase { return service; } - private MockTransportService buildService(final String name, final Version version, ClusterSettings clusterSettings) { + protected MockTransportService buildService(final String name, final Version version, ClusterSettings clusterSettings) { return buildService(name, version, clusterSettings, Settings.EMPTY, true, true); } diff --git a/test/framework/src/main/java/org/elasticsearch/transport/nio/MockNioTransport.java b/test/framework/src/main/java/org/elasticsearch/transport/nio/MockNioTransport.java index 89df9166431..b36685d5645 100644 --- a/test/framework/src/main/java/org/elasticsearch/transport/nio/MockNioTransport.java +++ b/test/framework/src/main/java/org/elasticsearch/transport/nio/MockNioTransport.java @@ -277,8 +277,9 @@ public class MockNioTransport extends TcpTransport { @Override public void setSoLinger(int value) throws IOException { - if (isOpen()) { - getRawChannel().setOption(StandardSocketOptions.SO_LINGER, value); + SocketChannel rawChannel = getRawChannel(); + if (rawChannel.isConnected()) { + rawChannel.setOption(StandardSocketOptions.SO_LINGER, value); } } diff --git a/test/framework/src/test/java/org/elasticsearch/test/AbstractXContentTestCaseTests.java b/test/framework/src/test/java/org/elasticsearch/test/AbstractXContentTestCaseTests.java index 165df584c03..2acb89befab 100644 --- a/test/framework/src/test/java/org/elasticsearch/test/AbstractXContentTestCaseTests.java +++ b/test/framework/src/test/java/org/elasticsearch/test/AbstractXContentTestCaseTests.java @@ -22,13 +22,11 @@ package org.elasticsearch.test; import com.carrotsearch.randomizedtesting.RandomizedContext; import org.elasticsearch.common.bytes.BytesReference; -import org.elasticsearch.common.xcontent.ToXContent; -import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentType; -import java.io.IOException; import java.util.Map; import static org.hamcrest.Matchers.equalTo; @@ -37,29 +35,24 @@ import static org.hamcrest.Matchers.not; public class AbstractXContentTestCaseTests extends ESTestCase { public void testInsertRandomFieldsAndShuffle() throws Exception { - TestInstance t = new TestInstance(); + XContentBuilder builder = XContentFactory.jsonBuilder(); + builder.startObject(); + { + builder.field("field", 1); + } + builder.endObject(); BytesReference insertRandomFieldsAndShuffle = RandomizedContext.current().runWithPrivateRandomness(1, - () -> AbstractXContentTestCase.insertRandomFieldsAndShuffle(t, XContentType.JSON, true, new String[] {}, null, - this::createParser, ToXContent.EMPTY_PARAMS)); + () -> AbstractXContentTestCase.insertRandomFieldsAndShuffle( + BytesReference.bytes(builder), + XContentType.JSON, + true, + new String[] {}, + null, + this::createParser)); try (XContentParser parser = createParser(XContentType.JSON.xContent(), insertRandomFieldsAndShuffle)) { Map mapOrdered = parser.mapOrdered(); assertThat(mapOrdered.size(), equalTo(2)); assertThat(mapOrdered.keySet().iterator().next(), not(equalTo("field"))); } } - - private class TestInstance implements ToXContentObject { - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - { - builder.field("field", 1); - } - builder.endObject(); - return builder; - } - - } - } \ No newline at end of file diff --git a/test/framework/src/test/java/org/elasticsearch/test/CorruptionUtilsTests.java b/test/framework/src/test/java/org/elasticsearch/test/CorruptionUtilsTests.java new file mode 100644 index 00000000000..2624858c3aa --- /dev/null +++ b/test/framework/src/test/java/org/elasticsearch/test/CorruptionUtilsTests.java @@ -0,0 +1,81 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.test; + +import org.apache.lucene.index.CheckIndex; +import org.apache.lucene.store.SimpleFSDirectory; +import org.elasticsearch.action.admin.indices.flush.FlushRequest; +import org.elasticsearch.index.shard.IndexShard; +import org.elasticsearch.index.shard.IndexShardTestCase; +import org.elasticsearch.index.shard.ShardPath; + +import java.nio.channels.FileChannel; +import java.nio.file.Files; +import java.nio.file.Path; +import java.nio.file.StandardOpenOption; +import java.util.stream.Stream; + +import static org.elasticsearch.test.CorruptionUtils.corruptAt; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.lessThan; + +public class CorruptionUtilsTests extends IndexShardTestCase { + + /** + * There is a dependency on Lucene bug fix + * https://github.com/elastic/elasticsearch/pull/33911 + */ + public void testLuceneCheckIndexIgnoresLast4Bytes() throws Exception { + final IndexShard indexShard = newStartedShard(true); + + final long numDocs = between(10, 100); + for (long i = 0; i < numDocs; i++) { + indexDoc(indexShard, "_doc", Long.toString(i), "{}"); + } + indexShard.flush(new FlushRequest()); + closeShards(indexShard); + + final ShardPath shardPath = indexShard.shardPath(); + + final Path indexPath = shardPath.getDataPath().resolve(ShardPath.INDEX_FOLDER_NAME); + + final Path cfsFile; + try (Stream paths = Files.walk(indexPath)) { + cfsFile = paths.filter(p -> p.getFileName().toString().endsWith(".cfs")).findFirst() + .orElseThrow(() -> new IllegalStateException("CFS file has to be there")); + } + + try (FileChannel raf = FileChannel.open(cfsFile, StandardOpenOption.READ, StandardOpenOption.WRITE)) { + assertThat(raf.size(), lessThan(Integer.MAX_VALUE * 1L)); + final int maxPosition = (int) raf.size(); + // corrupt only last 4 bytes! + final int position = randomIntBetween(maxPosition - 4, maxPosition - 1); + corruptAt(cfsFile, raf, position); + } + + final CheckIndex.Status status; + try (CheckIndex checkIndex = new CheckIndex(new SimpleFSDirectory(indexPath))) { + status = checkIndex.checkIndex(); + } + + assertThat("That's a good news! " + + "Lucene now validates CRC32 of CFS file: time to drop workaround at CorruptionUtils (and this test)", + status.clean, equalTo(true)); + } +} diff --git a/test/framework/src/test/java/org/elasticsearch/test/rest/yaml/restspec/ClientYamlSuiteRestApiParserTests.java b/test/framework/src/test/java/org/elasticsearch/test/rest/yaml/restspec/ClientYamlSuiteRestApiParserTests.java index cbc1f632617..9ba393966a3 100644 --- a/test/framework/src/test/java/org/elasticsearch/test/rest/yaml/restspec/ClientYamlSuiteRestApiParserTests.java +++ b/test/framework/src/test/java/org/elasticsearch/test/rest/yaml/restspec/ClientYamlSuiteRestApiParserTests.java @@ -91,6 +91,31 @@ public class ClientYamlSuiteRestApiParserTests extends AbstractClientYamlTestFra assertThat(restApi.isBodyRequired(), equalTo(false)); } + public void testRequiredBodyWithoutUrlParts() throws Exception { + String spec = "{\n" + + " \"count\": {\n" + + " \"documentation\": \"whatever\",\n" + + " \"methods\": [ \"GET\", \"POST\" ],\n" + + " \"url\": {\n" + + " \"path\": \"/whatever\",\n" + + " \"paths\": [ \"/whatever\" ]\n" + + " },\n" + + " \"body\": {\n" + + " \"description\" : \"whatever\",\n" + + " \"required\" : true\n" + + " }\n" + + " }\n" + + "}"; + + parser = createParser(YamlXContent.yamlXContent, spec); + ClientYamlSuiteRestApi restApi = new ClientYamlSuiteRestApiParser().parse("count.json", parser); + + assertThat(restApi, notNullValue()); + assertThat(restApi.getPathParts().isEmpty(), equalTo(true)); + assertThat(restApi.getParams().isEmpty(), equalTo(true)); + assertThat(restApi.isBodyRequired(), equalTo(true)); + } + private static final String REST_SPEC_COUNT_API = "{\n" + " \"count\": {\n" + " \"documentation\": \"http://www.elasticsearch.org/guide/en/elasticsearch/reference/current/search-count.html\",\n" + diff --git a/test/framework/src/test/java/org/elasticsearch/test/test/LoggingListenerTests.java b/test/framework/src/test/java/org/elasticsearch/test/test/LoggingListenerTests.java index 0845fc2546f..fe5dedb2d5a 100644 --- a/test/framework/src/test/java/org/elasticsearch/test/test/LoggingListenerTests.java +++ b/test/framework/src/test/java/org/elasticsearch/test/test/LoggingListenerTests.java @@ -21,7 +21,7 @@ package org.elasticsearch.test.test; import org.apache.logging.log4j.Level; import org.apache.logging.log4j.Logger; -import org.elasticsearch.common.logging.ESLoggerFactory; +import org.apache.logging.log4j.LogManager; import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.junit.annotations.TestLogging; @@ -51,7 +51,7 @@ public class LoggingListenerTests extends ESTestCase { Logger xyzLogger = Loggers.getLogger("xyz"); Logger abcLogger = Loggers.getLogger("abc"); - final Level level = ESLoggerFactory.getRootLogger().getLevel(); + final Level level = LogManager.getRootLogger().getLevel(); assertThat(xyzLogger.getLevel(), equalTo(level)); assertThat(abcLogger.getLevel(), equalTo(level)); @@ -88,7 +88,7 @@ public class LoggingListenerTests extends ESTestCase { Logger fooLogger = Loggers.getLogger("foo"); Logger fooBarLogger = Loggers.getLogger("foo.bar"); - final Level level = ESLoggerFactory.getRootLogger().getLevel(); + final Level level = LogManager.getRootLogger().getLevel(); assertThat(xyzLogger.getLevel(), equalTo(level)); assertThat(abcLogger.getLevel(), equalTo(level)); @@ -128,7 +128,7 @@ public class LoggingListenerTests extends ESTestCase { Logger abcLogger = Loggers.getLogger("abc"); Logger xyzLogger = Loggers.getLogger("xyz"); - final Level level = ESLoggerFactory.getRootLogger().getLevel(); + final Level level = LogManager.getRootLogger().getLevel(); assertThat(xyzLogger.getLevel(), equalTo(level)); assertThat(abcLogger.getLevel(), equalTo(level)); diff --git a/test/framework/src/test/java/org/elasticsearch/transport/MockTcpTransportTests.java b/test/framework/src/test/java/org/elasticsearch/transport/MockTcpTransportTests.java index 42658b1d9a6..4084d08b2e8 100644 --- a/test/framework/src/test/java/org/elasticsearch/transport/MockTcpTransportTests.java +++ b/test/framework/src/test/java/org/elasticsearch/transport/MockTcpTransportTests.java @@ -40,7 +40,7 @@ public class MockTcpTransportTests extends AbstractSimpleTransportTestCase { Transport transport = new MockTcpTransport(settings, threadPool, BigArrays.NON_RECYCLING_INSTANCE, new NoneCircuitBreakerService(), namedWriteableRegistry, new NetworkService(Collections.emptyList()), version) { @Override - protected Version executeHandshake(DiscoveryNode node, TcpChannel mockChannel, TimeValue timeout) throws IOException, + public Version executeHandshake(DiscoveryNode node, TcpChannel mockChannel, TimeValue timeout) throws IOException, InterruptedException { if (doHandshake) { return super.executeHandshake(node, mockChannel, timeout); diff --git a/test/framework/src/test/java/org/elasticsearch/transport/nio/SimpleMockNioTransportTests.java b/test/framework/src/test/java/org/elasticsearch/transport/nio/SimpleMockNioTransportTests.java index 754b34353b0..bebe50752f4 100644 --- a/test/framework/src/test/java/org/elasticsearch/transport/nio/SimpleMockNioTransportTests.java +++ b/test/framework/src/test/java/org/elasticsearch/transport/nio/SimpleMockNioTransportTests.java @@ -19,7 +19,6 @@ package org.elasticsearch.transport.nio; -import org.apache.lucene.util.Constants; import org.elasticsearch.Version; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; @@ -63,7 +62,7 @@ public class SimpleMockNioTransportTests extends AbstractSimpleTransportTestCase new NoneCircuitBreakerService()) { @Override - protected Version executeHandshake(DiscoveryNode node, TcpChannel channel, TimeValue timeout) throws IOException, + public Version executeHandshake(DiscoveryNode node, TcpChannel channel, TimeValue timeout) throws IOException, InterruptedException { if (doHandshake) { return super.executeHandshake(node, channel, timeout); @@ -100,7 +99,6 @@ public class SimpleMockNioTransportTests extends AbstractSimpleTransportTestCase } public void testConnectException() throws UnknownHostException { - assumeFalse("Broken on Darwin - https://github.com/elastic/elasticsearch/issues/33879", Constants.MAC_OS_X); try { serviceA.connectToNode(new DiscoveryNode("C", new TransportAddress(InetAddress.getByName("localhost"), 9876), emptyMap(), emptySet(),Version.CURRENT)); diff --git a/x-pack/docs/build.gradle b/x-pack/docs/build.gradle index f027493b0ab..59d89024db5 100644 --- a/x-pack/docs/build.gradle +++ b/x-pack/docs/build.gradle @@ -92,7 +92,6 @@ buildRestTests.docs = fileTree(projectDir) { exclude 'build' // These file simply doesn't pass yet. We should figure out how to fix them. exclude 'en/watcher/reference/actions.asciidoc' - exclude 'en/rest-api/graph/explore.asciidoc' } Map setups = buildRestTests.setups diff --git a/x-pack/docs/en/security/configuring-es.asciidoc b/x-pack/docs/en/security/configuring-es.asciidoc index 7bdfbef08de..c79824287b4 100644 --- a/x-pack/docs/en/security/configuring-es.asciidoc +++ b/x-pack/docs/en/security/configuring-es.asciidoc @@ -85,7 +85,7 @@ user API. . Set up roles and users to control access to {es}. For example, to grant _John Doe_ full access to all indices that match the pattern `events*` and enable him to create visualizations and dashboards -for those indices in {kib}, you could create an `events_admin` role and +for those indices in {kib}, you could create an `events_admin` role and assign the role to a new `johndoe` user. + -- diff --git a/x-pack/docs/en/watcher/actions/email.asciidoc b/x-pack/docs/en/watcher/actions/email.asciidoc index 0da028fcc7b..8c56f2e5da3 100644 --- a/x-pack/docs/en/watcher/actions/email.asciidoc +++ b/x-pack/docs/en/watcher/actions/email.asciidoc @@ -72,7 +72,7 @@ killed by firewalls or load balancers inbetween. "attachments" : { "my_image.png" : { <1> "http" : { <2> - "content_type" : "image.png", + "content_type" : "image/png", "request" : { "url": "http://example.org/foo/my-image.png" <3> } diff --git a/x-pack/docs/en/watcher/index.asciidoc b/x-pack/docs/en/watcher/index.asciidoc index 2be36389719..5f51c948ebf 100644 --- a/x-pack/docs/en/watcher/index.asciidoc +++ b/x-pack/docs/en/watcher/index.asciidoc @@ -1,5 +1,5 @@ [[xpack-alerting]] -= Alerting on Cluster and Index Events += Alerting on cluster and index events [partintro] -- diff --git a/x-pack/license-tools/src/test/java/org/elasticsearch/license/licensor/TestUtils.java b/x-pack/license-tools/src/test/java/org/elasticsearch/license/licensor/TestUtils.java index 31b458489d4..8743bc708f4 100644 --- a/x-pack/license-tools/src/test/java/org/elasticsearch/license/licensor/TestUtils.java +++ b/x-pack/license-tools/src/test/java/org/elasticsearch/license/licensor/TestUtils.java @@ -6,9 +6,9 @@ package org.elasticsearch.license.licensor; import org.elasticsearch.common.Strings; -import org.elasticsearch.common.joda.DateMathParser; import org.elasticsearch.common.joda.FormatDateTimeFormatter; import org.elasticsearch.common.joda.Joda; +import org.elasticsearch.common.time.DateMathParser; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; @@ -38,8 +38,7 @@ public class TestUtils { private static final FormatDateTimeFormatter formatDateTimeFormatter = Joda.forPattern("yyyy-MM-dd"); - private static final DateMathParser dateMathParser = - new DateMathParser(formatDateTimeFormatter); + private static final DateMathParser dateMathParser = formatDateTimeFormatter.toDateMathParser(); private static final DateTimeFormatter dateTimeFormatter = formatDateTimeFormatter.printer(); public static String dumpLicense(License license) throws Exception { diff --git a/x-pack/plugin/ccr/qa/multi-cluster-with-non-compliant-license/src/test/java/org/elasticsearch/xpack/ccr/CcrMultiClusterLicenseIT.java b/x-pack/plugin/ccr/qa/multi-cluster-with-non-compliant-license/src/test/java/org/elasticsearch/xpack/ccr/CcrMultiClusterLicenseIT.java index 505683b892c..07de603f28a 100644 --- a/x-pack/plugin/ccr/qa/multi-cluster-with-non-compliant-license/src/test/java/org/elasticsearch/xpack/ccr/CcrMultiClusterLicenseIT.java +++ b/x-pack/plugin/ccr/qa/multi-cluster-with-non-compliant-license/src/test/java/org/elasticsearch/xpack/ccr/CcrMultiClusterLicenseIT.java @@ -31,17 +31,17 @@ public class CcrMultiClusterLicenseIT extends ESRestTestCase { return true; } - public void testFollowIndex() { + public void testResumeFollow() { if (runningAgainstLeaderCluster == false) { - final Request request = new Request("POST", "/follower/_ccr/follow"); + final Request request = new Request("POST", "/follower/_ccr/resume_follow"); request.setJsonEntity("{\"leader_index\": \"leader_cluster:leader\"}"); assertNonCompliantLicense(request); } } - public void testCreateAndFollowIndex() { + public void testFollow() { if (runningAgainstLeaderCluster == false) { - final Request request = new Request("POST", "/follower/_ccr/create_and_follow"); + final Request request = new Request("PUT", "/follower/_ccr/follow"); request.setJsonEntity("{\"leader_index\": \"leader_cluster:leader\"}"); assertNonCompliantLicense(request); } diff --git a/x-pack/plugin/ccr/qa/multi-cluster-with-security/build.gradle b/x-pack/plugin/ccr/qa/multi-cluster-with-security/build.gradle index e2c772d7088..418c4e6d249 100644 --- a/x-pack/plugin/ccr/qa/multi-cluster-with-security/build.gradle +++ b/x-pack/plugin/ccr/qa/multi-cluster-with-security/build.gradle @@ -17,7 +17,7 @@ leaderClusterTestCluster { setting 'xpack.license.self_generated.type', 'trial' setting 'xpack.security.enabled', 'true' setting 'xpack.monitoring.enabled', 'false' - extraConfigFile 'roles.yml', 'roles.yml' + extraConfigFile 'roles.yml', 'leader-roles.yml' setupCommand 'setupTestAdmin', 'bin/elasticsearch-users', 'useradd', "test_admin", '-p', 'x-pack-test-password', '-r', "superuser" setupCommand 'setupCcrUser', @@ -48,7 +48,7 @@ followClusterTestCluster { setting 'xpack.license.self_generated.type', 'trial' setting 'xpack.security.enabled', 'true' setting 'xpack.monitoring.collection.enabled', 'true' - extraConfigFile 'roles.yml', 'roles.yml' + extraConfigFile 'roles.yml', 'follower-roles.yml' setupCommand 'setupTestAdmin', 'bin/elasticsearch-users', 'useradd', "test_admin", '-p', 'x-pack-test-password', '-r', "superuser" setupCommand 'setupCcrUser', diff --git a/x-pack/plugin/ccr/qa/multi-cluster-with-security/roles.yml b/x-pack/plugin/ccr/qa/multi-cluster-with-security/follower-roles.yml similarity index 100% rename from x-pack/plugin/ccr/qa/multi-cluster-with-security/roles.yml rename to x-pack/plugin/ccr/qa/multi-cluster-with-security/follower-roles.yml diff --git a/x-pack/plugin/ccr/qa/multi-cluster-with-security/leader-roles.yml b/x-pack/plugin/ccr/qa/multi-cluster-with-security/leader-roles.yml new file mode 100644 index 00000000000..99fa62cbe83 --- /dev/null +++ b/x-pack/plugin/ccr/qa/multi-cluster-with-security/leader-roles.yml @@ -0,0 +1,8 @@ +ccruser: + cluster: + - read_ccr + indices: + - names: [ 'allowed-index', 'logs-eu-*' ] + privileges: + - monitor + - read diff --git a/x-pack/plugin/ccr/qa/multi-cluster-with-security/src/test/java/org/elasticsearch/xpack/ccr/FollowIndexSecurityIT.java b/x-pack/plugin/ccr/qa/multi-cluster-with-security/src/test/java/org/elasticsearch/xpack/ccr/FollowIndexSecurityIT.java index 85913c26114..699837fa643 100644 --- a/x-pack/plugin/ccr/qa/multi-cluster-with-security/src/test/java/org/elasticsearch/xpack/ccr/FollowIndexSecurityIT.java +++ b/x-pack/plugin/ccr/qa/multi-cluster-with-security/src/test/java/org/elasticsearch/xpack/ccr/FollowIndexSecurityIT.java @@ -80,11 +80,11 @@ public class FollowIndexSecurityIT extends ESRestTestCase { refresh(allowedIndex); verifyDocuments(adminClient(), allowedIndex, numDocs); } else { - createAndFollowIndex("leader_cluster:" + allowedIndex, allowedIndex); + follow("leader_cluster:" + allowedIndex, allowedIndex); assertBusy(() -> verifyDocuments(client(), allowedIndex, numDocs)); assertThat(countCcrNodeTasks(), equalTo(1)); assertBusy(() -> verifyCcrMonitoring(allowedIndex, allowedIndex)); - assertOK(client().performRequest(new Request("POST", "/" + allowedIndex + "/_ccr/unfollow"))); + assertOK(client().performRequest(new Request("POST", "/" + allowedIndex + "/_ccr/pause_follow"))); // Make sure that there are no other ccr relates operations running: assertBusy(() -> { Map clusterState = toMap(adminClient().performRequest(new Request("GET", "/_cluster/state"))); @@ -93,9 +93,9 @@ public class FollowIndexSecurityIT extends ESRestTestCase { assertThat(countCcrNodeTasks(), equalTo(0)); }); - followIndex("leader_cluster:" + allowedIndex, allowedIndex); + resumeFollow("leader_cluster:" + allowedIndex, allowedIndex); assertThat(countCcrNodeTasks(), equalTo(1)); - assertOK(client().performRequest(new Request("POST", "/" + allowedIndex + "/_ccr/unfollow"))); + assertOK(client().performRequest(new Request("POST", "/" + allowedIndex + "/_ccr/pause_follow"))); // Make sure that there are no other ccr relates operations running: assertBusy(() -> { Map clusterState = toMap(adminClient().performRequest(new Request("GET", "/_cluster/state"))); @@ -104,17 +104,31 @@ public class FollowIndexSecurityIT extends ESRestTestCase { assertThat(countCcrNodeTasks(), equalTo(0)); }); + // User does not have create_follow_index index privilege for 'unallowedIndex': Exception e = expectThrows(ResponseException.class, - () -> createAndFollowIndex("leader_cluster:" + unallowedIndex, unallowedIndex)); + () -> follow("leader_cluster:" + unallowedIndex, unallowedIndex)); assertThat(e.getMessage(), - containsString("action [indices:admin/xpack/ccr/create_and_follow_index] is unauthorized for user [test_ccr]")); + containsString("action [indices:admin/xpack/ccr/put_follow] is unauthorized for user [test_ccr]")); + // Verify that the follow index has not been created and no node tasks are running + assertThat(indexExists(adminClient(), unallowedIndex), is(false)); + assertBusy(() -> assertThat(countCcrNodeTasks(), equalTo(0))); + + // User does have create_follow_index index privilege on 'allowed' index, + // but not read / monitor roles on 'disallowed' index: + e = expectThrows(ResponseException.class, + () -> follow("leader_cluster:" + unallowedIndex, allowedIndex)); + assertThat(e.getMessage(), containsString("insufficient privileges to follow index [unallowed-index], " + + "privilege for action [indices:monitor/stats] is missing, " + + "privilege for action [indices:data/read/xpack/ccr/shard_changes] is missing")); // Verify that the follow index has not been created and no node tasks are running assertThat(indexExists(adminClient(), unallowedIndex), is(false)); assertBusy(() -> assertThat(countCcrNodeTasks(), equalTo(0))); e = expectThrows(ResponseException.class, - () -> followIndex("leader_cluster:" + unallowedIndex, unallowedIndex)); - assertThat(e.getMessage(), containsString("action [indices:monitor/stats] is unauthorized for user [test_ccr]")); + () -> resumeFollow("leader_cluster:" + unallowedIndex, unallowedIndex)); + assertThat(e.getMessage(), containsString("insufficient privileges to follow index [unallowed-index], " + + "privilege for action [indices:monitor/stats] is missing, " + + "privilege for action [indices:data/read/xpack/ccr/shard_changes] is missing")); assertThat(indexExists(adminClient(), unallowedIndex), is(false)); assertBusy(() -> assertThat(countCcrNodeTasks(), equalTo(0))); } @@ -125,8 +139,15 @@ public class FollowIndexSecurityIT extends ESRestTestCase { String allowedIndex = "logs-eu-20190101"; String disallowedIndex = "logs-us-20190101"; + { + Request request = new Request("PUT", "/_ccr/auto_follow/leader_cluster"); + request.setJsonEntity("{\"leader_index_patterns\": [\"logs-*\"]}"); + Exception e = expectThrows(ResponseException.class, () -> assertOK(client().performRequest(request))); + assertThat(e.getMessage(), containsString("insufficient privileges to follow index [logs-*]")); + } + Request request = new Request("PUT", "/_ccr/auto_follow/leader_cluster"); - request.setJsonEntity("{\"leader_index_patterns\": [\"logs-*\"]}"); + request.setJsonEntity("{\"leader_index_patterns\": [\"logs-eu-*\"]}"); assertOK(client().performRequest(request)); try (RestClient leaderClient = buildLeaderClient()) { @@ -152,11 +173,15 @@ public class FollowIndexSecurityIT extends ESRestTestCase { verifyDocuments(adminClient(), allowedIndex, 5); }); assertThat(indexExists(adminClient(), disallowedIndex), is(false)); + assertBusy(() -> { + verifyCcrMonitoring(allowedIndex, allowedIndex); + verifyAutoFollowMonitoring(); + }); - // Cleanup by deleting auto follow pattern and unfollowing: + // Cleanup by deleting auto follow pattern and pause following: request = new Request("DELETE", "/_ccr/auto_follow/leader_cluster"); assertOK(client().performRequest(request)); - unfollowIndex(allowedIndex); + pauseFollow(allowedIndex); } private int countCcrNodeTasks() throws IOException { @@ -197,14 +222,14 @@ public class FollowIndexSecurityIT extends ESRestTestCase { assertOK(adminClient().performRequest(new Request("POST", "/" + index + "/_refresh"))); } - private static void followIndex(String leaderIndex, String followIndex) throws IOException { - final Request request = new Request("POST", "/" + followIndex + "/_ccr/follow"); + private static void resumeFollow(String leaderIndex, String followIndex) throws IOException { + final Request request = new Request("POST", "/" + followIndex + "/_ccr/resume_follow"); request.setJsonEntity("{\"leader_index\": \"" + leaderIndex + "\", \"poll_timeout\": \"10ms\"}"); assertOK(client().performRequest(request)); } - private static void createAndFollowIndex(String leaderIndex, String followIndex) throws IOException { - final Request request = new Request("POST", "/" + followIndex + "/_ccr/create_and_follow"); + private static void follow(String leaderIndex, String followIndex) throws IOException { + final Request request = new Request("PUT", "/" + followIndex + "/_ccr/follow"); request.setJsonEntity("{\"leader_index\": \"" + leaderIndex + "\", \"poll_timeout\": \"10ms\"}"); assertOK(client().performRequest(request)); } @@ -269,8 +294,8 @@ public class FollowIndexSecurityIT extends ESRestTestCase { return RestStatus.OK.getStatus() == response.getStatusLine().getStatusCode(); } - private static void unfollowIndex(String followIndex) throws IOException { - assertOK(client().performRequest(new Request("POST", "/" + followIndex + "/_ccr/unfollow"))); + private static void pauseFollow(String followIndex) throws IOException { + assertOK(client().performRequest(new Request("POST", "/" + followIndex + "/_ccr/pause_follow"))); } private static void verifyCcrMonitoring(String expectedLeaderIndex, String expectedFollowerIndex) throws IOException { @@ -309,4 +334,30 @@ public class FollowIndexSecurityIT extends ESRestTestCase { assertThat(numberOfOperationsIndexed, greaterThanOrEqualTo(1)); } + private static void verifyAutoFollowMonitoring() throws IOException { + Request request = new Request("GET", "/.monitoring-*/_search"); + request.setJsonEntity("{\"query\": {\"term\": {\"type\": \"ccr_auto_follow_stats\"}}}"); + Map response; + try { + response = toMap(adminClient().performRequest(request)); + } catch (ResponseException e) { + throw new AssertionError("error while searching", e); + } + + int numberOfSuccessfulFollowIndices = 0; + + List hits = (List) XContentMapValues.extractValue("hits.hits", response); + assertThat(hits.size(), greaterThanOrEqualTo(1)); + + for (int i = 0; i < hits.size(); i++) { + Map hit = (Map) hits.get(i); + + int foundNumberOfOperationsReceived = + (int) XContentMapValues.extractValue("_source.ccr_auto_follow_stats.number_of_successful_follow_indices", hit); + numberOfSuccessfulFollowIndices = Math.max(numberOfSuccessfulFollowIndices, foundNumberOfOperationsReceived); + } + + assertThat(numberOfSuccessfulFollowIndices, greaterThanOrEqualTo(1)); + } + } diff --git a/x-pack/plugin/ccr/qa/multi-cluster/src/test/java/org/elasticsearch/xpack/ccr/FollowIndexIT.java b/x-pack/plugin/ccr/qa/multi-cluster/src/test/java/org/elasticsearch/xpack/ccr/FollowIndexIT.java index 514e9f261f7..bfb6408c160 100644 --- a/x-pack/plugin/ccr/qa/multi-cluster/src/test/java/org/elasticsearch/xpack/ccr/FollowIndexIT.java +++ b/x-pack/plugin/ccr/qa/multi-cluster/src/test/java/org/elasticsearch/xpack/ccr/FollowIndexIT.java @@ -67,11 +67,11 @@ public class FollowIndexIT extends ESRestTestCase { } else { logger.info("Running against follow cluster"); final String followIndexName = "test_index2"; - createAndFollowIndex("leader_cluster:" + leaderIndexName, followIndexName); + followIndex("leader_cluster:" + leaderIndexName, followIndexName); assertBusy(() -> verifyDocuments(followIndexName, numDocs)); // unfollow and then follow and then index a few docs in leader index: - unfollowIndex(followIndexName); - followIndex("leader_cluster:" + leaderIndexName, followIndexName); + pauseFollow(followIndexName); + resumeFollow("leader_cluster:" + leaderIndexName, followIndexName); try (RestClient leaderClient = buildLeaderClient()) { int id = numDocs; index(leaderClient, leaderIndexName, Integer.toString(id), "field", id, "filtered_field", "true"); @@ -86,11 +86,11 @@ public class FollowIndexIT extends ESRestTestCase { public void testFollowNonExistingLeaderIndex() throws Exception { assumeFalse("Test should only run when both clusters are running", runningAgainstLeaderCluster); ResponseException e = expectThrows(ResponseException.class, - () -> followIndex("leader_cluster:non-existing-index", "non-existing-index")); + () -> resumeFollow("leader_cluster:non-existing-index", "non-existing-index")); assertThat(e.getMessage(), containsString("no such index")); assertThat(e.getResponse().getStatusLine().getStatusCode(), equalTo(404)); - e = expectThrows(ResponseException.class, () -> createAndFollowIndex("leader_cluster:non-existing-index", "non-existing-index")); + e = expectThrows(ResponseException.class, () -> followIndex("leader_cluster:non-existing-index", "non-existing-index")); assertThat(e.getMessage(), containsString("no such index")); assertThat(e.getResponse().getStatusLine().getStatusCode(), equalTo(404)); } @@ -125,7 +125,10 @@ public class FollowIndexIT extends ESRestTestCase { ensureYellow("logs-20190101"); verifyDocuments("logs-20190101", 5); }); - assertBusy(() -> verifyCcrMonitoring("logs-20190101", "logs-20190101")); + assertBusy(() -> { + verifyCcrMonitoring("logs-20190101", "logs-20190101"); + verifyAutoFollowMonitoring(); + }); } private static void index(RestClient client, String index, String id, Object... fields) throws IOException { @@ -143,20 +146,20 @@ public class FollowIndexIT extends ESRestTestCase { assertOK(client().performRequest(new Request("POST", "/" + index + "/_refresh"))); } + private static void resumeFollow(String leaderIndex, String followIndex) throws IOException { + final Request request = new Request("POST", "/" + followIndex + "/_ccr/resume_follow"); + request.setJsonEntity("{\"leader_index\": \"" + leaderIndex + "\", \"poll_timeout\": \"10ms\"}"); + assertOK(client().performRequest(request)); + } + private static void followIndex(String leaderIndex, String followIndex) throws IOException { - final Request request = new Request("POST", "/" + followIndex + "/_ccr/follow"); + final Request request = new Request("PUT", "/" + followIndex + "/_ccr/follow"); request.setJsonEntity("{\"leader_index\": \"" + leaderIndex + "\", \"poll_timeout\": \"10ms\"}"); assertOK(client().performRequest(request)); } - private static void createAndFollowIndex(String leaderIndex, String followIndex) throws IOException { - final Request request = new Request("POST", "/" + followIndex + "/_ccr/create_and_follow"); - request.setJsonEntity("{\"leader_index\": \"" + leaderIndex + "\", \"poll_timeout\": \"10ms\"}"); - assertOK(client().performRequest(request)); - } - - private static void unfollowIndex(String followIndex) throws IOException { - assertOK(client().performRequest(new Request("POST", "/" + followIndex + "/_ccr/unfollow"))); + private static void pauseFollow(String followIndex) throws IOException { + assertOK(client().performRequest(new Request("POST", "/" + followIndex + "/_ccr/pause_follow"))); } private static void verifyDocuments(String index, int expectedNumDocs) throws IOException { @@ -213,6 +216,32 @@ public class FollowIndexIT extends ESRestTestCase { assertThat(numberOfOperationsIndexed, greaterThanOrEqualTo(1)); } + private static void verifyAutoFollowMonitoring() throws IOException { + Request request = new Request("GET", "/.monitoring-*/_search"); + request.setJsonEntity("{\"query\": {\"term\": {\"type\": \"ccr_auto_follow_stats\"}}}"); + Map response; + try { + response = toMap(client().performRequest(request)); + } catch (ResponseException e) { + throw new AssertionError("error while searching", e); + } + + int numberOfSuccessfulFollowIndices = 0; + + List hits = (List) XContentMapValues.extractValue("hits.hits", response); + assertThat(hits.size(), greaterThanOrEqualTo(1)); + + for (int i = 0; i < hits.size(); i++) { + Map hit = (Map) hits.get(i); + + int foundNumberOfOperationsReceived = + (int) XContentMapValues.extractValue("_source.ccr_auto_follow_stats.number_of_successful_follow_indices", hit); + numberOfSuccessfulFollowIndices = Math.max(numberOfSuccessfulFollowIndices, foundNumberOfOperationsReceived); + } + + assertThat(numberOfSuccessfulFollowIndices, greaterThanOrEqualTo(1)); + } + private static Map toMap(Response response) throws IOException { return toMap(EntityUtils.toString(response.getEntity())); } diff --git a/x-pack/plugin/ccr/qa/rest/src/test/resources/rest-api-spec/test/ccr/auto_follow.yml b/x-pack/plugin/ccr/qa/rest/src/test/resources/rest-api-spec/test/ccr/auto_follow.yml index f4cf79fb558..2bf7820c10f 100644 --- a/x-pack/plugin/ccr/qa/rest/src/test/resources/rest-api-spec/test/ccr/auto_follow.yml +++ b/x-pack/plugin/ccr/qa/rest/src/test/resources/rest-api-spec/test/ccr/auto_follow.yml @@ -5,9 +5,26 @@ leader_cluster_alias: _local_ body: leader_index_patterns: ['logs-*'] + max_concurrent_read_batches: 2 - is_true: acknowledged + - do: + ccr.get_auto_follow_pattern: + leader_cluster_alias: _local_ + - match: { _local_.leader_index_patterns: ['logs-*'] } + - match: { _local_.max_concurrent_read_batches: 2 } + + - do: + ccr.get_auto_follow_pattern: {} + - match: { _local_.leader_index_patterns: ['logs-*'] } + - match: { _local_.max_concurrent_read_batches: 2 } + - do: ccr.delete_auto_follow_pattern: leader_cluster_alias: _local_ - is_true: acknowledged + + - do: + catch: missing + ccr.get_auto_follow_pattern: + leader_cluster_alias: _local_ diff --git a/x-pack/plugin/ccr/qa/rest/src/test/resources/rest-api-spec/test/ccr/follow_and_unfollow.yml b/x-pack/plugin/ccr/qa/rest/src/test/resources/rest-api-spec/test/ccr/follow_and_unfollow.yml index 6c95f307c25..ab60b2e4948 100644 --- a/x-pack/plugin/ccr/qa/rest/src/test/resources/rest-api-spec/test/ccr/follow_and_unfollow.yml +++ b/x-pack/plugin/ccr/qa/rest/src/test/resources/rest-api-spec/test/ccr/follow_and_unfollow.yml @@ -16,7 +16,7 @@ - is_true: acknowledged - do: - ccr.create_and_follow_index: + ccr.follow: index: bar body: leader_index: foo @@ -25,18 +25,28 @@ - is_true: index_following_started - do: - ccr.unfollow_index: + ccr.pause_follow: index: bar - is_true: acknowledged - do: - ccr.follow_index: + ccr.resume_follow: index: bar body: leader_index: foo - is_true: acknowledged - do: - ccr.unfollow_index: + ccr.pause_follow: + index: bar + - is_true: acknowledged + + - do: + indices.close: + index: bar + - is_true: acknowledged + + - do: + ccr.unfollow: index: bar - is_true: acknowledged diff --git a/x-pack/plugin/ccr/qa/rest/src/test/resources/rest-api-spec/test/ccr/stats.yml b/x-pack/plugin/ccr/qa/rest/src/test/resources/rest-api-spec/test/ccr/stats.yml index 431629b1d23..1c1170acf23 100644 --- a/x-pack/plugin/ccr/qa/rest/src/test/resources/rest-api-spec/test/ccr/stats.yml +++ b/x-pack/plugin/ccr/qa/rest/src/test/resources/rest-api-spec/test/ccr/stats.yml @@ -15,7 +15,7 @@ type: keyword - do: - ccr.create_and_follow_index: + ccr.follow: index: bar body: leader_index: foo @@ -51,7 +51,7 @@ - gte: { bar.0.time_since_last_fetch_millis: -1 } - do: - ccr.unfollow_index: + ccr.pause_follow: index: bar - is_true: acknowledged diff --git a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/Ccr.java b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/Ccr.java index 2efd86fca64..7caf144d533 100644 --- a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/Ccr.java +++ b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/Ccr.java @@ -40,35 +40,41 @@ import org.elasticsearch.threadpool.FixedExecutorBuilder; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.watcher.ResourceWatcherService; import org.elasticsearch.xpack.ccr.action.AutoFollowCoordinator; +import org.elasticsearch.xpack.ccr.action.TransportGetAutoFollowPatternAction; +import org.elasticsearch.xpack.ccr.action.TransportUnfollowAction; +import org.elasticsearch.xpack.ccr.rest.RestGetAutoFollowPatternAction; import org.elasticsearch.xpack.ccr.action.TransportAutoFollowStatsAction; import org.elasticsearch.xpack.ccr.rest.RestAutoFollowStatsAction; +import org.elasticsearch.xpack.ccr.rest.RestUnfollowAction; import org.elasticsearch.xpack.core.ccr.action.AutoFollowStatsAction; import org.elasticsearch.xpack.core.ccr.action.DeleteAutoFollowPatternAction; +import org.elasticsearch.xpack.core.ccr.action.GetAutoFollowPatternAction; import org.elasticsearch.xpack.core.ccr.action.PutAutoFollowPatternAction; import org.elasticsearch.xpack.ccr.action.ShardChangesAction; import org.elasticsearch.xpack.ccr.action.ShardFollowTask; import org.elasticsearch.xpack.ccr.action.ShardFollowTasksExecutor; import org.elasticsearch.xpack.ccr.action.TransportCcrStatsAction; -import org.elasticsearch.xpack.ccr.action.TransportCreateAndFollowIndexAction; +import org.elasticsearch.xpack.ccr.action.TransportPutFollowAction; import org.elasticsearch.xpack.ccr.action.TransportDeleteAutoFollowPatternAction; -import org.elasticsearch.xpack.ccr.action.TransportFollowIndexAction; +import org.elasticsearch.xpack.ccr.action.TransportResumeFollowAction; import org.elasticsearch.xpack.ccr.action.TransportPutAutoFollowPatternAction; -import org.elasticsearch.xpack.ccr.action.TransportUnfollowIndexAction; +import org.elasticsearch.xpack.ccr.action.TransportPauseFollowAction; import org.elasticsearch.xpack.ccr.action.bulk.BulkShardOperationsAction; import org.elasticsearch.xpack.ccr.action.bulk.TransportBulkShardOperationsAction; import org.elasticsearch.xpack.ccr.index.engine.FollowingEngineFactory; import org.elasticsearch.xpack.ccr.rest.RestCcrStatsAction; -import org.elasticsearch.xpack.ccr.rest.RestCreateAndFollowIndexAction; +import org.elasticsearch.xpack.ccr.rest.RestPutFollowAction; import org.elasticsearch.xpack.ccr.rest.RestDeleteAutoFollowPatternAction; -import org.elasticsearch.xpack.ccr.rest.RestFollowIndexAction; +import org.elasticsearch.xpack.ccr.rest.RestResumeFollowAction; import org.elasticsearch.xpack.ccr.rest.RestPutAutoFollowPatternAction; -import org.elasticsearch.xpack.ccr.rest.RestUnfollowIndexAction; +import org.elasticsearch.xpack.ccr.rest.RestPauseFollowAction; import org.elasticsearch.xpack.core.XPackPlugin; import org.elasticsearch.xpack.core.ccr.ShardFollowNodeTaskStatus; import org.elasticsearch.xpack.core.ccr.action.CcrStatsAction; -import org.elasticsearch.xpack.core.ccr.action.CreateAndFollowIndexAction; -import org.elasticsearch.xpack.core.ccr.action.FollowIndexAction; -import org.elasticsearch.xpack.core.ccr.action.UnfollowIndexAction; +import org.elasticsearch.xpack.core.ccr.action.PutFollowAction; +import org.elasticsearch.xpack.core.ccr.action.ResumeFollowAction; +import org.elasticsearch.xpack.core.ccr.action.PauseFollowAction; +import org.elasticsearch.xpack.core.ccr.action.UnfollowAction; import java.util.Arrays; import java.util.Collection; @@ -142,7 +148,7 @@ public class Ccr extends Plugin implements ActionPlugin, PersistentTaskPlugin, E @Override public List> getPersistentTasksExecutor(ClusterService clusterService, ThreadPool threadPool, Client client) { - return Collections.singletonList(new ShardFollowTasksExecutor(settings, client, threadPool)); + return Collections.singletonList(new ShardFollowTasksExecutor(settings, client, threadPool, clusterService)); } public List> getActions() { @@ -158,12 +164,14 @@ public class Ccr extends Plugin implements ActionPlugin, PersistentTaskPlugin, E new ActionHandler<>(CcrStatsAction.INSTANCE, TransportCcrStatsAction.class), new ActionHandler<>(AutoFollowStatsAction.INSTANCE, TransportAutoFollowStatsAction.class), // follow actions - new ActionHandler<>(CreateAndFollowIndexAction.INSTANCE, TransportCreateAndFollowIndexAction.class), - new ActionHandler<>(FollowIndexAction.INSTANCE, TransportFollowIndexAction.class), - new ActionHandler<>(UnfollowIndexAction.INSTANCE, TransportUnfollowIndexAction.class), + new ActionHandler<>(PutFollowAction.INSTANCE, TransportPutFollowAction.class), + new ActionHandler<>(ResumeFollowAction.INSTANCE, TransportResumeFollowAction.class), + new ActionHandler<>(PauseFollowAction.INSTANCE, TransportPauseFollowAction.class), + new ActionHandler<>(UnfollowAction.INSTANCE, TransportUnfollowAction.class), // auto-follow actions new ActionHandler<>(DeleteAutoFollowPatternAction.INSTANCE, TransportDeleteAutoFollowPatternAction.class), - new ActionHandler<>(PutAutoFollowPatternAction.INSTANCE, TransportPutAutoFollowPatternAction.class)); + new ActionHandler<>(PutAutoFollowPatternAction.INSTANCE, TransportPutAutoFollowPatternAction.class), + new ActionHandler<>(GetAutoFollowPatternAction.INSTANCE, TransportGetAutoFollowPatternAction.class)); } public List getRestHandlers(Settings settings, RestController restController, ClusterSettings clusterSettings, @@ -179,12 +187,14 @@ public class Ccr extends Plugin implements ActionPlugin, PersistentTaskPlugin, E new RestCcrStatsAction(settings, restController), new RestAutoFollowStatsAction(settings, restController), // follow APIs - new RestCreateAndFollowIndexAction(settings, restController), - new RestFollowIndexAction(settings, restController), - new RestUnfollowIndexAction(settings, restController), + new RestPutFollowAction(settings, restController), + new RestResumeFollowAction(settings, restController), + new RestPauseFollowAction(settings, restController), + new RestUnfollowAction(settings, restController), // auto-follow APIs new RestDeleteAutoFollowPatternAction(settings, restController), - new RestPutAutoFollowPatternAction(settings, restController)); + new RestPutAutoFollowPatternAction(settings, restController), + new RestGetAutoFollowPatternAction(settings, restController)); } public List getNamedWriteables() { diff --git a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/CcrLicenseChecker.java b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/CcrLicenseChecker.java index 065b3ffd4f5..d2c86e69fbd 100644 --- a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/CcrLicenseChecker.java +++ b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/CcrLicenseChecker.java @@ -16,6 +16,7 @@ import org.elasticsearch.action.admin.cluster.state.ClusterStateResponse; import org.elasticsearch.action.support.ContextPreservingActionListener; import org.elasticsearch.action.admin.indices.stats.IndexShardStats; import org.elasticsearch.action.admin.indices.stats.IndexStats; +import org.elasticsearch.action.admin.indices.stats.IndicesStatsAction; import org.elasticsearch.action.admin.indices.stats.IndicesStatsRequest; import org.elasticsearch.action.admin.indices.stats.IndicesStatsResponse; import org.elasticsearch.action.admin.indices.stats.ShardStats; @@ -25,6 +26,8 @@ import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.common.CheckedConsumer; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.IndexNotFoundException; import org.elasticsearch.index.engine.CommitStats; import org.elasticsearch.index.engine.Engine; @@ -33,8 +36,16 @@ import org.elasticsearch.license.RemoteClusterLicenseChecker; import org.elasticsearch.license.XPackLicenseState; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.xpack.ccr.action.ShardFollowTask; +import org.elasticsearch.xpack.ccr.action.ShardChangesAction; import org.elasticsearch.xpack.core.XPackPlugin; +import org.elasticsearch.xpack.core.security.SecurityContext; +import org.elasticsearch.xpack.core.security.action.user.HasPrivilegesAction; +import org.elasticsearch.xpack.core.security.action.user.HasPrivilegesRequest; +import org.elasticsearch.xpack.core.security.action.user.HasPrivilegesResponse; +import org.elasticsearch.xpack.core.security.authz.RoleDescriptor; +import org.elasticsearch.xpack.core.security.support.Exceptions; +import java.util.Arrays; import java.util.Collections; import java.util.Locale; import java.util.Map; @@ -52,21 +63,24 @@ import java.util.stream.Collectors; public final class CcrLicenseChecker { private final BooleanSupplier isCcrAllowed; + private final BooleanSupplier isAuthAllowed; /** * Constructs a CCR license checker with the default rule based on the license state for checking if CCR is allowed. */ CcrLicenseChecker() { - this(XPackPlugin.getSharedLicenseState()::isCcrAllowed); + this(XPackPlugin.getSharedLicenseState()::isCcrAllowed, XPackPlugin.getSharedLicenseState()::isAuthAllowed); } /** - * Constructs a CCR license checker with the specified boolean supplier. + * Constructs a CCR license checker with the specified boolean suppliers. * - * @param isCcrAllowed a boolean supplier that should return true if CCR is allowed and false otherwise + * @param isCcrAllowed a boolean supplier that should return true if CCR is allowed and false otherwise + * @param isAuthAllowed a boolean supplier that should return true if security, authentication, and authorization is allowed */ - public CcrLicenseChecker(final BooleanSupplier isCcrAllowed) { - this.isCcrAllowed = Objects.requireNonNull(isCcrAllowed); + public CcrLicenseChecker(final BooleanSupplier isCcrAllowed, final BooleanSupplier isAuthAllowed) { + this.isCcrAllowed = Objects.requireNonNull(isCcrAllowed, "isCcrAllowed"); + this.isAuthAllowed = Objects.requireNonNull(isAuthAllowed, "isAuthAllowed"); } /** @@ -116,8 +130,13 @@ public final class CcrLicenseChecker { } final Client leaderClient = client.getRemoteClusterClient(clusterAlias); - fetchLeaderHistoryUUIDs(leaderClient, leaderIndexMetaData, onFailure, historyUUIDs -> { - consumer.accept(historyUUIDs, leaderIndexMetaData); + hasPrivilegesToFollowIndices(leaderClient, new String[] {leaderIndex}, e -> { + if (e == null) { + fetchLeaderHistoryUUIDs(leaderClient, leaderIndexMetaData, onFailure, historyUUIDs -> + consumer.accept(historyUUIDs, leaderIndexMetaData)); + } else { + onFailure.accept(e); + } }); }, licenseCheck -> indexMetadataNonCompliantRemoteLicense(leaderIndex, licenseCheck), @@ -136,9 +155,8 @@ public final class CcrLicenseChecker { * @param request the cluster state request * @param onFailure the failure consumer * @param leaderClusterStateConsumer the leader cluster state consumer - * @param the type of response the listener is waiting for */ - public void checkRemoteClusterLicenseAndFetchClusterState( + public void checkRemoteClusterLicenseAndFetchClusterState( final Client client, final Map headers, final String clusterAlias, @@ -259,6 +277,64 @@ public final class CcrLicenseChecker { leaderClient.admin().indices().stats(request, ActionListener.wrap(indicesStatsHandler, onFailure)); } + /** + * Check if the user executing the current action has privileges to follow the specified indices on the cluster specified by the leader + * client. The specified callback will be invoked with null if the user has the necessary privileges to follow the specified indices, + * otherwise the callback will be invoked with an exception outlining the authorization error. + * + * @param leaderClient the leader client + * @param indices the indices + * @param handler the callback + */ + public void hasPrivilegesToFollowIndices(final Client leaderClient, final String[] indices, final Consumer handler) { + Objects.requireNonNull(leaderClient, "leaderClient"); + Objects.requireNonNull(indices, "indices"); + if (indices.length == 0) { + throw new IllegalArgumentException("indices must not be empty"); + } + Objects.requireNonNull(handler, "handler"); + if (isAuthAllowed.getAsBoolean() == false) { + handler.accept(null); + return; + } + + ThreadContext threadContext = leaderClient.threadPool().getThreadContext(); + SecurityContext securityContext = new SecurityContext(Settings.EMPTY, threadContext); + String username = securityContext.getUser().principal(); + + RoleDescriptor.IndicesPrivileges privileges = RoleDescriptor.IndicesPrivileges.builder() + .indices(indices) + .privileges(IndicesStatsAction.NAME, ShardChangesAction.NAME) + .build(); + + HasPrivilegesRequest request = new HasPrivilegesRequest(); + request.username(username); + request.clusterPrivileges(Strings.EMPTY_ARRAY); + request.indexPrivileges(privileges); + request.applicationPrivileges(new RoleDescriptor.ApplicationResourcePrivileges[0]); + CheckedConsumer responseHandler = response -> { + if (response.isCompleteMatch()) { + handler.accept(null); + } else { + StringBuilder message = new StringBuilder("insufficient privileges to follow"); + message.append(indices.length == 1 ? " index " : " indices "); + message.append(Arrays.toString(indices)); + + HasPrivilegesResponse.ResourcePrivileges resourcePrivileges = response.getIndexPrivileges().get(0); + for (Map.Entry entry : resourcePrivileges.getPrivileges().entrySet()) { + if (entry.getValue() == false) { + message.append(", privilege for action ["); + message.append(entry.getKey()); + message.append("] is missing"); + } + } + + handler.accept(Exceptions.authorizationError(message.toString())); + } + }; + leaderClient.execute(HasPrivilegesAction.INSTANCE, request, ActionListener.wrap(responseHandler, handler)); + } + public static Client wrapClient(Client client, Map headers) { if (headers.isEmpty()) { return client; diff --git a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/AutoFollowCoordinator.java b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/AutoFollowCoordinator.java index d2cd5cdd4d9..b3ed608bd2a 100644 --- a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/AutoFollowCoordinator.java +++ b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/AutoFollowCoordinator.java @@ -28,13 +28,14 @@ import org.elasticsearch.common.util.concurrent.CountDown; import org.elasticsearch.index.Index; import org.elasticsearch.license.LicenseUtils; import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.xpack.ccr.Ccr; import org.elasticsearch.xpack.ccr.CcrLicenseChecker; import org.elasticsearch.xpack.ccr.CcrSettings; import org.elasticsearch.xpack.core.ccr.AutoFollowMetadata; import org.elasticsearch.xpack.core.ccr.AutoFollowMetadata.AutoFollowPattern; import org.elasticsearch.xpack.core.ccr.AutoFollowStats; -import org.elasticsearch.xpack.core.ccr.action.CreateAndFollowIndexAction; -import org.elasticsearch.xpack.core.ccr.action.FollowIndexAction; +import org.elasticsearch.xpack.core.ccr.action.PutFollowAction; +import org.elasticsearch.xpack.core.ccr.action.ResumeFollowAction; import java.util.ArrayList; import java.util.Collections; @@ -183,13 +184,13 @@ public class AutoFollowCoordinator implements ClusterStateApplier { @Override void createAndFollow(Map headers, - FollowIndexAction.Request followRequest, + ResumeFollowAction.Request followRequest, Runnable successHandler, Consumer failureHandler) { Client followerClient = CcrLicenseChecker.wrapClient(client, headers); - CreateAndFollowIndexAction.Request request = new CreateAndFollowIndexAction.Request(followRequest); + PutFollowAction.Request request = new PutFollowAction.Request(followRequest); followerClient.execute( - CreateAndFollowIndexAction.INSTANCE, + PutFollowAction.INSTANCE, request, ActionListener.wrap(r -> successHandler.run(), failureHandler) ); @@ -254,17 +255,18 @@ public class AutoFollowCoordinator implements ClusterStateApplier { final String clusterAlias = entry.getKey(); final AutoFollowPattern autoFollowPattern = entry.getValue(); - getLeaderClusterState(autoFollowPattern.getHeaders(), clusterAlias, (leaderClusterState, e) -> { + Map headers = autoFollowMetadata.getHeaders().get(clusterAlias); + getLeaderClusterState(headers, clusterAlias, (leaderClusterState, e) -> { if (leaderClusterState != null) { assert e == null; final List followedIndices = autoFollowMetadata.getFollowedLeaderIndexUUIDs().get(clusterAlias); - final List leaderIndicesToFollow = - getLeaderIndicesToFollow(autoFollowPattern, leaderClusterState, followerClusterState, followedIndices); + final List leaderIndicesToFollow = getLeaderIndicesToFollow(clusterAlias, autoFollowPattern, + leaderClusterState, followerClusterState, followedIndices); if (leaderIndicesToFollow.isEmpty()) { finalise(slot, new AutoFollowResult(clusterAlias)); } else { Consumer resultHandler = result -> finalise(slot, result); - checkAutoFollowPattern(clusterAlias, autoFollowPattern, leaderIndicesToFollow, resultHandler); + checkAutoFollowPattern(clusterAlias, autoFollowPattern, leaderIndicesToFollow, headers, resultHandler); } } else { finalise(slot, new AutoFollowResult(clusterAlias, e)); @@ -274,15 +276,18 @@ public class AutoFollowCoordinator implements ClusterStateApplier { } } - private void checkAutoFollowPattern(String clusterAlias, AutoFollowPattern autoFollowPattern, - List leaderIndicesToFollow, Consumer resultHandler) { + private void checkAutoFollowPattern(String clusterAlias, + AutoFollowPattern autoFollowPattern, + List leaderIndicesToFollow, + Map headers, + Consumer resultHandler) { final CountDown leaderIndicesCountDown = new CountDown(leaderIndicesToFollow.size()); final AtomicArray> results = new AtomicArray<>(leaderIndicesToFollow.size()); for (int i = 0; i < leaderIndicesToFollow.size(); i++) { final Index indexToFollow = leaderIndicesToFollow.get(i); final int slot = i; - followLeaderIndex(clusterAlias, indexToFollow, autoFollowPattern, error -> { + followLeaderIndex(clusterAlias, indexToFollow, autoFollowPattern, headers, error -> { results.set(slot, new Tuple<>(indexToFollow, error)); if (leaderIndicesCountDown.countDown()) { resultHandler.accept(new AutoFollowResult(clusterAlias, results.asList())); @@ -291,19 +296,22 @@ public class AutoFollowCoordinator implements ClusterStateApplier { } } - private void followLeaderIndex(String clusterAlias, Index indexToFollow, - AutoFollowPattern pattern, Consumer onResult) { + private void followLeaderIndex(String clusterAlias, + Index indexToFollow, + AutoFollowPattern pattern, + Map headers, + Consumer onResult) { final String leaderIndexName = indexToFollow.getName(); final String followIndexName = getFollowerIndexName(pattern, leaderIndexName); String leaderIndexNameWithClusterAliasPrefix = clusterAlias.equals("_local_") ? leaderIndexName : clusterAlias + ":" + leaderIndexName; - FollowIndexAction.Request request = new FollowIndexAction.Request(); + ResumeFollowAction.Request request = new ResumeFollowAction.Request(); request.setLeaderIndex(leaderIndexNameWithClusterAliasPrefix); request.setFollowerIndex(followIndexName); request.setMaxBatchOperationCount(pattern.getMaxBatchOperationCount()); request.setMaxConcurrentReadBatches(pattern.getMaxConcurrentReadBatches()); - request.setMaxOperationSizeInBytes(pattern.getMaxOperationSizeInBytes()); + request.setMaxBatchSize(pattern.getMaxBatchSize()); request.setMaxConcurrentWriteBatches(pattern.getMaxConcurrentWriteBatches()); request.setMaxWriteBufferSize(pattern.getMaxWriteBufferSize()); request.setMaxRetryDelay(pattern.getMaxRetryDelay()); @@ -319,7 +327,7 @@ public class AutoFollowCoordinator implements ClusterStateApplier { // The coordinator always runs on the elected master node, so we can update cluster state here: updateAutoFollowMetadata(function, onResult); }; - createAndFollow(pattern.getHeaders(), request, successHandler, onResult); + createAndFollow(headers, request, successHandler, onResult); } private void finalise(int slot, AutoFollowResult result) { @@ -330,12 +338,21 @@ public class AutoFollowCoordinator implements ClusterStateApplier { } } - static List getLeaderIndicesToFollow(AutoFollowPattern autoFollowPattern, + static List getLeaderIndicesToFollow(String clusterAlias, + AutoFollowPattern autoFollowPattern, ClusterState leaderClusterState, ClusterState followerClusterState, List followedIndexUUIDs) { List leaderIndicesToFollow = new ArrayList<>(); for (IndexMetaData leaderIndexMetaData : leaderClusterState.getMetaData()) { + // If an auto follow pattern has been set up for the local cluster then + // we should not automatically follow a leader index that is also a follow index because + // this can result into an index creation explosion. + if (leaderIndexMetaData.getCustomData(Ccr.CCR_CUSTOM_METADATA_KEY) != null && + clusterAlias.equals("_local_")) { + continue; + } + if (autoFollowPattern.match(leaderIndexMetaData.getIndex().getName())) { if (followedIndexUUIDs.contains(leaderIndexMetaData.getIndex().getUUID()) == false) { // TODO: iterate over the indices in the followerClusterState and check whether a IndexMetaData @@ -357,21 +374,23 @@ public class AutoFollowCoordinator implements ClusterStateApplier { } } - static Function recordLeaderIndexAsFollowFunction(String clusterAlias, Index indexToFollow) { + static Function recordLeaderIndexAsFollowFunction(String clusterAlias, + Index indexToFollow) { return currentState -> { AutoFollowMetadata currentAutoFollowMetadata = currentState.metaData().custom(AutoFollowMetadata.TYPE); - - Map> newFollowedIndexUUIDS = - new HashMap<>(currentAutoFollowMetadata.getFollowedLeaderIndexUUIDs()); - newFollowedIndexUUIDS.get(clusterAlias).add(indexToFollow.getUUID()); - - ClusterState.Builder newState = ClusterState.builder(currentState); - AutoFollowMetadata newAutoFollowMetadata = - new AutoFollowMetadata(currentAutoFollowMetadata.getPatterns(), newFollowedIndexUUIDS); - newState.metaData(MetaData.builder(currentState.getMetaData()) - .putCustom(AutoFollowMetadata.TYPE, newAutoFollowMetadata) - .build()); - return newState.build(); + Map> newFollowedIndexUUIDS = new HashMap<>(currentAutoFollowMetadata.getFollowedLeaderIndexUUIDs()); + newFollowedIndexUUIDS.compute(clusterAlias, (key, existingUUIDs) -> { + assert existingUUIDs != null; + List newUUIDs = new ArrayList<>(existingUUIDs); + newUUIDs.add(indexToFollow.getUUID()); + return Collections.unmodifiableList(newUUIDs); + }); + final AutoFollowMetadata newAutoFollowMetadata = new AutoFollowMetadata(currentAutoFollowMetadata.getPatterns(), + newFollowedIndexUUIDS, currentAutoFollowMetadata.getHeaders()); + return ClusterState.builder(currentState) + .metaData(MetaData.builder(currentState.getMetaData()) + .putCustom(AutoFollowMetadata.TYPE, newAutoFollowMetadata).build()) + .build(); }; } @@ -390,7 +409,7 @@ public class AutoFollowCoordinator implements ClusterStateApplier { abstract void createAndFollow( Map headers, - FollowIndexAction.Request followRequest, + ResumeFollowAction.Request followRequest, Runnable successHandler, Consumer failureHandler ); diff --git a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/ShardChangesAction.java b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/ShardChangesAction.java index 937ca0a0096..eb31ce25746 100644 --- a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/ShardChangesAction.java +++ b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/ShardChangesAction.java @@ -21,6 +21,7 @@ import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.index.IndexService; import org.elasticsearch.index.seqno.SeqNoStats; @@ -63,8 +64,8 @@ public class ShardChangesAction extends Action { private int maxOperationCount; private ShardId shardId; private String expectedHistoryUUID; - private TimeValue pollTimeout = TransportFollowIndexAction.DEFAULT_POLL_TIMEOUT; - private long maxOperationSizeInBytes = TransportFollowIndexAction.DEFAULT_MAX_BATCH_SIZE_IN_BYTES; + private TimeValue pollTimeout = TransportResumeFollowAction.DEFAULT_POLL_TIMEOUT; + private ByteSizeValue maxBatchSize = TransportResumeFollowAction.DEFAULT_MAX_BATCH_SIZE; public Request(ShardId shardId, String expectedHistoryUUID) { super(shardId.getIndexName()); @@ -95,12 +96,12 @@ public class ShardChangesAction extends Action { this.maxOperationCount = maxOperationCount; } - public long getMaxOperationSizeInBytes() { - return maxOperationSizeInBytes; + public ByteSizeValue getMaxBatchSize() { + return maxBatchSize; } - public void setMaxOperationSizeInBytes(long maxOperationSizeInBytes) { - this.maxOperationSizeInBytes = maxOperationSizeInBytes; + public void setMaxBatchSize(ByteSizeValue maxBatchSize) { + this.maxBatchSize = maxBatchSize; } public String getExpectedHistoryUUID() { @@ -125,9 +126,9 @@ public class ShardChangesAction extends Action { validationException = addValidationError("maxOperationCount [" + maxOperationCount + "] cannot be lower than 0", validationException); } - if (maxOperationSizeInBytes <= 0) { - validationException = addValidationError("maxOperationSizeInBytes [" + maxOperationSizeInBytes + "] must be larger than 0", - validationException); + if (maxBatchSize.compareTo(ByteSizeValue.ZERO) <= 0) { + validationException = + addValidationError("maxBatchSize [" + maxBatchSize.getStringRep() + "] must be larger than 0", validationException); } return validationException; } @@ -140,7 +141,7 @@ public class ShardChangesAction extends Action { shardId = ShardId.readShardId(in); expectedHistoryUUID = in.readString(); pollTimeout = in.readTimeValue(); - maxOperationSizeInBytes = in.readVLong(); + maxBatchSize = new ByteSizeValue(in); } @Override @@ -151,7 +152,7 @@ public class ShardChangesAction extends Action { shardId.writeTo(out); out.writeString(expectedHistoryUUID); out.writeTimeValue(pollTimeout); - out.writeVLong(maxOperationSizeInBytes); + maxBatchSize.writeTo(out); } @@ -165,12 +166,12 @@ public class ShardChangesAction extends Action { Objects.equals(shardId, request.shardId) && Objects.equals(expectedHistoryUUID, request.expectedHistoryUUID) && Objects.equals(pollTimeout, request.pollTimeout) && - maxOperationSizeInBytes == request.maxOperationSizeInBytes; + maxBatchSize.equals(request.maxBatchSize); } @Override public int hashCode() { - return Objects.hash(fromSeqNo, maxOperationCount, shardId, expectedHistoryUUID, pollTimeout, maxOperationSizeInBytes); + return Objects.hash(fromSeqNo, maxOperationCount, shardId, expectedHistoryUUID, pollTimeout, maxBatchSize); } @Override @@ -181,7 +182,7 @@ public class ShardChangesAction extends Action { ", shardId=" + shardId + ", expectedHistoryUUID=" + expectedHistoryUUID + ", pollTimeout=" + pollTimeout + - ", maxOperationSizeInBytes=" + maxOperationSizeInBytes + + ", maxBatchSize=" + maxBatchSize.getStringRep() + '}'; } @@ -207,6 +208,12 @@ public class ShardChangesAction extends Action { return maxSeqNo; } + private long maxSeqNoOfUpdatesOrDeletes; + + public long getMaxSeqNoOfUpdatesOrDeletes() { + return maxSeqNoOfUpdatesOrDeletes; + } + private Translog.Operation[] operations; public Translog.Operation[] getOperations() { @@ -220,11 +227,13 @@ public class ShardChangesAction extends Action { final long mappingVersion, final long globalCheckpoint, final long maxSeqNo, + final long maxSeqNoOfUpdatesOrDeletes, final Translog.Operation[] operations) { this.mappingVersion = mappingVersion; this.globalCheckpoint = globalCheckpoint; this.maxSeqNo = maxSeqNo; + this.maxSeqNoOfUpdatesOrDeletes = maxSeqNoOfUpdatesOrDeletes; this.operations = operations; } @@ -234,6 +243,7 @@ public class ShardChangesAction extends Action { mappingVersion = in.readVLong(); globalCheckpoint = in.readZLong(); maxSeqNo = in.readZLong(); + maxSeqNoOfUpdatesOrDeletes = in.readZLong(); operations = in.readArray(Translog.Operation::readOperation, Translog.Operation[]::new); } @@ -243,6 +253,7 @@ public class ShardChangesAction extends Action { out.writeVLong(mappingVersion); out.writeZLong(globalCheckpoint); out.writeZLong(maxSeqNo); + out.writeZLong(maxSeqNoOfUpdatesOrDeletes); out.writeArray(Translog.Operation::writeOperation, operations); } @@ -254,12 +265,13 @@ public class ShardChangesAction extends Action { return mappingVersion == that.mappingVersion && globalCheckpoint == that.globalCheckpoint && maxSeqNo == that.maxSeqNo && + maxSeqNoOfUpdatesOrDeletes == that.maxSeqNoOfUpdatesOrDeletes && Arrays.equals(operations, that.operations); } @Override public int hashCode() { - return Objects.hash(mappingVersion, globalCheckpoint, maxSeqNo, Arrays.hashCode(operations)); + return Objects.hash(mappingVersion, globalCheckpoint, maxSeqNo, maxSeqNoOfUpdatesOrDeletes, Arrays.hashCode(operations)); } } @@ -293,8 +305,10 @@ public class ShardChangesAction extends Action { request.getFromSeqNo(), request.getMaxOperationCount(), request.getExpectedHistoryUUID(), - request.getMaxOperationSizeInBytes()); - return getResponse(mappingVersion, seqNoStats, operations); + request.getMaxBatchSize()); + // must capture after after snapshotting operations to ensure this MUS is at least the highest MUS of any of these operations. + final long maxSeqNoOfUpdatesOrDeletes = indexShard.getMaxSeqNoOfUpdatesOrDeletes(); + return getResponse(mappingVersion, seqNoStats, maxSeqNoOfUpdatesOrDeletes, operations); } @Override @@ -358,7 +372,8 @@ public class ShardChangesAction extends Action { final long mappingVersion = clusterService.state().metaData().index(shardId.getIndex()).getMappingVersion(); final SeqNoStats latestSeqNoStats = indexShard.seqNoStats(); - listener.onResponse(getResponse(mappingVersion, latestSeqNoStats, EMPTY_OPERATIONS_ARRAY)); + final long maxSeqNoOfUpdatesOrDeletes = indexShard.getMaxSeqNoOfUpdatesOrDeletes(); + listener.onResponse(getResponse(mappingVersion, latestSeqNoStats, maxSeqNoOfUpdatesOrDeletes, EMPTY_OPERATIONS_ARRAY)); } catch (final Exception caught) { caught.addSuppressed(e); listener.onFailure(caught); @@ -391,18 +406,28 @@ public class ShardChangesAction extends Action { static final Translog.Operation[] EMPTY_OPERATIONS_ARRAY = new Translog.Operation[0]; /** - * Returns at most maxOperationCount operations from the specified from sequence number. - * This method will never return operations above the specified globalCheckpoint. + * Returns at most the specified maximum number of operations from the specified from sequence number. This method will never return + * operations above the specified global checkpoint. * - * Also if the sum of collected operations' size is above the specified maxOperationSizeInBytes then this method - * stops collecting more operations and returns what has been collected so far. + * Also if the sum of collected operations size is above the specified maximum batch size then this method stops collecting more + * operations and returns what has been collected so far. + * + * @param indexShard the shard + * @param globalCheckpoint the global checkpoint + * @param fromSeqNo the starting sequence number + * @param maxOperationCount the maximum number of operations + * @param expectedHistoryUUID the expected history UUID for the shard + * @param maxBatchSize the maximum batch size + * @return the operations + * @throws IOException if an I/O exception occurs reading the operations */ - static Translog.Operation[] getOperations(IndexShard indexShard, - long globalCheckpoint, - long fromSeqNo, - int maxOperationCount, - String expectedHistoryUUID, - long maxOperationSizeInBytes) throws IOException { + static Translog.Operation[] getOperations( + final IndexShard indexShard, + final long globalCheckpoint, + final long fromSeqNo, + final int maxOperationCount, + final String expectedHistoryUUID, + final ByteSizeValue maxBatchSize) throws IOException { if (indexShard.state() != IndexShardState.STARTED) { throw new IndexShardNotStartedException(indexShard.shardId(), indexShard.state()); } @@ -425,7 +450,7 @@ public class ShardChangesAction extends Action { while ((op = snapshot.next()) != null) { operations.add(op); seenBytes += op.estimateSize(); - if (seenBytes > maxOperationSizeInBytes) { + if (seenBytes > maxBatchSize.getBytes()) { break; } } @@ -433,8 +458,9 @@ public class ShardChangesAction extends Action { return operations.toArray(EMPTY_OPERATIONS_ARRAY); } - static Response getResponse(final long mappingVersion, final SeqNoStats seqNoStats, final Translog.Operation[] operations) { - return new Response(mappingVersion, seqNoStats.getGlobalCheckpoint(), seqNoStats.getMaxSeqNo(), operations); + static Response getResponse(final long mappingVersion, final SeqNoStats seqNoStats, + final long maxSeqNoOfUpdates, final Translog.Operation[] operations) { + return new Response(mappingVersion, seqNoStats.getGlobalCheckpoint(), seqNoStats.getMaxSeqNo(), maxSeqNoOfUpdates, operations); } } diff --git a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/ShardFollowNodeTask.java b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/ShardFollowNodeTask.java index 777efdd654b..781fb359a45 100644 --- a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/ShardFollowNodeTask.java +++ b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/ShardFollowNodeTask.java @@ -6,18 +6,26 @@ package org.elasticsearch.xpack.ccr.action; +import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.apache.logging.log4j.message.ParameterizedMessage; +import org.apache.lucene.store.AlreadyClosedException; import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.ElasticsearchSecurityException; import org.elasticsearch.ExceptionsHelper; -import org.elasticsearch.action.support.TransportActions; +import org.elasticsearch.action.NoShardAvailableActionException; +import org.elasticsearch.action.UnavailableShardsException; +import org.elasticsearch.cluster.block.ClusterBlockException; import org.elasticsearch.common.Randomness; import org.elasticsearch.common.collect.Tuple; -import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.transport.NetworkExceptionHelper; import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.index.shard.IllegalIndexShardStateException; +import org.elasticsearch.index.seqno.SequenceNumbers; import org.elasticsearch.index.shard.ShardId; +import org.elasticsearch.index.shard.ShardNotFoundException; import org.elasticsearch.index.translog.Translog; +import org.elasticsearch.indices.IndexClosedException; import org.elasticsearch.persistent.AllocatedPersistentTask; import org.elasticsearch.tasks.TaskId; import org.elasticsearch.xpack.ccr.action.bulk.BulkShardOperationsResponse; @@ -47,15 +55,17 @@ import java.util.stream.Collectors; public abstract class ShardFollowNodeTask extends AllocatedPersistentTask { private static final int DELAY_MILLIS = 50; - private static final Logger LOGGER = Loggers.getLogger(ShardFollowNodeTask.class); + private static final Logger LOGGER = LogManager.getLogger(ShardFollowNodeTask.class); private final String leaderIndex; private final ShardFollowTask params; private final BiConsumer scheduler; private final LongSupplier relativeTimeProvider; + private String followerHistoryUUID; private long leaderGlobalCheckpoint; private long leaderMaxSeqNo; + private long leaderMaxSeqNoOfUpdatesOrDeletes = SequenceNumbers.UNASSIGNED_SEQ_NO; private long lastRequestedSeqNo; private long followerGlobalCheckpoint = 0; private long followerMaxSeqNo = 0; @@ -101,15 +111,17 @@ public abstract class ShardFollowNodeTask extends AllocatedPersistentTask { } void start( - final long leaderGlobalCheckpoint, - final long leaderMaxSeqNo, - final long followerGlobalCheckpoint, - final long followerMaxSeqNo) { + final String followerHistoryUUID, + final long leaderGlobalCheckpoint, + final long leaderMaxSeqNo, + final long followerGlobalCheckpoint, + final long followerMaxSeqNo) { /* * While this should only ever be called once and before any other threads can touch these fields, we use synchronization here to * avoid the need to declare these fields as volatile. That is, we are ensuring thesefields are always accessed under the same lock. */ synchronized (this) { + this.followerHistoryUUID = followerHistoryUUID; this.leaderGlobalCheckpoint = leaderGlobalCheckpoint; this.leaderMaxSeqNo = leaderMaxSeqNo; this.followerGlobalCheckpoint = followerGlobalCheckpoint; @@ -194,14 +206,14 @@ public abstract class ShardFollowNodeTask extends AllocatedPersistentTask { Translog.Operation op = buffer.remove(); ops.add(op); sumEstimatedSize += op.estimateSize(); - if (sumEstimatedSize > params.getMaxBatchSizeInBytes()) { + if (sumEstimatedSize > params.getMaxBatchSize().getBytes()) { break; } } numConcurrentWrites++; LOGGER.trace("{}[{}] write [{}/{}] [{}]", params.getFollowShardId(), numConcurrentWrites, ops.get(0).seqNo(), ops.get(ops.size() - 1).seqNo(), ops.size()); - sendBulkShardOperationsRequest(ops); + sendBulkShardOperationsRequest(ops, leaderMaxSeqNoOfUpdatesOrDeletes, new AtomicInteger(0)); } } @@ -262,6 +274,7 @@ public abstract class ShardFollowNodeTask extends AllocatedPersistentTask { onOperationsFetched(response.getOperations()); leaderGlobalCheckpoint = Math.max(leaderGlobalCheckpoint, response.getGlobalCheckpoint()); leaderMaxSeqNo = Math.max(leaderMaxSeqNo, response.getMaxSeqNo()); + leaderMaxSeqNoOfUpdatesOrDeletes = SequenceNumbers.max(leaderMaxSeqNoOfUpdatesOrDeletes, response.getMaxSeqNoOfUpdatesOrDeletes()); final long newFromSeqNo; if (response.getOperations().length == 0) { newFromSeqNo = from; @@ -291,13 +304,11 @@ public abstract class ShardFollowNodeTask extends AllocatedPersistentTask { } } - private void sendBulkShardOperationsRequest(List operations) { - sendBulkShardOperationsRequest(operations, new AtomicInteger(0)); - } - - private void sendBulkShardOperationsRequest(List operations, AtomicInteger retryCounter) { + private void sendBulkShardOperationsRequest(List operations, long leaderMaxSeqNoOfUpdatesOrDeletes, + AtomicInteger retryCounter) { + assert leaderMaxSeqNoOfUpdatesOrDeletes != SequenceNumbers.UNASSIGNED_SEQ_NO : "mus is not replicated"; final long startTime = relativeTimeProvider.getAsLong(); - innerSendBulkShardOperationsRequest(operations, + innerSendBulkShardOperationsRequest(followerHistoryUUID, operations, leaderMaxSeqNoOfUpdatesOrDeletes, response -> { synchronized (ShardFollowNodeTask.this) { totalIndexTimeMillis += TimeUnit.NANOSECONDS.toMillis(relativeTimeProvider.getAsLong() - startTime); @@ -311,7 +322,8 @@ public abstract class ShardFollowNodeTask extends AllocatedPersistentTask { totalIndexTimeMillis += TimeUnit.NANOSECONDS.toMillis(relativeTimeProvider.getAsLong() - startTime); numberOfFailedBulkOperations++; } - handleFailure(e, retryCounter, () -> sendBulkShardOperationsRequest(operations, retryCounter)); + handleFailure(e, retryCounter, + () -> sendBulkShardOperationsRequest(operations, leaderMaxSeqNoOfUpdatesOrDeletes, retryCounter)); } ); } @@ -374,17 +386,32 @@ public abstract class ShardFollowNodeTask extends AllocatedPersistentTask { return Math.min(backOffDelay, maxRetryDelayInMillis); } - private static boolean shouldRetry(Exception e) { - return NetworkExceptionHelper.isConnectException(e) || - NetworkExceptionHelper.isCloseConnectionException(e) || - TransportActions.isShardNotAvailableException(e); + static boolean shouldRetry(Exception e) { + if (NetworkExceptionHelper.isConnectException(e)) { + return true; + } else if (NetworkExceptionHelper.isCloseConnectionException(e)) { + return true; + } + + final Throwable actual = ExceptionsHelper.unwrapCause(e); + return actual instanceof ShardNotFoundException || + actual instanceof IllegalIndexShardStateException || + actual instanceof NoShardAvailableActionException || + actual instanceof UnavailableShardsException || + actual instanceof AlreadyClosedException || + actual instanceof ElasticsearchSecurityException || // If user does not have sufficient privileges + actual instanceof ClusterBlockException || // If leader index is closed or no elected master + actual instanceof IndexClosedException; // If follow index is closed } // These methods are protected for testing purposes: protected abstract void innerUpdateMapping(LongConsumer handler, Consumer errorHandler); - protected abstract void innerSendBulkShardOperationsRequest( - List operations, Consumer handler, Consumer errorHandler); + protected abstract void innerSendBulkShardOperationsRequest(String followerHistoryUUID, + List operations, + long leaderMaxSeqNoOfUpdatesOrDeletes, + Consumer handler, + Consumer errorHandler); protected abstract void innerSendShardChangesRequest(long from, int maxOperationCount, Consumer handler, Consumer errorHandler); diff --git a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/ShardFollowTask.java b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/ShardFollowTask.java index 2a01f72ca77..85afd8be28a 100644 --- a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/ShardFollowTask.java +++ b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/ShardFollowTask.java @@ -11,6 +11,7 @@ import org.elasticsearch.common.ParseField; import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.xcontent.ConstructingObjectParser; import org.elasticsearch.common.xcontent.ObjectParser; @@ -45,18 +46,17 @@ public class ShardFollowTask implements XPackPlugin.XPackPersistentTaskParams { static final ParseField HEADERS = new ParseField("headers"); public static final ParseField MAX_BATCH_OPERATION_COUNT = new ParseField("max_batch_operation_count"); public static final ParseField MAX_CONCURRENT_READ_BATCHES = new ParseField("max_concurrent_read_batches"); - public static final ParseField MAX_BATCH_SIZE_IN_BYTES = new ParseField("max_batch_size_in_bytes"); + public static final ParseField MAX_BATCH_SIZE = new ParseField("max_batch_size"); public static final ParseField MAX_CONCURRENT_WRITE_BATCHES = new ParseField("max_concurrent_write_batches"); public static final ParseField MAX_WRITE_BUFFER_SIZE = new ParseField("max_write_buffer_size"); public static final ParseField MAX_RETRY_DELAY = new ParseField("max_retry_delay"); public static final ParseField POLL_TIMEOUT = new ParseField("poll_timeout"); - public static final ParseField RECORDED_HISTORY_UUID = new ParseField("recorded_history_uuid"); @SuppressWarnings("unchecked") private static ConstructingObjectParser PARSER = new ConstructingObjectParser<>(NAME, (a) -> new ShardFollowTask((String) a[0], new ShardId((String) a[1], (String) a[2], (int) a[3]), - new ShardId((String) a[4], (String) a[5], (int) a[6]), (int) a[7], (int) a[8], (long) a[9], - (int) a[10], (int) a[11], (TimeValue) a[12], (TimeValue) a[13], (String) a[14], (Map) a[15])); + new ShardId((String) a[4], (String) a[5], (int) a[6]), (int) a[7], (int) a[8], (ByteSizeValue) a[9], + (int) a[10], (int) a[11], (TimeValue) a[12], (TimeValue) a[13], (Map) a[14])); static { PARSER.declareString(ConstructingObjectParser.optionalConstructorArg(), LEADER_CLUSTER_ALIAS_FIELD); @@ -68,7 +68,11 @@ public class ShardFollowTask implements XPackPlugin.XPackPersistentTaskParams { PARSER.declareInt(ConstructingObjectParser.constructorArg(), LEADER_SHARD_SHARDID_FIELD); PARSER.declareInt(ConstructingObjectParser.constructorArg(), MAX_BATCH_OPERATION_COUNT); PARSER.declareInt(ConstructingObjectParser.constructorArg(), MAX_CONCURRENT_READ_BATCHES); - PARSER.declareLong(ConstructingObjectParser.constructorArg(), MAX_BATCH_SIZE_IN_BYTES); + PARSER.declareField( + ConstructingObjectParser.constructorArg(), + (p, c) -> ByteSizeValue.parseBytesSizeValue(p.text(), MAX_BATCH_SIZE.getPreferredName()), + MAX_BATCH_SIZE, + ObjectParser.ValueType.STRING); PARSER.declareInt(ConstructingObjectParser.constructorArg(), MAX_CONCURRENT_WRITE_BATCHES); PARSER.declareInt(ConstructingObjectParser.constructorArg(), MAX_WRITE_BUFFER_SIZE); PARSER.declareField(ConstructingObjectParser.constructorArg(), @@ -77,7 +81,6 @@ public class ShardFollowTask implements XPackPlugin.XPackPersistentTaskParams { PARSER.declareField(ConstructingObjectParser.constructorArg(), (p, c) -> TimeValue.parseTimeValue(p.text(), POLL_TIMEOUT.getPreferredName()), POLL_TIMEOUT, ObjectParser.ValueType.STRING); - PARSER.declareString(ConstructingObjectParser.constructorArg(), RECORDED_HISTORY_UUID); PARSER.declareObject(ConstructingObjectParser.constructorArg(), (p, c) -> p.mapStrings(), HEADERS); } @@ -86,12 +89,11 @@ public class ShardFollowTask implements XPackPlugin.XPackPersistentTaskParams { private final ShardId leaderShardId; private final int maxBatchOperationCount; private final int maxConcurrentReadBatches; - private final long maxBatchSizeInBytes; + private final ByteSizeValue maxBatchSize; private final int maxConcurrentWriteBatches; private final int maxWriteBufferSize; private final TimeValue maxRetryDelay; private final TimeValue pollTimeout; - private final String recordedLeaderIndexHistoryUUID; private final Map headers; ShardFollowTask( @@ -100,24 +102,22 @@ public class ShardFollowTask implements XPackPlugin.XPackPersistentTaskParams { final ShardId leaderShardId, final int maxBatchOperationCount, final int maxConcurrentReadBatches, - final long maxBatchSizeInBytes, + final ByteSizeValue maxBatchSize, final int maxConcurrentWriteBatches, final int maxWriteBufferSize, final TimeValue maxRetryDelay, final TimeValue pollTimeout, - final String recordedLeaderIndexHistoryUUID, final Map headers) { this.leaderClusterAlias = leaderClusterAlias; this.followShardId = followShardId; this.leaderShardId = leaderShardId; this.maxBatchOperationCount = maxBatchOperationCount; this.maxConcurrentReadBatches = maxConcurrentReadBatches; - this.maxBatchSizeInBytes = maxBatchSizeInBytes; + this.maxBatchSize = maxBatchSize; this.maxConcurrentWriteBatches = maxConcurrentWriteBatches; this.maxWriteBufferSize = maxWriteBufferSize; this.maxRetryDelay = maxRetryDelay; this.pollTimeout = pollTimeout; - this.recordedLeaderIndexHistoryUUID = recordedLeaderIndexHistoryUUID; this.headers = headers != null ? Collections.unmodifiableMap(headers) : Collections.emptyMap(); } @@ -127,12 +127,11 @@ public class ShardFollowTask implements XPackPlugin.XPackPersistentTaskParams { this.leaderShardId = ShardId.readShardId(in); this.maxBatchOperationCount = in.readVInt(); this.maxConcurrentReadBatches = in.readVInt(); - this.maxBatchSizeInBytes = in.readVLong(); + this.maxBatchSize = new ByteSizeValue(in); this.maxConcurrentWriteBatches = in.readVInt(); this.maxWriteBufferSize = in.readVInt(); this.maxRetryDelay = in.readTimeValue(); this.pollTimeout = in.readTimeValue(); - this.recordedLeaderIndexHistoryUUID = in.readString(); this.headers = Collections.unmodifiableMap(in.readMap(StreamInput::readString, StreamInput::readString)); } @@ -164,8 +163,8 @@ public class ShardFollowTask implements XPackPlugin.XPackPersistentTaskParams { return maxWriteBufferSize; } - public long getMaxBatchSizeInBytes() { - return maxBatchSizeInBytes; + public ByteSizeValue getMaxBatchSize() { + return maxBatchSize; } public TimeValue getMaxRetryDelay() { @@ -180,10 +179,6 @@ public class ShardFollowTask implements XPackPlugin.XPackPersistentTaskParams { return followShardId.getIndex().getUUID() + "-" + followShardId.getId(); } - public String getRecordedLeaderIndexHistoryUUID() { - return recordedLeaderIndexHistoryUUID; - } - public Map getHeaders() { return headers; } @@ -200,12 +195,11 @@ public class ShardFollowTask implements XPackPlugin.XPackPersistentTaskParams { leaderShardId.writeTo(out); out.writeVLong(maxBatchOperationCount); out.writeVInt(maxConcurrentReadBatches); - out.writeVLong(maxBatchSizeInBytes); + maxBatchSize.writeTo(out); out.writeVInt(maxConcurrentWriteBatches); out.writeVInt(maxWriteBufferSize); out.writeTimeValue(maxRetryDelay); out.writeTimeValue(pollTimeout); - out.writeString(recordedLeaderIndexHistoryUUID); out.writeMap(headers, StreamOutput::writeString, StreamOutput::writeString); } @@ -227,12 +221,11 @@ public class ShardFollowTask implements XPackPlugin.XPackPersistentTaskParams { builder.field(LEADER_SHARD_SHARDID_FIELD.getPreferredName(), leaderShardId.id()); builder.field(MAX_BATCH_OPERATION_COUNT.getPreferredName(), maxBatchOperationCount); builder.field(MAX_CONCURRENT_READ_BATCHES.getPreferredName(), maxConcurrentReadBatches); - builder.field(MAX_BATCH_SIZE_IN_BYTES.getPreferredName(), maxBatchSizeInBytes); + builder.field(MAX_BATCH_SIZE.getPreferredName(), maxBatchSize.getStringRep()); builder.field(MAX_CONCURRENT_WRITE_BATCHES.getPreferredName(), maxConcurrentWriteBatches); builder.field(MAX_WRITE_BUFFER_SIZE.getPreferredName(), maxWriteBufferSize); builder.field(MAX_RETRY_DELAY.getPreferredName(), maxRetryDelay.getStringRep()); builder.field(POLL_TIMEOUT.getPreferredName(), pollTimeout.getStringRep()); - builder.field(RECORDED_HISTORY_UUID.getPreferredName(), recordedLeaderIndexHistoryUUID); builder.field(HEADERS.getPreferredName(), headers); return builder.endObject(); } @@ -248,29 +241,27 @@ public class ShardFollowTask implements XPackPlugin.XPackPersistentTaskParams { maxBatchOperationCount == that.maxBatchOperationCount && maxConcurrentReadBatches == that.maxConcurrentReadBatches && maxConcurrentWriteBatches == that.maxConcurrentWriteBatches && - maxBatchSizeInBytes == that.maxBatchSizeInBytes && + maxBatchSize.equals(that.maxBatchSize) && maxWriteBufferSize == that.maxWriteBufferSize && Objects.equals(maxRetryDelay, that.maxRetryDelay) && Objects.equals(pollTimeout, that.pollTimeout) && - Objects.equals(recordedLeaderIndexHistoryUUID, that.recordedLeaderIndexHistoryUUID) && Objects.equals(headers, that.headers); } @Override public int hashCode() { return Objects.hash( - leaderClusterAlias, - followShardId, - leaderShardId, - maxBatchOperationCount, - maxConcurrentReadBatches, - maxConcurrentWriteBatches, - maxBatchSizeInBytes, - maxWriteBufferSize, - maxRetryDelay, - pollTimeout, - recordedLeaderIndexHistoryUUID, - headers + leaderClusterAlias, + followShardId, + leaderShardId, + maxBatchOperationCount, + maxConcurrentReadBatches, + maxConcurrentWriteBatches, + maxBatchSize, + maxWriteBufferSize, + maxRetryDelay, + pollTimeout, + headers ); } @@ -280,6 +271,6 @@ public class ShardFollowTask implements XPackPlugin.XPackPersistentTaskParams { @Override public Version getMinimalSupportedVersion() { - return Version.V_6_4_0; + return Version.V_6_5_0; } } diff --git a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/ShardFollowTasksExecutor.java b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/ShardFollowTasksExecutor.java index d473091f80c..fddb779fdc2 100644 --- a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/ShardFollowTasksExecutor.java +++ b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/ShardFollowTasksExecutor.java @@ -5,6 +5,7 @@ */ package org.elasticsearch.xpack.ccr.action; +import org.apache.logging.log4j.message.ParameterizedMessage; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.admin.cluster.state.ClusterStateRequest; import org.elasticsearch.action.admin.indices.mapping.put.PutMappingRequest; @@ -16,13 +17,18 @@ import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.MappingMetaData; import org.elasticsearch.cluster.routing.IndexRoutingTable; +import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.util.concurrent.EsRejectedExecutionException; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.index.Index; +import org.elasticsearch.index.IndexNotFoundException; +import org.elasticsearch.index.engine.CommitStats; +import org.elasticsearch.index.engine.Engine; import org.elasticsearch.index.seqno.SeqNoStats; import org.elasticsearch.index.shard.ShardId; +import org.elasticsearch.index.shard.ShardNotFoundException; import org.elasticsearch.index.translog.Translog; import org.elasticsearch.persistent.AllocatedPersistentTask; import org.elasticsearch.persistent.PersistentTaskState; @@ -44,16 +50,19 @@ import java.util.function.Consumer; import java.util.function.LongConsumer; import static org.elasticsearch.xpack.ccr.CcrLicenseChecker.wrapClient; +import static org.elasticsearch.xpack.ccr.action.TransportResumeFollowAction.extractLeaderShardHistoryUUIDs; public class ShardFollowTasksExecutor extends PersistentTasksExecutor { private final Client client; private final ThreadPool threadPool; + private final ClusterService clusterService; - public ShardFollowTasksExecutor(Settings settings, Client client, ThreadPool threadPool) { + public ShardFollowTasksExecutor(Settings settings, Client client, ThreadPool threadPool, ClusterService clusterService) { super(settings, ShardFollowTask.NAME, Ccr.CCR_THREAD_POOL_NAME); this.client = client; this.threadPool = threadPool; + this.clusterService = clusterService; } @Override @@ -96,8 +105,10 @@ public class ShardFollowTasksExecutor extends PersistentTasksExecutor errorHandler) { @@ -132,10 +143,14 @@ public class ShardFollowTasksExecutor extends PersistentTasksExecutor operations, - final Consumer handler, - final Consumer errorHandler) { - final BulkShardOperationsRequest request = new BulkShardOperationsRequest(params.getFollowShardId(), operations); + final String followerHistoryUUID, + final List operations, + final long maxSeqNoOfUpdatesOrDeletes, + final Consumer handler, + final Consumer errorHandler) { + + final BulkShardOperationsRequest request = new BulkShardOperationsRequest(params.getFollowShardId(), + followerHistoryUUID, operations, maxSeqNoOfUpdatesOrDeletes); followerClient.execute(BulkShardOperationsAction.INSTANCE, request, ActionListener.wrap(response -> handler.accept(response), errorHandler)); } @@ -144,47 +159,80 @@ public class ShardFollowTasksExecutor extends PersistentTasksExecutor handler, Consumer errorHandler) { ShardChangesAction.Request request = - new ShardChangesAction.Request(params.getLeaderShardId(), params.getRecordedLeaderIndexHistoryUUID()); + new ShardChangesAction.Request(params.getLeaderShardId(), recordedLeaderShardHistoryUUID); request.setFromSeqNo(from); request.setMaxOperationCount(maxOperationCount); - request.setMaxOperationSizeInBytes(params.getMaxBatchSizeInBytes()); + request.setMaxBatchSize(params.getMaxBatchSize()); request.setPollTimeout(params.getPollTimeout()); leaderClient.execute(ShardChangesAction.INSTANCE, request, ActionListener.wrap(handler::accept, errorHandler)); } }; } - interface BiLongConsumer { - void accept(long x, long y); + private String getLeaderShardHistoryUUID(ShardFollowTask params) { + IndexMetaData followIndexMetaData = clusterService.state().metaData().index(params.getFollowShardId().getIndex()); + Map ccrIndexMetadata = followIndexMetaData.getCustomData(Ccr.CCR_CUSTOM_METADATA_KEY); + String[] recordedLeaderShardHistoryUUIDs = extractLeaderShardHistoryUUIDs(ccrIndexMetadata); + return recordedLeaderShardHistoryUUIDs[params.getLeaderShardId().id()]; + } + + interface FollowerStatsInfoHandler { + void accept(String followerHistoryUUID, long globalCheckpoint, long maxSeqNo); } @Override protected void nodeOperation(final AllocatedPersistentTask task, final ShardFollowTask params, final PersistentTaskState state) { Client followerClient = wrapClient(client, params.getHeaders()); ShardFollowNodeTask shardFollowNodeTask = (ShardFollowNodeTask) task; - logger.info("{} Started to track leader shard {}", params.getFollowShardId(), params.getLeaderShardId()); - fetchGlobalCheckpoint(followerClient, params.getFollowShardId(), - (followerGCP, maxSeqNo) -> shardFollowNodeTask.start(followerGCP, maxSeqNo, followerGCP, maxSeqNo), task::markAsFailed); + logger.info("{} Starting to track leader shard {}", params.getFollowShardId(), params.getLeaderShardId()); + + FollowerStatsInfoHandler handler = (followerHistoryUUID, followerGCP, maxSeqNo) -> { + shardFollowNodeTask.start(followerHistoryUUID, followerGCP, maxSeqNo, followerGCP, maxSeqNo); + }; + Consumer errorHandler = e -> { + if (shardFollowNodeTask.isStopped()) { + return; + } + + if (ShardFollowNodeTask.shouldRetry(e)) { + logger.debug(new ParameterizedMessage("failed to fetch follow shard global {} checkpoint and max sequence number", + shardFollowNodeTask), e); + threadPool.schedule(params.getMaxRetryDelay(), Ccr.CCR_THREAD_POOL_NAME, () -> nodeOperation(task, params, state)); + } else { + shardFollowNodeTask.markAsFailed(e); + } + }; + + fetchFollowerShardInfo(followerClient, params.getFollowShardId(), handler, errorHandler); } - private void fetchGlobalCheckpoint( + private void fetchFollowerShardInfo( final Client client, final ShardId shardId, - final BiLongConsumer handler, + final FollowerStatsInfoHandler handler, final Consumer errorHandler) { client.admin().indices().stats(new IndicesStatsRequest().indices(shardId.getIndexName()), ActionListener.wrap(r -> { IndexStats indexStats = r.getIndex(shardId.getIndexName()); + if (indexStats == null) { + errorHandler.accept(new IndexNotFoundException(shardId.getIndex())); + return; + } + Optional filteredShardStats = Arrays.stream(indexStats.getShards()) .filter(shardStats -> shardStats.getShardRouting().shardId().equals(shardId)) .filter(shardStats -> shardStats.getShardRouting().primary()) .findAny(); if (filteredShardStats.isPresent()) { - final SeqNoStats seqNoStats = filteredShardStats.get().getSeqNoStats(); + final ShardStats shardStats = filteredShardStats.get(); + final CommitStats commitStats = shardStats.getCommitStats(); + final String historyUUID = commitStats.getUserData().get(Engine.HISTORY_UUID_KEY); + + final SeqNoStats seqNoStats = shardStats.getSeqNoStats(); final long globalCheckpoint = seqNoStats.getGlobalCheckpoint(); final long maxSeqNo = seqNoStats.getMaxSeqNo(); - handler.accept(globalCheckpoint, maxSeqNo); + handler.accept(historyUUID, globalCheckpoint, maxSeqNo); } else { - errorHandler.accept(new IllegalArgumentException("Cannot find shard stats for shard " + shardId)); + errorHandler.accept(new ShardNotFoundException(shardId)); } }, errorHandler)); } diff --git a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/TransportDeleteAutoFollowPatternAction.java b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/TransportDeleteAutoFollowPatternAction.java index 8d2e59defd8..974bca98859 100644 --- a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/TransportDeleteAutoFollowPatternAction.java +++ b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/TransportDeleteAutoFollowPatternAction.java @@ -85,10 +85,12 @@ public class TransportDeleteAutoFollowPatternAction extends final Map patternsCopy = new HashMap<>(patterns); final Map> followedLeaderIndexUUIDSCopy = new HashMap<>(currentAutoFollowMetadata.getFollowedLeaderIndexUUIDs()); + final Map> headers = new HashMap<>(currentAutoFollowMetadata.getHeaders()); patternsCopy.remove(request.getLeaderClusterAlias()); followedLeaderIndexUUIDSCopy.remove(request.getLeaderClusterAlias()); + headers.remove(request.getLeaderClusterAlias()); - AutoFollowMetadata newAutoFollowMetadata = new AutoFollowMetadata(patternsCopy, followedLeaderIndexUUIDSCopy); + AutoFollowMetadata newAutoFollowMetadata = new AutoFollowMetadata(patternsCopy, followedLeaderIndexUUIDSCopy, headers); ClusterState.Builder newState = ClusterState.builder(currentState); newState.metaData(MetaData.builder(currentState.getMetaData()) .putCustom(AutoFollowMetadata.TYPE, newAutoFollowMetadata) diff --git a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/TransportGetAutoFollowPatternAction.java b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/TransportGetAutoFollowPatternAction.java new file mode 100644 index 00000000000..b5b13873157 --- /dev/null +++ b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/TransportGetAutoFollowPatternAction.java @@ -0,0 +1,83 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +package org.elasticsearch.xpack.ccr.action; + +import org.elasticsearch.ResourceNotFoundException; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.action.support.master.TransportMasterNodeReadAction; +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.block.ClusterBlockException; +import org.elasticsearch.cluster.block.ClusterBlockLevel; +import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; +import org.elasticsearch.cluster.metadata.MetaData; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.transport.TransportService; +import org.elasticsearch.xpack.core.ccr.AutoFollowMetadata; +import org.elasticsearch.xpack.core.ccr.AutoFollowMetadata.AutoFollowPattern; +import org.elasticsearch.xpack.core.ccr.action.GetAutoFollowPatternAction; + +import java.util.Collections; +import java.util.Map; + +public class TransportGetAutoFollowPatternAction + extends TransportMasterNodeReadAction { + + @Inject + public TransportGetAutoFollowPatternAction(Settings settings, + TransportService transportService, + ClusterService clusterService, + ThreadPool threadPool, + ActionFilters actionFilters, + IndexNameExpressionResolver indexNameExpressionResolver) { + super(settings, GetAutoFollowPatternAction.NAME, transportService, clusterService, threadPool, actionFilters, + GetAutoFollowPatternAction.Request::new, indexNameExpressionResolver); + } + + @Override + protected String executor() { + return ThreadPool.Names.SAME; + } + + @Override + protected GetAutoFollowPatternAction.Response newResponse() { + return new GetAutoFollowPatternAction.Response(); + } + + @Override + protected void masterOperation(GetAutoFollowPatternAction.Request request, + ClusterState state, + ActionListener listener) throws Exception { + Map autoFollowPatterns = getAutoFollowPattern(state.metaData(), request.getLeaderClusterAlias()); + listener.onResponse(new GetAutoFollowPatternAction.Response(autoFollowPatterns)); + } + + @Override + protected ClusterBlockException checkBlock(GetAutoFollowPatternAction.Request request, ClusterState state) { + return state.blocks().globalBlockedException(ClusterBlockLevel.METADATA_READ); + } + + static Map getAutoFollowPattern(MetaData metaData, String leaderClusterAlias) { + AutoFollowMetadata autoFollowMetadata = metaData.custom(AutoFollowMetadata.TYPE); + if (autoFollowMetadata == null) { + throw new ResourceNotFoundException("no auto-follow patterns for cluster alias [{}] found", leaderClusterAlias); + } + + if (leaderClusterAlias == null) { + return autoFollowMetadata.getPatterns(); + } + + AutoFollowPattern autoFollowPattern = autoFollowMetadata.getPatterns().get(leaderClusterAlias); + if (autoFollowPattern == null) { + throw new ResourceNotFoundException("no auto-follow patterns for cluster alias [{}] found", leaderClusterAlias); + } + return Collections.singletonMap(leaderClusterAlias, autoFollowPattern); + } +} diff --git a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/TransportUnfollowIndexAction.java b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/TransportPauseFollowAction.java similarity index 91% rename from x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/TransportUnfollowIndexAction.java rename to x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/TransportPauseFollowAction.java index 05cde0eab85..041d5e3f429 100644 --- a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/TransportUnfollowIndexAction.java +++ b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/TransportPauseFollowAction.java @@ -19,24 +19,24 @@ import org.elasticsearch.persistent.PersistentTasksCustomMetaData; import org.elasticsearch.persistent.PersistentTasksService; import org.elasticsearch.tasks.Task; import org.elasticsearch.transport.TransportService; -import org.elasticsearch.xpack.core.ccr.action.UnfollowIndexAction; +import org.elasticsearch.xpack.core.ccr.action.PauseFollowAction; import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicReferenceArray; -public class TransportUnfollowIndexAction extends HandledTransportAction { +public class TransportPauseFollowAction extends HandledTransportAction { private final Client client; private final PersistentTasksService persistentTasksService; @Inject - public TransportUnfollowIndexAction( + public TransportPauseFollowAction( final Settings settings, final TransportService transportService, final ActionFilters actionFilters, final Client client, final PersistentTasksService persistentTasksService) { - super(settings, UnfollowIndexAction.NAME, transportService, actionFilters, UnfollowIndexAction.Request::new); + super(settings, PauseFollowAction.NAME, transportService, actionFilters, PauseFollowAction.Request::new); this.client = client; this.persistentTasksService = persistentTasksService; } @@ -44,7 +44,7 @@ public class TransportUnfollowIndexAction extends HandledTransportAction listener) { client.admin().cluster().state(new ClusterStateRequest(), ActionListener.wrap(r -> { @@ -102,4 +102,4 @@ public class TransportUnfollowIndexAction extends HandledTransportAction ShardFollowTask.HEADER_FILTERS.contains(e.getKey())) .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue)); - leaderClient.admin().cluster().state( - clusterStateRequest, - ActionListener.wrap( + String[] indices = request.getLeaderIndexPatterns().toArray(new String[0]); + ccrLicenseChecker.hasPrivilegesToFollowIndices(leaderClient, indices, e -> { + if (e == null) { + leaderClient.admin().cluster().state( + clusterStateRequest, + ActionListener.wrap( clusterStateResponse -> { final ClusterState leaderClusterState = clusterStateResponse.getState(); clusterService.submitStateUpdateTask("put-auto-follow-pattern-" + request.getLeaderClusterAlias(), - new AckedClusterStateUpdateTask(request, listener) { + new AckedClusterStateUpdateTask(request, listener) { - @Override - protected AcknowledgedResponse newResponse(boolean acknowledged) { - return new AcknowledgedResponse(acknowledged); - } + @Override + protected AcknowledgedResponse newResponse(boolean acknowledged) { + return new AcknowledgedResponse(acknowledged); + } - @Override - public ClusterState execute(ClusterState currentState) throws Exception { - return innerPut(request, filteredHeaders, currentState, leaderClusterState); - } - }); + @Override + public ClusterState execute(ClusterState currentState) throws Exception { + return innerPut(request, filteredHeaders, currentState, leaderClusterState); + } + }); }, listener::onFailure)); + } else { + listener.onFailure(e); + } + }); } static ClusterState innerPut(PutAutoFollowPatternAction.Request request, @@ -123,21 +130,25 @@ public class TransportPutAutoFollowPatternAction extends AutoFollowMetadata currentAutoFollowMetadata = localState.metaData().custom(AutoFollowMetadata.TYPE); Map> followedLeaderIndices; Map patterns; + Map> headers; if (currentAutoFollowMetadata != null) { patterns = new HashMap<>(currentAutoFollowMetadata.getPatterns()); followedLeaderIndices = new HashMap<>(currentAutoFollowMetadata.getFollowedLeaderIndexUUIDs()); + headers = new HashMap<>(currentAutoFollowMetadata.getHeaders()); } else { patterns = new HashMap<>(); followedLeaderIndices = new HashMap<>(); + headers = new HashMap<>(); } AutoFollowPattern previousPattern = patterns.get(request.getLeaderClusterAlias()); - List followedIndexUUIDs = followedLeaderIndices.get(request.getLeaderClusterAlias()); - if (followedIndexUUIDs == null) { + final List followedIndexUUIDs; + if (followedLeaderIndices.containsKey(request.getLeaderClusterAlias())) { + followedIndexUUIDs = new ArrayList<>(followedLeaderIndices.get(request.getLeaderClusterAlias())); + } else { followedIndexUUIDs = new ArrayList<>(); - followedLeaderIndices.put(request.getLeaderClusterAlias(), followedIndexUUIDs); } - + followedLeaderIndices.put(request.getLeaderClusterAlias(), followedIndexUUIDs); // Mark existing leader indices as already auto followed: if (previousPattern != null) { markExistingIndicesAsAutoFollowedForNewPatterns(request.getLeaderIndexPatterns(), leaderClusterState.metaData(), @@ -147,21 +158,24 @@ public class TransportPutAutoFollowPatternAction extends followedIndexUUIDs); } + if (filteredHeaders != null) { + headers.put(request.getLeaderClusterAlias(), filteredHeaders); + } + AutoFollowPattern autoFollowPattern = new AutoFollowPattern( request.getLeaderIndexPatterns(), request.getFollowIndexNamePattern(), request.getMaxBatchOperationCount(), request.getMaxConcurrentReadBatches(), - request.getMaxOperationSizeInBytes(), + request.getMaxBatchSize(), request.getMaxConcurrentWriteBatches(), request.getMaxWriteBufferSize(), request.getMaxRetryDelay(), - request.getPollTimeout(), - filteredHeaders); + request.getPollTimeout()); patterns.put(request.getLeaderClusterAlias(), autoFollowPattern); ClusterState.Builder newState = ClusterState.builder(localState); newState.metaData(MetaData.builder(localState.getMetaData()) - .putCustom(AutoFollowMetadata.TYPE, new AutoFollowMetadata(patterns, followedLeaderIndices)) + .putCustom(AutoFollowMetadata.TYPE, new AutoFollowMetadata(patterns, followedLeaderIndices, headers)) .build()); return newState.build(); } diff --git a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/TransportCreateAndFollowIndexAction.java b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/TransportPutFollowAction.java similarity index 82% rename from x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/TransportCreateAndFollowIndexAction.java rename to x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/TransportPutFollowAction.java index fd421a9380b..122aef0b3e8 100644 --- a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/TransportCreateAndFollowIndexAction.java +++ b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/TransportPutFollowAction.java @@ -37,8 +37,8 @@ import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.ccr.Ccr; import org.elasticsearch.xpack.ccr.CcrLicenseChecker; import org.elasticsearch.xpack.ccr.CcrSettings; -import org.elasticsearch.xpack.core.ccr.action.CreateAndFollowIndexAction; -import org.elasticsearch.xpack.core.ccr.action.FollowIndexAction; +import org.elasticsearch.xpack.core.ccr.action.PutFollowAction; +import org.elasticsearch.xpack.core.ccr.action.ResumeFollowAction; import java.util.HashMap; import java.util.List; @@ -46,8 +46,8 @@ import java.util.Map; import java.util.Objects; import java.util.function.Consumer; -public final class TransportCreateAndFollowIndexAction - extends TransportMasterNodeAction { +public final class TransportPutFollowAction + extends TransportMasterNodeAction { private final Client client; private final AllocationService allocationService; @@ -56,7 +56,7 @@ public final class TransportCreateAndFollowIndexAction private final CcrLicenseChecker ccrLicenseChecker; @Inject - public TransportCreateAndFollowIndexAction( + public TransportPutFollowAction( final Settings settings, final ThreadPool threadPool, final TransportService transportService, @@ -68,13 +68,13 @@ public final class TransportCreateAndFollowIndexAction final CcrLicenseChecker ccrLicenseChecker) { super( settings, - CreateAndFollowIndexAction.NAME, + PutFollowAction.NAME, transportService, clusterService, threadPool, actionFilters, indexNameExpressionResolver, - CreateAndFollowIndexAction.Request::new); + PutFollowAction.Request::new); this.client = client; this.allocationService = allocationService; this.remoteClusterService = transportService.getRemoteClusterService(); @@ -88,15 +88,15 @@ public final class TransportCreateAndFollowIndexAction } @Override - protected CreateAndFollowIndexAction.Response newResponse() { - return new CreateAndFollowIndexAction.Response(); + protected PutFollowAction.Response newResponse() { + return new PutFollowAction.Response(); } @Override protected void masterOperation( - final CreateAndFollowIndexAction.Request request, + final PutFollowAction.Request request, final ClusterState state, - final ActionListener listener) throws Exception { + final ActionListener listener) throws Exception { if (ccrLicenseChecker.isCcrAllowed() == false) { listener.onFailure(LicenseUtils.newComplianceException("ccr")); return; @@ -116,9 +116,9 @@ public final class TransportCreateAndFollowIndexAction } private void createFollowerIndexAndFollowLocalIndex( - final CreateAndFollowIndexAction.Request request, + final PutFollowAction.Request request, final ClusterState state, - final ActionListener listener) { + final ActionListener listener) { // following an index in local cluster, so use local cluster state to fetch leader index metadata final String leaderIndex = request.getFollowRequest().getLeaderIndex(); final IndexMetaData leaderIndexMetadata = state.getMetaData().index(leaderIndex); @@ -127,17 +127,23 @@ public final class TransportCreateAndFollowIndexAction return; } - Consumer handler = historyUUIDs -> { + Consumer historyUUIDhandler = historyUUIDs -> { createFollowerIndex(leaderIndexMetadata, historyUUIDs, request, listener); }; - ccrLicenseChecker.fetchLeaderHistoryUUIDs(client, leaderIndexMetadata, listener::onFailure, handler); + ccrLicenseChecker.hasPrivilegesToFollowIndices(client, new String[] {leaderIndex}, e -> { + if (e == null) { + ccrLicenseChecker.fetchLeaderHistoryUUIDs(client, leaderIndexMetadata, listener::onFailure, historyUUIDhandler); + } else { + listener.onFailure(e); + } + }); } private void createFollowerIndexAndFollowRemoteIndex( - final CreateAndFollowIndexAction.Request request, + final PutFollowAction.Request request, final String clusterAlias, final String leaderIndex, - final ActionListener listener) { + final ActionListener listener) { ccrLicenseChecker.checkRemoteClusterLicenseAndFetchLeaderIndexMetadataAndHistoryUUIDs( client, clusterAlias, @@ -149,8 +155,8 @@ public final class TransportCreateAndFollowIndexAction private void createFollowerIndex( final IndexMetaData leaderIndexMetaData, final String[] historyUUIDs, - final CreateAndFollowIndexAction.Request request, - final ActionListener listener) { + final PutFollowAction.Request request, + final ActionListener listener) { if (leaderIndexMetaData == null) { listener.onFailure(new IllegalArgumentException("leader index [" + request.getFollowRequest().getLeaderIndex() + "] does not exist")); @@ -162,13 +168,13 @@ public final class TransportCreateAndFollowIndexAction if (result) { initiateFollowing(request, listener); } else { - listener.onResponse(new CreateAndFollowIndexAction.Response(true, false, false)); + listener.onResponse(new PutFollowAction.Response(true, false, false)); } }, listener::onFailure); // Can't use create index api here, because then index templates can alter the mappings / settings. // And index templates could introduce settings / mappings that are incompatible with the leader index. - clusterService.submitStateUpdateTask("follow_index_action", new AckedClusterStateUpdateTask(request, handler) { + clusterService.submitStateUpdateTask("create_following_index", new AckedClusterStateUpdateTask(request, handler) { @Override protected Boolean newResponse(final boolean acknowledged) { @@ -228,23 +234,23 @@ public final class TransportCreateAndFollowIndexAction } private void initiateFollowing( - final CreateAndFollowIndexAction.Request request, - final ActionListener listener) { + final PutFollowAction.Request request, + final ActionListener listener) { activeShardsObserver.waitForActiveShards(new String[]{request.getFollowRequest().getFollowerIndex()}, ActiveShardCount.DEFAULT, request.timeout(), result -> { if (result) { - client.execute(FollowIndexAction.INSTANCE, request.getFollowRequest(), ActionListener.wrap( - r -> listener.onResponse(new CreateAndFollowIndexAction.Response(true, true, r.isAcknowledged())), + client.execute(ResumeFollowAction.INSTANCE, request.getFollowRequest(), ActionListener.wrap( + r -> listener.onResponse(new PutFollowAction.Response(true, true, r.isAcknowledged())), listener::onFailure )); } else { - listener.onResponse(new CreateAndFollowIndexAction.Response(true, false, false)); + listener.onResponse(new PutFollowAction.Response(true, false, false)); } }, listener::onFailure); } @Override - protected ClusterBlockException checkBlock(final CreateAndFollowIndexAction.Request request, final ClusterState state) { + protected ClusterBlockException checkBlock(final PutFollowAction.Request request, final ClusterState state) { return state.blocks().indexBlockedException(ClusterBlockLevel.METADATA_WRITE, request.getFollowRequest().getFollowerIndex()); } diff --git a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/TransportFollowIndexAction.java b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/TransportResumeFollowAction.java similarity index 90% rename from x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/TransportFollowIndexAction.java rename to x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/TransportResumeFollowAction.java index e9ee38fd1f9..24a5891dd37 100644 --- a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/TransportFollowIndexAction.java +++ b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/TransportResumeFollowAction.java @@ -19,6 +19,8 @@ import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.unit.ByteSizeUnit; +import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.index.IndexNotFoundException; import org.elasticsearch.index.IndexSettings; @@ -40,7 +42,7 @@ import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.ccr.Ccr; import org.elasticsearch.xpack.ccr.CcrLicenseChecker; import org.elasticsearch.xpack.ccr.CcrSettings; -import org.elasticsearch.xpack.core.ccr.action.FollowIndexAction; +import org.elasticsearch.xpack.core.ccr.action.ResumeFollowAction; import java.io.IOException; import java.util.Collections; @@ -54,9 +56,9 @@ import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicReferenceArray; import java.util.stream.Collectors; -public class TransportFollowIndexAction extends HandledTransportAction { +public class TransportResumeFollowAction extends HandledTransportAction { - static final long DEFAULT_MAX_BATCH_SIZE_IN_BYTES = Long.MAX_VALUE; + static final ByteSizeValue DEFAULT_MAX_BATCH_SIZE = new ByteSizeValue(Long.MAX_VALUE, ByteSizeUnit.BYTES); private static final TimeValue DEFAULT_MAX_RETRY_DELAY = new TimeValue(500); private static final int DEFAULT_MAX_CONCURRENT_WRITE_BATCHES = 1; private static final int DEFAULT_MAX_WRITE_BUFFER_SIZE = 10240; @@ -73,7 +75,7 @@ public class TransportFollowIndexAction extends HandledTransportAction listener) { if (ccrLicenseChecker.isCcrAllowed() == false) { listener.onFailure(LicenseUtils.newComplianceException("ccr")); @@ -115,7 +117,7 @@ public class TransportFollowIndexAction extends HandledTransportAction listener) { final ClusterState state = clusterService.state(); final IndexMetaData followerIndexMetadata = state.getMetaData().index(request.getFollowerIndex()); @@ -124,17 +126,23 @@ public class TransportFollowIndexAction extends HandledTransportAction { - try { - start(request, null, leaderIndexMetadata, followerIndexMetadata, historyUUIDs, listener); - } catch (final IOException e) { + ccrLicenseChecker.hasPrivilegesToFollowIndices(client, new String[] {request.getLeaderIndex()}, e -> { + if (e == null) { + ccrLicenseChecker.fetchLeaderHistoryUUIDs(client, leaderIndexMetadata, listener::onFailure, historyUUIDs -> { + try { + start(request, null, leaderIndexMetadata, followerIndexMetadata, historyUUIDs, listener); + } catch (final IOException ioe) { + listener.onFailure(ioe); + } + }); + } else { listener.onFailure(e); } }); } private void followRemoteIndex( - final FollowIndexAction.Request request, + final ResumeFollowAction.Request request, final String clusterAlias, final String leaderIndex, final ActionListener listener) { @@ -165,7 +173,7 @@ public class TransportFollowIndexAction extends HandledTransportAction */ void start( - FollowIndexAction.Request request, + ResumeFollowAction.Request request, String clusterNameAlias, IndexMetaData leaderIndexMetadata, IndexMetaData followIndexMetadata, @@ -184,12 +192,9 @@ public class TransportFollowIndexAction extends HandledTransportAction ccrIndexMetadata = followIndexMetadata.getCustomData(Ccr.CCR_CUSTOM_METADATA_KEY); - String[] recordedLeaderShardHistoryUUIDs = extractIndexShardHistoryUUIDs(ccrIndexMetadata); - String recordedLeaderShardHistoryUUID = recordedLeaderShardHistoryUUIDs[shardId]; - final ShardFollowTask shardFollowTask = createShardFollowTask(shardId, clusterNameAlias, request, - leaderIndexMetadata, followIndexMetadata, recordedLeaderShardHistoryUUID, filteredHeaders); + final ShardFollowTask shardFollowTask = createShardFollowTask(shardId, clusterNameAlias, request, + leaderIndexMetadata, followIndexMetadata, filteredHeaders); persistentTasksService.sendStartRequest(taskId, ShardFollowTask.NAME, shardFollowTask, new ActionListener>() { @Override @@ -233,7 +238,7 @@ public class TransportFollowIndexAction extends HandledTransportAction filteredHeaders ) { int maxBatchOperationCount; @@ -320,11 +324,11 @@ public class TransportFollowIndexAction extends HandledTransportAction ccrIndexMetaData) { + static String[] extractLeaderShardHistoryUUIDs(Map ccrIndexMetaData) { String historyUUIDs = ccrIndexMetaData.get(Ccr.CCR_CUSTOM_METADATA_LEADER_INDEX_SHARD_HISTORY_UUIDS); + if (historyUUIDs == null) { + throw new IllegalArgumentException("leader index shard UUIDs are missing"); + } + return historyUUIDs.split(","); } diff --git a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/TransportUnfollowAction.java b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/TransportUnfollowAction.java new file mode 100644 index 00000000000..1ce01f7ab09 --- /dev/null +++ b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/TransportUnfollowAction.java @@ -0,0 +1,117 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +package org.elasticsearch.xpack.ccr.action; + +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.action.support.master.AcknowledgedResponse; +import org.elasticsearch.action.support.master.TransportMasterNodeAction; +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.ClusterStateUpdateTask; +import org.elasticsearch.cluster.block.ClusterBlockException; +import org.elasticsearch.cluster.block.ClusterBlockLevel; +import org.elasticsearch.cluster.metadata.IndexMetaData; +import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; +import org.elasticsearch.cluster.metadata.MetaData; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.persistent.PersistentTasksCustomMetaData; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.transport.TransportService; +import org.elasticsearch.xpack.ccr.Ccr; +import org.elasticsearch.xpack.ccr.CcrSettings; +import org.elasticsearch.xpack.core.ccr.action.UnfollowAction; + +public class TransportUnfollowAction extends TransportMasterNodeAction { + + @Inject + public TransportUnfollowAction(Settings settings, TransportService transportService, ClusterService clusterService, + ThreadPool threadPool, ActionFilters actionFilters, + IndexNameExpressionResolver indexNameExpressionResolver) { + super(settings, UnfollowAction.NAME, transportService, clusterService, threadPool, actionFilters, + UnfollowAction.Request::new, indexNameExpressionResolver); + } + + @Override + protected String executor() { + return ThreadPool.Names.SAME; + } + + @Override + protected AcknowledgedResponse newResponse() { + return new AcknowledgedResponse(); + } + + @Override + protected void masterOperation(UnfollowAction.Request request, + ClusterState state, + ActionListener listener) throws Exception { + clusterService.submitStateUpdateTask("unfollow_action", new ClusterStateUpdateTask() { + + @Override + public ClusterState execute(ClusterState current) throws Exception { + String followerIndex = request.getFollowerIndex(); + return unfollow(followerIndex, current); + } + + @Override + public void onFailure(String source, Exception e) { + listener.onFailure(e); + } + + @Override + public void clusterStateProcessed(String source, ClusterState oldState, ClusterState newState) { + listener.onResponse(new AcknowledgedResponse(true)); + } + }); + } + + @Override + protected ClusterBlockException checkBlock(UnfollowAction.Request request, ClusterState state) { + return state.blocks().globalBlockedException(ClusterBlockLevel.METADATA_WRITE); + } + + static ClusterState unfollow(String followerIndex, ClusterState current) { + IndexMetaData followerIMD = current.metaData().index(followerIndex); + + PersistentTasksCustomMetaData persistentTasks = current.metaData().custom(PersistentTasksCustomMetaData.TYPE); + if (persistentTasks != null) { + for (PersistentTasksCustomMetaData.PersistentTask persistentTask : persistentTasks.tasks()) { + if (persistentTask.getTaskName().equals(ShardFollowTask.NAME)) { + ShardFollowTask shardFollowTask = (ShardFollowTask) persistentTask.getParams(); + if (shardFollowTask.getFollowShardId().getIndexName().equals(followerIndex)) { + throw new IllegalArgumentException("cannot convert the follower index [" + followerIndex + + "] to a non-follower, because it has not been paused"); + } + } + } + } + + if (followerIMD.getState() != IndexMetaData.State.CLOSE) { + throw new IllegalArgumentException("cannot convert the follower index [" + followerIndex + + "] to a non-follower, because it has not been closed"); + } + + IndexMetaData.Builder newIMD = IndexMetaData.builder(followerIMD); + // Remove index.xpack.ccr.following_index setting + Settings.Builder builder = Settings.builder(); + builder.put(followerIMD.getSettings()); + builder.remove(CcrSettings.CCR_FOLLOWING_INDEX_SETTING.getKey()); + + newIMD.settings(builder); + // Remove ccr custom metadata + newIMD.removeCustom(Ccr.CCR_CUSTOM_METADATA_KEY); + + MetaData newMetaData = MetaData.builder(current.metaData()) + .put(newIMD) + .build(); + return ClusterState.builder(current) + .metaData(newMetaData) + .build(); + } +} diff --git a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/bulk/BulkShardOperationsRequest.java b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/bulk/BulkShardOperationsRequest.java index c28789fb580..cf9239af740 100644 --- a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/bulk/BulkShardOperationsRequest.java +++ b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/bulk/BulkShardOperationsRequest.java @@ -16,30 +16,49 @@ import java.util.List; public final class BulkShardOperationsRequest extends ReplicatedWriteRequest { + private String historyUUID; private List operations; + private long maxSeqNoOfUpdatesOrDeletes; public BulkShardOperationsRequest() { } - public BulkShardOperationsRequest(final ShardId shardId, final List operations) { + public BulkShardOperationsRequest(final ShardId shardId, + final String historyUUID, + final List operations, + long maxSeqNoOfUpdatesOrDeletes) { super(shardId); setRefreshPolicy(RefreshPolicy.NONE); + this.historyUUID = historyUUID; this.operations = operations; + this.maxSeqNoOfUpdatesOrDeletes = maxSeqNoOfUpdatesOrDeletes; + } + + public String getHistoryUUID() { + return historyUUID; } public List getOperations() { return operations; } + public long getMaxSeqNoOfUpdatesOrDeletes() { + return maxSeqNoOfUpdatesOrDeletes; + } + @Override public void readFrom(final StreamInput in) throws IOException { super.readFrom(in); + historyUUID = in.readString(); + maxSeqNoOfUpdatesOrDeletes = in.readZLong(); operations = in.readList(Translog.Operation::readOperation); } @Override public void writeTo(final StreamOutput out) throws IOException { super.writeTo(out); + out.writeString(historyUUID); + out.writeZLong(maxSeqNoOfUpdatesOrDeletes); out.writeVInt(operations.size()); for (Translog.Operation operation : operations) { Translog.Operation.writeOperation(out, operation); @@ -49,7 +68,9 @@ public final class BulkShardOperationsRequest extends ReplicatedWriteRequest shardOperationOnPrimary( final BulkShardOperationsRequest request, final IndexShard primary) throws Exception { - return shardOperationOnPrimary(request.shardId(), request.getOperations(), primary, logger); + return shardOperationOnPrimary(request.shardId(), request.getHistoryUUID(), request.getOperations(), + request.getMaxSeqNoOfUpdatesOrDeletes(), primary, logger); } // public for testing purposes only public static WritePrimaryResult shardOperationOnPrimary( final ShardId shardId, + final String historyUUID, final List sourceOperations, + final long maxSeqNoOfUpdatesOrDeletes, final IndexShard primary, final Logger logger) throws IOException { + if (historyUUID.equalsIgnoreCase(primary.getHistoryUUID()) == false) { + throw new IllegalStateException("unexpected history uuid, expected [" + historyUUID + + "], actual [" + primary.getHistoryUUID() + "], shard is likely restored from snapshot or force allocated"); + } + final List targetOperations = sourceOperations.stream().map(operation -> { final Translog.Operation operationWithPrimaryTerm; switch (operation.opType()) { @@ -103,14 +112,19 @@ public class TransportBulkShardOperationsAction } return operationWithPrimaryTerm; }).collect(Collectors.toList()); + assert maxSeqNoOfUpdatesOrDeletes >= SequenceNumbers.NO_OPS_PERFORMED : "invalid msu [" + maxSeqNoOfUpdatesOrDeletes + "]"; + primary.advanceMaxSeqNoOfUpdatesOrDeletes(maxSeqNoOfUpdatesOrDeletes); final Translog.Location location = applyTranslogOperations(targetOperations, primary, Engine.Operation.Origin.PRIMARY); - final BulkShardOperationsRequest replicaRequest = new BulkShardOperationsRequest(shardId, targetOperations); + final BulkShardOperationsRequest replicaRequest = new BulkShardOperationsRequest( + shardId, historyUUID, targetOperations, maxSeqNoOfUpdatesOrDeletes); return new CcrWritePrimaryResult(replicaRequest, location, primary, logger); } @Override protected WriteReplicaResult shardOperationOnReplica( final BulkShardOperationsRequest request, final IndexShard replica) throws Exception { + assert replica.getMaxSeqNoOfUpdatesOrDeletes() >= request.getMaxSeqNoOfUpdatesOrDeletes() : + "mus on replica [" + replica + "] < mus of request [" + request.getMaxSeqNoOfUpdatesOrDeletes() + "]"; final Translog.Location location = applyTranslogOperations(request.getOperations(), replica, Engine.Operation.Origin.REPLICA); return new WriteReplicaResult<>(request, location, null, replica, logger); } diff --git a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/index/engine/FollowingEngine.java b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/index/engine/FollowingEngine.java index 24ada3755cb..458461f3c84 100644 --- a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/index/engine/FollowingEngine.java +++ b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/index/engine/FollowingEngine.java @@ -5,6 +5,7 @@ */ package org.elasticsearch.xpack.ccr.index.engine; +import org.elasticsearch.common.metrics.CounterMetric; import org.elasticsearch.index.VersionType; import org.elasticsearch.index.engine.EngineConfig; import org.elasticsearch.index.engine.InternalEngine; @@ -18,6 +19,8 @@ import java.io.IOException; */ public final class FollowingEngine extends InternalEngine { + private final CounterMetric numOfOptimizedIndexing = new CounterMetric(); + /** * Construct a new following engine with the specified engine configuration. * @@ -51,7 +54,20 @@ public final class FollowingEngine extends InternalEngine { @Override protected InternalEngine.IndexingStrategy indexingStrategyForOperation(final Index index) throws IOException { preFlight(index); - return planIndexingAsNonPrimary(index); + // NOTES: refer Engine#getMaxSeqNoOfUpdatesOrDeletes for the explanation of the optimization using sequence numbers. + final long maxSeqNoOfUpdatesOrDeletes = getMaxSeqNoOfUpdatesOrDeletes(); + assert maxSeqNoOfUpdatesOrDeletes != SequenceNumbers.UNASSIGNED_SEQ_NO : "max_seq_no_of_updates is not initialized"; + if (hasBeenProcessedBefore(index)) { + return IndexingStrategy.processButSkipLucene(false, index.seqNo(), index.version()); + + } else if (maxSeqNoOfUpdatesOrDeletes <= getLocalCheckpoint()) { + assert maxSeqNoOfUpdatesOrDeletes < index.seqNo() : "seq_no[" + index.seqNo() + "] <= msu[" + maxSeqNoOfUpdatesOrDeletes + "]"; + numOfOptimizedIndexing.inc(); + return InternalEngine.IndexingStrategy.optimizedAppendOnly(index.seqNo(), index.version()); + + } else { + return planIndexingAsNonPrimary(index); + } } @Override @@ -85,4 +101,11 @@ public final class FollowingEngine extends InternalEngine { return true; } + /** + * Returns the number of indexing operations that have been optimized (bypass version lookup) using sequence numbers in this engine. + * This metric is not persisted, and started from 0 when the engine is opened. + */ + public long getNumberOfOptimizedIndexing() { + return numOfOptimizedIndexing.count(); + } } diff --git a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/rest/RestCcrStatsAction.java b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/rest/RestCcrStatsAction.java index de285dba19e..976d8ba2074 100644 --- a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/rest/RestCcrStatsAction.java +++ b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/rest/RestCcrStatsAction.java @@ -6,7 +6,6 @@ package org.elasticsearch.xpack.ccr.rest; -import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.common.Strings; import org.elasticsearch.common.settings.Settings; @@ -23,7 +22,7 @@ public class RestCcrStatsAction extends BaseRestHandler { public RestCcrStatsAction(final Settings settings, final RestController controller) { super(settings); controller.registerHandler(RestRequest.Method.GET, "/_ccr/stats", this); - controller.registerHandler(RestRequest.Method.GET, "/_ccr/stats/{index}", this); + controller.registerHandler(RestRequest.Method.GET, "/{index}/_ccr/stats", this); } @Override @@ -35,7 +34,6 @@ public class RestCcrStatsAction extends BaseRestHandler { protected RestChannelConsumer prepareRequest(final RestRequest restRequest, final NodeClient client) throws IOException { final CcrStatsAction.StatsRequest request = new CcrStatsAction.StatsRequest(); request.setIndices(Strings.splitStringByCommaToArray(restRequest.param("index"))); - request.setIndicesOptions(IndicesOptions.fromRequest(restRequest, request.indicesOptions())); return channel -> client.execute(CcrStatsAction.INSTANCE, request, new RestToXContentListener<>(channel)); } diff --git a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/rest/RestGetAutoFollowPatternAction.java b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/rest/RestGetAutoFollowPatternAction.java new file mode 100644 index 00000000000..e9c3073f6c7 --- /dev/null +++ b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/rest/RestGetAutoFollowPatternAction.java @@ -0,0 +1,40 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.ccr.rest; + +import org.elasticsearch.client.node.NodeClient; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.rest.BaseRestHandler; +import org.elasticsearch.rest.RestController; +import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.action.RestToXContentListener; +import org.elasticsearch.xpack.core.ccr.action.GetAutoFollowPatternAction.Request; + +import java.io.IOException; + +import static org.elasticsearch.xpack.core.ccr.action.GetAutoFollowPatternAction.INSTANCE; + +public class RestGetAutoFollowPatternAction extends BaseRestHandler { + + public RestGetAutoFollowPatternAction(Settings settings, RestController controller) { + super(settings); + controller.registerHandler(RestRequest.Method.GET, "/_ccr/auto_follow/{leader_cluster_alias}", this); + controller.registerHandler(RestRequest.Method.GET, "/_ccr/auto_follow", this); + } + + @Override + public String getName() { + return "ccr_get_auto_follow_pattern_action"; + } + + @Override + protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient client) throws IOException { + Request request = new Request(); + request.setLeaderClusterAlias(restRequest.param("leader_cluster_alias")); + return channel -> client.execute(INSTANCE, request, new RestToXContentListener<>(channel)); + } + +} diff --git a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/rest/RestUnfollowIndexAction.java b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/rest/RestPauseFollowAction.java similarity index 73% rename from x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/rest/RestUnfollowIndexAction.java rename to x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/rest/RestPauseFollowAction.java index 9a82717b621..9d4df8d8567 100644 --- a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/rest/RestUnfollowIndexAction.java +++ b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/rest/RestPauseFollowAction.java @@ -14,19 +14,19 @@ import org.elasticsearch.rest.action.RestToXContentListener; import java.io.IOException; -import static org.elasticsearch.xpack.core.ccr.action.UnfollowIndexAction.INSTANCE; -import static org.elasticsearch.xpack.core.ccr.action.UnfollowIndexAction.Request; +import static org.elasticsearch.xpack.core.ccr.action.PauseFollowAction.INSTANCE; +import static org.elasticsearch.xpack.core.ccr.action.PauseFollowAction.Request; -public class RestUnfollowIndexAction extends BaseRestHandler { +public class RestPauseFollowAction extends BaseRestHandler { - public RestUnfollowIndexAction(Settings settings, RestController controller) { + public RestPauseFollowAction(Settings settings, RestController controller) { super(settings); - controller.registerHandler(RestRequest.Method.POST, "/{index}/_ccr/unfollow", this); + controller.registerHandler(RestRequest.Method.POST, "/{index}/_ccr/pause_follow", this); } @Override public String getName() { - return "ccr_unfollow_index_action"; + return "ccr_pause_follow_action"; } @Override diff --git a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/rest/RestPutFollowAction.java b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/rest/RestPutFollowAction.java new file mode 100644 index 00000000000..d6116ff274b --- /dev/null +++ b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/rest/RestPutFollowAction.java @@ -0,0 +1,37 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.ccr.rest; + +import org.elasticsearch.client.node.NodeClient; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.rest.BaseRestHandler; +import org.elasticsearch.rest.RestController; +import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.action.RestToXContentListener; + +import java.io.IOException; + +import static org.elasticsearch.xpack.core.ccr.action.PutFollowAction.INSTANCE; +import static org.elasticsearch.xpack.core.ccr.action.PutFollowAction.Request; + +public class RestPutFollowAction extends BaseRestHandler { + + public RestPutFollowAction(Settings settings, RestController controller) { + super(settings); + controller.registerHandler(RestRequest.Method.PUT, "/{index}/_ccr/follow", this); + } + + @Override + public String getName() { + return "ccr_put_follow_action"; + } + + @Override + protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient client) throws IOException { + Request request = new Request(RestResumeFollowAction.createRequest(restRequest)); + return channel -> client.execute(INSTANCE, request, new RestToXContentListener<>(channel)); + } +} diff --git a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/rest/RestFollowIndexAction.java b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/rest/RestResumeFollowAction.java similarity index 77% rename from x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/rest/RestFollowIndexAction.java rename to x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/rest/RestResumeFollowAction.java index 8a1d7d778bd..62b3f6323ab 100644 --- a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/rest/RestFollowIndexAction.java +++ b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/rest/RestResumeFollowAction.java @@ -15,19 +15,19 @@ import org.elasticsearch.rest.action.RestToXContentListener; import java.io.IOException; -import static org.elasticsearch.xpack.core.ccr.action.FollowIndexAction.INSTANCE; -import static org.elasticsearch.xpack.core.ccr.action.FollowIndexAction.Request; +import static org.elasticsearch.xpack.core.ccr.action.ResumeFollowAction.INSTANCE; +import static org.elasticsearch.xpack.core.ccr.action.ResumeFollowAction.Request; -public class RestFollowIndexAction extends BaseRestHandler { +public class RestResumeFollowAction extends BaseRestHandler { - public RestFollowIndexAction(Settings settings, RestController controller) { + public RestResumeFollowAction(Settings settings, RestController controller) { super(settings); - controller.registerHandler(RestRequest.Method.POST, "/{index}/_ccr/follow", this); + controller.registerHandler(RestRequest.Method.POST, "/{index}/_ccr/resume_follow", this); } @Override public String getName() { - return "ccr_follow_index_action"; + return "ccr_resume_follow_action"; } @Override diff --git a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/rest/RestCreateAndFollowIndexAction.java b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/rest/RestUnfollowAction.java similarity index 65% rename from x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/rest/RestCreateAndFollowIndexAction.java rename to x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/rest/RestUnfollowAction.java index 8816760f526..127d06eb751 100644 --- a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/rest/RestCreateAndFollowIndexAction.java +++ b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/rest/RestUnfollowAction.java @@ -3,6 +3,7 @@ * or more contributor license agreements. Licensed under the Elastic License; * you may not use this file except in compliance with the Elastic License. */ + package org.elasticsearch.xpack.ccr.rest; import org.elasticsearch.client.node.NodeClient; @@ -11,27 +12,28 @@ import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.action.RestToXContentListener; +import org.elasticsearch.xpack.core.ccr.action.UnfollowAction; import java.io.IOException; -import static org.elasticsearch.xpack.core.ccr.action.CreateAndFollowIndexAction.INSTANCE; -import static org.elasticsearch.xpack.core.ccr.action.CreateAndFollowIndexAction.Request; +import static org.elasticsearch.xpack.core.ccr.action.UnfollowAction.INSTANCE; -public class RestCreateAndFollowIndexAction extends BaseRestHandler { +public class RestUnfollowAction extends BaseRestHandler { - public RestCreateAndFollowIndexAction(Settings settings, RestController controller) { + public RestUnfollowAction(Settings settings, RestController controller) { super(settings); - controller.registerHandler(RestRequest.Method.POST, "/{index}/_ccr/create_and_follow", this); + controller.registerHandler(RestRequest.Method.POST, "/{index}/_ccr/unfollow", this); } @Override public String getName() { - return "ccr_create_and_follow_index_action"; + return "ccr_unfollow_action"; } @Override protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient client) throws IOException { - Request request = new Request(RestFollowIndexAction.createRequest(restRequest)); + UnfollowAction.Request request = new UnfollowAction.Request(restRequest.param("index")); return channel -> client.execute(INSTANCE, request, new RestToXContentListener<>(channel)); } + } diff --git a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/AutoFollowMetadataTests.java b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/AutoFollowMetadataTests.java index 5ef7b4093ae..89bac141ee3 100644 --- a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/AutoFollowMetadataTests.java +++ b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/AutoFollowMetadataTests.java @@ -6,6 +6,8 @@ package org.elasticsearch.xpack.ccr; import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.common.unit.ByteSizeUnit; +import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.test.AbstractSerializingTestCase; @@ -13,7 +15,6 @@ import org.elasticsearch.xpack.core.ccr.AutoFollowMetadata; import java.io.IOException; import java.util.Arrays; -import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; @@ -36,6 +37,7 @@ public class AutoFollowMetadataTests extends AbstractSerializingTestCase configs = new HashMap<>(numEntries); Map> followedLeaderIndices = new HashMap<>(numEntries); + Map> headers = new HashMap<>(numEntries); for (int i = 0; i < numEntries; i++) { List leaderPatterns = Arrays.asList(generateRandomStringArray(4, 4, false)); AutoFollowMetadata.AutoFollowPattern autoFollowPattern = new AutoFollowMetadata.AutoFollowPattern( @@ -43,16 +45,23 @@ public class AutoFollowMetadataTests extends AbstractSerializingTestCase header = new HashMap<>(); + for (int j = 0; j < numHeaderEntries; j++) { + header.put(randomAlphaOfLength(5), randomAlphaOfLength(5)); + } + headers.put(Integer.toString(i), header); + } } - return new AutoFollowMetadata(configs, followedLeaderIndices); + return new AutoFollowMetadata(configs, followedLeaderIndices, headers); } @Override diff --git a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/CcrLicenseIT.java b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/CcrLicenseIT.java index a74b1e33cd2..7e5ae092408 100644 --- a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/CcrLicenseIT.java +++ b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/CcrLicenseIT.java @@ -23,8 +23,8 @@ import org.elasticsearch.test.ESSingleNodeTestCase; import org.elasticsearch.test.MockLogAppender; import org.elasticsearch.xpack.ccr.action.AutoFollowCoordinator; import org.elasticsearch.xpack.core.ccr.action.CcrStatsAction; -import org.elasticsearch.xpack.core.ccr.action.CreateAndFollowIndexAction; -import org.elasticsearch.xpack.core.ccr.action.FollowIndexAction; +import org.elasticsearch.xpack.core.ccr.action.PutFollowAction; +import org.elasticsearch.xpack.core.ccr.action.ResumeFollowAction; import org.elasticsearch.xpack.core.ccr.AutoFollowMetadata; import org.elasticsearch.xpack.core.ccr.AutoFollowMetadata.AutoFollowPattern; import org.elasticsearch.xpack.core.ccr.action.PutAutoFollowPatternAction; @@ -44,10 +44,10 @@ public class CcrLicenseIT extends ESSingleNodeTestCase { } public void testThatFollowingIndexIsUnavailableWithNonCompliantLicense() throws InterruptedException { - final FollowIndexAction.Request followRequest = getFollowRequest(); + final ResumeFollowAction.Request followRequest = getFollowRequest(); final CountDownLatch latch = new CountDownLatch(1); client().execute( - FollowIndexAction.INSTANCE, + ResumeFollowAction.INSTANCE, followRequest, new ActionListener() { @Override @@ -66,15 +66,15 @@ public class CcrLicenseIT extends ESSingleNodeTestCase { } public void testThatCreateAndFollowingIndexIsUnavailableWithNonCompliantLicense() throws InterruptedException { - final FollowIndexAction.Request followRequest = getFollowRequest(); - final CreateAndFollowIndexAction.Request createAndFollowRequest = new CreateAndFollowIndexAction.Request(followRequest); + final ResumeFollowAction.Request followRequest = getFollowRequest(); + final PutFollowAction.Request createAndFollowRequest = new PutFollowAction.Request(followRequest); final CountDownLatch latch = new CountDownLatch(1); client().execute( - CreateAndFollowIndexAction.INSTANCE, + PutFollowAction.INSTANCE, createAndFollowRequest, - new ActionListener() { + new ActionListener() { @Override - public void onResponse(final CreateAndFollowIndexAction.Response response) { + public void onResponse(final PutFollowAction.Response response) { latch.countDown(); fail(); } @@ -140,11 +140,11 @@ public class CcrLicenseIT extends ESSingleNodeTestCase { @Override public ClusterState execute(ClusterState currentState) throws Exception { AutoFollowPattern autoFollowPattern = - new AutoFollowPattern(Collections.singletonList("logs-*"), null, null, null, null, null, null, null, null, null); + new AutoFollowPattern(Collections.singletonList("logs-*"), null, null, null, null, null, null, null, null); AutoFollowMetadata autoFollowMetadata = new AutoFollowMetadata( Collections.singletonMap("test_alias", autoFollowPattern), - Collections.emptyMap() - ); + Collections.emptyMap(), + Collections.emptyMap()); ClusterState.Builder newState = ClusterState.builder(currentState); newState.metaData(MetaData.builder(currentState.getMetaData()) @@ -191,8 +191,8 @@ public class CcrLicenseIT extends ESSingleNodeTestCase { assertThat(e.getMessage(), equalTo("current license is non-compliant for [ccr]")); } - private FollowIndexAction.Request getFollowRequest() { - FollowIndexAction.Request request = new FollowIndexAction.Request(); + private ResumeFollowAction.Request getFollowRequest() { + ResumeFollowAction.Request request = new ResumeFollowAction.Request(); request.setLeaderIndex("leader"); request.setFollowerIndex("follower"); request.setMaxRetryDelay(TimeValue.timeValueMillis(10)); diff --git a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/CcrTests.java b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/CcrTests.java index 0a9ca00590b..65efc184ec1 100644 --- a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/CcrTests.java +++ b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/CcrTests.java @@ -41,7 +41,7 @@ public class CcrTests extends ESTestCase { .numberOfShards(1) .numberOfReplicas(0) .build(); - final Ccr ccr = new Ccr(Settings.EMPTY, new CcrLicenseChecker(() -> true)); + final Ccr ccr = new Ccr(Settings.EMPTY, new CcrLicenseChecker(() -> true, () -> false)); final Optional engineFactory = ccr.getEngineFactory(new IndexSettings(indexMetaData, Settings.EMPTY)); if (value != null && value) { assertTrue(engineFactory.isPresent()); diff --git a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/LocalStateCcr.java b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/LocalStateCcr.java index cfc30b8dfac..bb7371cc572 100644 --- a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/LocalStateCcr.java +++ b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/LocalStateCcr.java @@ -17,7 +17,7 @@ public class LocalStateCcr extends LocalStateCompositeXPackPlugin { public LocalStateCcr(final Settings settings, final Path configPath) throws Exception { super(settings, configPath); - plugins.add(new Ccr(settings, new CcrLicenseChecker(() -> true)) { + plugins.add(new Ccr(settings, new CcrLicenseChecker(() -> true, () -> false)) { @Override protected XPackLicenseState getLicenseState() { diff --git a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/NonCompliantLicenseLocalStateCcr.java b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/NonCompliantLicenseLocalStateCcr.java index f960668a7df..99f23fe7e76 100644 --- a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/NonCompliantLicenseLocalStateCcr.java +++ b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/NonCompliantLicenseLocalStateCcr.java @@ -17,7 +17,7 @@ public class NonCompliantLicenseLocalStateCcr extends LocalStateCompositeXPackPl public NonCompliantLicenseLocalStateCcr(final Settings settings, final Path configPath) throws Exception { super(settings, configPath); - plugins.add(new Ccr(settings, new CcrLicenseChecker(() -> false)) { + plugins.add(new Ccr(settings, new CcrLicenseChecker(() -> false, () -> false)) { @Override protected XPackLicenseState getLicenseState() { diff --git a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/ShardChangesIT.java b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/ShardChangesIT.java index 78715654a05..f4d098f41ab 100644 --- a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/ShardChangesIT.java +++ b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/ShardChangesIT.java @@ -6,10 +6,14 @@ package org.elasticsearch.xpack.ccr; +import org.apache.lucene.store.AlreadyClosedException; +import org.elasticsearch.ElasticsearchException; import org.elasticsearch.action.admin.cluster.node.tasks.list.ListTasksAction; import org.elasticsearch.action.admin.cluster.node.tasks.list.ListTasksRequest; import org.elasticsearch.action.admin.cluster.node.tasks.list.ListTasksResponse; import org.elasticsearch.action.admin.indices.close.CloseIndexRequest; +import org.elasticsearch.action.admin.indices.delete.DeleteIndexRequest; +import org.elasticsearch.action.admin.indices.open.OpenIndexRequest; import org.elasticsearch.action.admin.indices.settings.put.UpdateSettingsRequest; import org.elasticsearch.action.admin.indices.stats.ShardStats; import org.elasticsearch.action.bulk.BulkProcessor; @@ -19,6 +23,7 @@ import org.elasticsearch.action.get.GetResponse; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.search.SearchResponse; +import org.elasticsearch.action.support.WriteRequest; import org.elasticsearch.analysis.common.CommonAnalysisPlugin; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.IndexMetaData; @@ -26,15 +31,22 @@ import org.elasticsearch.cluster.metadata.MappingMetaData; import org.elasticsearch.common.CheckedRunnable; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.unit.ByteSizeUnit; +import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.common.xcontent.support.XContentMapValues; +import org.elasticsearch.index.Index; import org.elasticsearch.index.IndexNotFoundException; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.engine.Engine; +import org.elasticsearch.index.seqno.SequenceNumbers; +import org.elasticsearch.index.shard.IndexShard; +import org.elasticsearch.index.shard.IndexShardTestCase; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.index.translog.Translog; +import org.elasticsearch.indices.IndicesService; import org.elasticsearch.persistent.PersistentTasksCustomMetaData; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.search.builder.SearchSourceBuilder; @@ -44,11 +56,16 @@ import org.elasticsearch.test.MockHttpTransport; import org.elasticsearch.test.discovery.TestZenDiscovery; import org.elasticsearch.xpack.ccr.action.ShardChangesAction; import org.elasticsearch.xpack.ccr.action.ShardFollowTask; +import org.elasticsearch.xpack.ccr.index.engine.FollowingEngine; import org.elasticsearch.xpack.core.XPackSettings; import org.elasticsearch.xpack.core.ccr.ShardFollowNodeTaskStatus; -import org.elasticsearch.xpack.core.ccr.action.CreateAndFollowIndexAction; -import org.elasticsearch.xpack.core.ccr.action.FollowIndexAction; -import org.elasticsearch.xpack.core.ccr.action.UnfollowIndexAction; +import org.elasticsearch.xpack.core.ccr.action.CcrStatsAction; +import org.elasticsearch.xpack.core.ccr.action.CcrStatsAction.StatsRequest; +import org.elasticsearch.xpack.core.ccr.action.CcrStatsAction.StatsResponses; +import org.elasticsearch.xpack.core.ccr.action.PutFollowAction; +import org.elasticsearch.xpack.core.ccr.action.ResumeFollowAction; +import org.elasticsearch.xpack.core.ccr.action.PauseFollowAction; +import org.elasticsearch.xpack.core.ccr.action.UnfollowAction; import java.io.IOException; import java.util.Arrays; @@ -59,6 +76,7 @@ import java.util.List; import java.util.Locale; import java.util.Map; import java.util.Objects; +import java.util.Set; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; @@ -66,7 +84,10 @@ import static java.util.Collections.singletonMap; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.empty; import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.greaterThanOrEqualTo; +import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.notNullValue; import static org.hamcrest.Matchers.nullValue; @@ -171,9 +192,8 @@ public class ShardChangesIT extends ESIntegTestCase { assertAcked(client().admin().indices().prepareCreate("index1").setSource(leaderIndexSettings, XContentType.JSON)); ensureYellow("index1"); - final FollowIndexAction.Request followRequest = createFollowRequest("index1", "index2"); - final CreateAndFollowIndexAction.Request createAndFollowRequest = new CreateAndFollowIndexAction.Request(followRequest); - client().execute(CreateAndFollowIndexAction.INSTANCE, createAndFollowRequest).get(); + final PutFollowAction.Request followRequest = follow("index1", "index2"); + client().execute(PutFollowAction.INSTANCE, followRequest).get(); final int firstBatchNumDocs = randomIntBetween(2, 64); logger.info("Indexing [{}] docs as first batch", firstBatchNumDocs); @@ -196,9 +216,9 @@ public class ShardChangesIT extends ESIntegTestCase { for (int i = 0; i < firstBatchNumDocs; i++) { assertBusy(assertExpectedDocumentRunnable(i)); } - + assertTotalNumberOfOptimizedIndexing(resolveIndex("index2"), numberOfPrimaryShards, firstBatchNumDocs); unfollowIndex("index2"); - client().execute(FollowIndexAction.INSTANCE, followRequest).get(); + client().execute(ResumeFollowAction.INSTANCE, followRequest.getFollowRequest()).get(); final int secondBatchNumDocs = randomIntBetween(2, 64); logger.info("Indexing [{}] docs as second batch", secondBatchNumDocs); for (int i = firstBatchNumDocs; i < firstBatchNumDocs + secondBatchNumDocs; i++) { @@ -220,7 +240,9 @@ public class ShardChangesIT extends ESIntegTestCase { for (int i = firstBatchNumDocs; i < firstBatchNumDocs + secondBatchNumDocs; i++) { assertBusy(assertExpectedDocumentRunnable(i)); } + assertTotalNumberOfOptimizedIndexing(resolveIndex("index2"), numberOfPrimaryShards, firstBatchNumDocs + secondBatchNumDocs); unfollowIndex("index2"); + assertMaxSeqNoOfUpdatesIsTransferred(resolveIndex("index1"), resolveIndex("index2"), numberOfPrimaryShards); } public void testSyncMappings() throws Exception { @@ -229,9 +251,8 @@ public class ShardChangesIT extends ESIntegTestCase { assertAcked(client().admin().indices().prepareCreate("index1").setSource(leaderIndexSettings, XContentType.JSON)); ensureYellow("index1"); - final FollowIndexAction.Request followRequest = createFollowRequest("index1", "index2"); - final CreateAndFollowIndexAction.Request createAndFollowRequest = new CreateAndFollowIndexAction.Request(followRequest); - client().execute(CreateAndFollowIndexAction.INSTANCE, createAndFollowRequest).get(); + final PutFollowAction.Request followRequest = follow("index1", "index2"); + client().execute(PutFollowAction.INSTANCE, followRequest).get(); final long firstBatchNumDocs = randomIntBetween(2, 64); for (long i = 0; i < firstBatchNumDocs; i++) { @@ -258,6 +279,7 @@ public class ShardChangesIT extends ESIntegTestCase { assertThat(XContentMapValues.extractValue("properties.f.type", mappingMetaData.sourceAsMap()), equalTo("integer")); assertThat(XContentMapValues.extractValue("properties.k.type", mappingMetaData.sourceAsMap()), equalTo("long")); unfollowIndex("index2"); + assertMaxSeqNoOfUpdatesIsTransferred(resolveIndex("index1"), resolveIndex("index2"), 2); } public void testNoMappingDefined() throws Exception { @@ -269,9 +291,8 @@ public class ShardChangesIT extends ESIntegTestCase { .build())); ensureGreen("index1"); - final FollowIndexAction.Request followRequest = createFollowRequest("index1", "index2"); - final CreateAndFollowIndexAction.Request createAndFollowRequest = new CreateAndFollowIndexAction.Request(followRequest); - client().execute(CreateAndFollowIndexAction.INSTANCE, createAndFollowRequest).get(); + final PutFollowAction.Request followRequest = follow("index1", "index2"); + client().execute(PutFollowAction.INSTANCE, followRequest).get(); client().prepareIndex("index1", "doc", "1").setSource("{\"f\":1}", XContentType.JSON).get(); assertBusy(() -> assertThat(client().prepareSearch("index2").get().getHits().totalHits, equalTo(1L))); @@ -284,7 +305,8 @@ public class ShardChangesIT extends ESIntegTestCase { } public void testFollowIndex_backlog() throws Exception { - String leaderIndexSettings = getIndexSettings(between(1, 5), between(0, 1), + int numberOfShards = between(1, 5); + String leaderIndexSettings = getIndexSettings(numberOfShards, between(0, 1), singletonMap(IndexSettings.INDEX_SOFT_DELETES_SETTING.getKey(), "true")); assertAcked(client().admin().indices().prepareCreate("index1").setSource(leaderIndexSettings, XContentType.JSON)); BulkProcessor.Listener listener = new BulkProcessor.Listener() { @@ -319,13 +341,12 @@ public class ShardChangesIT extends ESIntegTestCase { long numDocsIndexed = Math.min(3000 * 2, randomLongBetween(maxReadSize, maxReadSize * 10)); atLeastDocsIndexed("index1", numDocsIndexed / 3); - FollowIndexAction.Request followRequest = createFollowRequest("index1", "index2"); - followRequest.setMaxBatchOperationCount(maxReadSize); - followRequest.setMaxConcurrentReadBatches(randomIntBetween(2, 10)); - followRequest.setMaxConcurrentWriteBatches(randomIntBetween(2, 10)); - followRequest.setMaxWriteBufferSize(randomIntBetween(1024, 10240)); - CreateAndFollowIndexAction.Request createAndFollowRequest = new CreateAndFollowIndexAction.Request(followRequest); - client().execute(CreateAndFollowIndexAction.INSTANCE, createAndFollowRequest).get(); + PutFollowAction.Request followRequest = follow("index1", "index2"); + followRequest.getFollowRequest().setMaxBatchOperationCount(maxReadSize); + followRequest.getFollowRequest().setMaxConcurrentReadBatches(randomIntBetween(2, 10)); + followRequest.getFollowRequest().setMaxConcurrentWriteBatches(randomIntBetween(2, 10)); + followRequest.getFollowRequest().setMaxWriteBufferSize(randomIntBetween(1024, 10240)); + client().execute(PutFollowAction.INSTANCE, followRequest).get(); atLeastDocsIndexed("index1", numDocsIndexed); run.set(false); @@ -333,7 +354,10 @@ public class ShardChangesIT extends ESIntegTestCase { assertThat(bulkProcessor.awaitClose(1L, TimeUnit.MINUTES), is(true)); assertSameDocCount("index1", "index2"); + assertTotalNumberOfOptimizedIndexing(resolveIndex("index2"), numberOfShards, + client().prepareSearch("index1").get().getHits().totalHits); unfollowIndex("index2"); + assertMaxSeqNoOfUpdatesIsTransferred(resolveIndex("index1"), resolveIndex("index2"), numberOfShards); } @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/33337") @@ -360,14 +384,14 @@ public class ShardChangesIT extends ESIntegTestCase { }); thread.start(); - FollowIndexAction.Request followRequest = createFollowRequest("index1", "index2"); - followRequest.setMaxBatchOperationCount(randomIntBetween(32, 2048)); - followRequest.setMaxConcurrentReadBatches(randomIntBetween(2, 10)); - followRequest.setMaxConcurrentWriteBatches(randomIntBetween(2, 10)); - client().execute(CreateAndFollowIndexAction.INSTANCE, new CreateAndFollowIndexAction.Request(followRequest)).get(); + PutFollowAction.Request followRequest = follow("index1", "index2"); + followRequest.getFollowRequest().setMaxBatchOperationCount(randomIntBetween(32, 2048)); + followRequest.getFollowRequest().setMaxConcurrentReadBatches(randomIntBetween(2, 10)); + followRequest.getFollowRequest().setMaxConcurrentWriteBatches(randomIntBetween(2, 10)); + client().execute(PutFollowAction.INSTANCE, followRequest).get(); - long maxNumDocsReplicated = Math.min(1000, randomLongBetween(followRequest.getMaxBatchOperationCount(), - followRequest.getMaxBatchOperationCount() * 10)); + long maxNumDocsReplicated = Math.min(1000, randomLongBetween(followRequest.getFollowRequest().getMaxBatchOperationCount(), + followRequest.getFollowRequest().getMaxBatchOperationCount() * 10)); long minNumDocsReplicated = maxNumDocsReplicated / 3L; logger.info("waiting for at least [{}] documents to be indexed and then stop a random data node", minNumDocsReplicated); atLeastDocsIndexed("index2", minNumDocsReplicated); @@ -379,6 +403,7 @@ public class ShardChangesIT extends ESIntegTestCase { assertSameDocCount("index1", "index2"); unfollowIndex("index2"); + assertMaxSeqNoOfUpdatesIsTransferred(resolveIndex("index1"), resolveIndex("index2"), 3); } public void testFollowIndexWithNestedField() throws Exception { @@ -388,8 +413,8 @@ public class ShardChangesIT extends ESIntegTestCase { internalCluster().ensureAtLeastNumDataNodes(2); ensureGreen("index1"); - final FollowIndexAction.Request followRequest = createFollowRequest("index1", "index2"); - client().execute(CreateAndFollowIndexAction.INSTANCE, new CreateAndFollowIndexAction.Request(followRequest)).get(); + final PutFollowAction.Request followRequest = follow("index1", "index2"); + client().execute(PutFollowAction.INSTANCE, followRequest).get(); final int numDocs = randomIntBetween(2, 64); for (int i = 0; i < numDocs; i++) { @@ -419,34 +444,36 @@ public class ShardChangesIT extends ESIntegTestCase { }); } unfollowIndex("index2"); + assertMaxSeqNoOfUpdatesIsTransferred(resolveIndex("index1"), resolveIndex("index2"), 1); + assertTotalNumberOfOptimizedIndexing(resolveIndex("index2"), 1, numDocs); } public void testUnfollowNonExistingIndex() { - UnfollowIndexAction.Request unfollowRequest = new UnfollowIndexAction.Request(); + PauseFollowAction.Request unfollowRequest = new PauseFollowAction.Request(); unfollowRequest.setFollowIndex("non-existing-index"); - expectThrows(IllegalArgumentException.class, () -> client().execute(UnfollowIndexAction.INSTANCE, unfollowRequest).actionGet()); + expectThrows(IllegalArgumentException.class, () -> client().execute(PauseFollowAction.INSTANCE, unfollowRequest).actionGet()); } public void testFollowNonExistentIndex() throws Exception { assertAcked(client().admin().indices().prepareCreate("test-leader").get()); assertAcked(client().admin().indices().prepareCreate("test-follower").get()); // Leader index does not exist. - FollowIndexAction.Request followRequest1 = createFollowRequest("non-existent-leader", "test-follower"); - expectThrows(IndexNotFoundException.class, () -> client().execute(FollowIndexAction.INSTANCE, followRequest1).actionGet()); + ResumeFollowAction.Request followRequest1 = resumeFollow("non-existent-leader", "test-follower"); + expectThrows(IndexNotFoundException.class, () -> client().execute(ResumeFollowAction.INSTANCE, followRequest1).actionGet()); expectThrows(IndexNotFoundException.class, - () -> client().execute(CreateAndFollowIndexAction.INSTANCE, new CreateAndFollowIndexAction.Request(followRequest1)) + () -> client().execute(PutFollowAction.INSTANCE, new PutFollowAction.Request(followRequest1)) .actionGet()); // Follower index does not exist. - FollowIndexAction.Request followRequest2 = createFollowRequest("non-test-leader", "non-existent-follower"); - expectThrows(IndexNotFoundException.class, () -> client().execute(FollowIndexAction.INSTANCE, followRequest2).actionGet()); + ResumeFollowAction.Request followRequest2 = resumeFollow("non-test-leader", "non-existent-follower"); + expectThrows(IndexNotFoundException.class, () -> client().execute(ResumeFollowAction.INSTANCE, followRequest2).actionGet()); expectThrows(IndexNotFoundException.class, - () -> client().execute(CreateAndFollowIndexAction.INSTANCE, new CreateAndFollowIndexAction.Request(followRequest2)) + () -> client().execute(PutFollowAction.INSTANCE, new PutFollowAction.Request(followRequest2)) .actionGet()); // Both indices do not exist. - FollowIndexAction.Request followRequest3 = createFollowRequest("non-existent-leader", "non-existent-follower"); - expectThrows(IndexNotFoundException.class, () -> client().execute(FollowIndexAction.INSTANCE, followRequest3).actionGet()); + ResumeFollowAction.Request followRequest3 = resumeFollow("non-existent-leader", "non-existent-follower"); + expectThrows(IndexNotFoundException.class, () -> client().execute(ResumeFollowAction.INSTANCE, followRequest3).actionGet()); expectThrows(IndexNotFoundException.class, - () -> client().execute(CreateAndFollowIndexAction.INSTANCE, new CreateAndFollowIndexAction.Request(followRequest3)) + () -> client().execute(PutFollowAction.INSTANCE, new PutFollowAction.Request(followRequest3)) .actionGet()); } @@ -456,17 +483,16 @@ public class ShardChangesIT extends ESIntegTestCase { assertAcked(client().admin().indices().prepareCreate("index1").setSource(leaderIndexSettings, XContentType.JSON)); ensureYellow("index1"); - final int numDocs = 1024; + final int numDocs = between(10, 1024); logger.info("Indexing [{}] docs", numDocs); for (int i = 0; i < numDocs; i++) { final String source = String.format(Locale.ROOT, "{\"f\":%d}", i); client().prepareIndex("index1", "doc", Integer.toString(i)).setSource(source, XContentType.JSON).get(); } - FollowIndexAction.Request followRequest = createFollowRequest("index1", "index2"); - followRequest.setMaxOperationSizeInBytes(1L); - final CreateAndFollowIndexAction.Request createAndFollowRequest = new CreateAndFollowIndexAction.Request(followRequest); - client().execute(CreateAndFollowIndexAction.INSTANCE, createAndFollowRequest).get(); + PutFollowAction.Request followRequest = follow("index1", "index2"); + followRequest.getFollowRequest().setMaxBatchSize(new ByteSizeValue(1, ByteSizeUnit.BYTES)); + client().execute(PutFollowAction.INSTANCE, followRequest).get(); final Map firstBatchNumDocsPerShard = new HashMap<>(); final ShardStats[] firstBatchShardStats = client().admin().indices().prepareStats("index1").get().getIndex("index1").getShards(); @@ -482,6 +508,8 @@ public class ShardChangesIT extends ESIntegTestCase { assertBusy(assertExpectedDocumentRunnable(i)); } unfollowIndex("index2"); + assertMaxSeqNoOfUpdatesIsTransferred(resolveIndex("index1"), resolveIndex("index2"), 1); + assertTotalNumberOfOptimizedIndexing(resolveIndex("index2"), 1, numDocs); } public void testDontFollowTheWrongIndex() throws Exception { @@ -492,22 +520,20 @@ public class ShardChangesIT extends ESIntegTestCase { assertAcked(client().admin().indices().prepareCreate("index3").setSource(leaderIndexSettings, XContentType.JSON)); ensureGreen("index3"); - FollowIndexAction.Request followRequest = createFollowRequest("index1", "index2"); - CreateAndFollowIndexAction.Request createAndFollowRequest = new CreateAndFollowIndexAction.Request(followRequest); - client().execute(CreateAndFollowIndexAction.INSTANCE, createAndFollowRequest).get(); + PutFollowAction.Request followRequest = follow("index1", "index2"); + client().execute(PutFollowAction.INSTANCE, followRequest).get(); - followRequest = createFollowRequest("index3", "index4"); - createAndFollowRequest = new CreateAndFollowIndexAction.Request(followRequest); - client().execute(CreateAndFollowIndexAction.INSTANCE, createAndFollowRequest).get(); + followRequest = follow("index3", "index4"); + client().execute(PutFollowAction.INSTANCE, followRequest).get(); unfollowIndex("index2", "index4"); - FollowIndexAction.Request wrongRequest1 = createFollowRequest("index1", "index4"); + ResumeFollowAction.Request wrongRequest1 = resumeFollow("index1", "index4"); Exception e = expectThrows(IllegalArgumentException.class, - () -> client().execute(FollowIndexAction.INSTANCE, wrongRequest1).actionGet()); + () -> client().execute(ResumeFollowAction.INSTANCE, wrongRequest1).actionGet()); assertThat(e.getMessage(), containsString("follow index [index4] should reference")); - FollowIndexAction.Request wrongRequest2 = createFollowRequest("index3", "index2"); - e = expectThrows(IllegalArgumentException.class, () -> client().execute(FollowIndexAction.INSTANCE, wrongRequest2).actionGet()); + ResumeFollowAction.Request wrongRequest2 = resumeFollow("index3", "index2"); + e = expectThrows(IllegalArgumentException.class, () -> client().execute(ResumeFollowAction.INSTANCE, wrongRequest2).actionGet()); assertThat(e.getMessage(), containsString("follow index [index2] should reference")); } @@ -515,9 +541,8 @@ public class ShardChangesIT extends ESIntegTestCase { String leaderIndexSettings = getIndexSettings(1, 0, singletonMap(IndexSettings.INDEX_SOFT_DELETES_SETTING.getKey(), "true")); assertAcked(client().admin().indices().prepareCreate("index1").setSource(leaderIndexSettings, XContentType.JSON).get()); ensureYellow("index1"); - FollowIndexAction.Request followRequest = createFollowRequest("index1", "index2"); - CreateAndFollowIndexAction.Request createAndFollowRequest = new CreateAndFollowIndexAction.Request(followRequest); - client().execute(CreateAndFollowIndexAction.INSTANCE, createAndFollowRequest).get(); + PutFollowAction.Request followRequest = follow("index1", "index2"); + client().execute(PutFollowAction.INSTANCE, followRequest).get(); unfollowIndex("index2"); client().admin().indices().close(new CloseIndexRequest("index2")).actionGet(); @@ -529,6 +554,133 @@ public class ShardChangesIT extends ESIntegTestCase { "this setting is managed via a dedicated API")); } + public void testCloseLeaderIndex() throws Exception { + assertAcked(client().admin().indices().prepareCreate("index1") + .setSettings(Settings.builder() + .put(IndexSettings.INDEX_SOFT_DELETES_SETTING.getKey(), true) + .put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1) + .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 0) + .build())); + + final PutFollowAction.Request followRequest = follow("index1", "index2"); + client().execute(PutFollowAction.INSTANCE, followRequest).get(); + + client().prepareIndex("index1", "doc", "1").setSource("{}", XContentType.JSON).get(); + assertBusy(() -> assertThat(client().prepareSearch("index2").get().getHits().totalHits, equalTo(1L))); + + client().admin().indices().close(new CloseIndexRequest("index1")).actionGet(); + assertBusy(() -> { + StatsResponses response = client().execute(CcrStatsAction.INSTANCE, new StatsRequest()).actionGet(); + assertThat(response.getNodeFailures(), empty()); + assertThat(response.getTaskFailures(), empty()); + assertThat(response.getStatsResponses(), hasSize(1)); + assertThat(response.getStatsResponses().get(0).status().numberOfFailedFetches(), greaterThanOrEqualTo(1L)); + assertThat(response.getStatsResponses().get(0).status().fetchExceptions().size(), equalTo(1)); + ElasticsearchException exception = response.getStatsResponses().get(0).status() + .fetchExceptions().entrySet().iterator().next().getValue().v2(); + assertThat(exception.getMessage(), equalTo("blocked by: [FORBIDDEN/4/index closed];")); + }); + + client().admin().indices().open(new OpenIndexRequest("index1")).actionGet(); + client().prepareIndex("index1", "doc", "2").setSource("{}", XContentType.JSON).get(); + assertBusy(() -> assertThat(client().prepareSearch("index2").get().getHits().totalHits, equalTo(2L))); + + unfollowIndex("index2"); + } + + public void testCloseFollowIndex() throws Exception { + assertAcked(client().admin().indices().prepareCreate("index1") + .setSettings(Settings.builder() + .put(IndexSettings.INDEX_SOFT_DELETES_SETTING.getKey(), true) + .put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1) + .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 0) + .build())); + + final PutFollowAction.Request followRequest = follow("index1", "index2"); + client().execute(PutFollowAction.INSTANCE, followRequest).get(); + + client().prepareIndex("index1", "doc", "1").setSource("{}", XContentType.JSON).get(); + assertBusy(() -> assertThat(client().prepareSearch("index2").get().getHits().totalHits, equalTo(1L))); + + client().admin().indices().close(new CloseIndexRequest("index2")).actionGet(); + client().prepareIndex("index1", "doc", "2").setSource("{}", XContentType.JSON).get(); + assertBusy(() -> { + StatsResponses response = client().execute(CcrStatsAction.INSTANCE, new StatsRequest()).actionGet(); + assertThat(response.getNodeFailures(), empty()); + assertThat(response.getTaskFailures(), empty()); + assertThat(response.getStatsResponses(), hasSize(1)); + assertThat(response.getStatsResponses().get(0).status().numberOfFailedBulkOperations(), greaterThanOrEqualTo(1L)); + }); + client().admin().indices().open(new OpenIndexRequest("index2")).actionGet(); + assertBusy(() -> assertThat(client().prepareSearch("index2").get().getHits().totalHits, equalTo(2L))); + + unfollowIndex("index2"); + } + + public void testDeleteLeaderIndex() throws Exception { + assertAcked(client().admin().indices().prepareCreate("index1") + .setSettings(Settings.builder() + .put(IndexSettings.INDEX_SOFT_DELETES_SETTING.getKey(), true) + .put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1) + .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 0) + .build())); + + final PutFollowAction.Request followRequest = follow("index1", "index2"); + client().execute(PutFollowAction.INSTANCE, followRequest).get(); + + client().prepareIndex("index1", "doc", "1").setSource("{}", XContentType.JSON).get(); + assertBusy(() -> assertThat(client().prepareSearch("index2").get().getHits().totalHits, equalTo(1L))); + + client().admin().indices().delete(new DeleteIndexRequest("index1")).actionGet(); + ensureNoCcrTasks(); + } + + public void testDeleteFollowerIndex() throws Exception { + assertAcked(client().admin().indices().prepareCreate("index1") + .setSettings(Settings.builder() + .put(IndexSettings.INDEX_SOFT_DELETES_SETTING.getKey(), true) + .put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1) + .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 0) + .build())); + + final PutFollowAction.Request followRequest = follow("index1", "index2"); + client().execute(PutFollowAction.INSTANCE, followRequest).get(); + + client().prepareIndex("index1", "doc", "1").setSource("{}", XContentType.JSON).get(); + assertBusy(() -> assertThat(client().prepareSearch("index2").get().getHits().totalHits, equalTo(1L))); + + client().admin().indices().delete(new DeleteIndexRequest("index2")).actionGet(); + client().prepareIndex("index1", "doc", "2").setSource("{}", XContentType.JSON).get(); + ensureNoCcrTasks(); + } + + public void testUnfollowIndex() throws Exception { + String leaderIndexSettings = getIndexSettings(1, 0, singletonMap(IndexSettings.INDEX_SOFT_DELETES_SETTING.getKey(), "true")); + assertAcked(client().admin().indices().prepareCreate("index1").setSource(leaderIndexSettings, XContentType.JSON).get()); + PutFollowAction.Request followRequest = follow("index1", "index2"); + client().execute(PutFollowAction.INSTANCE, followRequest).get(); + client().prepareIndex("index1", "doc").setSource("{}", XContentType.JSON).get(); + assertBusy(() -> { + assertThat(client().prepareSearch("index2").get().getHits().getTotalHits(), equalTo(1L)); + }); + + // Indexing directly into index2 would fail now, because index2 is a follow index. + // We can't test this here because an assertion trips before an actual error is thrown and then index call hangs. + + // Turn follow index into a regular index by: pausing shard follow, close index, unfollow index and then open index: + unfollowIndex("index2"); + client().admin().indices().close(new CloseIndexRequest("index2")).actionGet(); + assertAcked(client().execute(UnfollowAction.INSTANCE, new UnfollowAction.Request("index2")).actionGet()); + client().admin().indices().open(new OpenIndexRequest("index2")).actionGet(); + ensureGreen("index2"); + + // Indexing succeeds now, because index2 is no longer a follow index: + client().prepareIndex("index2", "doc").setSource("{}", XContentType.JSON) + .setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE) + .get(); + assertThat(client().prepareSearch("index2").get().getHits().getTotalHits(), equalTo(2L)); + } + private CheckedRunnable assertTask(final int numberOfPrimaryShards, final Map numDocsPerShard) { return () -> { final ClusterState clusterState = client().admin().cluster().prepareState().get().getState(); @@ -567,14 +719,18 @@ public class ShardChangesIT extends ESIntegTestCase { private void unfollowIndex(String... indices) throws Exception { for (String index : indices) { - final UnfollowIndexAction.Request unfollowRequest = new UnfollowIndexAction.Request(); + final PauseFollowAction.Request unfollowRequest = new PauseFollowAction.Request(); unfollowRequest.setFollowIndex(index); - client().execute(UnfollowIndexAction.INSTANCE, unfollowRequest).get(); + client().execute(PauseFollowAction.INSTANCE, unfollowRequest).get(); } + ensureNoCcrTasks(); + } + + private void ensureNoCcrTasks() throws Exception { assertBusy(() -> { final ClusterState clusterState = client().admin().cluster().prepareState().get().getState(); final PersistentTasksCustomMetaData tasks = clusterState.getMetaData().custom(PersistentTasksCustomMetaData.TYPE); - assertThat(tasks.tasks().size(), equalTo(0)); + assertThat(tasks.tasks(), empty()); ListTasksRequest listTasksRequest = new ListTasksRequest(); listTasksRequest.setDetailed(true); @@ -715,8 +871,71 @@ public class ShardChangesIT extends ESIntegTestCase { }, 60, TimeUnit.SECONDS); } - public static FollowIndexAction.Request createFollowRequest(String leaderIndex, String followerIndex) { - FollowIndexAction.Request request = new FollowIndexAction.Request(); + private void assertMaxSeqNoOfUpdatesIsTransferred(Index leaderIndex, Index followerIndex, int numberOfShards) throws Exception { + assertBusy(() -> { + long[] msuOnLeader = new long[numberOfShards]; + for (int i = 0; i < msuOnLeader.length; i++) { + msuOnLeader[i] = SequenceNumbers.UNASSIGNED_SEQ_NO; + } + Set leaderNodes = internalCluster().nodesInclude(leaderIndex.getName()); + for (String leaderNode : leaderNodes) { + IndicesService indicesService = internalCluster().getInstance(IndicesService.class, leaderNode); + for (int i = 0; i < numberOfShards; i++) { + IndexShard shard = indicesService.getShardOrNull(new ShardId(leaderIndex, i)); + if (shard != null) { + try { + msuOnLeader[i] = SequenceNumbers.max(msuOnLeader[i], shard.getMaxSeqNoOfUpdatesOrDeletes()); + } catch (AlreadyClosedException ignored) { + return; + } + } + } + } + + Set followerNodes = internalCluster().nodesInclude(followerIndex.getName()); + for (String followerNode : followerNodes) { + IndicesService indicesService = internalCluster().getInstance(IndicesService.class, followerNode); + for (int i = 0; i < numberOfShards; i++) { + IndexShard shard = indicesService.getShardOrNull(new ShardId(leaderIndex, i)); + if (shard != null) { + try { + assertThat(shard.getMaxSeqNoOfUpdatesOrDeletes(), equalTo(msuOnLeader[i])); + } catch (AlreadyClosedException ignored) { + + } + } + } + } + }); + } + + private void assertTotalNumberOfOptimizedIndexing(Index followerIndex, int numberOfShards, long expectedTotal) throws Exception { + assertBusy(() -> { + long[] numOfOptimizedOps = new long[numberOfShards]; + for (int shardId = 0; shardId < numberOfShards; shardId++) { + for (String node : internalCluster().nodesInclude(followerIndex.getName())) { + IndicesService indicesService = internalCluster().getInstance(IndicesService.class, node); + IndexShard shard = indicesService.getShardOrNull(new ShardId(followerIndex, shardId)); + if (shard != null && shard.routingEntry().primary()) { + try { + FollowingEngine engine = ((FollowingEngine) IndexShardTestCase.getEngine(shard)); + numOfOptimizedOps[shardId] = engine.getNumberOfOptimizedIndexing(); + } catch (AlreadyClosedException e) { + throw new AssertionError(e); // causes assertBusy to retry + } + } + } + } + assertThat(Arrays.stream(numOfOptimizedOps).sum(), equalTo(expectedTotal)); + }); + } + + public static PutFollowAction.Request follow(String leaderIndex, String followerIndex) { + return new PutFollowAction.Request(resumeFollow(leaderIndex, followerIndex)); + } + + public static ResumeFollowAction.Request resumeFollow(String leaderIndex, String followerIndex) { + ResumeFollowAction.Request request = new ResumeFollowAction.Request(); request.setLeaderIndex(leaderIndex); request.setFollowerIndex(followerIndex); request.setMaxRetryDelay(TimeValue.timeValueMillis(10)); diff --git a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/AutoFollowCoordinatorTests.java b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/AutoFollowCoordinatorTests.java index 218825e4120..32d7ea205a9 100644 --- a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/AutoFollowCoordinatorTests.java +++ b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/AutoFollowCoordinatorTests.java @@ -16,12 +16,13 @@ import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.Index; import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.ccr.Ccr; import org.elasticsearch.xpack.ccr.CcrLicenseChecker; import org.elasticsearch.xpack.ccr.action.AutoFollowCoordinator.AutoFollower; import org.elasticsearch.xpack.core.ccr.AutoFollowMetadata; import org.elasticsearch.xpack.core.ccr.AutoFollowMetadata.AutoFollowPattern; import org.elasticsearch.xpack.core.ccr.AutoFollowStats; -import org.elasticsearch.xpack.core.ccr.action.FollowIndexAction; +import org.elasticsearch.xpack.core.ccr.action.ResumeFollowAction; import java.util.ArrayList; import java.util.Arrays; @@ -56,12 +57,14 @@ public class AutoFollowCoordinatorTests extends ESTestCase { .build(); AutoFollowPattern autoFollowPattern = - new AutoFollowPattern(Collections.singletonList("logs-*"), null, null, null, null, null, null, null, null, null); + new AutoFollowPattern(Collections.singletonList("logs-*"), null, null, null, null, null, null, null, null); Map patterns = new HashMap<>(); patterns.put("remote", autoFollowPattern); Map> followedLeaderIndexUUIDS = new HashMap<>(); followedLeaderIndexUUIDS.put("remote", new ArrayList<>()); - AutoFollowMetadata autoFollowMetadata = new AutoFollowMetadata(patterns, followedLeaderIndexUUIDS); + Map> autoFollowHeaders = new HashMap<>(); + autoFollowHeaders.put("remote", Collections.singletonMap("key", "val")); + AutoFollowMetadata autoFollowMetadata = new AutoFollowMetadata(patterns, followedLeaderIndexUUIDS, autoFollowHeaders); ClusterState currentState = ClusterState.builder(new ClusterName("name")) .metaData(MetaData.builder().putCustom(AutoFollowMetadata.TYPE, autoFollowMetadata)) @@ -83,14 +86,16 @@ public class AutoFollowCoordinatorTests extends ESTestCase { void getLeaderClusterState(Map headers, String leaderClusterAlias, BiConsumer handler) { + assertThat(headers, equalTo(autoFollowHeaders.get("remote"))); handler.accept(leaderState, null); } @Override void createAndFollow(Map headers, - FollowIndexAction.Request followRequest, + ResumeFollowAction.Request followRequest, Runnable successHandler, Consumer failureHandler) { + assertThat(headers, equalTo(autoFollowHeaders.get("remote"))); assertThat(followRequest.getLeaderIndex(), equalTo("remote:logs-20190101")); assertThat(followRequest.getFollowerIndex(), equalTo("logs-20190101")); successHandler.run(); @@ -115,12 +120,13 @@ public class AutoFollowCoordinatorTests extends ESTestCase { when(client.getRemoteClusterClient(anyString())).thenReturn(client); AutoFollowPattern autoFollowPattern = - new AutoFollowPattern(Collections.singletonList("logs-*"), null, null, null, null, null, null, null, null, null); + new AutoFollowPattern(Collections.singletonList("logs-*"), null, null, null, null, null, null, null, null); Map patterns = new HashMap<>(); patterns.put("remote", autoFollowPattern); Map> followedLeaderIndexUUIDS = new HashMap<>(); followedLeaderIndexUUIDS.put("remote", new ArrayList<>()); - AutoFollowMetadata autoFollowMetadata = new AutoFollowMetadata(patterns, followedLeaderIndexUUIDS); + Map> headers = new HashMap<>(); + AutoFollowMetadata autoFollowMetadata = new AutoFollowMetadata(patterns, followedLeaderIndexUUIDS, headers); ClusterState followerState = ClusterState.builder(new ClusterName("remote")) .metaData(MetaData.builder().putCustom(AutoFollowMetadata.TYPE, autoFollowMetadata)) .build(); @@ -144,7 +150,7 @@ public class AutoFollowCoordinatorTests extends ESTestCase { @Override void createAndFollow(Map headers, - FollowIndexAction.Request followRequest, + ResumeFollowAction.Request followRequest, Runnable successHandler, Consumer failureHandler) { fail("should not get here"); @@ -172,12 +178,13 @@ public class AutoFollowCoordinatorTests extends ESTestCase { .build(); AutoFollowPattern autoFollowPattern = - new AutoFollowPattern(Collections.singletonList("logs-*"), null, null, null, null, null, null, null, null, null); + new AutoFollowPattern(Collections.singletonList("logs-*"), null, null, null, null, null, null, null, null); Map patterns = new HashMap<>(); patterns.put("remote", autoFollowPattern); Map> followedLeaderIndexUUIDS = new HashMap<>(); followedLeaderIndexUUIDS.put("remote", new ArrayList<>()); - AutoFollowMetadata autoFollowMetadata = new AutoFollowMetadata(patterns, followedLeaderIndexUUIDS); + Map> headers = new HashMap<>(); + AutoFollowMetadata autoFollowMetadata = new AutoFollowMetadata(patterns, followedLeaderIndexUUIDS, headers); ClusterState followerState = ClusterState.builder(new ClusterName("remote")) .metaData(MetaData.builder().putCustom(AutoFollowMetadata.TYPE, autoFollowMetadata)) .build(); @@ -204,7 +211,7 @@ public class AutoFollowCoordinatorTests extends ESTestCase { @Override void createAndFollow(Map headers, - FollowIndexAction.Request followRequest, + ResumeFollowAction.Request followRequest, Runnable successHandler, Consumer failureHandler) { assertThat(followRequest.getLeaderIndex(), equalTo("remote:logs-20190101")); @@ -233,12 +240,13 @@ public class AutoFollowCoordinatorTests extends ESTestCase { .build(); AutoFollowPattern autoFollowPattern = - new AutoFollowPattern(Collections.singletonList("logs-*"), null, null, null, null, null, null, null, null, null); + new AutoFollowPattern(Collections.singletonList("logs-*"), null, null, null, null, null, null, null, null); Map patterns = new HashMap<>(); patterns.put("remote", autoFollowPattern); Map> followedLeaderIndexUUIDS = new HashMap<>(); followedLeaderIndexUUIDS.put("remote", new ArrayList<>()); - AutoFollowMetadata autoFollowMetadata = new AutoFollowMetadata(patterns, followedLeaderIndexUUIDS); + Map> headers = new HashMap<>(); + AutoFollowMetadata autoFollowMetadata = new AutoFollowMetadata(patterns, followedLeaderIndexUUIDS, headers); ClusterState followerState = ClusterState.builder(new ClusterName("remote")) .metaData(MetaData.builder().putCustom(AutoFollowMetadata.TYPE, autoFollowMetadata)) .build(); @@ -265,7 +273,7 @@ public class AutoFollowCoordinatorTests extends ESTestCase { @Override void createAndFollow(Map headers, - FollowIndexAction.Request followRequest, + ResumeFollowAction.Request followRequest, Runnable successHandler, Consumer failureHandler) { assertThat(followRequest.getLeaderIndex(), equalTo("remote:logs-20190101")); @@ -285,10 +293,11 @@ public class AutoFollowCoordinatorTests extends ESTestCase { public void testGetLeaderIndicesToFollow() { AutoFollowPattern autoFollowPattern = - new AutoFollowPattern(Collections.singletonList("metrics-*"), null, null, null, null, null, null, null, null, null); + new AutoFollowPattern(Collections.singletonList("metrics-*"), null, null, null, null, null, null, null, null); + Map> headers = new HashMap<>(); ClusterState followerState = ClusterState.builder(new ClusterName("remote")) .metaData(MetaData.builder().putCustom(AutoFollowMetadata.TYPE, - new AutoFollowMetadata(Collections.singletonMap("remote", autoFollowPattern), Collections.emptyMap()))) + new AutoFollowMetadata(Collections.singletonMap("remote", autoFollowPattern), Collections.emptyMap(), headers))) .build(); MetaData.Builder imdBuilder = MetaData.builder(); @@ -310,7 +319,8 @@ public class AutoFollowCoordinatorTests extends ESTestCase { .metaData(imdBuilder) .build(); - List result = AutoFollower.getLeaderIndicesToFollow(autoFollowPattern, leaderState, followerState, Collections.emptyList()); + List result = AutoFollower.getLeaderIndicesToFollow("remote", autoFollowPattern, leaderState, followerState, + Collections.emptyList()); result.sort(Comparator.comparing(Index::getName)); assertThat(result.size(), equalTo(5)); assertThat(result.get(0).getName(), equalTo("metrics-0")); @@ -320,7 +330,7 @@ public class AutoFollowCoordinatorTests extends ESTestCase { assertThat(result.get(4).getName(), equalTo("metrics-4")); List followedIndexUUIDs = Collections.singletonList(leaderState.metaData().index("metrics-2").getIndexUUID()); - result = AutoFollower.getLeaderIndicesToFollow(autoFollowPattern, leaderState, followerState, followedIndexUUIDs); + result = AutoFollower.getLeaderIndicesToFollow("remote", autoFollowPattern, leaderState, followerState, followedIndexUUIDs); result.sort(Comparator.comparing(Index::getName)); assertThat(result.size(), equalTo(4)); assertThat(result.get(0).getName(), equalTo("metrics-0")); @@ -329,17 +339,45 @@ public class AutoFollowCoordinatorTests extends ESTestCase { assertThat(result.get(3).getName(), equalTo("metrics-4")); } + public void testGetLeaderIndicesToFollowDoNotSelectFollowIndicesInTheSameCluster() { + MetaData.Builder imdBuilder = MetaData.builder(); + imdBuilder.put(IndexMetaData.builder("metrics-0") + .settings(settings(Version.CURRENT)) + .numberOfShards(1) + .numberOfReplicas(0)); + imdBuilder.put(IndexMetaData.builder("metrics-1") + .putCustom(Ccr.CCR_CUSTOM_METADATA_KEY, new HashMap<>()) + .settings(settings(Version.CURRENT)) + .numberOfShards(1) + .numberOfReplicas(0)); + + AutoFollowPattern autoFollowPattern = + new AutoFollowPattern(Collections.singletonList("metrics-*"), null, null, null, null, null, null, null, null); + imdBuilder.putCustom(AutoFollowMetadata.TYPE, new AutoFollowMetadata(Collections.singletonMap("remote", autoFollowPattern), + Collections.emptyMap(), Collections.emptyMap())); + + ClusterState clusterState = ClusterState.builder(new ClusterName("name")) + .metaData(imdBuilder) + .build(); + + List result = AutoFollower.getLeaderIndicesToFollow("_local_", autoFollowPattern, clusterState, + clusterState, Collections.emptyList()); + result.sort(Comparator.comparing(Index::getName)); + assertThat(result.size(), equalTo(1)); + assertThat(result.get(0).getName(), equalTo("metrics-0")); + } + public void testGetFollowerIndexName() { AutoFollowPattern autoFollowPattern = new AutoFollowPattern(Collections.singletonList("metrics-*"), null, null, - null, null, null, null, null, null, null); + null, null, null, null, null, null); assertThat(AutoFollower.getFollowerIndexName(autoFollowPattern, "metrics-0"), equalTo("metrics-0")); autoFollowPattern = new AutoFollowPattern(Collections.singletonList("metrics-*"), "eu-metrics-0", null, null, - null, null, null, null, null, null); + null, null, null, null, null); assertThat(AutoFollower.getFollowerIndexName(autoFollowPattern, "metrics-0"), equalTo("eu-metrics-0")); autoFollowPattern = new AutoFollowPattern(Collections.singletonList("metrics-*"), "eu-{{leader_index}}", null, - null, null, null, null, null, null, null); + null, null, null, null, null, null); assertThat(AutoFollower.getFollowerIndexName(autoFollowPattern, "metrics-0"), equalTo("eu-metrics-0")); } @@ -349,7 +387,7 @@ public class AutoFollowCoordinatorTests extends ESTestCase { null, null, mock(ClusterService.class), - new CcrLicenseChecker(() -> true) + new CcrLicenseChecker(() -> true, () -> false) ); autoFollowCoordinator.updateStats(Collections.singletonList( diff --git a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/AutoFollowTests.java b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/AutoFollowTests.java index 0071887484b..1101d260dcf 100644 --- a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/AutoFollowTests.java +++ b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/AutoFollowTests.java @@ -9,6 +9,8 @@ import org.elasticsearch.action.admin.indices.exists.indices.IndicesExistsReques import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.unit.ByteSizeUnit; +import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.persistent.PersistentTasksCustomMetaData; @@ -134,7 +136,7 @@ public class AutoFollowTests extends ESSingleNodeTestCase { request.setMaxBatchOperationCount(randomIntBetween(0, Integer.MAX_VALUE)); } if (randomBoolean()) { - request.setMaxOperationSizeInBytes(randomNonNegativeLong()); + request.setMaxBatchSize(new ByteSizeValue(randomNonNegativeLong(), ByteSizeUnit.BYTES)); } if (randomBoolean()) { request.setMaxRetryDelay(TimeValue.timeValueMillis(500)); @@ -165,8 +167,8 @@ public class AutoFollowTests extends ESSingleNodeTestCase { if (request.getMaxBatchOperationCount() != null) { assertThat(shardFollowTask.getMaxBatchOperationCount(), equalTo(request.getMaxBatchOperationCount())); } - if (request.getMaxOperationSizeInBytes() != null) { - assertThat(shardFollowTask.getMaxBatchSizeInBytes(), equalTo(request.getMaxOperationSizeInBytes())); + if (request.getMaxBatchSize() != null) { + assertThat(shardFollowTask.getMaxBatchSize(), equalTo(request.getMaxBatchSize())); } if (request.getMaxRetryDelay() != null) { assertThat(shardFollowTask.getMaxRetryDelay(), equalTo(request.getMaxRetryDelay())); diff --git a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/CreateAndFollowIndexRequestTests.java b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/CreateAndFollowIndexRequestTests.java deleted file mode 100644 index c751ca5f000..00000000000 --- a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/CreateAndFollowIndexRequestTests.java +++ /dev/null @@ -1,22 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License; - * you may not use this file except in compliance with the Elastic License. - */ -package org.elasticsearch.xpack.ccr.action; - -import org.elasticsearch.test.AbstractStreamableTestCase; -import org.elasticsearch.xpack.core.ccr.action.CreateAndFollowIndexAction; - -public class CreateAndFollowIndexRequestTests extends AbstractStreamableTestCase { - - @Override - protected CreateAndFollowIndexAction.Request createBlankInstance() { - return new CreateAndFollowIndexAction.Request(); - } - - @Override - protected CreateAndFollowIndexAction.Request createTestInstance() { - return new CreateAndFollowIndexAction.Request(FollowIndexRequestTests.createTestRequest()); - } -} diff --git a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/CreateAndFollowIndexResponseTests.java b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/CreateAndFollowIndexResponseTests.java deleted file mode 100644 index 44ac21055a7..00000000000 --- a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/CreateAndFollowIndexResponseTests.java +++ /dev/null @@ -1,22 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License; - * you may not use this file except in compliance with the Elastic License. - */ -package org.elasticsearch.xpack.ccr.action; - -import org.elasticsearch.test.AbstractStreamableTestCase; -import org.elasticsearch.xpack.core.ccr.action.CreateAndFollowIndexAction; - -public class CreateAndFollowIndexResponseTests extends AbstractStreamableTestCase { - - @Override - protected CreateAndFollowIndexAction.Response createBlankInstance() { - return new CreateAndFollowIndexAction.Response(); - } - - @Override - protected CreateAndFollowIndexAction.Response createTestInstance() { - return new CreateAndFollowIndexAction.Response(randomBoolean(), randomBoolean(), randomBoolean()); - } -} diff --git a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/FollowIndexRequestTests.java b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/FollowIndexRequestTests.java index 2bff73d223b..0721660bb47 100644 --- a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/FollowIndexRequestTests.java +++ b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/FollowIndexRequestTests.java @@ -6,10 +6,12 @@ package org.elasticsearch.xpack.ccr.action; import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.common.unit.ByteSizeUnit; +import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.test.AbstractStreamableXContentTestCase; -import org.elasticsearch.xpack.core.ccr.action.FollowIndexAction; +import org.elasticsearch.xpack.core.ccr.action.ResumeFollowAction; import java.io.IOException; @@ -17,21 +19,21 @@ import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.notNullValue; import static org.hamcrest.Matchers.nullValue; -public class FollowIndexRequestTests extends AbstractStreamableXContentTestCase { +public class FollowIndexRequestTests extends AbstractStreamableXContentTestCase { @Override - protected FollowIndexAction.Request createBlankInstance() { - return new FollowIndexAction.Request(); + protected ResumeFollowAction.Request createBlankInstance() { + return new ResumeFollowAction.Request(); } @Override - protected FollowIndexAction.Request createTestInstance() { + protected ResumeFollowAction.Request createTestInstance() { return createTestRequest(); } @Override - protected FollowIndexAction.Request doParseInstance(XContentParser parser) throws IOException { - return FollowIndexAction.Request.fromXContent(parser, null); + protected ResumeFollowAction.Request doParseInstance(XContentParser parser) throws IOException { + return ResumeFollowAction.Request.fromXContent(parser, null); } @Override @@ -39,8 +41,8 @@ public class FollowIndexRequestTests extends AbstractStreamableXContentTestCase< return false; } - static FollowIndexAction.Request createTestRequest() { - FollowIndexAction.Request request = new FollowIndexAction.Request(); + static ResumeFollowAction.Request createTestRequest() { + ResumeFollowAction.Request request = new ResumeFollowAction.Request(); request.setLeaderIndex(randomAlphaOfLength(4)); request.setFollowerIndex(randomAlphaOfLength(4)); if (randomBoolean()) { @@ -53,7 +55,7 @@ public class FollowIndexRequestTests extends AbstractStreamableXContentTestCase< request.setMaxConcurrentWriteBatches(randomIntBetween(1, Integer.MAX_VALUE)); } if (randomBoolean()) { - request.setMaxOperationSizeInBytes(randomNonNegativeLong()); + request.setMaxBatchSize(new ByteSizeValue(randomNonNegativeLong(), ByteSizeUnit.BYTES)); } if (randomBoolean()) { request.setMaxWriteBufferSize(randomIntBetween(1, Integer.MAX_VALUE)); @@ -68,7 +70,7 @@ public class FollowIndexRequestTests extends AbstractStreamableXContentTestCase< } public void testValidate() { - FollowIndexAction.Request request = new FollowIndexAction.Request(); + ResumeFollowAction.Request request = new ResumeFollowAction.Request(); request.setLeaderIndex("index1"); request.setFollowerIndex("index2"); request.setMaxRetryDelay(TimeValue.ZERO); diff --git a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/GetAutoFollowPatternRequestTests.java b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/GetAutoFollowPatternRequestTests.java new file mode 100644 index 00000000000..05b731127cc --- /dev/null +++ b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/GetAutoFollowPatternRequestTests.java @@ -0,0 +1,27 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.ccr.action; + +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.test.AbstractWireSerializingTestCase; +import org.elasticsearch.xpack.core.ccr.action.GetAutoFollowPatternAction; + +public class GetAutoFollowPatternRequestTests extends AbstractWireSerializingTestCase { + + @Override + protected Writeable.Reader instanceReader() { + return GetAutoFollowPatternAction.Request::new; + } + + @Override + protected GetAutoFollowPatternAction.Request createTestInstance() { + GetAutoFollowPatternAction.Request request = new GetAutoFollowPatternAction.Request(); + if (randomBoolean()) { + request.setLeaderClusterAlias(randomAlphaOfLength(4)); + } + return request; + } +} diff --git a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/GetAutoFollowPatternResponseTests.java b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/GetAutoFollowPatternResponseTests.java new file mode 100644 index 00000000000..f9850c4e2ed --- /dev/null +++ b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/GetAutoFollowPatternResponseTests.java @@ -0,0 +1,45 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.ccr.action; + +import org.elasticsearch.common.unit.ByteSizeUnit; +import org.elasticsearch.common.unit.ByteSizeValue; +import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.test.AbstractStreamableTestCase; +import org.elasticsearch.xpack.core.ccr.AutoFollowMetadata.AutoFollowPattern; +import org.elasticsearch.xpack.core.ccr.action.GetAutoFollowPatternAction; + +import java.util.Collections; +import java.util.HashMap; +import java.util.Map; + +public class GetAutoFollowPatternResponseTests extends AbstractStreamableTestCase { + + @Override + protected GetAutoFollowPatternAction.Response createBlankInstance() { + return new GetAutoFollowPatternAction.Response(); + } + + @Override + protected GetAutoFollowPatternAction.Response createTestInstance() { + int numPatterns = randomIntBetween(1, 8); + Map patterns = new HashMap<>(numPatterns); + for (int i = 0; i < numPatterns; i++) { + AutoFollowPattern autoFollowPattern = new AutoFollowPattern( + Collections.singletonList(randomAlphaOfLength(4)), + randomAlphaOfLength(4), + randomIntBetween(0, Integer.MAX_VALUE), + randomIntBetween(0, Integer.MAX_VALUE), + new ByteSizeValue(randomNonNegativeLong(), ByteSizeUnit.BYTES), + randomIntBetween(0, Integer.MAX_VALUE), + randomIntBetween(0, Integer.MAX_VALUE), + TimeValue.timeValueMillis(500), + TimeValue.timeValueMillis(500)); + patterns.put(randomAlphaOfLength(4), autoFollowPattern); + } + return new GetAutoFollowPatternAction.Response(patterns); + } +} diff --git a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/PutAutoFollowPatternRequestTests.java b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/PutAutoFollowPatternRequestTests.java index 6fafaab7995..865b8884dbe 100644 --- a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/PutAutoFollowPatternRequestTests.java +++ b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/PutAutoFollowPatternRequestTests.java @@ -6,6 +6,8 @@ package org.elasticsearch.xpack.ccr.action; import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.common.unit.ByteSizeUnit; +import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.test.AbstractStreamableXContentTestCase; @@ -60,7 +62,7 @@ public class PutAutoFollowPatternRequestTests extends AbstractStreamableXContent request.setMaxConcurrentWriteBatches(randomIntBetween(0, Integer.MAX_VALUE)); } if (randomBoolean()) { - request.setMaxOperationSizeInBytes(randomNonNegativeLong()); + request.setMaxBatchSize(new ByteSizeValue(randomNonNegativeLong(), ByteSizeUnit.BYTES)); } if (randomBoolean()) { request.setMaxWriteBufferSize(randomIntBetween(0, Integer.MAX_VALUE)); diff --git a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/PutFollowActionRequestTests.java b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/PutFollowActionRequestTests.java new file mode 100644 index 00000000000..3c21abcfbf5 --- /dev/null +++ b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/PutFollowActionRequestTests.java @@ -0,0 +1,22 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.ccr.action; + +import org.elasticsearch.test.AbstractStreamableTestCase; +import org.elasticsearch.xpack.core.ccr.action.PutFollowAction; + +public class PutFollowActionRequestTests extends AbstractStreamableTestCase { + + @Override + protected PutFollowAction.Request createBlankInstance() { + return new PutFollowAction.Request(); + } + + @Override + protected PutFollowAction.Request createTestInstance() { + return new PutFollowAction.Request(FollowIndexRequestTests.createTestRequest()); + } +} diff --git a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/PutFollowActionResponseTests.java b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/PutFollowActionResponseTests.java new file mode 100644 index 00000000000..506a5e6ffbb --- /dev/null +++ b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/PutFollowActionResponseTests.java @@ -0,0 +1,22 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.ccr.action; + +import org.elasticsearch.test.AbstractStreamableTestCase; +import org.elasticsearch.xpack.core.ccr.action.PutFollowAction; + +public class PutFollowActionResponseTests extends AbstractStreamableTestCase { + + @Override + protected PutFollowAction.Response createBlankInstance() { + return new PutFollowAction.Response(); + } + + @Override + protected PutFollowAction.Response createTestInstance() { + return new PutFollowAction.Response(randomBoolean(), randomBoolean(), randomBoolean()); + } +} diff --git a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/ShardChangesActionTests.java b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/ShardChangesActionTests.java index 88802be4e38..4824ac3c221 100644 --- a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/ShardChangesActionTests.java +++ b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/ShardChangesActionTests.java @@ -10,6 +10,8 @@ import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.cluster.routing.ShardRoutingState; import org.elasticsearch.cluster.routing.TestShardRouting; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.unit.ByteSizeUnit; +import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.index.Index; import org.elasticsearch.index.IndexNotFoundException; @@ -49,7 +51,7 @@ public class ShardChangesActionTests extends ESSingleNodeTestCase { .build(); final IndexService indexService = createIndex("index", settings); - final int numWrites = randomIntBetween(2, 4096); + final int numWrites = randomIntBetween(10, 4096); for (int i = 0; i < numWrites; i++) { client().prepareIndex("index", "doc", Integer.toString(i)).setSource("{}", XContentType.JSON).get(); } @@ -61,8 +63,13 @@ public class ShardChangesActionTests extends ESSingleNodeTestCase { int min = randomIntBetween(0, numWrites - 1); int max = randomIntBetween(min, numWrites - 1); int size = max - min + 1; - final Translog.Operation[] operations = ShardChangesAction.getOperations(indexShard, - indexShard.getGlobalCheckpoint(), min, size, indexShard.getHistoryUUID(), Long.MAX_VALUE); + final Translog.Operation[] operations = ShardChangesAction.getOperations( + indexShard, + indexShard.getGlobalCheckpoint(), + min, + size, + indexShard.getHistoryUUID(), + new ByteSizeValue(Long.MAX_VALUE, ByteSizeUnit.BYTES)); final List seenSeqNos = Arrays.stream(operations).map(Translog.Operation::seqNo).collect(Collectors.toList()); final List expectedSeqNos = LongStream.rangeClosed(min, max).boxed().collect(Collectors.toList()); assertThat(seenSeqNos, equalTo(expectedSeqNos)); @@ -78,7 +85,7 @@ public class ShardChangesActionTests extends ESSingleNodeTestCase { numWrites, numWrites + 1, indexShard.getHistoryUUID(), - Long.MAX_VALUE)); + new ByteSizeValue(Long.MAX_VALUE, ByteSizeUnit.BYTES))); final String message = String.format( Locale.ROOT, "not exposing operations from [%d] greater than the global checkpoint [%d]", @@ -89,14 +96,25 @@ public class ShardChangesActionTests extends ESSingleNodeTestCase { // get operations for a range some operations do not exist: Translog.Operation[] operations = ShardChangesAction.getOperations(indexShard, indexShard.getGlobalCheckpoint(), - numWrites - 10, numWrites + 10, indexShard.getHistoryUUID(), Long.MAX_VALUE); + numWrites - 10, numWrites + 10, indexShard.getHistoryUUID(), new ByteSizeValue(Long.MAX_VALUE, ByteSizeUnit.BYTES)); assertThat(operations.length, equalTo(10)); // Unexpected history UUID: Exception e = expectThrows(IllegalStateException.class, () -> ShardChangesAction.getOperations(indexShard, - indexShard.getGlobalCheckpoint(), 0, 10, "different-history-uuid", Long.MAX_VALUE)); + indexShard.getGlobalCheckpoint(), 0, 10, "different-history-uuid", new ByteSizeValue(Long.MAX_VALUE, ByteSizeUnit.BYTES))); assertThat(e.getMessage(), equalTo("unexpected history uuid, expected [different-history-uuid], actual [" + indexShard.getHistoryUUID() + "]")); + + // invalid range + { + final long fromSeqNo = randomLongBetween(Long.MIN_VALUE, -1); + final int batchSize = randomIntBetween(0, Integer.MAX_VALUE); + final IllegalArgumentException invalidRangeError = expectThrows(IllegalArgumentException.class, + () -> ShardChangesAction.getOperations(indexShard, indexShard.getGlobalCheckpoint(), + fromSeqNo, batchSize, indexShard.getHistoryUUID(), new ByteSizeValue(Long.MAX_VALUE, ByteSizeUnit.BYTES))); + assertThat(invalidRangeError.getMessage(), + equalTo("Invalid range; from_seqno [" + fromSeqNo + "], to_seqno [" + (fromSeqNo + batchSize - 1) + "]")); + } } public void testGetOperationsWhenShardNotStarted() throws Exception { @@ -105,7 +123,7 @@ public class ShardChangesActionTests extends ESSingleNodeTestCase { ShardRouting shardRouting = TestShardRouting.newShardRouting("index", 0, "_node_id", true, ShardRoutingState.INITIALIZING); Mockito.when(indexShard.routingEntry()).thenReturn(shardRouting); expectThrows(IndexShardNotStartedException.class, () -> ShardChangesAction.getOperations(indexShard, - indexShard.getGlobalCheckpoint(), 0, 1, indexShard.getHistoryUUID(), Long.MAX_VALUE)); + indexShard.getGlobalCheckpoint(), 0, 1, indexShard.getHistoryUUID(), new ByteSizeValue(Long.MAX_VALUE, ByteSizeUnit.BYTES))); } public void testGetOperationsExceedByteLimit() throws Exception { @@ -122,7 +140,7 @@ public class ShardChangesActionTests extends ESSingleNodeTestCase { final IndexShard indexShard = indexService.getShard(0); final Translog.Operation[] operations = ShardChangesAction.getOperations(indexShard, indexShard.getGlobalCheckpoint(), - 0, 12, indexShard.getHistoryUUID(), 256); + 0, 12, indexShard.getHistoryUUID(), new ByteSizeValue(256, ByteSizeUnit.BYTES)); assertThat(operations.length, equalTo(12)); assertThat(operations[0].seqNo(), equalTo(0L)); assertThat(operations[1].seqNo(), equalTo(1L)); @@ -149,7 +167,8 @@ public class ShardChangesActionTests extends ESSingleNodeTestCase { final IndexShard indexShard = indexService.getShard(0); final Translog.Operation[] operations = - ShardChangesAction.getOperations(indexShard, indexShard.getGlobalCheckpoint(), 0, 1, indexShard.getHistoryUUID(), 0); + ShardChangesAction.getOperations( + indexShard, indexShard.getGlobalCheckpoint(), 0, 1, indexShard.getHistoryUUID(), ByteSizeValue.ZERO); assertThat(operations.length, equalTo(1)); assertThat(operations[0].seqNo(), equalTo(0L)); } diff --git a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/ShardChangesResponseTests.java b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/ShardChangesResponseTests.java index 04cf45f9d2c..a99e930188c 100644 --- a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/ShardChangesResponseTests.java +++ b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/ShardChangesResponseTests.java @@ -15,6 +15,7 @@ public class ShardChangesResponseTests extends AbstractStreamableTestCase { ShardFollowNodeTaskStatus status = task.getStatus(); assertThat(status.leaderGlobalCheckpoint(), equalTo(testRun.finalExpectedGlobalCheckpoint)); @@ -80,12 +80,11 @@ public class ShardFollowNodeTaskRandomTests extends ESTestCase { new ShardId("leader_index", "", 0), testRun.maxOperationCount, concurrency, - TransportFollowIndexAction.DEFAULT_MAX_BATCH_SIZE_IN_BYTES, + TransportResumeFollowAction.DEFAULT_MAX_BATCH_SIZE, concurrency, 10240, TimeValue.timeValueMillis(10), TimeValue.timeValueMillis(10), - "uuid", Collections.emptyMap() ); @@ -111,9 +110,10 @@ public class ShardFollowNodeTaskRandomTests extends ESTestCase { @Override protected void innerSendBulkShardOperationsRequest( - List operations, - Consumer handler, - Consumer errorHandler) { + String followerHistoryUUID, List operations, + long maxSeqNoOfUpdates, + Consumer handler, + Consumer errorHandler) { for(Translog.Operation op : operations) { tracker.markSeqNoAsCompleted(op.seqNo()); } @@ -157,7 +157,8 @@ public class ShardFollowNodeTaskRandomTests extends ESTestCase { assert from >= testRun.finalExpectedGlobalCheckpoint; final long globalCheckpoint = tracker.getCheckpoint(); final long maxSeqNo = tracker.getMaxSeqNo(); - handler.accept(new ShardChangesAction.Response(0L,globalCheckpoint, maxSeqNo, new Translog.Operation[0])); + handler.accept(new ShardChangesAction.Response( + 0L, globalCheckpoint, maxSeqNo, randomNonNegativeLong(), new Translog.Operation[0])); } }; threadPool.generic().execute(task); @@ -231,6 +232,7 @@ public class ShardFollowNodeTaskRandomTests extends ESTestCase { mappingVersion, nextGlobalCheckPoint, nextGlobalCheckPoint, + randomNonNegativeLong(), ops.toArray(EMPTY)) ) ); @@ -253,6 +255,7 @@ public class ShardFollowNodeTaskRandomTests extends ESTestCase { mappingVersion, prevGlobalCheckpoint, prevGlobalCheckpoint, + randomNonNegativeLong(), EMPTY ); item.add(new TestResponse(null, mappingVersion, response)); @@ -269,6 +272,7 @@ public class ShardFollowNodeTaskRandomTests extends ESTestCase { mappingVersion, localLeaderGCP, localLeaderGCP, + randomNonNegativeLong(), ops.toArray(EMPTY) ); item.add(new TestResponse(null, mappingVersion, response)); diff --git a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/ShardFollowNodeTaskTests.java b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/ShardFollowNodeTaskTests.java index 7e813ae4cf6..8727f8b907b 100644 --- a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/ShardFollowNodeTaskTests.java +++ b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/ShardFollowNodeTaskTests.java @@ -8,6 +8,8 @@ package org.elasticsearch.xpack.ccr.action; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.common.UUIDs; import org.elasticsearch.common.collect.Tuple; +import org.elasticsearch.common.unit.ByteSizeUnit; +import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.index.shard.ShardNotFoundException; @@ -123,7 +125,7 @@ public class ShardFollowNodeTaskTests extends ESTestCase { shardChangesRequests.clear(); // The call the updateMapping is a noop, so noting happens. - task.start(128L, 128L, task.getStatus().followerGlobalCheckpoint(), task.getStatus().followerMaxSeqNo()); + task.start("uuid", 128L, 128L, task.getStatus().followerGlobalCheckpoint(), task.getStatus().followerMaxSeqNo()); task.markAsCompleted(); task.coordinateReads(); assertThat(shardChangesRequests.size(), equalTo(0)); @@ -407,7 +409,7 @@ public class ShardFollowNodeTaskTests extends ESTestCase { assertThat(shardChangesRequests.get(0)[1], equalTo(64L)); shardChangesRequests.clear(); - task.innerHandleReadResponse(0L, 63L, new ShardChangesAction.Response(0, 0, 0, new Translog.Operation[0])); + task.innerHandleReadResponse(0L, 63L, new ShardChangesAction.Response(0, 0, 0, 100, new Translog.Operation[0])); assertThat(shardChangesRequests.size(), equalTo(1)); assertThat(shardChangesRequests.get(0)[0], equalTo(0L)); @@ -675,12 +677,11 @@ public class ShardFollowNodeTaskTests extends ESTestCase { new ShardId("leader_index", "", 0), maxBatchOperationCount, maxConcurrentReadBatches, - maxBatchSizeInBytes, + new ByteSizeValue(maxBatchSizeInBytes, ByteSizeUnit.BYTES), maxConcurrentWriteBatches, bufferWriteLimit, TimeValue.ZERO, TimeValue.ZERO, - "uuid", Collections.emptyMap() ); @@ -713,9 +714,10 @@ public class ShardFollowNodeTaskTests extends ESTestCase { @Override protected void innerSendBulkShardOperationsRequest( - final List operations, - final Consumer handler, - final Consumer errorHandler) { + String followerHistoryUUID, final List operations, + final long maxSeqNoOfUpdates, + final Consumer handler, + final Consumer errorHandler) { bulkShardOperationRequests.add(operations); Exception writeFailure = ShardFollowNodeTaskTests.this.writeFailures.poll(); if (writeFailure != null) { @@ -749,6 +751,7 @@ public class ShardFollowNodeTaskTests extends ESTestCase { mappingVersions.poll(), leaderGlobalCheckpoints.poll(), maxSeqNos.poll(), + randomNonNegativeLong(), operations ); handler.accept(response); @@ -785,13 +788,14 @@ public class ShardFollowNodeTaskTests extends ESTestCase { mappingVersion, leaderGlobalCheckPoint, leaderGlobalCheckPoint, + randomNonNegativeLong(), ops.toArray(new Translog.Operation[0]) ); } void startTask(ShardFollowNodeTask task, long leaderGlobalCheckpoint, long followerGlobalCheckpoint) { // The call the updateMapping is a noop, so noting happens. - task.start(leaderGlobalCheckpoint, leaderGlobalCheckpoint, followerGlobalCheckpoint, followerGlobalCheckpoint); + task.start("uuid", leaderGlobalCheckpoint, leaderGlobalCheckpoint, followerGlobalCheckpoint, followerGlobalCheckpoint); } diff --git a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/ShardFollowTaskReplicationTests.java b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/ShardFollowTaskReplicationTests.java index 0bb263d3c44..055005b9e7d 100644 --- a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/ShardFollowTaskReplicationTests.java +++ b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/ShardFollowTaskReplicationTests.java @@ -16,6 +16,8 @@ import org.elasticsearch.action.support.replication.TransportWriteAction; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.unit.ByteSizeUnit; +import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.engine.Engine.Operation.Origin; @@ -30,6 +32,7 @@ import org.elasticsearch.xpack.ccr.CcrSettings; import org.elasticsearch.xpack.ccr.action.bulk.BulkShardOperationsRequest; import org.elasticsearch.xpack.ccr.action.bulk.BulkShardOperationsResponse; import org.elasticsearch.xpack.ccr.action.bulk.TransportBulkShardOperationsAction; +import org.elasticsearch.xpack.ccr.index.engine.FollowingEngine; import org.elasticsearch.xpack.ccr.index.engine.FollowingEngineFactory; import java.io.IOException; @@ -60,6 +63,7 @@ public class ShardFollowTaskReplicationTests extends ESIndexLevelReplicationTest final SeqNoStats leaderSeqNoStats = leaderGroup.getPrimary().seqNoStats(); final SeqNoStats followerSeqNoStats = followerGroup.getPrimary().seqNoStats(); shardFollowTask.start( + followerGroup.getPrimary().getHistoryUUID(), leaderSeqNoStats.getGlobalCheckpoint(), leaderSeqNoStats.getMaxSeqNo(), followerSeqNoStats.getGlobalCheckpoint(), @@ -72,6 +76,9 @@ public class ShardFollowTaskReplicationTests extends ESIndexLevelReplicationTest assertThat(followerGroup.getPrimary().getGlobalCheckpoint(), equalTo(leaderGroup.getPrimary().getGlobalCheckpoint())); followerGroup.assertAllEqual(indexedDocIds.size()); }); + for (IndexShard shard : followerGroup) { + assertThat(((FollowingEngine) (getEngine(shard))).getNumberOfOptimizedIndexing(), equalTo((long) docCount)); + } // Deletes should be replicated to the follower List deleteDocIds = randomSubsetOf(indexedDocIds); for (String deleteId : deleteDocIds) { @@ -97,6 +104,7 @@ public class ShardFollowTaskReplicationTests extends ESIndexLevelReplicationTest final SeqNoStats leaderSeqNoStats = leaderGroup.getPrimary().seqNoStats(); final SeqNoStats followerSeqNoStats = followerGroup.getPrimary().seqNoStats(); shardFollowTask.start( + followerGroup.getPrimary().getHistoryUUID(), leaderSeqNoStats.getGlobalCheckpoint(), leaderSeqNoStats.getMaxSeqNo(), followerSeqNoStats.getGlobalCheckpoint(), @@ -131,7 +139,7 @@ public class ShardFollowTaskReplicationTests extends ESIndexLevelReplicationTest } } - public void testChangeHistoryUUID() throws Exception { + public void testChangeLeaderHistoryUUID() throws Exception { try (ReplicationGroup leaderGroup = createGroup(0); ReplicationGroup followerGroup = createFollowGroup(0)) { leaderGroup.startAll(); @@ -142,6 +150,7 @@ public class ShardFollowTaskReplicationTests extends ESIndexLevelReplicationTest final SeqNoStats leaderSeqNoStats = leaderGroup.getPrimary().seqNoStats(); final SeqNoStats followerSeqNoStats = followerGroup.getPrimary().seqNoStats(); shardFollowTask.start( + followerGroup.getPrimary().getHistoryUUID(), leaderSeqNoStats.getGlobalCheckpoint(), leaderSeqNoStats.getMaxSeqNo(), followerSeqNoStats.getGlobalCheckpoint(), @@ -171,6 +180,47 @@ public class ShardFollowTaskReplicationTests extends ESIndexLevelReplicationTest } } + public void testChangeFollowerHistoryUUID() throws Exception { + try (ReplicationGroup leaderGroup = createGroup(0); + ReplicationGroup followerGroup = createFollowGroup(0)) { + leaderGroup.startAll(); + int docCount = leaderGroup.appendDocs(randomInt(64)); + leaderGroup.assertAllEqual(docCount); + followerGroup.startAll(); + ShardFollowNodeTask shardFollowTask = createShardFollowTask(leaderGroup, followerGroup); + final SeqNoStats leaderSeqNoStats = leaderGroup.getPrimary().seqNoStats(); + final SeqNoStats followerSeqNoStats = followerGroup.getPrimary().seqNoStats(); + shardFollowTask.start( + followerGroup.getPrimary().getHistoryUUID(), + leaderSeqNoStats.getGlobalCheckpoint(), + leaderSeqNoStats.getMaxSeqNo(), + followerSeqNoStats.getGlobalCheckpoint(), + followerSeqNoStats.getMaxSeqNo()); + leaderGroup.syncGlobalCheckpoint(); + leaderGroup.assertAllEqual(docCount); + Set indexedDocIds = getShardDocUIDs(leaderGroup.getPrimary()); + assertBusy(() -> { + assertThat(followerGroup.getPrimary().getGlobalCheckpoint(), equalTo(leaderGroup.getPrimary().getGlobalCheckpoint())); + followerGroup.assertAllEqual(indexedDocIds.size()); + }); + + String oldHistoryUUID = followerGroup.getPrimary().getHistoryUUID(); + followerGroup.reinitPrimaryShard(); + followerGroup.getPrimary().store().bootstrapNewHistory(); + recoverShardFromStore(followerGroup.getPrimary()); + String newHistoryUUID = followerGroup.getPrimary().getHistoryUUID(); + + // force the global checkpoint on the leader to advance + leaderGroup.appendDocs(64); + + assertBusy(() -> { + assertThat(shardFollowTask.isStopped(), is(true)); + assertThat(shardFollowTask.getFailure().getMessage(), equalTo("unexpected history uuid, expected [" + oldHistoryUUID + + "], actual [" + newHistoryUUID + "], shard is likely restored from snapshot or force allocated")); + }); + } + } + @Override protected ReplicationGroup createGroup(int replicas, Settings settings) throws IOException { Settings newSettings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) @@ -207,13 +257,13 @@ public class ShardFollowTaskReplicationTests extends ESIndexLevelReplicationTest new ShardId("leader_index", "", 0), between(1, 64), between(1, 8), - Long.MAX_VALUE, + new ByteSizeValue(Long.MAX_VALUE, ByteSizeUnit.BYTES), between(1, 4), 10240, TimeValue.timeValueMillis(10), TimeValue.timeValueMillis(10), - leaderGroup.getPrimary().getHistoryUUID(), Collections.emptyMap() ); + final String recordedLeaderIndexHistoryUUID = leaderGroup.getPrimary().getHistoryUUID(); BiConsumer scheduler = (delay, task) -> threadPool.schedule(delay, ThreadPool.Names.GENERIC, task); AtomicBoolean stopped = new AtomicBoolean(false); @@ -239,11 +289,14 @@ public class ShardFollowTaskReplicationTests extends ESIndexLevelReplicationTest @Override protected void innerSendBulkShardOperationsRequest( - final List operations, - final Consumer handler, - final Consumer errorHandler) { + final String followerHistoryUUID, + final List operations, + final long maxSeqNoOfUpdates, + final Consumer handler, + final Consumer errorHandler) { Runnable task = () -> { - BulkShardOperationsRequest request = new BulkShardOperationsRequest(params.getFollowShardId(), operations); + BulkShardOperationsRequest request = new BulkShardOperationsRequest(params.getFollowShardId(), + followerHistoryUUID, operations, maxSeqNoOfUpdates); ActionListener listener = ActionListener.wrap(handler::accept, errorHandler); new CCRAction(request, listener, followerGroup).execute(); }; @@ -262,17 +315,20 @@ public class ShardFollowTaskReplicationTests extends ESIndexLevelReplicationTest for (IndexShard indexShard : indexShards) { try { final SeqNoStats seqNoStats = indexShard.seqNoStats(); + final long maxSeqNoOfUpdatesOrDeletes = indexShard.getMaxSeqNoOfUpdatesOrDeletes(); if (from > seqNoStats.getGlobalCheckpoint()) { - handler.accept(ShardChangesAction.getResponse(1L, seqNoStats, ShardChangesAction.EMPTY_OPERATIONS_ARRAY)); + handler.accept(ShardChangesAction.getResponse(1L, seqNoStats, + maxSeqNoOfUpdatesOrDeletes, ShardChangesAction.EMPTY_OPERATIONS_ARRAY)); return; } Translog.Operation[] ops = ShardChangesAction.getOperations(indexShard, seqNoStats.getGlobalCheckpoint(), from, - maxOperationCount, params.getRecordedLeaderIndexHistoryUUID(), params.getMaxBatchSizeInBytes()); + maxOperationCount, recordedLeaderIndexHistoryUUID, params.getMaxBatchSize()); // hard code mapping version; this is ok, as mapping updates are not tested here final ShardChangesAction.Response response = new ShardChangesAction.Response( 1L, seqNoStats.getGlobalCheckpoint(), seqNoStats.getMaxSeqNo(), + maxSeqNoOfUpdatesOrDeletes, ops ); handler.accept(response); @@ -315,6 +371,9 @@ public class ShardFollowTaskReplicationTests extends ESIndexLevelReplicationTest for (IndexShard followingShard : follower) { assertThat(followingShard.estimateNumberOfHistoryOperations("test", 0), equalTo(totalOps)); } + for (IndexShard followingShard : follower) { + assertThat(followingShard.getMaxSeqNoOfUpdatesOrDeletes(), equalTo(leader.getPrimary().getMaxSeqNoOfUpdatesOrDeletes())); + } } class CCRAction extends ReplicationAction { @@ -326,8 +385,8 @@ public class ShardFollowTaskReplicationTests extends ESIndexLevelReplicationTest @Override protected PrimaryResult performOnPrimary(IndexShard primary, BulkShardOperationsRequest request) throws Exception { TransportWriteAction.WritePrimaryResult result = - TransportBulkShardOperationsAction.shardOperationOnPrimary(primary.shardId(), request.getOperations(), - primary, logger); + TransportBulkShardOperationsAction.shardOperationOnPrimary(primary.shardId(), request.getHistoryUUID(), + request.getOperations(), request.getMaxSeqNoOfUpdatesOrDeletes(), primary, logger); return new PrimaryResult(result.replicaRequest(), result.finalResponseIfSuccessful); } diff --git a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/ShardFollowTaskTests.java b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/ShardFollowTaskTests.java index fa11ddf4bf9..865d18e6067 100644 --- a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/ShardFollowTaskTests.java +++ b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/ShardFollowTaskTests.java @@ -6,6 +6,8 @@ package org.elasticsearch.xpack.ccr.action; import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.common.unit.ByteSizeUnit; +import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.index.shard.ShardId; @@ -29,12 +31,11 @@ public class ShardFollowTaskTests extends AbstractSerializingTestCase { + + @Override + protected CcrStatsAction.StatsRequest createBlankInstance() { + return new CcrStatsAction.StatsRequest(); + } + + @Override + protected CcrStatsAction.StatsRequest createTestInstance() { + CcrStatsAction.StatsRequest statsRequest = new CcrStatsAction.StatsRequest(); + if (randomBoolean()) { + statsRequest.setIndices(generateRandomStringArray(8, 4, false)); + } + return statsRequest; + } +} diff --git a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/StatsResponsesTests.java b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/StatsResponsesTests.java new file mode 100644 index 00000000000..b79f8db1923 --- /dev/null +++ b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/StatsResponsesTests.java @@ -0,0 +1,56 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.ccr.action; + +import org.elasticsearch.test.AbstractStreamableTestCase; +import org.elasticsearch.xpack.core.ccr.ShardFollowNodeTaskStatus; +import org.elasticsearch.xpack.core.ccr.action.CcrStatsAction; + +import java.util.ArrayList; +import java.util.Collections; +import java.util.List; + +public class StatsResponsesTests extends AbstractStreamableTestCase { + + @Override + protected CcrStatsAction.StatsResponses createBlankInstance() { + return new CcrStatsAction.StatsResponses(); + } + + @Override + protected CcrStatsAction.StatsResponses createTestInstance() { + int numResponses = randomIntBetween(0, 8); + List responses = new ArrayList<>(numResponses); + for (int i = 0; i < numResponses; i++) { + ShardFollowNodeTaskStatus status = new ShardFollowNodeTaskStatus( + randomAlphaOfLength(4), + randomAlphaOfLength(4), + randomInt(), + randomNonNegativeLong(), + randomNonNegativeLong(), + randomNonNegativeLong(), + randomNonNegativeLong(), + randomNonNegativeLong(), + randomIntBetween(0, Integer.MAX_VALUE), + randomIntBetween(0, Integer.MAX_VALUE), + randomIntBetween(0, Integer.MAX_VALUE), + randomNonNegativeLong(), + randomNonNegativeLong(), + randomNonNegativeLong(), + randomNonNegativeLong(), + randomNonNegativeLong(), + randomNonNegativeLong(), + randomNonNegativeLong(), + randomNonNegativeLong(), + randomNonNegativeLong(), + randomNonNegativeLong(), + Collections.emptyNavigableMap(), + randomLong()); + responses.add(new CcrStatsAction.StatsResponse(status)); + } + return new CcrStatsAction.StatsResponses(Collections.emptyList(), Collections.emptyList(), responses); + } +} diff --git a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/TransportDeleteAutoFollowPatternActionTests.java b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/TransportDeleteAutoFollowPatternActionTests.java index 2525b63de31..e2280316264 100644 --- a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/TransportDeleteAutoFollowPatternActionTests.java +++ b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/TransportDeleteAutoFollowPatternActionTests.java @@ -14,6 +14,7 @@ import org.elasticsearch.xpack.core.ccr.action.DeleteAutoFollowPatternAction.Req import org.elasticsearch.xpack.core.ccr.AutoFollowMetadata; import java.util.ArrayList; +import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; @@ -25,30 +26,33 @@ public class TransportDeleteAutoFollowPatternActionTests extends ESTestCase { public void testInnerDelete() { Map> existingAlreadyFollowedIndexUUIDS = new HashMap<>(); + Map> existingHeaders = new HashMap<>(); Map existingAutoFollowPatterns = new HashMap<>(); { List existingPatterns = new ArrayList<>(); existingPatterns.add("transactions-*"); existingAutoFollowPatterns.put("eu_cluster", - new AutoFollowMetadata.AutoFollowPattern(existingPatterns, null, null, null, null, null, null, null, null, null)); + new AutoFollowMetadata.AutoFollowPattern(existingPatterns, null, null, null, null, null, null, null, null)); List existingUUIDS = new ArrayList<>(); existingUUIDS.add("_val"); existingAlreadyFollowedIndexUUIDS.put("eu_cluster", existingUUIDS); + existingHeaders.put("eu_cluster", Collections.singletonMap("key", "val")); } { List existingPatterns = new ArrayList<>(); existingPatterns.add("logs-*"); existingAutoFollowPatterns.put("asia_cluster", - new AutoFollowMetadata.AutoFollowPattern(existingPatterns, null, null, null, null, null, null, null, null, null)); + new AutoFollowMetadata.AutoFollowPattern(existingPatterns, null, null, null, null, null, null, null, null)); List existingUUIDS = new ArrayList<>(); existingUUIDS.add("_val"); existingAlreadyFollowedIndexUUIDS.put("asia_cluster", existingUUIDS); + existingHeaders.put("asia_cluster", Collections.singletonMap("key", "val")); } ClusterState clusterState = ClusterState.builder(new ClusterName("us_cluster")) .metaData(MetaData.builder().putCustom(AutoFollowMetadata.TYPE, - new AutoFollowMetadata(existingAutoFollowPatterns, existingAlreadyFollowedIndexUUIDS))) + new AutoFollowMetadata(existingAutoFollowPatterns, existingAlreadyFollowedIndexUUIDS, existingHeaders))) .build(); Request request = new Request(); @@ -60,20 +64,24 @@ public class TransportDeleteAutoFollowPatternActionTests extends ESTestCase { assertThat(result.getPatterns().get("asia_cluster"), notNullValue()); assertThat(result.getFollowedLeaderIndexUUIDs().size(), equalTo(1)); assertThat(result.getFollowedLeaderIndexUUIDs().get("asia_cluster"), notNullValue()); + assertThat(result.getHeaders().size(), equalTo(1)); + assertThat(result.getHeaders().get("asia_cluster"), notNullValue()); } public void testInnerDeleteDoesNotExist() { Map> existingAlreadyFollowedIndexUUIDS = new HashMap<>(); Map existingAutoFollowPatterns = new HashMap<>(); + Map> existingHeaders = new HashMap<>(); { List existingPatterns = new ArrayList<>(); existingPatterns.add("transactions-*"); existingAutoFollowPatterns.put("eu_cluster", - new AutoFollowMetadata.AutoFollowPattern(existingPatterns, null, null, null, null, null, null, null, null, null)); + new AutoFollowMetadata.AutoFollowPattern(existingPatterns, null, null, null, null, null, null, null, null)); + existingHeaders.put("key", Collections.singletonMap("key", "val")); } ClusterState clusterState = ClusterState.builder(new ClusterName("us_cluster")) .metaData(MetaData.builder().putCustom(AutoFollowMetadata.TYPE, - new AutoFollowMetadata(existingAutoFollowPatterns, existingAlreadyFollowedIndexUUIDS))) + new AutoFollowMetadata(existingAutoFollowPatterns, existingAlreadyFollowedIndexUUIDS, existingHeaders))) .build(); Request request = new Request(); diff --git a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/TransportGetAutoFollowPatternActionTests.java b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/TransportGetAutoFollowPatternActionTests.java new file mode 100644 index 00000000000..187e404abbb --- /dev/null +++ b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/TransportGetAutoFollowPatternActionTests.java @@ -0,0 +1,63 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +package org.elasticsearch.xpack.ccr.action; + +import org.elasticsearch.ResourceNotFoundException; +import org.elasticsearch.cluster.metadata.MetaData; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.core.ccr.AutoFollowMetadata; +import org.elasticsearch.xpack.core.ccr.AutoFollowMetadata.AutoFollowPattern; + +import java.util.Collections; +import java.util.HashMap; +import java.util.Map; + +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.collection.IsMapContaining.hasEntry; + +public class TransportGetAutoFollowPatternActionTests extends ESTestCase { + + public void testGetAutoFollowPattern() { + Map patterns = new HashMap<>(); + patterns.put("test_alias1", + new AutoFollowPattern(Collections.singletonList("index-*"), null, null, null, null, null, null, null, null)); + patterns.put("test_alias2", + new AutoFollowPattern(Collections.singletonList("index-*"), null, null, null, null, null, null, null, null)); + MetaData metaData = MetaData.builder() + .putCustom(AutoFollowMetadata.TYPE, new AutoFollowMetadata(patterns, Collections.emptyMap(), Collections.emptyMap())) + .build(); + + Map result = TransportGetAutoFollowPatternAction.getAutoFollowPattern(metaData, "test_alias1"); + assertThat(result.size(), equalTo(1)); + assertThat(result, hasEntry("test_alias1", patterns.get("test_alias1"))); + + result = TransportGetAutoFollowPatternAction.getAutoFollowPattern(metaData, null); + assertThat(result.size(), equalTo(2)); + assertThat(result, hasEntry("test_alias1", patterns.get("test_alias1"))); + assertThat(result, hasEntry("test_alias2", patterns.get("test_alias2"))); + + expectThrows(ResourceNotFoundException.class, + () -> TransportGetAutoFollowPatternAction.getAutoFollowPattern(metaData, "another_alias")); + } + + public void testGetAutoFollowPatternNoAutoFollowPatterns() { + AutoFollowMetadata autoFollowMetadata = + new AutoFollowMetadata(Collections.emptyMap(), Collections.emptyMap(), Collections.emptyMap()); + MetaData metaData = MetaData.builder() + .putCustom(AutoFollowMetadata.TYPE, autoFollowMetadata) + .build(); + expectThrows(ResourceNotFoundException.class, + () -> TransportGetAutoFollowPatternAction.getAutoFollowPattern(metaData, "test_alias")); + } + + public void testGetAutoFollowPatternNoAutoFollowMetadata() { + MetaData metaData = MetaData.builder().build(); + expectThrows(ResourceNotFoundException.class, + () -> TransportGetAutoFollowPatternAction.getAutoFollowPattern(metaData, "test_alias")); + } + +} diff --git a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/TransportPutAutoFollowPatternActionTests.java b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/TransportPutAutoFollowPatternActionTests.java index 5731a64ba89..c208a4b042d 100644 --- a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/TransportPutAutoFollowPatternActionTests.java +++ b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/TransportPutAutoFollowPatternActionTests.java @@ -97,14 +97,17 @@ public class TransportPutAutoFollowPatternActionTests extends ESTestCase { List existingPatterns = new ArrayList<>(); existingPatterns.add("transactions-*"); existingAutoFollowPatterns.put("eu_cluster", - new AutoFollowMetadata.AutoFollowPattern(existingPatterns, null, null, null, null, null, null, null, null, null)); + new AutoFollowMetadata.AutoFollowPattern(existingPatterns, null, null, null, null, null, null, null, null)); Map> existingAlreadyFollowedIndexUUIDS = new HashMap<>(); List existingUUIDS = new ArrayList<>(); existingUUIDS.add("_val"); existingAlreadyFollowedIndexUUIDS.put("eu_cluster", existingUUIDS); + Map> existingHeaders = new HashMap<>(); + existingHeaders.put("eu_cluster", Collections.singletonMap("key", "val")); + ClusterState localState = ClusterState.builder(new ClusterName("us_cluster")) .metaData(MetaData.builder().putCustom(AutoFollowMetadata.TYPE, - new AutoFollowMetadata(existingAutoFollowPatterns, existingAlreadyFollowedIndexUUIDS))) + new AutoFollowMetadata(existingAutoFollowPatterns, existingAlreadyFollowedIndexUUIDS, existingHeaders))) .build(); int numLeaderIndices = randomIntBetween(1, 8); @@ -129,6 +132,8 @@ public class TransportPutAutoFollowPatternActionTests extends ESTestCase { assertThat(autoFollowMetadata.getPatterns().get("eu_cluster").getLeaderIndexPatterns().get(1), equalTo("transactions-*")); assertThat(autoFollowMetadata.getFollowedLeaderIndexUUIDs().size(), equalTo(1)); assertThat(autoFollowMetadata.getFollowedLeaderIndexUUIDs().get("eu_cluster").size(), equalTo(numLeaderIndices + 1)); + assertThat(autoFollowMetadata.getHeaders().size(), equalTo(1)); + assertThat(autoFollowMetadata.getHeaders().get("eu_cluster"), notNullValue()); } } diff --git a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/TransportFollowIndexActionTests.java b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/TransportResumeFollowActionTests.java similarity index 97% rename from x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/TransportFollowIndexActionTests.java rename to x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/TransportResumeFollowActionTests.java index 8d4704566fd..442180c7089 100644 --- a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/TransportFollowIndexActionTests.java +++ b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/TransportResumeFollowActionTests.java @@ -17,7 +17,7 @@ import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xpack.ccr.Ccr; import org.elasticsearch.xpack.ccr.CcrSettings; import org.elasticsearch.xpack.ccr.ShardChangesIT; -import org.elasticsearch.xpack.core.ccr.action.FollowIndexAction; +import org.elasticsearch.xpack.core.ccr.action.ResumeFollowAction; import java.io.IOException; import java.util.HashMap; @@ -25,17 +25,17 @@ import java.util.Map; import static java.util.Collections.emptyMap; import static java.util.Collections.singletonMap; -import static org.elasticsearch.xpack.ccr.action.TransportFollowIndexAction.validate; +import static org.elasticsearch.xpack.ccr.action.TransportResumeFollowAction.validate; import static org.hamcrest.Matchers.equalTo; -public class TransportFollowIndexActionTests extends ESTestCase { +public class TransportResumeFollowActionTests extends ESTestCase { public void testValidation() throws IOException { final Map customMetaData = new HashMap<>(); customMetaData.put(Ccr.CCR_CUSTOM_METADATA_LEADER_INDEX_SHARD_HISTORY_UUIDS, "uuid"); customMetaData.put(Ccr.CCR_CUSTOM_METADATA_LEADER_INDEX_UUID_KEY, "_na_"); - FollowIndexAction.Request request = ShardChangesIT.createFollowRequest("index1", "index2"); + ResumeFollowAction.Request request = ShardChangesIT.resumeFollow("index1", "index2"); String[] UUIDs = new String[]{"uuid"}; { // should fail, because leader index does not exist diff --git a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/TransportUnfollowActionTests.java b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/TransportUnfollowActionTests.java new file mode 100644 index 00000000000..07b0fc078ac --- /dev/null +++ b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/TransportUnfollowActionTests.java @@ -0,0 +1,102 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +package org.elasticsearch.xpack.ccr.action; + +import org.elasticsearch.Version; +import org.elasticsearch.cluster.ClusterName; +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.metadata.IndexMetaData; +import org.elasticsearch.cluster.metadata.MetaData; +import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.index.shard.ShardId; +import org.elasticsearch.persistent.PersistentTasksCustomMetaData; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.ccr.Ccr; +import org.elasticsearch.xpack.ccr.CcrSettings; + +import java.util.Collections; +import java.util.HashMap; + +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.nullValue; + +public class TransportUnfollowActionTests extends ESTestCase { + + public void testUnfollow() { + IndexMetaData.Builder followerIndex = IndexMetaData.builder("follow_index") + .settings(settings(Version.CURRENT).put(CcrSettings.CCR_FOLLOWING_INDEX_SETTING.getKey(), true)) + .numberOfShards(1) + .numberOfReplicas(0) + .state(IndexMetaData.State.CLOSE) + .putCustom(Ccr.CCR_CUSTOM_METADATA_KEY, new HashMap<>()); + + ClusterState current = ClusterState.builder(new ClusterName("cluster_name")) + .metaData(MetaData.builder() + .put(followerIndex) + .build()) + .build(); + ClusterState result = TransportUnfollowAction.unfollow("follow_index", current); + + IndexMetaData resultIMD = result.metaData().index("follow_index"); + assertThat(resultIMD.getSettings().get(CcrSettings.CCR_FOLLOWING_INDEX_SETTING.getKey()), nullValue()); + assertThat(resultIMD.getCustomData(Ccr.CCR_CUSTOM_METADATA_KEY), nullValue()); + } + + public void testUnfollowIndexOpen() { + IndexMetaData.Builder followerIndex = IndexMetaData.builder("follow_index") + .settings(settings(Version.CURRENT).put(CcrSettings.CCR_FOLLOWING_INDEX_SETTING.getKey(), true)) + .numberOfShards(1) + .numberOfReplicas(0) + .putCustom(Ccr.CCR_CUSTOM_METADATA_KEY, new HashMap<>()); + + ClusterState current = ClusterState.builder(new ClusterName("cluster_name")) + .metaData(MetaData.builder() + .put(followerIndex) + .build()) + .build(); + Exception e = expectThrows(IllegalArgumentException.class, () -> TransportUnfollowAction.unfollow("follow_index", current)); + assertThat(e.getMessage(), + equalTo("cannot convert the follower index [follow_index] to a non-follower, because it has not been closed")); + } + + public void testUnfollowRunningShardFollowTasks() { + IndexMetaData.Builder followerIndex = IndexMetaData.builder("follow_index") + .settings(settings(Version.CURRENT).put(CcrSettings.CCR_FOLLOWING_INDEX_SETTING.getKey(), true)) + .numberOfShards(1) + .numberOfReplicas(0) + .state(IndexMetaData.State.CLOSE) + .putCustom(Ccr.CCR_CUSTOM_METADATA_KEY, new HashMap<>()); + + + ShardFollowTask params = new ShardFollowTask( + null, + new ShardId("follow_index", "", 0), + new ShardId("leader_index", "", 0), + 1024, + 1, + TransportResumeFollowAction.DEFAULT_MAX_BATCH_SIZE, + 1, + 10240, + TimeValue.timeValueMillis(10), + TimeValue.timeValueMillis(10), + Collections.emptyMap() + ); + PersistentTasksCustomMetaData.PersistentTask task = + new PersistentTasksCustomMetaData.PersistentTask<>("id", ShardFollowTask.NAME, params, 0, null); + + ClusterState current = ClusterState.builder(new ClusterName("cluster_name")) + .metaData(MetaData.builder() + .put(followerIndex) + .putCustom(PersistentTasksCustomMetaData.TYPE, new PersistentTasksCustomMetaData(0, Collections.singletonMap("id", task))) + .build()) + .build(); + Exception e = expectThrows(IllegalArgumentException.class, () -> TransportUnfollowAction.unfollow("follow_index", current)); + assertThat(e.getMessage(), + equalTo("cannot convert the follower index [follow_index] to a non-follower, because it has not been paused")); + } + +} diff --git a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/bulk/BulkShardOperationsTests.java b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/bulk/BulkShardOperationsTests.java index 4c6c0c060e4..fe85e8a7445 100644 --- a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/bulk/BulkShardOperationsTests.java +++ b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/bulk/BulkShardOperationsTests.java @@ -59,7 +59,9 @@ public class BulkShardOperationsTests extends IndexShardTestCase { } final TransportWriteAction.WritePrimaryResult result = - TransportBulkShardOperationsAction.shardOperationOnPrimary(followerPrimary.shardId(), operations, followerPrimary, logger); + TransportBulkShardOperationsAction.shardOperationOnPrimary(followerPrimary.shardId(), followerPrimary.getHistoryUUID(), + operations, + numOps - 1, followerPrimary, logger); try (Translog.Snapshot snapshot = followerPrimary.getHistoryOperations("test", 0)) { assertThat(snapshot.totalOperations(), equalTo(operations.size())); diff --git a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/index/engine/FollowingEngineTests.java b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/index/engine/FollowingEngineTests.java index b3e2d12227b..ce67cfe2d44 100644 --- a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/index/engine/FollowingEngineTests.java +++ b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/index/engine/FollowingEngineTests.java @@ -6,8 +6,6 @@ package org.elasticsearch.xpack.ccr.index.engine; import org.apache.logging.log4j.Logger; -import org.apache.lucene.document.Field; -import org.apache.lucene.document.NumericDocValuesField; import org.apache.lucene.index.IndexWriterConfig; import org.apache.lucene.index.Term; import org.apache.lucene.search.IndexSearcher; @@ -16,13 +14,11 @@ import org.elasticsearch.Version; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.common.CheckedBiConsumer; -import org.elasticsearch.common.bytes.BytesArray; -import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.Randomness; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.common.xcontent.NamedXContentRegistry; -import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.index.Index; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.VersionType; @@ -30,11 +26,10 @@ import org.elasticsearch.index.codec.CodecService; import org.elasticsearch.index.engine.Engine; import org.elasticsearch.index.engine.EngineConfig; import org.elasticsearch.index.engine.EngineTestCase; +import org.elasticsearch.index.engine.InternalEngine; import org.elasticsearch.index.engine.TranslogHandler; -import org.elasticsearch.index.mapper.IdFieldMapper; -import org.elasticsearch.index.mapper.ParseContext; +import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.ParsedDocument; -import org.elasticsearch.index.mapper.SeqNoFieldMapper; import org.elasticsearch.index.seqno.SequenceNumbers; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.index.store.Store; @@ -48,12 +43,20 @@ import org.elasticsearch.threadpool.ThreadPool; import java.io.IOException; import java.nio.file.Path; +import java.util.ArrayList; import java.util.Collections; +import java.util.Iterator; import java.util.List; +import java.util.Set; +import java.util.concurrent.CountDownLatch; +import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicLong; +import java.util.stream.Collectors; +import static org.elasticsearch.index.engine.EngineTestCase.getDocIds; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.greaterThanOrEqualTo; import static org.hamcrest.Matchers.hasToString; public class FollowingEngineTests extends ESTestCase { @@ -127,6 +130,7 @@ public class FollowingEngineTests extends ESTestCase { final VersionType versionType = randomFrom(VersionType.INTERNAL, VersionType.EXTERNAL, VersionType.EXTERNAL_GTE, VersionType.FORCE); final List ops = EngineTestCase.generateSingleDocHistory(true, versionType, 2, 2, 20, "id"); + ops.stream().mapToLong(op -> op.seqNo()).max().ifPresent(followingEngine::advanceMaxSeqNoOfUpdatesOrDeletes); EngineTestCase.assertOpsOnReplica(ops, followingEngine, true, logger); } } @@ -148,7 +152,7 @@ public class FollowingEngineTests extends ESTestCase { try (Store store = createStore(shardId, indexSettings, newDirectory())) { final EngineConfig engineConfig = engineConfig(shardId, indexSettings, threadPool, store, logger, xContentRegistry()); try (FollowingEngine followingEngine = createEngine(store, engineConfig)) { - final Engine.Index index = createIndexOp("id", seqNo, origin); + final Engine.Index index = indexForFollowing("id", seqNo, origin); consumer.accept(followingEngine, index); } } @@ -160,6 +164,7 @@ public class FollowingEngineTests extends ESTestCase { seqNo, Engine.Operation.Origin.PRIMARY, (followingEngine, delete) -> { + followingEngine.advanceMaxSeqNoOfUpdatesOrDeletes(randomLongBetween(seqNo, Long.MAX_VALUE)); final Engine.DeleteResult result = followingEngine.delete(delete); assertThat(result.getSeqNo(), equalTo(seqNo)); }); @@ -211,7 +216,7 @@ public class FollowingEngineTests extends ESTestCase { try (Store store = createStore(shardId, indexSettings, newDirectory())) { final EngineConfig engineConfig = engineConfig(shardId, indexSettings, threadPool, store, logger, xContentRegistry()); try (FollowingEngine followingEngine = createEngine(store, engineConfig)) { - followingEngine.index(createIndexOp("id", 128, Engine.Operation.Origin.PRIMARY)); + followingEngine.index(indexForFollowing("id", 128, Engine.Operation.Origin.PRIMARY)); int addedNoops = followingEngine.fillSeqNoGaps(primaryTerm.get()); assertThat(addedNoops, equalTo(0)); } @@ -271,53 +276,259 @@ public class FollowingEngineTests extends ESTestCase { store.associateIndexWithNewTranslog(translogUuid); FollowingEngine followingEngine = new FollowingEngine(config); TranslogHandler translogHandler = new TranslogHandler(xContentRegistry(), config.getIndexSettings()); + followingEngine.initializeMaxSeqNoOfUpdatesOrDeletes(); followingEngine.recoverFromTranslog(translogHandler, Long.MAX_VALUE); return followingEngine; } - private Engine.Index createIndexOp(String id, long seqNo, Engine.Operation.Origin origin) { - final Field uidField = new Field("_id", id, IdFieldMapper.Defaults.FIELD_TYPE); - final String type = "type"; - final Field versionField = new NumericDocValuesField("_version", 0); - final SeqNoFieldMapper.SequenceIDFields seqID = SeqNoFieldMapper.SequenceIDFields.emptySeqID(); - final ParseContext.Document document = new ParseContext.Document(); - document.add(uidField); - document.add(versionField); - document.add(seqID.seqNo); - document.add(seqID.seqNoDocValue); - document.add(seqID.primaryTerm); - final BytesReference source = new BytesArray(new byte[]{1}); - final ParsedDocument parsedDocument = new ParsedDocument( - versionField, - seqID, - id, - type, - "routing", - Collections.singletonList(document), - source, - XContentType.JSON, - null); - - final long version; - final long autoGeneratedIdTimestamp; - if (randomBoolean()) { - version = 1; - autoGeneratedIdTimestamp = System.currentTimeMillis(); - } else { - version = randomNonNegativeLong(); - autoGeneratedIdTimestamp = IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP; - } - return new Engine.Index( - new Term("_id", parsedDocument.id()), - parsedDocument, - seqNo, - primaryTerm.get(), - version, - VersionType.EXTERNAL, - origin, - System.currentTimeMillis(), - autoGeneratedIdTimestamp, - randomBoolean()); + private Engine.Index indexForFollowing(String id, long seqNo, Engine.Operation.Origin origin) { + final long version = randomBoolean() ? 1 : randomNonNegativeLong(); + final ParsedDocument parsedDocument = EngineTestCase.createParsedDoc(id, null); + return new Engine.Index(EngineTestCase.newUid(parsedDocument), parsedDocument, seqNo, primaryTerm.get(), version, + VersionType.EXTERNAL, origin, System.currentTimeMillis(), IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP, randomBoolean()); } + private Engine.Index indexForPrimary(String id) { + final ParsedDocument parsedDoc = EngineTestCase.createParsedDoc(id, null); + return new Engine.Index(EngineTestCase.newUid(parsedDoc), primaryTerm.get(), parsedDoc); + } + + private Engine.Delete deleteForPrimary(String id) { + final ParsedDocument parsedDoc = EngineTestCase.createParsedDoc(id, null); + return new Engine.Delete(parsedDoc.type(), parsedDoc.id(), EngineTestCase.newUid(parsedDoc), primaryTerm.get()); + } + + public void testBasicOptimization() throws Exception { + runFollowTest((leader, follower) -> { + long numDocs = between(1, 100); + for (int i = 0; i < numDocs; i++) { + leader.index(indexForPrimary(Integer.toString(i))); + } + follower.waitForOpsToComplete(leader.getLocalCheckpoint()); + assertThat(follower.getMaxSeqNoOfUpdatesOrDeletes(), equalTo(-1L)); + assertThat(follower.getNumberOfOptimizedIndexing(), equalTo(numDocs)); + assertThat(getDocIds(follower, true), equalTo(getDocIds(leader, true))); + + // Do not apply optimization for deletes or updates + for (int i = 0; i < numDocs; i++) { + if (randomBoolean()) { + leader.index(indexForPrimary(Integer.toString(i))); + } else if (randomBoolean()) { + leader.delete(deleteForPrimary(Integer.toString(i))); + } + } + follower.waitForOpsToComplete(leader.getLocalCheckpoint()); + assertThat(follower.getMaxSeqNoOfUpdatesOrDeletes(), equalTo(leader.getMaxSeqNoOfUpdatesOrDeletes())); + assertThat(follower.getNumberOfOptimizedIndexing(), equalTo(numDocs)); + assertThat(getDocIds(follower, true), equalTo(getDocIds(leader, true))); + // Apply optimization for documents that do not exist + long moreDocs = between(1, 100); + Set docIds = getDocIds(follower, true).stream().map(doc -> doc.getId()).collect(Collectors.toSet()); + for (int i = 0; i < moreDocs; i++) { + String docId = randomValueOtherThanMany(docIds::contains, () -> Integer.toString(between(1, 1000))); + docIds.add(docId); + leader.index(indexForPrimary(docId)); + } + follower.waitForOpsToComplete(leader.getLocalCheckpoint()); + assertThat(follower.getMaxSeqNoOfUpdatesOrDeletes(), equalTo(leader.getMaxSeqNoOfUpdatesOrDeletes())); + assertThat(follower.getNumberOfOptimizedIndexing(), equalTo(numDocs + moreDocs)); + assertThat(getDocIds(follower, true), equalTo(getDocIds(leader, true))); + }); + } + + public void testOptimizeAppendOnly() throws Exception { + int numOps = scaledRandomIntBetween(1, 1000); + List ops = new ArrayList<>(); + for (int i = 0; i < numOps; i++) { + ops.add(indexForPrimary(Integer.toString(i))); + } + runFollowTest((leader, follower) -> { + EngineTestCase.concurrentlyApplyOps(ops, leader); + assertThat(follower.getMaxSeqNoOfUpdatesOrDeletes(), equalTo(-1L)); + follower.waitForOpsToComplete(leader.getLocalCheckpoint()); + assertThat(follower.getNumberOfOptimizedIndexing(), equalTo((long) numOps)); + }); + } + + public void testOptimizeMultipleVersions() throws Exception { + List ops = new ArrayList<>(); + for (int numOps = scaledRandomIntBetween(1, 1000), i = 0; i < numOps; i++) { + String id = Integer.toString(between(0, 100)); + if (randomBoolean()) { + ops.add(indexForPrimary(id)); + } else { + ops.add(deleteForPrimary(id)); + } + } + Randomness.shuffle(ops); + runFollowTest((leader, follower) -> { + EngineTestCase.concurrentlyApplyOps(ops, leader); + follower.waitForOpsToComplete(leader.getLocalCheckpoint()); + final List appendOps = new ArrayList<>(); + for (int numAppends = scaledRandomIntBetween(0, 100), i = 0; i < numAppends; i++) { + appendOps.add(indexForPrimary("append-" + i)); + } + EngineTestCase.concurrentlyApplyOps(appendOps, leader); + follower.waitForOpsToComplete(leader.getLocalCheckpoint()); + assertThat(follower.getNumberOfOptimizedIndexing(), greaterThanOrEqualTo((long) appendOps.size())); + }); + } + + public void testOptimizeSingleDocSequentially() throws Exception { + runFollowTest((leader, follower) -> { + leader.index(indexForPrimary("id")); + follower.waitForOpsToComplete(leader.getLocalCheckpoint()); + assertThat(follower.getNumberOfOptimizedIndexing(), equalTo(1L)); + + leader.delete(deleteForPrimary("id")); + follower.waitForOpsToComplete(leader.getLocalCheckpoint()); + assertThat(follower.getNumberOfOptimizedIndexing(), equalTo(1L)); + + leader.index(indexForPrimary("id")); + follower.waitForOpsToComplete(leader.getLocalCheckpoint()); + assertThat(follower.getNumberOfOptimizedIndexing(), equalTo(2L)); + + leader.index(indexForPrimary("id")); + follower.waitForOpsToComplete(leader.getLocalCheckpoint()); + assertThat(follower.getNumberOfOptimizedIndexing(), equalTo(2L)); + }); + } + + public void testOptimizeSingleDocConcurrently() throws Exception { + List ops = EngineTestCase.generateSingleDocHistory(false, randomFrom(VersionType.values()), 2, 10, 500, "id"); + Randomness.shuffle(ops); + runFollowTest((leader, follower) -> { + EngineTestCase.concurrentlyApplyOps(ops, leader); + follower.waitForOpsToComplete(leader.getLocalCheckpoint()); + assertThat(getDocIds(follower, true), equalTo(getDocIds(leader, true))); + long numOptimized = follower.getNumberOfOptimizedIndexing(); + + leader.delete(deleteForPrimary("id")); + follower.waitForOpsToComplete(leader.getLocalCheckpoint()); + assertThat(follower.getNumberOfOptimizedIndexing(), equalTo(numOptimized)); + + leader.index(indexForPrimary("id")); + follower.waitForOpsToComplete(leader.getLocalCheckpoint()); + assertThat(follower.getNumberOfOptimizedIndexing(), equalTo(numOptimized + 1L)); + + leader.index(indexForPrimary("id")); + follower.waitForOpsToComplete(leader.getLocalCheckpoint()); + assertThat(follower.getNumberOfOptimizedIndexing(), equalTo(numOptimized + 1L)); + }); + } + + private void runFollowTest(CheckedBiConsumer task) throws Exception { + final CheckedBiConsumer wrappedTask = (leader, follower) -> { + Thread[] threads = new Thread[between(1, 8)]; + AtomicBoolean taskIsCompleted = new AtomicBoolean(); + AtomicLong lastFetchedSeqNo = new AtomicLong(follower.getLocalCheckpoint()); + CountDownLatch latch = new CountDownLatch(threads.length + 1); + for (int i = 0; i < threads.length; i++) { + threads[i] = new Thread(() -> { + try { + latch.countDown(); + latch.await(); + fetchOperations(taskIsCompleted, lastFetchedSeqNo, leader, follower); + } catch (Exception e) { + throw new AssertionError(e); + } + }); + threads[i].start(); + } + try { + latch.countDown(); + latch.await(); + task.accept(leader, follower); + follower.waitForOpsToComplete(leader.getLocalCheckpoint()); + } finally { + taskIsCompleted.set(true); + for (Thread thread : threads) { + thread.join(); + } + assertThat(follower.getMaxSeqNoOfUpdatesOrDeletes(), equalTo(leader.getMaxSeqNoOfUpdatesOrDeletes())); + assertThat(getDocIds(follower, true), equalTo(getDocIds(leader, true))); + } + }; + + Settings leaderSettings = Settings.builder() + .put("index.number_of_shards", 1).put("index.number_of_replicas", 0) + .put("index.version.created", Version.CURRENT).put("index.soft_deletes.enabled", true).build(); + IndexMetaData leaderIndexMetaData = IndexMetaData.builder(index.getName()).settings(leaderSettings).build(); + IndexSettings leaderIndexSettings = new IndexSettings(leaderIndexMetaData, leaderSettings); + try (Store leaderStore = createStore(shardId, leaderIndexSettings, newDirectory())) { + leaderStore.createEmpty(); + EngineConfig leaderConfig = engineConfig(shardId, leaderIndexSettings, threadPool, leaderStore, logger, xContentRegistry()); + leaderStore.associateIndexWithNewTranslog(Translog.createEmptyTranslog( + leaderConfig.getTranslogConfig().getTranslogPath(), SequenceNumbers.NO_OPS_PERFORMED, shardId, 1L)); + try (InternalEngine leaderEngine = new InternalEngine(leaderConfig)) { + leaderEngine.initializeMaxSeqNoOfUpdatesOrDeletes(); + leaderEngine.skipTranslogRecovery(); + Settings followerSettings = Settings.builder() + .put("index.number_of_shards", 1).put("index.number_of_replicas", 0) + .put("index.version.created", Version.CURRENT).put("index.xpack.ccr.following_index", true).build(); + IndexMetaData followerIndexMetaData = IndexMetaData.builder(index.getName()).settings(followerSettings).build(); + IndexSettings followerIndexSettings = new IndexSettings(followerIndexMetaData, leaderSettings); + try (Store followerStore = createStore(shardId, followerIndexSettings, newDirectory())) { + EngineConfig followerConfig = engineConfig( + shardId, followerIndexSettings, threadPool, followerStore, logger, xContentRegistry()); + try (FollowingEngine followingEngine = createEngine(followerStore, followerConfig)) { + wrappedTask.accept(leaderEngine, followingEngine); + } + } + } + } + } + + private void fetchOperations(AtomicBoolean stopped, AtomicLong lastFetchedSeqNo, + InternalEngine leader, FollowingEngine follower) throws IOException { + final MapperService mapperService = EngineTestCase.createMapperService("test"); + final TranslogHandler translogHandler = new TranslogHandler(xContentRegistry(), follower.config().getIndexSettings()); + while (stopped.get() == false) { + final long checkpoint = leader.getLocalCheckpoint(); + final long lastSeqNo = lastFetchedSeqNo.get(); + if (lastSeqNo < checkpoint) { + final long nextSeqNo = randomLongBetween(lastSeqNo + 1, checkpoint); + if (lastFetchedSeqNo.compareAndSet(lastSeqNo, nextSeqNo)) { + // extends the fetch range so we may deliver some overlapping operations more than once. + final long fromSeqNo = randomLongBetween(Math.max(lastSeqNo - 5, 0), lastSeqNo + 1); + final long toSeqNo = randomLongBetween(nextSeqNo, Math.min(nextSeqNo + 5, checkpoint)); + try (Translog.Snapshot snapshot = + shuffleSnapshot(leader.newChangesSnapshot("test", mapperService, fromSeqNo, toSeqNo, true))) { + follower.advanceMaxSeqNoOfUpdatesOrDeletes(leader.getMaxSeqNoOfUpdatesOrDeletes()); + translogHandler.run(follower, snapshot); + } + } + } + } + } + + private Translog.Snapshot shuffleSnapshot(Translog.Snapshot snapshot) throws IOException { + final List operations = new ArrayList<>(snapshot.totalOperations()); + Translog.Operation op; + while ((op = snapshot.next()) != null) { + operations.add(op); + } + Randomness.shuffle(operations); + final Iterator iterator = operations.iterator(); + + return new Translog.Snapshot() { + @Override + public int totalOperations() { + return snapshot.totalOperations(); + } + + @Override + public Translog.Operation next() { + if (iterator.hasNext()) { + return iterator.next(); + } + return null; + } + + @Override + public void close() throws IOException { + snapshot.close(); + } + }; + } } diff --git a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/monitoring/collector/ccr/AbstractCcrCollectorTestCase.java b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/monitoring/collector/ccr/AbstractCcrCollectorTestCase.java new file mode 100644 index 00000000000..f98e541a9d9 --- /dev/null +++ b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/monitoring/collector/ccr/AbstractCcrCollectorTestCase.java @@ -0,0 +1,118 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.monitoring.collector.ccr; + +import org.elasticsearch.client.Client; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.license.XPackLicenseState; +import org.elasticsearch.xpack.core.XPackSettings; +import org.elasticsearch.xpack.monitoring.BaseCollectorTestCase; + +import static org.hamcrest.Matchers.is; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; + +public abstract class AbstractCcrCollectorTestCase extends BaseCollectorTestCase { + + public void testShouldCollectReturnsFalseIfMonitoringNotAllowed() { + final Settings settings = randomFrom(ccrEnabledSettings(), ccrDisabledSettings()); + final boolean ccrAllowed = randomBoolean(); + final boolean isElectedMaster = randomBoolean(); + whenLocalNodeElectedMaster(isElectedMaster); + + // this controls the blockage + when(licenseState.isMonitoringAllowed()).thenReturn(false); + when(licenseState.isCcrAllowed()).thenReturn(ccrAllowed); + + final AbstractCcrCollector collector = createCollector(settings, clusterService, licenseState, client); + + assertThat(collector.shouldCollect(isElectedMaster), is(false)); + if (isElectedMaster) { + verify(licenseState).isMonitoringAllowed(); + } + } + + public void testShouldCollectReturnsFalseIfNotMaster() { + // regardless of CCR being enabled + final Settings settings = randomFrom(ccrEnabledSettings(), ccrDisabledSettings()); + + when(licenseState.isMonitoringAllowed()).thenReturn(randomBoolean()); + when(licenseState.isCcrAllowed()).thenReturn(randomBoolean()); + // this controls the blockage + final boolean isElectedMaster = false; + + final AbstractCcrCollector collector = createCollector(settings, clusterService, licenseState, client); + + assertThat(collector.shouldCollect(isElectedMaster), is(false)); + } + + public void testShouldCollectReturnsFalseIfCCRIsDisabled() { + // this is controls the blockage + final Settings settings = ccrDisabledSettings(); + + when(licenseState.isMonitoringAllowed()).thenReturn(randomBoolean()); + when(licenseState.isCcrAllowed()).thenReturn(randomBoolean()); + + final boolean isElectedMaster = randomBoolean(); + whenLocalNodeElectedMaster(isElectedMaster); + + final AbstractCcrCollector collector = createCollector(settings, clusterService, licenseState, client); + + assertThat(collector.shouldCollect(isElectedMaster), is(false)); + + if (isElectedMaster) { + verify(licenseState).isMonitoringAllowed(); + } + } + + public void testShouldCollectReturnsFalseIfCCRIsNotAllowed() { + final Settings settings = randomFrom(ccrEnabledSettings(), ccrDisabledSettings()); + + when(licenseState.isMonitoringAllowed()).thenReturn(randomBoolean()); + // this is controls the blockage + when(licenseState.isCcrAllowed()).thenReturn(false); + final boolean isElectedMaster = randomBoolean(); + whenLocalNodeElectedMaster(isElectedMaster); + + final AbstractCcrCollector collector = createCollector(settings, clusterService, licenseState, client); + + assertThat(collector.shouldCollect(isElectedMaster), is(false)); + + if (isElectedMaster) { + verify(licenseState).isMonitoringAllowed(); + } + } + + public void testShouldCollectReturnsTrue() { + final Settings settings = ccrEnabledSettings(); + + when(licenseState.isMonitoringAllowed()).thenReturn(true); + when(licenseState.isCcrAllowed()).thenReturn(true); + final boolean isElectedMaster = true; + + final AbstractCcrCollector collector = createCollector(settings, clusterService, licenseState, client); + + assertThat(collector.shouldCollect(isElectedMaster), is(true)); + + verify(licenseState).isMonitoringAllowed(); + } + + abstract AbstractCcrCollector createCollector(Settings settings, + ClusterService clusterService, + XPackLicenseState licenseState, + Client client); + + private Settings ccrEnabledSettings() { + // since it's the default, we want to ensure we test both with/without it + return randomBoolean() ? Settings.EMPTY : Settings.builder().put(XPackSettings.CCR_ENABLED_SETTING.getKey(), true).build(); + } + + private Settings ccrDisabledSettings() { + return Settings.builder().put(XPackSettings.CCR_ENABLED_SETTING.getKey(), false).build(); + } + +} diff --git a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/monitoring/collector/ccr/AutoFollowStatsMonitoringDocTests.java b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/monitoring/collector/ccr/AutoFollowStatsMonitoringDocTests.java new file mode 100644 index 00000000000..ce1c0136677 --- /dev/null +++ b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/monitoring/collector/ccr/AutoFollowStatsMonitoringDocTests.java @@ -0,0 +1,163 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.monitoring.collector.ccr; + +import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentHelper; +import org.elasticsearch.common.xcontent.XContentType; +import org.elasticsearch.common.xcontent.support.XContentMapValues; +import org.elasticsearch.xpack.core.ccr.AutoFollowStats; +import org.elasticsearch.xpack.core.monitoring.MonitoredSystem; +import org.elasticsearch.xpack.core.monitoring.exporter.MonitoringDoc; +import org.elasticsearch.xpack.core.monitoring.exporter.MonitoringTemplateUtils; +import org.elasticsearch.xpack.monitoring.exporter.BaseMonitoringDocTestCase; +import org.joda.time.DateTime; +import org.joda.time.DateTimeZone; +import org.junit.Before; + +import java.io.IOException; +import java.util.Collections; +import java.util.Map; +import java.util.NavigableMap; +import java.util.TreeMap; + +import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; +import static org.hamcrest.Matchers.anyOf; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.notNullValue; +import static org.hamcrest.Matchers.nullValue; + +public class AutoFollowStatsMonitoringDocTests extends BaseMonitoringDocTestCase { + + private AutoFollowStats autoFollowStats; + + @Before + public void instantiateAutoFollowStats() { + autoFollowStats = new AutoFollowStats(randomNonNegativeLong(), randomNonNegativeLong(), randomNonNegativeLong(), + Collections.emptyNavigableMap()); + } + + @Override + protected AutoFollowStatsMonitoringDoc createMonitoringDoc(String cluster, + long timestamp, + long interval, + MonitoringDoc.Node node, + MonitoredSystem system, + String type, + String id) { + return new AutoFollowStatsMonitoringDoc(cluster, timestamp, interval, node, autoFollowStats); + } + + @Override + protected void assertMonitoringDoc(AutoFollowStatsMonitoringDoc document) { + assertThat(document.getSystem(), is(MonitoredSystem.ES)); + assertThat(document.getType(), is(AutoFollowStatsMonitoringDoc.TYPE)); + assertThat(document.getId(), nullValue()); + assertThat(document.stats(), is(autoFollowStats)); + } + + @Override + public void testToXContent() throws IOException { + final long timestamp = System.currentTimeMillis(); + final long intervalMillis = System.currentTimeMillis(); + final long nodeTimestamp = System.currentTimeMillis(); + final MonitoringDoc.Node node = new MonitoringDoc.Node("_uuid", "_host", "_addr", "_ip", "_name", nodeTimestamp); + + final NavigableMap recentAutoFollowExceptions = + new TreeMap<>(Collections.singletonMap( + randomAlphaOfLength(4), + new ElasticsearchException("cannot follow index"))); + final AutoFollowStats autoFollowStats = + new AutoFollowStats(randomNonNegativeLong(), randomNonNegativeLong(), randomNonNegativeLong(), recentAutoFollowExceptions); + + final AutoFollowStatsMonitoringDoc document = + new AutoFollowStatsMonitoringDoc("_cluster", timestamp, intervalMillis, node, autoFollowStats); + final BytesReference xContent = XContentHelper.toXContent(document, XContentType.JSON, false); + assertThat( + xContent.utf8ToString(), + equalTo( + "{" + + "\"cluster_uuid\":\"_cluster\"," + + "\"timestamp\":\"" + new DateTime(timestamp, DateTimeZone.UTC).toString() + "\"," + + "\"interval_ms\":" + intervalMillis + "," + + "\"type\":\"ccr_auto_follow_stats\"," + + "\"source_node\":{" + + "\"uuid\":\"_uuid\"," + + "\"host\":\"_host\"," + + "\"transport_address\":\"_addr\"," + + "\"ip\":\"_ip\"," + + "\"name\":\"_name\"," + + "\"timestamp\":\"" + new DateTime(nodeTimestamp, DateTimeZone.UTC).toString() + "\"" + + "}," + + "\"ccr_auto_follow_stats\":{" + + "\"number_of_failed_follow_indices\":" + autoFollowStats.getNumberOfFailedFollowIndices() + "," + + "\"number_of_failed_remote_cluster_state_requests\":" + + autoFollowStats.getNumberOfFailedRemoteClusterStateRequests() + "," + + "\"number_of_successful_follow_indices\":" + autoFollowStats.getNumberOfSuccessfulFollowIndices() + "," + + "\"recent_auto_follow_errors\":[" + + "{" + + "\"leader_index\":\"" + recentAutoFollowExceptions.keySet().iterator().next() + "\"," + + "\"auto_follow_exception\":{" + + "\"type\":\"exception\"," + + "\"reason\":\"cannot follow index\"" + + "}" + + "}" + + "]" + + "}" + + "}")); + } + + public void testShardFollowNodeTaskStatusFieldsMapped() throws IOException { + final NavigableMap fetchExceptions = + new TreeMap<>(Collections.singletonMap("leader_index", new ElasticsearchException("cannot follow index"))); + final AutoFollowStats status = new AutoFollowStats(1, 0, 2, fetchExceptions); + XContentBuilder builder = jsonBuilder(); + builder.value(status); + Map serializedStatus = XContentHelper.convertToMap(XContentType.JSON.xContent(), Strings.toString(builder), false); + + Map template = + XContentHelper.convertToMap(XContentType.JSON.xContent(), MonitoringTemplateUtils.loadTemplate("es"), false); + Map autoFollowStatsMapping = + (Map) XContentMapValues.extractValue("mappings.doc.properties.ccr_auto_follow_stats.properties", template); + + assertThat(serializedStatus.size(), equalTo(autoFollowStatsMapping.size())); + for (Map.Entry entry : serializedStatus.entrySet()) { + String fieldName = entry.getKey(); + Map fieldMapping = (Map) autoFollowStatsMapping.get(fieldName); + assertThat(fieldMapping, notNullValue()); + + Object fieldValue = entry.getValue(); + String fieldType = (String) fieldMapping.get("type"); + if (fieldValue instanceof Long || fieldValue instanceof Integer) { + assertThat("expected long field type for field [" + fieldName + "]", fieldType, + anyOf(equalTo("long"), equalTo("integer"))); + } else if (fieldValue instanceof String) { + assertThat("expected keyword field type for field [" + fieldName + "]", fieldType, + anyOf(equalTo("keyword"), equalTo("text"))); + } else { + // Manual test specific object fields and if not just fail: + if (fieldName.equals("recent_auto_follow_errors")) { + assertThat(fieldType, equalTo("nested")); + assertThat(((Map) fieldMapping.get("properties")).size(), equalTo(2)); + assertThat(XContentMapValues.extractValue("properties.leader_index.type", fieldMapping), equalTo("keyword")); + assertThat(XContentMapValues.extractValue("properties.auto_follow_exception.type", fieldMapping), equalTo("object")); + + Map exceptionFieldMapping = + (Map) XContentMapValues.extractValue("properties.auto_follow_exception.properties", fieldMapping); + assertThat(exceptionFieldMapping.size(), equalTo(2)); + assertThat(XContentMapValues.extractValue("type.type", exceptionFieldMapping), equalTo("keyword")); + assertThat(XContentMapValues.extractValue("reason.type", exceptionFieldMapping), equalTo("text")); + } else { + fail("unexpected field value type [" + fieldValue.getClass() + "] for field [" + fieldName + "]"); + } + } + } + } +} diff --git a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/monitoring/collector/ccr/CcrAutoFollowStatsCollectorTests.java b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/monitoring/collector/ccr/CcrAutoFollowStatsCollectorTests.java new file mode 100644 index 00000000000..7a302503d2d --- /dev/null +++ b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/monitoring/collector/ccr/CcrAutoFollowStatsCollectorTests.java @@ -0,0 +1,85 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +package org.elasticsearch.xpack.monitoring.collector.ccr; + +import org.elasticsearch.action.ActionFuture; +import org.elasticsearch.client.Client; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.common.util.concurrent.ThreadContext; +import org.elasticsearch.license.XPackLicenseState; +import org.elasticsearch.xpack.core.ccr.AutoFollowStats; +import org.elasticsearch.xpack.core.ccr.action.AutoFollowStatsAction; +import org.elasticsearch.xpack.core.ccr.client.CcrClient; +import org.elasticsearch.xpack.core.monitoring.MonitoredSystem; +import org.elasticsearch.xpack.core.monitoring.exporter.MonitoringDoc; + +import java.util.Collection; + +import static org.elasticsearch.xpack.monitoring.MonitoringTestUtils.randomMonitoringNode; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.greaterThan; +import static org.hamcrest.Matchers.hasSize; +import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.nullValue; +import static org.mockito.Matchers.any; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; + +public class CcrAutoFollowStatsCollectorTests extends AbstractCcrCollectorTestCase { + + @Override + AbstractCcrCollector createCollector(Settings settings, ClusterService clusterService, XPackLicenseState licenseState, Client client) { + return new CcrAutoFollowStatsCollector(settings, clusterService, licenseState, client); + } + + public void testDoCollect() throws Exception { + final String clusterUuid = randomAlphaOfLength(5); + whenClusterStateWithUUID(clusterUuid); + + final MonitoringDoc.Node node = randomMonitoringNode(random()); + final CcrClient client = mock(CcrClient.class); + final ThreadContext threadContext = new ThreadContext(Settings.EMPTY); + + final TimeValue timeout = TimeValue.timeValueSeconds(randomIntBetween(1, 120)); + withCollectionTimeout(CcrAutoFollowStatsCollector.CCR_AUTO_FOLLOW_STATS_TIMEOUT, timeout); + + final CcrAutoFollowStatsCollector collector = + new CcrAutoFollowStatsCollector(Settings.EMPTY, clusterService, licenseState, client, threadContext); + assertEquals(timeout, collector.getCollectionTimeout()); + + final AutoFollowStats autoFollowStats = mock(AutoFollowStats.class); + + @SuppressWarnings("unchecked") + final ActionFuture future = (ActionFuture)mock(ActionFuture.class); + final AutoFollowStatsAction.Response response = new AutoFollowStatsAction.Response(autoFollowStats); + + when(client.autoFollowStats(any())).thenReturn(future); + when(future.actionGet(timeout)).thenReturn(response); + + final long interval = randomNonNegativeLong(); + + final Collection documents = collector.doCollect(node, interval, clusterState); + verify(clusterState).metaData(); + verify(metaData).clusterUUID(); + + assertThat(documents, hasSize(1)); + final AutoFollowStatsMonitoringDoc document = (AutoFollowStatsMonitoringDoc) documents.iterator().next(); + + assertThat(document.getCluster(), is(clusterUuid)); + assertThat(document.getTimestamp(), greaterThan(0L)); + assertThat(document.getIntervalMillis(), equalTo(interval)); + assertThat(document.getNode(), equalTo(node)); + assertThat(document.getSystem(), is(MonitoredSystem.ES)); + assertThat(document.getType(), is(AutoFollowStatsMonitoringDoc.TYPE)); + assertThat(document.getId(), nullValue()); + assertThat(document.stats(), is(autoFollowStats)); + } + +} diff --git a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/monitoring/collector/ccr/CcrStatsCollectorTests.java b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/monitoring/collector/ccr/CcrStatsCollectorTests.java index aaf3a61643b..b0f2a00d2dc 100644 --- a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/monitoring/collector/ccr/CcrStatsCollectorTests.java +++ b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/monitoring/collector/ccr/CcrStatsCollectorTests.java @@ -7,17 +7,18 @@ package org.elasticsearch.xpack.monitoring.collector.ccr; import org.elasticsearch.action.ActionFuture; +import org.elasticsearch.client.Client; +import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.Strings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.util.concurrent.ThreadContext; -import org.elasticsearch.xpack.core.XPackSettings; +import org.elasticsearch.license.XPackLicenseState; import org.elasticsearch.xpack.core.ccr.ShardFollowNodeTaskStatus; import org.elasticsearch.xpack.core.ccr.action.CcrStatsAction; import org.elasticsearch.xpack.core.ccr.client.CcrClient; import org.elasticsearch.xpack.core.monitoring.MonitoredSystem; import org.elasticsearch.xpack.core.monitoring.exporter.MonitoringDoc; -import org.elasticsearch.xpack.monitoring.BaseCollectorTestCase; import org.mockito.ArgumentMatcher; import java.util.ArrayList; @@ -38,89 +39,11 @@ import static org.mockito.Mockito.mock; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; -public class CcrStatsCollectorTests extends BaseCollectorTestCase { +public class CcrStatsCollectorTests extends AbstractCcrCollectorTestCase { - public void testShouldCollectReturnsFalseIfMonitoringNotAllowed() { - final Settings settings = randomFrom(ccrEnabledSettings(), ccrDisabledSettings()); - final boolean ccrAllowed = randomBoolean(); - final boolean isElectedMaster = randomBoolean(); - whenLocalNodeElectedMaster(isElectedMaster); - - // this controls the blockage - when(licenseState.isMonitoringAllowed()).thenReturn(false); - when(licenseState.isCcrAllowed()).thenReturn(ccrAllowed); - - final CcrStatsCollector collector = new CcrStatsCollector(settings, clusterService, licenseState, client); - - assertThat(collector.shouldCollect(isElectedMaster), is(false)); - if (isElectedMaster) { - verify(licenseState).isMonitoringAllowed(); - } - } - - public void testShouldCollectReturnsFalseIfNotMaster() { - // regardless of CCR being enabled - final Settings settings = randomFrom(ccrEnabledSettings(), ccrDisabledSettings()); - - when(licenseState.isMonitoringAllowed()).thenReturn(randomBoolean()); - when(licenseState.isCcrAllowed()).thenReturn(randomBoolean()); - // this controls the blockage - final boolean isElectedMaster = false; - - final CcrStatsCollector collector = new CcrStatsCollector(settings, clusterService, licenseState, client); - - assertThat(collector.shouldCollect(isElectedMaster), is(false)); - } - - public void testShouldCollectReturnsFalseIfCCRIsDisabled() { - // this is controls the blockage - final Settings settings = ccrDisabledSettings(); - - when(licenseState.isMonitoringAllowed()).thenReturn(randomBoolean()); - when(licenseState.isCcrAllowed()).thenReturn(randomBoolean()); - - final boolean isElectedMaster = randomBoolean(); - whenLocalNodeElectedMaster(isElectedMaster); - - final CcrStatsCollector collector = new CcrStatsCollector(settings, clusterService, licenseState, client); - - assertThat(collector.shouldCollect(isElectedMaster), is(false)); - - if (isElectedMaster) { - verify(licenseState).isMonitoringAllowed(); - } - } - - public void testShouldCollectReturnsFalseIfCCRIsNotAllowed() { - final Settings settings = randomFrom(ccrEnabledSettings(), ccrDisabledSettings()); - - when(licenseState.isMonitoringAllowed()).thenReturn(randomBoolean()); - // this is controls the blockage - when(licenseState.isCcrAllowed()).thenReturn(false); - final boolean isElectedMaster = randomBoolean(); - whenLocalNodeElectedMaster(isElectedMaster); - - final CcrStatsCollector collector = new CcrStatsCollector(settings, clusterService, licenseState, client); - - assertThat(collector.shouldCollect(isElectedMaster), is(false)); - - if (isElectedMaster) { - verify(licenseState).isMonitoringAllowed(); - } - } - - public void testShouldCollectReturnsTrue() { - final Settings settings = ccrEnabledSettings(); - - when(licenseState.isMonitoringAllowed()).thenReturn(true); - when(licenseState.isCcrAllowed()).thenReturn(true); - final boolean isElectedMaster = true; - - final CcrStatsCollector collector = new CcrStatsCollector(settings, clusterService, licenseState, client); - - assertThat(collector.shouldCollect(isElectedMaster), is(true)); - - verify(licenseState).isMonitoringAllowed(); + @Override + AbstractCcrCollector createCollector(Settings settings, ClusterService clusterService, XPackLicenseState licenseState, Client client) { + return new CcrStatsCollector(settings, clusterService, licenseState, client); } public void testDoCollect() throws Exception { @@ -186,15 +109,6 @@ public class CcrStatsCollectorTests extends BaseCollectorTestCase { return statuses; } - private Settings ccrEnabledSettings() { - // since it's the default, we want to ensure we test both with/without it - return randomBoolean() ? Settings.EMPTY : Settings.builder().put(XPackSettings.CCR_ENABLED_SETTING.getKey(), true).build(); - } - - private Settings ccrDisabledSettings() { - return Settings.builder().put(XPackSettings.CCR_ENABLED_SETTING.getKey(), false).build(); - } - private static CcrStatsAction.StatsRequest statsRequestEq(CcrStatsAction.StatsRequest expected) { return argThat(new StatsRequestMatches(expected)); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/LicenseVerifier.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/LicenseVerifier.java index a879dc9ed18..918067e6766 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/LicenseVerifier.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/LicenseVerifier.java @@ -43,6 +43,7 @@ public class LicenseVerifier { try { byte[] signatureBytes = Base64.getDecoder().decode(license.signature()); ByteBuffer byteBuffer = ByteBuffer.wrap(signatureBytes); + @SuppressWarnings("unused") int version = byteBuffer.getInt(); int magicLen = byteBuffer.getInt(); byte[] magic = new byte[magicLen]; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/StartupSelfGeneratedLicenseTask.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/StartupSelfGeneratedLicenseTask.java index c2d53bd0716..49ee35c2779 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/StartupSelfGeneratedLicenseTask.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/StartupSelfGeneratedLicenseTask.java @@ -6,6 +6,7 @@ package org.elasticsearch.license; import org.apache.logging.log4j.Logger; +import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.message.ParameterizedMessage; import org.apache.logging.log4j.util.Supplier; import org.elasticsearch.Version; @@ -14,7 +15,6 @@ import org.elasticsearch.cluster.ClusterStateUpdateTask; import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.Nullable; -import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.xpack.core.XPackPlugin; @@ -22,6 +22,7 @@ import java.time.Clock; import java.util.UUID; public class StartupSelfGeneratedLicenseTask extends ClusterStateUpdateTask { + private static final Logger logger = LogManager.getLogger(StartupSelfGeneratedLicenseTask.class); /** * Max number of nodes licensed by generated trial license @@ -31,13 +32,11 @@ public class StartupSelfGeneratedLicenseTask extends ClusterStateUpdateTask { private final Settings settings; private final Clock clock; private final ClusterService clusterService; - private final Logger logger; public StartupSelfGeneratedLicenseTask(Settings settings, Clock clock, ClusterService clusterService) { this.settings = settings; this.clock = clock; this.clusterService = clusterService; - this.logger = Loggers.getLogger(getClass(), settings); } @Override diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/AutoFollowMetadata.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/AutoFollowMetadata.java index 75832271bee..e063a85d0b1 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/AutoFollowMetadata.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/AutoFollowMetadata.java @@ -14,6 +14,7 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.regex.Regex; +import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.xcontent.ConstructingObjectParser; import org.elasticsearch.common.xcontent.ObjectParser; @@ -21,16 +22,15 @@ import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.xpack.core.XPackPlugin; -import org.elasticsearch.xpack.core.security.xcontent.XContentUtils; import java.io.IOException; -import java.util.Arrays; import java.util.Collections; import java.util.EnumSet; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Objects; +import java.util.stream.Collectors; /** * Custom metadata that contains auto follow patterns and what leader indices an auto follow pattern has already followed. @@ -41,10 +41,15 @@ public class AutoFollowMetadata extends AbstractNamedDiffable i private static final ParseField PATTERNS_FIELD = new ParseField("patterns"); private static final ParseField FOLLOWED_LEADER_INDICES_FIELD = new ParseField("followed_leader_indices"); + private static final ParseField HEADERS = new ParseField("headers"); @SuppressWarnings("unchecked") private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>("auto_follow", - args -> new AutoFollowMetadata((Map) args[0], (Map>) args[1])); + args -> new AutoFollowMetadata( + (Map) args[0], + (Map>) args[1], + (Map>) args[2] + )); static { PARSER.declareObject(ConstructingObjectParser.constructorArg(), (p, c) -> { @@ -61,20 +66,8 @@ public class AutoFollowMetadata extends AbstractNamedDiffable i } return patterns; }, PATTERNS_FIELD); - PARSER.declareObject(ConstructingObjectParser.constructorArg(), (p, c) -> { - Map> alreadyFollowedIndexUUIDS = new HashMap<>(); - String fieldName = null; - for (XContentParser.Token token = p.nextToken(); token != XContentParser.Token.END_OBJECT; token = p.nextToken()) { - if (token == XContentParser.Token.FIELD_NAME) { - fieldName = p.currentName(); - } else if (token == XContentParser.Token.START_ARRAY) { - alreadyFollowedIndexUUIDS.put(fieldName, Arrays.asList(XContentUtils.readStringArray(p, false))); - } else { - throw new ElasticsearchParseException("unexpected token [" + token + "]"); - } - } - return alreadyFollowedIndexUUIDS; - }, FOLLOWED_LEADER_INDICES_FIELD); + PARSER.declareObject(ConstructingObjectParser.constructorArg(), (p, c) -> p.map(), FOLLOWED_LEADER_INDICES_FIELD); + PARSER.declareObject(ConstructingObjectParser.constructorArg(), (p, c) -> p.map(), HEADERS); } public static AutoFollowMetadata fromXContent(XContentParser parser) throws IOException { @@ -83,15 +76,24 @@ public class AutoFollowMetadata extends AbstractNamedDiffable i private final Map patterns; private final Map> followedLeaderIndexUUIDs; + private final Map> headers; - public AutoFollowMetadata(Map patterns, Map> followedLeaderIndexUUIDs) { - this.patterns = patterns; - this.followedLeaderIndexUUIDs = followedLeaderIndexUUIDs; + public AutoFollowMetadata(Map patterns, + Map> followedLeaderIndexUUIDs, + Map> headers) { + this.patterns = Collections.unmodifiableMap(patterns); + this.followedLeaderIndexUUIDs = Collections.unmodifiableMap(followedLeaderIndexUUIDs.entrySet().stream() + .collect(Collectors.toMap(Map.Entry::getKey, e -> Collections.unmodifiableList(e.getValue())))); + this.headers = Collections.unmodifiableMap(headers.entrySet().stream() + .collect(Collectors.toMap(Map.Entry::getKey, e -> Collections.unmodifiableMap(e.getValue())))); } public AutoFollowMetadata(StreamInput in) throws IOException { - patterns = in.readMap(StreamInput::readString, AutoFollowPattern::new); - followedLeaderIndexUUIDs = in.readMapOfLists(StreamInput::readString, StreamInput::readString); + this( + in.readMap(StreamInput::readString, AutoFollowPattern::new), + in.readMapOfLists(StreamInput::readString, StreamInput::readString), + in.readMap(StreamInput::readString, valIn -> valIn.readMap(StreamInput::readString, StreamInput::readString)) + ); } public Map getPatterns() { @@ -102,11 +104,14 @@ public class AutoFollowMetadata extends AbstractNamedDiffable i return followedLeaderIndexUUIDs; } + public Map> getHeaders() { + return headers; + } + @Override public EnumSet context() { - // TODO: When a snapshot is restored do we want to restore this? - // (Otherwise we would start following indices automatically immediately) - return MetaData.ALL_CONTEXTS; + // No XContentContext.API, because the headers should not be serialized as part of clusters state api + return EnumSet.of(MetaData.XContentContext.SNAPSHOT, MetaData.XContentContext.GATEWAY); } @Override @@ -123,6 +128,8 @@ public class AutoFollowMetadata extends AbstractNamedDiffable i public void writeTo(StreamOutput out) throws IOException { out.writeMap(patterns, StreamOutput::writeString, (out1, value) -> value.writeTo(out1)); out.writeMapOfLists(followedLeaderIndexUUIDs, StreamOutput::writeString, StreamOutput::writeString); + out.writeMap(headers, StreamOutput::writeString, + (valOut, header) -> valOut.writeMap(header, StreamOutput::writeString, StreamOutput::writeString)); } @Override @@ -140,6 +147,11 @@ public class AutoFollowMetadata extends AbstractNamedDiffable i builder.field(entry.getKey(), entry.getValue()); } builder.endObject(); + builder.startObject(HEADERS.getPreferredName()); + for (Map.Entry> entry : headers.entrySet()) { + builder.field(entry.getKey(), entry.getValue()); + } + builder.endObject(); return builder; } @@ -163,30 +175,32 @@ public class AutoFollowMetadata extends AbstractNamedDiffable i public static class AutoFollowPattern implements Writeable, ToXContentObject { - private static final ParseField LEADER_PATTERNS_FIELD = new ParseField("leader_patterns"); - private static final ParseField FOLLOW_PATTERN_FIELD = new ParseField("follow_pattern"); + public static final ParseField LEADER_PATTERNS_FIELD = new ParseField("leader_index_patterns"); + public static final ParseField FOLLOW_PATTERN_FIELD = new ParseField("follow_index_pattern"); public static final ParseField MAX_BATCH_OPERATION_COUNT = new ParseField("max_batch_operation_count"); public static final ParseField MAX_CONCURRENT_READ_BATCHES = new ParseField("max_concurrent_read_batches"); - public static final ParseField MAX_BATCH_SIZE_IN_BYTES = new ParseField("max_batch_size_in_bytes"); + public static final ParseField MAX_BATCH_SIZE = new ParseField("max_batch_size"); public static final ParseField MAX_CONCURRENT_WRITE_BATCHES = new ParseField("max_concurrent_write_batches"); public static final ParseField MAX_WRITE_BUFFER_SIZE = new ParseField("max_write_buffer_size"); public static final ParseField MAX_RETRY_DELAY = new ParseField("max_retry_delay"); public static final ParseField POLL_TIMEOUT = new ParseField("poll_timeout"); - private static final ParseField HEADERS = new ParseField("headers"); @SuppressWarnings("unchecked") private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>("auto_follow_pattern", args -> new AutoFollowPattern((List) args[0], (String) args[1], (Integer) args[2], (Integer) args[3], - (Long) args[4], (Integer) args[5], (Integer) args[6], (TimeValue) args[7], (TimeValue) args[8], - (Map) args[9])); + (ByteSizeValue) args[4], (Integer) args[5], (Integer) args[6], (TimeValue) args[7], (TimeValue) args[8])); static { PARSER.declareStringArray(ConstructingObjectParser.constructorArg(), LEADER_PATTERNS_FIELD); PARSER.declareString(ConstructingObjectParser.optionalConstructorArg(), FOLLOW_PATTERN_FIELD); PARSER.declareInt(ConstructingObjectParser.optionalConstructorArg(), MAX_BATCH_OPERATION_COUNT); PARSER.declareInt(ConstructingObjectParser.optionalConstructorArg(), MAX_CONCURRENT_READ_BATCHES); - PARSER.declareLong(ConstructingObjectParser.optionalConstructorArg(), MAX_BATCH_SIZE_IN_BYTES); + PARSER.declareField( + ConstructingObjectParser.optionalConstructorArg(), + (p, c) -> ByteSizeValue.parseBytesSizeValue(p.text(), MAX_BATCH_SIZE.getPreferredName()), + MAX_BATCH_SIZE, + ObjectParser.ValueType.STRING); PARSER.declareInt(ConstructingObjectParser.optionalConstructorArg(), MAX_CONCURRENT_WRITE_BATCHES); PARSER.declareInt(ConstructingObjectParser.optionalConstructorArg(), MAX_WRITE_BUFFER_SIZE); PARSER.declareField(ConstructingObjectParser.optionalConstructorArg(), @@ -195,53 +209,48 @@ public class AutoFollowMetadata extends AbstractNamedDiffable i PARSER.declareField(ConstructingObjectParser.optionalConstructorArg(), (p, c) -> TimeValue.parseTimeValue(p.text(), POLL_TIMEOUT.getPreferredName()), POLL_TIMEOUT, ObjectParser.ValueType.STRING); - PARSER.declareObject(ConstructingObjectParser.constructorArg(), (p, c) -> p.mapStrings(), HEADERS); } private final List leaderIndexPatterns; private final String followIndexPattern; private final Integer maxBatchOperationCount; private final Integer maxConcurrentReadBatches; - private final Long maxOperationSizeInBytes; + private final ByteSizeValue maxBatchSize; private final Integer maxConcurrentWriteBatches; private final Integer maxWriteBufferSize; private final TimeValue maxRetryDelay; private final TimeValue pollTimeout; - private final Map headers; public AutoFollowPattern(List leaderIndexPatterns, String followIndexPattern, Integer maxBatchOperationCount, Integer maxConcurrentReadBatches, - Long maxOperationSizeInBytes, + ByteSizeValue maxBatchSize, Integer maxConcurrentWriteBatches, Integer maxWriteBufferSize, TimeValue maxRetryDelay, - TimeValue pollTimeout, - Map headers) { + TimeValue pollTimeout) { this.leaderIndexPatterns = leaderIndexPatterns; this.followIndexPattern = followIndexPattern; this.maxBatchOperationCount = maxBatchOperationCount; this.maxConcurrentReadBatches = maxConcurrentReadBatches; - this.maxOperationSizeInBytes = maxOperationSizeInBytes; + this.maxBatchSize = maxBatchSize; this.maxConcurrentWriteBatches = maxConcurrentWriteBatches; this.maxWriteBufferSize = maxWriteBufferSize; this.maxRetryDelay = maxRetryDelay; this.pollTimeout = pollTimeout; - this.headers = headers != null ? Collections.unmodifiableMap(headers) : Collections.emptyMap(); } - AutoFollowPattern(StreamInput in) throws IOException { + public AutoFollowPattern(StreamInput in) throws IOException { leaderIndexPatterns = in.readList(StreamInput::readString); followIndexPattern = in.readOptionalString(); maxBatchOperationCount = in.readOptionalVInt(); maxConcurrentReadBatches = in.readOptionalVInt(); - maxOperationSizeInBytes = in.readOptionalLong(); + maxBatchSize = in.readOptionalWriteable(ByteSizeValue::new); maxConcurrentWriteBatches = in.readOptionalVInt(); maxWriteBufferSize = in.readOptionalVInt(); maxRetryDelay = in.readOptionalTimeValue(); pollTimeout = in.readOptionalTimeValue(); - this.headers = Collections.unmodifiableMap(in.readMap(StreamInput::readString, StreamInput::readString)); } public boolean match(String indexName) { @@ -268,8 +277,8 @@ public class AutoFollowMetadata extends AbstractNamedDiffable i return maxConcurrentReadBatches; } - public Long getMaxOperationSizeInBytes() { - return maxOperationSizeInBytes; + public ByteSizeValue getMaxBatchSize() { + return maxBatchSize; } public Integer getMaxConcurrentWriteBatches() { @@ -288,22 +297,17 @@ public class AutoFollowMetadata extends AbstractNamedDiffable i return pollTimeout; } - public Map getHeaders() { - return headers; - } - @Override public void writeTo(StreamOutput out) throws IOException { out.writeStringList(leaderIndexPatterns); out.writeOptionalString(followIndexPattern); out.writeOptionalVInt(maxBatchOperationCount); out.writeOptionalVInt(maxConcurrentReadBatches); - out.writeOptionalLong(maxOperationSizeInBytes); + out.writeOptionalWriteable(maxBatchSize); out.writeOptionalVInt(maxConcurrentWriteBatches); out.writeOptionalVInt(maxWriteBufferSize); out.writeOptionalTimeValue(maxRetryDelay); out.writeOptionalTimeValue(pollTimeout); - out.writeMap(headers, StreamOutput::writeString, StreamOutput::writeString); } @Override @@ -318,8 +322,8 @@ public class AutoFollowMetadata extends AbstractNamedDiffable i if (maxConcurrentReadBatches != null) { builder.field(MAX_CONCURRENT_READ_BATCHES.getPreferredName(), maxConcurrentReadBatches); } - if (maxOperationSizeInBytes != null) { - builder.field(MAX_BATCH_SIZE_IN_BYTES.getPreferredName(), maxOperationSizeInBytes); + if (maxBatchSize != null) { + builder.field(MAX_BATCH_SIZE.getPreferredName(), maxBatchSize.getStringRep()); } if (maxConcurrentWriteBatches != null) { builder.field(MAX_CONCURRENT_WRITE_BATCHES.getPreferredName(), maxConcurrentWriteBatches); @@ -333,7 +337,6 @@ public class AutoFollowMetadata extends AbstractNamedDiffable i if (pollTimeout != null) { builder.field(POLL_TIMEOUT.getPreferredName(), pollTimeout); } - builder.field(HEADERS.getPreferredName(), headers); return builder; } @@ -348,31 +351,28 @@ public class AutoFollowMetadata extends AbstractNamedDiffable i if (o == null || getClass() != o.getClass()) return false; AutoFollowPattern that = (AutoFollowPattern) o; return Objects.equals(leaderIndexPatterns, that.leaderIndexPatterns) && - Objects.equals(followIndexPattern, that.followIndexPattern) && - Objects.equals(maxBatchOperationCount, that.maxBatchOperationCount) && - Objects.equals(maxConcurrentReadBatches, that.maxConcurrentReadBatches) && - Objects.equals(maxOperationSizeInBytes, that.maxOperationSizeInBytes) && - Objects.equals(maxConcurrentWriteBatches, that.maxConcurrentWriteBatches) && - Objects.equals(maxWriteBufferSize, that.maxWriteBufferSize) && - Objects.equals(maxRetryDelay, that.maxRetryDelay) && - Objects.equals(pollTimeout, that.pollTimeout) && - Objects.equals(headers, that.headers); + Objects.equals(followIndexPattern, that.followIndexPattern) && + Objects.equals(maxBatchOperationCount, that.maxBatchOperationCount) && + Objects.equals(maxConcurrentReadBatches, that.maxConcurrentReadBatches) && + Objects.equals(maxBatchSize, that.maxBatchSize) && + Objects.equals(maxConcurrentWriteBatches, that.maxConcurrentWriteBatches) && + Objects.equals(maxWriteBufferSize, that.maxWriteBufferSize) && + Objects.equals(maxRetryDelay, that.maxRetryDelay) && + Objects.equals(pollTimeout, that.pollTimeout); } @Override public int hashCode() { return Objects.hash( - leaderIndexPatterns, - followIndexPattern, - maxBatchOperationCount, - maxConcurrentReadBatches, - maxOperationSizeInBytes, - maxConcurrentWriteBatches, - maxWriteBufferSize, - maxRetryDelay, - pollTimeout, - headers - ); + leaderIndexPatterns, + followIndexPattern, + maxBatchOperationCount, + maxConcurrentReadBatches, + maxBatchSize, + maxConcurrentWriteBatches, + maxWriteBufferSize, + maxRetryDelay, + pollTimeout); } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/AutoFollowStats.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/AutoFollowStats.java index 7133a201f4e..6f28c450f04 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/AutoFollowStats.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/AutoFollowStats.java @@ -121,28 +121,33 @@ public class AutoFollowStats implements Writeable, ToXContentObject { public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); { - builder.field(NUMBER_OF_FAILED_INDICES_AUTO_FOLLOWED.getPreferredName(), numberOfFailedFollowIndices); - builder.field(NUMBER_OF_FAILED_REMOTE_CLUSTER_STATE_REQUESTS.getPreferredName(), numberOfFailedRemoteClusterStateRequests); - builder.field(NUMBER_OF_SUCCESSFUL_INDICES_AUTO_FOLLOWED.getPreferredName(), numberOfSuccessfulFollowIndices); - builder.startArray(RECENT_AUTO_FOLLOW_ERRORS.getPreferredName()); - { - for (final Map.Entry entry : recentAutoFollowErrors.entrySet()) { + toXContentFragment(builder, params); + } + builder.endObject(); + return builder; + } + + public XContentBuilder toXContentFragment(final XContentBuilder builder, final Params params) throws IOException { + builder.field(NUMBER_OF_FAILED_INDICES_AUTO_FOLLOWED.getPreferredName(), numberOfFailedFollowIndices); + builder.field(NUMBER_OF_FAILED_REMOTE_CLUSTER_STATE_REQUESTS.getPreferredName(), numberOfFailedRemoteClusterStateRequests); + builder.field(NUMBER_OF_SUCCESSFUL_INDICES_AUTO_FOLLOWED.getPreferredName(), numberOfSuccessfulFollowIndices); + builder.startArray(RECENT_AUTO_FOLLOW_ERRORS.getPreferredName()); + { + for (final Map.Entry entry : recentAutoFollowErrors.entrySet()) { + builder.startObject(); + { + builder.field(LEADER_INDEX.getPreferredName(), entry.getKey()); + builder.field(AUTO_FOLLOW_EXCEPTION.getPreferredName()); builder.startObject(); { - builder.field(LEADER_INDEX.getPreferredName(), entry.getKey()); - builder.field(AUTO_FOLLOW_EXCEPTION.getPreferredName()); - builder.startObject(); - { - ElasticsearchException.generateThrowableXContent(builder, params, entry.getValue()); - } - builder.endObject(); + ElasticsearchException.generateThrowableXContent(builder, params, entry.getValue()); } builder.endObject(); } + builder.endObject(); } - builder.endArray(); } - builder.endObject(); + builder.endArray(); return builder; } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/ShardFollowNodeTaskStatus.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/ShardFollowNodeTaskStatus.java index a8193c35a8d..759823ef786 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/ShardFollowNodeTaskStatus.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/ShardFollowNodeTaskStatus.java @@ -381,7 +381,10 @@ public class ShardFollowNodeTaskStatus implements Task.Status { out.writeMap( fetchExceptions, StreamOutput::writeVLong, - (stream, value) -> { stream.writeVInt(value.v1()); stream.writeException(value.v2()); }); + (stream, value) -> { + stream.writeVInt(value.v1()); + stream.writeException(value.v2()); + }); out.writeZLong(timeSinceLastFetchMillis); } @@ -526,6 +529,7 @@ public class ShardFollowNodeTaskStatus implements Task.Status { return status.fetchExceptions().values().stream().map(t -> t.v2().getMessage()).collect(Collectors.toList()); } + @Override public String toString() { return Strings.toString(this); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/CcrStatsAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/CcrStatsAction.java index 863cb678d7e..a69ecbf7cdf 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/CcrStatsAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/CcrStatsAction.java @@ -23,9 +23,11 @@ import org.elasticsearch.tasks.Task; import org.elasticsearch.xpack.core.ccr.ShardFollowNodeTaskStatus; import java.io.IOException; +import java.util.Arrays; import java.util.Collections; import java.util.List; import java.util.Map; +import java.util.Objects; import java.util.TreeMap; public class CcrStatsAction extends Action { @@ -45,7 +47,7 @@ public class CcrStatsAction extends Action { public static class StatsResponses extends BaseTasksResponse implements ToXContentObject { - private final List statsResponse; + private List statsResponse; public List getStatsResponses() { return statsResponse; @@ -87,6 +89,31 @@ public class CcrStatsAction extends Action { builder.endObject(); return builder; } + + @Override + public void readFrom(StreamInput in) throws IOException { + super.readFrom(in); + statsResponse = in.readList(StatsResponse::new); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + out.writeList(statsResponse); + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + StatsResponses that = (StatsResponses) o; + return Objects.equals(statsResponse, that.statsResponse); + } + + @Override + public int hashCode() { + return Objects.hash(statsResponse); + } } public static class StatsRequest extends BaseTasksRequest implements IndicesRequest { @@ -102,15 +129,9 @@ public class CcrStatsAction extends Action { this.indices = indices; } - private IndicesOptions indicesOptions = IndicesOptions.strictExpandOpenAndForbidClosed(); - @Override public IndicesOptions indicesOptions() { - return indicesOptions; - } - - public void setIndicesOptions(final IndicesOptions indicesOptions) { - this.indicesOptions = indicesOptions; + return IndicesOptions.strictExpand(); } @Override @@ -134,17 +155,27 @@ public class CcrStatsAction extends Action { @Override public void readFrom(final StreamInput in) throws IOException { super.readFrom(in); - indices = in.readStringArray(); - indicesOptions = IndicesOptions.readIndicesOptions(in); + indices = in.readOptionalStringArray(); } @Override public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); - out.writeStringArray(indices); - indicesOptions.writeIndicesOptions(out); + out.writeOptionalStringArray(indices); } + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + StatsRequest that = (StatsRequest) o; + return Arrays.equals(indices, that.indices); + } + + @Override + public int hashCode() { + return Arrays.hashCode(indices); + } } public static class StatsResponse implements Writeable { @@ -168,6 +199,18 @@ public class CcrStatsAction extends Action { status.writeTo(out); } + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + StatsResponse that = (StatsResponse) o; + return Objects.equals(status, that.status); + } + + @Override + public int hashCode() { + return Objects.hash(status); + } } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/GetAutoFollowPatternAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/GetAutoFollowPatternAction.java new file mode 100644 index 00000000000..ff7f50e66c8 --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/GetAutoFollowPatternAction.java @@ -0,0 +1,135 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +package org.elasticsearch.xpack.core.ccr.action; + +import org.elasticsearch.action.Action; +import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.action.ActionResponse; +import org.elasticsearch.action.support.master.MasterNodeReadRequest; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.xcontent.ToXContentObject; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.xpack.core.ccr.AutoFollowMetadata.AutoFollowPattern; + +import java.io.IOException; +import java.util.Map; +import java.util.Objects; + +public class GetAutoFollowPatternAction extends Action { + + public static final String NAME = "cluster:admin/xpack/ccr/auto_follow_pattern/get"; + public static final GetAutoFollowPatternAction INSTANCE = new GetAutoFollowPatternAction(); + + private GetAutoFollowPatternAction() { + super(NAME); + } + + @Override + public Response newResponse() { + return new Response(); + } + + public static class Request extends MasterNodeReadRequest { + + private String leaderClusterAlias; + + public Request() { + } + + public Request(StreamInput in) throws IOException { + super(in); + this.leaderClusterAlias = in.readOptionalString(); + } + + @Override + public ActionRequestValidationException validate() { + return null; + } + + public String getLeaderClusterAlias() { + return leaderClusterAlias; + } + + public void setLeaderClusterAlias(String leaderClusterAlias) { + this.leaderClusterAlias = leaderClusterAlias; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + out.writeOptionalString(leaderClusterAlias); + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + Request request = (Request) o; + return Objects.equals(leaderClusterAlias, request.leaderClusterAlias); + } + + @Override + public int hashCode() { + return Objects.hash(leaderClusterAlias); + } + } + + public static class Response extends ActionResponse implements ToXContentObject { + + private Map autoFollowPatterns; + + public Response(Map autoFollowPatterns) { + this.autoFollowPatterns = autoFollowPatterns; + } + + public Response() { + } + + public Map getAutoFollowPatterns() { + return autoFollowPatterns; + } + + @Override + public void readFrom(StreamInput in) throws IOException { + super.readFrom(in); + autoFollowPatterns = in.readMap(StreamInput::readString, AutoFollowPattern::new); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + out.writeMap(autoFollowPatterns, StreamOutput::writeString, (out1, value) -> value.writeTo(out1)); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + for (Map.Entry entry : autoFollowPatterns.entrySet()) { + builder.startObject(entry.getKey()); + entry.getValue().toXContent(builder, params); + builder.endObject(); + } + builder.endObject(); + return builder; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + Response response = (Response) o; + return Objects.equals(autoFollowPatterns, response.autoFollowPatterns); + } + + @Override + public int hashCode() { + return Objects.hash(autoFollowPatterns); + } + } + +} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/UnfollowIndexAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/PauseFollowAction.java similarity index 85% rename from x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/UnfollowIndexAction.java rename to x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/PauseFollowAction.java index 65ecd3dad2f..a5a45fea3f6 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/UnfollowIndexAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/PauseFollowAction.java @@ -15,12 +15,12 @@ import org.elasticsearch.common.io.stream.StreamOutput; import java.io.IOException; -public class UnfollowIndexAction extends Action { +public class PauseFollowAction extends Action { - public static final UnfollowIndexAction INSTANCE = new UnfollowIndexAction(); - public static final String NAME = "cluster:admin/xpack/ccr/unfollow_index"; + public static final PauseFollowAction INSTANCE = new PauseFollowAction(); + public static final String NAME = "cluster:admin/xpack/ccr/pause_follow"; - private UnfollowIndexAction() { + private PauseFollowAction() { super(NAME); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/PutAutoFollowPatternAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/PutAutoFollowPatternAction.java index 93d8d1fb7d1..d35365b0d32 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/PutAutoFollowPatternAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/PutAutoFollowPatternAction.java @@ -12,6 +12,7 @@ import org.elasticsearch.action.support.master.AcknowledgedResponse; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.xcontent.ObjectParser; import org.elasticsearch.common.xcontent.ToXContentObject; @@ -42,18 +43,20 @@ public class PutAutoFollowPatternAction extends Action { public static class Request extends AcknowledgedRequest implements ToXContentObject { static final ParseField LEADER_CLUSTER_ALIAS_FIELD = new ParseField("leader_cluster_alias"); - static final ParseField LEADER_INDEX_PATTERNS_FIELD = new ParseField("leader_index_patterns"); - static final ParseField FOLLOW_INDEX_NAME_PATTERN_FIELD = new ParseField("follow_index_name_pattern"); private static final ObjectParser PARSER = new ObjectParser<>("put_auto_follow_pattern_request", Request::new); static { PARSER.declareString(Request::setLeaderClusterAlias, LEADER_CLUSTER_ALIAS_FIELD); - PARSER.declareStringArray(Request::setLeaderIndexPatterns, LEADER_INDEX_PATTERNS_FIELD); - PARSER.declareString(Request::setFollowIndexNamePattern, FOLLOW_INDEX_NAME_PATTERN_FIELD); + PARSER.declareStringArray(Request::setLeaderIndexPatterns, AutoFollowPattern.LEADER_PATTERNS_FIELD); + PARSER.declareString(Request::setFollowIndexNamePattern, AutoFollowPattern.FOLLOW_PATTERN_FIELD); PARSER.declareInt(Request::setMaxBatchOperationCount, AutoFollowPattern.MAX_BATCH_OPERATION_COUNT); PARSER.declareInt(Request::setMaxConcurrentReadBatches, AutoFollowPattern.MAX_CONCURRENT_READ_BATCHES); - PARSER.declareLong(Request::setMaxOperationSizeInBytes, AutoFollowPattern.MAX_BATCH_SIZE_IN_BYTES); + PARSER.declareField( + Request::setMaxBatchSize, + (p, c) -> ByteSizeValue.parseBytesSizeValue(p.text(), AutoFollowPattern.MAX_BATCH_SIZE.getPreferredName()), + AutoFollowPattern.MAX_BATCH_SIZE, + ObjectParser.ValueType.STRING); PARSER.declareInt(Request::setMaxConcurrentWriteBatches, AutoFollowPattern.MAX_CONCURRENT_WRITE_BATCHES); PARSER.declareInt(Request::setMaxWriteBufferSize, AutoFollowPattern.MAX_WRITE_BUFFER_SIZE); PARSER.declareField(Request::setMaxRetryDelay, @@ -84,7 +87,7 @@ public class PutAutoFollowPatternAction extends Action { private Integer maxBatchOperationCount; private Integer maxConcurrentReadBatches; - private Long maxOperationSizeInBytes; + private ByteSizeValue maxBatchSize; private Integer maxConcurrentWriteBatches; private Integer maxWriteBufferSize; private TimeValue maxRetryDelay; @@ -98,7 +101,7 @@ public class PutAutoFollowPatternAction extends Action { "] is missing", validationException); } if (leaderIndexPatterns == null || leaderIndexPatterns.isEmpty()) { - validationException = addValidationError("[" + LEADER_INDEX_PATTERNS_FIELD.getPreferredName() + + validationException = addValidationError("[" + AutoFollowPattern.LEADER_PATTERNS_FIELD.getPreferredName() + "] is missing", validationException); } if (maxRetryDelay != null) { @@ -107,9 +110,9 @@ public class PutAutoFollowPatternAction extends Action { maxRetryDelay.getStringRep() + "]"; validationException = addValidationError(message, validationException); } - if (maxRetryDelay.millis() > FollowIndexAction.MAX_RETRY_DELAY.millis()) { + if (maxRetryDelay.millis() > ResumeFollowAction.MAX_RETRY_DELAY.millis()) { String message = "[" + AutoFollowPattern.MAX_RETRY_DELAY.getPreferredName() + "] must be less than [" + - FollowIndexAction.MAX_RETRY_DELAY + + ResumeFollowAction.MAX_RETRY_DELAY + "] but was [" + maxRetryDelay.getStringRep() + "]"; validationException = addValidationError(message, validationException); } @@ -157,12 +160,12 @@ public class PutAutoFollowPatternAction extends Action { this.maxConcurrentReadBatches = maxConcurrentReadBatches; } - public Long getMaxOperationSizeInBytes() { - return maxOperationSizeInBytes; + public ByteSizeValue getMaxBatchSize() { + return maxBatchSize; } - public void setMaxOperationSizeInBytes(Long maxOperationSizeInBytes) { - this.maxOperationSizeInBytes = maxOperationSizeInBytes; + public void setMaxBatchSize(ByteSizeValue maxBatchSize) { + this.maxBatchSize = maxBatchSize; } public Integer getMaxConcurrentWriteBatches() { @@ -205,7 +208,7 @@ public class PutAutoFollowPatternAction extends Action { followIndexNamePattern = in.readOptionalString(); maxBatchOperationCount = in.readOptionalVInt(); maxConcurrentReadBatches = in.readOptionalVInt(); - maxOperationSizeInBytes = in.readOptionalLong(); + maxBatchSize = in.readOptionalWriteable(ByteSizeValue::new); maxConcurrentWriteBatches = in.readOptionalVInt(); maxWriteBufferSize = in.readOptionalVInt(); maxRetryDelay = in.readOptionalTimeValue(); @@ -220,7 +223,7 @@ public class PutAutoFollowPatternAction extends Action { out.writeOptionalString(followIndexNamePattern); out.writeOptionalVInt(maxBatchOperationCount); out.writeOptionalVInt(maxConcurrentReadBatches); - out.writeOptionalLong(maxOperationSizeInBytes); + out.writeOptionalWriteable(maxBatchSize); out.writeOptionalVInt(maxConcurrentWriteBatches); out.writeOptionalVInt(maxWriteBufferSize); out.writeOptionalTimeValue(maxRetryDelay); @@ -232,15 +235,15 @@ public class PutAutoFollowPatternAction extends Action { builder.startObject(); { builder.field(LEADER_CLUSTER_ALIAS_FIELD.getPreferredName(), leaderClusterAlias); - builder.field(LEADER_INDEX_PATTERNS_FIELD.getPreferredName(), leaderIndexPatterns); + builder.field(AutoFollowPattern.LEADER_PATTERNS_FIELD.getPreferredName(), leaderIndexPatterns); if (followIndexNamePattern != null) { - builder.field(FOLLOW_INDEX_NAME_PATTERN_FIELD.getPreferredName(), followIndexNamePattern); + builder.field(AutoFollowPattern.FOLLOW_PATTERN_FIELD.getPreferredName(), followIndexNamePattern); } if (maxBatchOperationCount != null) { builder.field(AutoFollowPattern.MAX_BATCH_OPERATION_COUNT.getPreferredName(), maxBatchOperationCount); } - if (maxOperationSizeInBytes != null) { - builder.field(AutoFollowPattern.MAX_BATCH_SIZE_IN_BYTES.getPreferredName(), maxOperationSizeInBytes); + if (maxBatchSize != null) { + builder.field(AutoFollowPattern.MAX_BATCH_SIZE.getPreferredName(), maxBatchSize.getStringRep()); } if (maxWriteBufferSize != null) { builder.field(AutoFollowPattern.MAX_WRITE_BUFFER_SIZE.getPreferredName(), maxWriteBufferSize); @@ -268,31 +271,30 @@ public class PutAutoFollowPatternAction extends Action { if (o == null || getClass() != o.getClass()) return false; Request request = (Request) o; return Objects.equals(leaderClusterAlias, request.leaderClusterAlias) && - Objects.equals(leaderIndexPatterns, request.leaderIndexPatterns) && - Objects.equals(followIndexNamePattern, request.followIndexNamePattern) && - Objects.equals(maxBatchOperationCount, request.maxBatchOperationCount) && - Objects.equals(maxConcurrentReadBatches, request.maxConcurrentReadBatches) && - Objects.equals(maxOperationSizeInBytes, request.maxOperationSizeInBytes) && - Objects.equals(maxConcurrentWriteBatches, request.maxConcurrentWriteBatches) && - Objects.equals(maxWriteBufferSize, request.maxWriteBufferSize) && - Objects.equals(maxRetryDelay, request.maxRetryDelay) && - Objects.equals(pollTimeout, request.pollTimeout); + Objects.equals(leaderIndexPatterns, request.leaderIndexPatterns) && + Objects.equals(followIndexNamePattern, request.followIndexNamePattern) && + Objects.equals(maxBatchOperationCount, request.maxBatchOperationCount) && + Objects.equals(maxConcurrentReadBatches, request.maxConcurrentReadBatches) && + Objects.equals(maxBatchSize, request.maxBatchSize) && + Objects.equals(maxConcurrentWriteBatches, request.maxConcurrentWriteBatches) && + Objects.equals(maxWriteBufferSize, request.maxWriteBufferSize) && + Objects.equals(maxRetryDelay, request.maxRetryDelay) && + Objects.equals(pollTimeout, request.pollTimeout); } @Override public int hashCode() { return Objects.hash( - leaderClusterAlias, - leaderIndexPatterns, - followIndexNamePattern, - maxBatchOperationCount, - maxConcurrentReadBatches, - maxOperationSizeInBytes, - maxConcurrentWriteBatches, - maxWriteBufferSize, - maxRetryDelay, - pollTimeout - ); + leaderClusterAlias, + leaderIndexPatterns, + followIndexNamePattern, + maxBatchOperationCount, + maxConcurrentReadBatches, + maxBatchSize, + maxConcurrentWriteBatches, + maxWriteBufferSize, + maxRetryDelay, + pollTimeout); } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/CreateAndFollowIndexAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/PutFollowAction.java similarity index 89% rename from x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/CreateAndFollowIndexAction.java rename to x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/PutFollowAction.java index ea63815c2b9..5fdb13871b5 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/CreateAndFollowIndexAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/PutFollowAction.java @@ -20,12 +20,12 @@ import org.elasticsearch.common.xcontent.XContentBuilder; import java.io.IOException; import java.util.Objects; -public final class CreateAndFollowIndexAction extends Action { +public final class PutFollowAction extends Action { - public static final CreateAndFollowIndexAction INSTANCE = new CreateAndFollowIndexAction(); - public static final String NAME = "indices:admin/xpack/ccr/create_and_follow_index"; + public static final PutFollowAction INSTANCE = new PutFollowAction(); + public static final String NAME = "indices:admin/xpack/ccr/put_follow"; - private CreateAndFollowIndexAction() { + private PutFollowAction() { super(NAME); } @@ -36,9 +36,9 @@ public final class CreateAndFollowIndexAction extends Action implements IndicesRequest { - private FollowIndexAction.Request followRequest; + private ResumeFollowAction.Request followRequest; - public Request(FollowIndexAction.Request followRequest) { + public Request(ResumeFollowAction.Request followRequest) { this.followRequest = Objects.requireNonNull(followRequest); } @@ -46,7 +46,7 @@ public final class CreateAndFollowIndexAction extends Action { +public final class ResumeFollowAction extends Action { - public static final FollowIndexAction INSTANCE = new FollowIndexAction(); - public static final String NAME = "cluster:admin/xpack/ccr/follow_index"; + public static final ResumeFollowAction INSTANCE = new ResumeFollowAction(); + public static final String NAME = "cluster:admin/xpack/ccr/resume_follow"; public static final TimeValue MAX_RETRY_DELAY = TimeValue.timeValueMinutes(5); - private FollowIndexAction() { + private ResumeFollowAction() { super(NAME); } @@ -46,7 +47,7 @@ public final class FollowIndexAction extends Action { private static final ParseField FOLLOWER_INDEX_FIELD = new ParseField("follower_index"); private static final ParseField MAX_BATCH_OPERATION_COUNT = new ParseField("max_batch_operation_count"); private static final ParseField MAX_CONCURRENT_READ_BATCHES = new ParseField("max_concurrent_read_batches"); - private static final ParseField MAX_BATCH_SIZE_IN_BYTES = new ParseField("max_batch_size_in_bytes"); + private static final ParseField MAX_BATCH_SIZE = new ParseField("max_batch_size"); private static final ParseField MAX_CONCURRENT_WRITE_BATCHES = new ParseField("max_concurrent_write_batches"); private static final ParseField MAX_WRITE_BUFFER_SIZE = new ParseField("max_write_buffer_size"); private static final ParseField MAX_RETRY_DELAY_FIELD = new ParseField("max_retry_delay"); @@ -58,7 +59,11 @@ public final class FollowIndexAction extends Action { PARSER.declareString(Request::setFollowerIndex, FOLLOWER_INDEX_FIELD); PARSER.declareInt(Request::setMaxBatchOperationCount, MAX_BATCH_OPERATION_COUNT); PARSER.declareInt(Request::setMaxConcurrentReadBatches, MAX_CONCURRENT_READ_BATCHES); - PARSER.declareLong(Request::setMaxOperationSizeInBytes, MAX_BATCH_SIZE_IN_BYTES); + PARSER.declareField( + Request::setMaxBatchSize, + (p, c) -> ByteSizeValue.parseBytesSizeValue(p.text(), MAX_BATCH_SIZE.getPreferredName()), + MAX_BATCH_SIZE, + ObjectParser.ValueType.STRING); PARSER.declareInt(Request::setMaxConcurrentWriteBatches, MAX_CONCURRENT_WRITE_BATCHES); PARSER.declareInt(Request::setMaxWriteBufferSize, MAX_WRITE_BUFFER_SIZE); PARSER.declareField( @@ -127,14 +132,14 @@ public final class FollowIndexAction extends Action { this.maxConcurrentReadBatches = maxConcurrentReadBatches; } - private Long maxOperationSizeInBytes; + private ByteSizeValue maxBatchSize; - public Long getMaxOperationSizeInBytes() { - return maxOperationSizeInBytes; + public ByteSizeValue getMaxBatchSize() { + return maxBatchSize; } - public void setMaxOperationSizeInBytes(Long maxOperationSizeInBytes) { - this.maxOperationSizeInBytes = maxOperationSizeInBytes; + public void setMaxBatchSize(ByteSizeValue maxBatchSize) { + this.maxBatchSize = maxBatchSize; } private Integer maxConcurrentWriteBatches; @@ -196,8 +201,8 @@ public final class FollowIndexAction extends Action { if (maxConcurrentReadBatches != null && maxConcurrentReadBatches < 1) { e = addValidationError(MAX_CONCURRENT_READ_BATCHES.getPreferredName() + " must be larger than 0", e); } - if (maxOperationSizeInBytes != null && maxOperationSizeInBytes <= 0) { - e = addValidationError(MAX_BATCH_SIZE_IN_BYTES.getPreferredName() + " must be larger than 0", e); + if (maxBatchSize != null && maxBatchSize.compareTo(ByteSizeValue.ZERO) <= 0) { + e = addValidationError(MAX_BATCH_SIZE.getPreferredName() + " must be larger than 0", e); } if (maxConcurrentWriteBatches != null && maxConcurrentWriteBatches < 1) { e = addValidationError(MAX_CONCURRENT_WRITE_BATCHES.getPreferredName() + " must be larger than 0", e); @@ -210,7 +215,7 @@ public final class FollowIndexAction extends Action { maxRetryDelay.getStringRep() + "]"; e = addValidationError(message, e); } - if (maxRetryDelay != null && maxRetryDelay.millis() > FollowIndexAction.MAX_RETRY_DELAY.millis()) { + if (maxRetryDelay != null && maxRetryDelay.millis() > ResumeFollowAction.MAX_RETRY_DELAY.millis()) { String message = "[" + MAX_RETRY_DELAY_FIELD.getPreferredName() + "] must be less than [" + MAX_RETRY_DELAY + "] but was [" + maxRetryDelay.getStringRep() + "]"; e = addValidationError(message, e); @@ -226,7 +231,7 @@ public final class FollowIndexAction extends Action { followerIndex = in.readString(); maxBatchOperationCount = in.readOptionalVInt(); maxConcurrentReadBatches = in.readOptionalVInt(); - maxOperationSizeInBytes = in.readOptionalLong(); + maxBatchSize = in.readOptionalWriteable(ByteSizeValue::new); maxConcurrentWriteBatches = in.readOptionalVInt(); maxWriteBufferSize = in.readOptionalVInt(); maxRetryDelay = in.readOptionalTimeValue(); @@ -240,7 +245,7 @@ public final class FollowIndexAction extends Action { out.writeString(followerIndex); out.writeOptionalVInt(maxBatchOperationCount); out.writeOptionalVInt(maxConcurrentReadBatches); - out.writeOptionalLong(maxOperationSizeInBytes); + out.writeOptionalWriteable(maxBatchSize); out.writeOptionalVInt(maxConcurrentWriteBatches); out.writeOptionalVInt(maxWriteBufferSize); out.writeOptionalTimeValue(maxRetryDelay); @@ -256,8 +261,8 @@ public final class FollowIndexAction extends Action { if (maxBatchOperationCount != null) { builder.field(MAX_BATCH_OPERATION_COUNT.getPreferredName(), maxBatchOperationCount); } - if (maxOperationSizeInBytes != null) { - builder.field(MAX_BATCH_SIZE_IN_BYTES.getPreferredName(), maxOperationSizeInBytes); + if (maxBatchSize != null) { + builder.field(MAX_BATCH_SIZE.getPreferredName(), maxBatchSize.getStringRep()); } if (maxWriteBufferSize != null) { builder.field(MAX_WRITE_BUFFER_SIZE.getPreferredName(), maxWriteBufferSize); @@ -285,29 +290,28 @@ public final class FollowIndexAction extends Action { if (o == null || getClass() != o.getClass()) return false; Request request = (Request) o; return Objects.equals(maxBatchOperationCount, request.maxBatchOperationCount) && - Objects.equals(maxConcurrentReadBatches, request.maxConcurrentReadBatches) && - Objects.equals(maxOperationSizeInBytes, request.maxOperationSizeInBytes) && - Objects.equals(maxConcurrentWriteBatches, request.maxConcurrentWriteBatches) && - Objects.equals(maxWriteBufferSize, request.maxWriteBufferSize) && - Objects.equals(maxRetryDelay, request.maxRetryDelay) && - Objects.equals(pollTimeout, request.pollTimeout) && - Objects.equals(leaderIndex, request.leaderIndex) && - Objects.equals(followerIndex, request.followerIndex); + Objects.equals(maxConcurrentReadBatches, request.maxConcurrentReadBatches) && + Objects.equals(maxBatchSize, request.maxBatchSize) && + Objects.equals(maxConcurrentWriteBatches, request.maxConcurrentWriteBatches) && + Objects.equals(maxWriteBufferSize, request.maxWriteBufferSize) && + Objects.equals(maxRetryDelay, request.maxRetryDelay) && + Objects.equals(pollTimeout, request.pollTimeout) && + Objects.equals(leaderIndex, request.leaderIndex) && + Objects.equals(followerIndex, request.followerIndex); } @Override public int hashCode() { return Objects.hash( - leaderIndex, - followerIndex, - maxBatchOperationCount, - maxConcurrentReadBatches, - maxOperationSizeInBytes, - maxConcurrentWriteBatches, - maxWriteBufferSize, - maxRetryDelay, - pollTimeout - ); + leaderIndex, + followerIndex, + maxBatchOperationCount, + maxConcurrentReadBatches, + maxBatchSize, + maxConcurrentWriteBatches, + maxWriteBufferSize, + maxRetryDelay, + pollTimeout); } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/UnfollowAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/UnfollowAction.java new file mode 100644 index 00000000000..cf8c9ec2e61 --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/UnfollowAction.java @@ -0,0 +1,67 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +package org.elasticsearch.xpack.core.ccr.action; + +import org.elasticsearch.action.Action; +import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.action.support.master.AcknowledgedRequest; +import org.elasticsearch.action.support.master.AcknowledgedResponse; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; + +import java.io.IOException; + +import static org.elasticsearch.action.ValidateActions.addValidationError; + +public class UnfollowAction extends Action { + + public static final UnfollowAction INSTANCE = new UnfollowAction(); + public static final String NAME = "cluster:admin/xpack/ccr/unfollow"; + + private UnfollowAction() { + super(NAME); + } + + @Override + public AcknowledgedResponse newResponse() { + return new AcknowledgedResponse(); + } + + public static class Request extends AcknowledgedRequest { + + private final String followerIndex; + + public Request(String followerIndex) { + this.followerIndex = followerIndex; + } + + public Request(StreamInput in) throws IOException { + super(in); + followerIndex = in.readString(); + } + + public String getFollowerIndex() { + return followerIndex; + } + + @Override + public ActionRequestValidationException validate() { + ActionRequestValidationException e = null; + if (followerIndex == null) { + e = addValidationError("follower index is missing", e); + } + return e; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + out.writeString(followerIndex); + } + } + +} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/client/CcrClient.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/client/CcrClient.java index 3100dae9edf..1dab97599df 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/client/CcrClient.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/client/CcrClient.java @@ -11,12 +11,15 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.action.support.master.AcknowledgedResponse; import org.elasticsearch.client.ElasticsearchClient; +import org.elasticsearch.xpack.core.ccr.action.AutoFollowStatsAction; import org.elasticsearch.xpack.core.ccr.action.CcrStatsAction; -import org.elasticsearch.xpack.core.ccr.action.CreateAndFollowIndexAction; +import org.elasticsearch.xpack.core.ccr.action.PutFollowAction; import org.elasticsearch.xpack.core.ccr.action.DeleteAutoFollowPatternAction; -import org.elasticsearch.xpack.core.ccr.action.FollowIndexAction; +import org.elasticsearch.xpack.core.ccr.action.ResumeFollowAction; +import org.elasticsearch.xpack.core.ccr.action.GetAutoFollowPatternAction; import org.elasticsearch.xpack.core.ccr.action.PutAutoFollowPatternAction; -import org.elasticsearch.xpack.core.ccr.action.UnfollowIndexAction; +import org.elasticsearch.xpack.core.ccr.action.PauseFollowAction; +import org.elasticsearch.xpack.core.ccr.action.UnfollowAction; import java.util.Objects; @@ -28,25 +31,25 @@ public class CcrClient { this.client = Objects.requireNonNull(client, "client"); } - public void createAndFollow( - final CreateAndFollowIndexAction.Request request, - final ActionListener listener) { - client.execute(CreateAndFollowIndexAction.INSTANCE, request, listener); + public void putFollow( + final PutFollowAction.Request request, + final ActionListener listener) { + client.execute(PutFollowAction.INSTANCE, request, listener); } - public ActionFuture createAndFollow(final CreateAndFollowIndexAction.Request request) { - final PlainActionFuture listener = PlainActionFuture.newFuture(); - client.execute(CreateAndFollowIndexAction.INSTANCE, request, listener); + public ActionFuture putFollow(final PutFollowAction.Request request) { + final PlainActionFuture listener = PlainActionFuture.newFuture(); + client.execute(PutFollowAction.INSTANCE, request, listener); return listener; } - public void follow(final FollowIndexAction.Request request, final ActionListener listener) { - client.execute(FollowIndexAction.INSTANCE, request, listener); + public void resumeFollow(final ResumeFollowAction.Request request, final ActionListener listener) { + client.execute(ResumeFollowAction.INSTANCE, request, listener); } - public ActionFuture follow(final FollowIndexAction.Request request) { + public ActionFuture resumeFollow(final ResumeFollowAction.Request request) { final PlainActionFuture listener = PlainActionFuture.newFuture(); - client.execute(FollowIndexAction.INSTANCE, request, listener); + client.execute(ResumeFollowAction.INSTANCE, request, listener); return listener; } @@ -62,13 +65,34 @@ public class CcrClient { return listener; } - public void unfollow(final UnfollowIndexAction.Request request, final ActionListener listener) { - client.execute(UnfollowIndexAction.INSTANCE, request, listener); + public void autoFollowStats(final AutoFollowStatsAction.Request request, + final ActionListener listener) { + client.execute(AutoFollowStatsAction.INSTANCE, request, listener); } - public ActionFuture unfollow(final UnfollowIndexAction.Request request) { + public ActionFuture autoFollowStats(final AutoFollowStatsAction.Request request) { + final PlainActionFuture listener = PlainActionFuture.newFuture(); + autoFollowStats(request, listener); + return listener; + } + + public void pauseFollow(final PauseFollowAction.Request request, final ActionListener listener) { + client.execute(PauseFollowAction.INSTANCE, request, listener); + } + + public ActionFuture pauseFollow(final PauseFollowAction.Request request) { final PlainActionFuture listener = PlainActionFuture.newFuture(); - client.execute(UnfollowIndexAction.INSTANCE, request, listener); + client.execute(PauseFollowAction.INSTANCE, request, listener); + return listener; + } + + public void unfollow(final UnfollowAction.Request request, final ActionListener listener) { + client.execute(UnfollowAction.INSTANCE, request, listener); + } + + public ActionFuture unfollow(final UnfollowAction.Request request) { + final PlainActionFuture listener = PlainActionFuture.newFuture(); + client.execute(UnfollowAction.INSTANCE, request, listener); return listener; } @@ -96,4 +120,16 @@ public class CcrClient { return listener; } + public void getAutoFollowPattern( + final GetAutoFollowPatternAction.Request request, + final ActionListener listener) { + client.execute(GetAutoFollowPatternAction.INSTANCE, request, listener); + } + + public ActionFuture getAutoFollowPattern(final GetAutoFollowPatternAction.Request request) { + final PlainActionFuture listener = PlainActionFuture.newFuture(); + client.execute(GetAutoFollowPatternAction.INSTANCE, request, listener); + return listener; + } + } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/DeleteJobAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/DeleteJobAction.java index 56b7ec2b52f..9fbde4721cd 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/DeleteJobAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/DeleteJobAction.java @@ -16,7 +16,7 @@ import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.tasks.Task; import org.elasticsearch.tasks.TaskId; import org.elasticsearch.xpack.core.ml.job.config.Job; -import org.elasticsearch.xpack.core.ml.job.persistence.JobStorageDeletionTask; +import org.elasticsearch.xpack.core.ml.job.persistence.JobDeletionTask; import org.elasticsearch.xpack.core.ml.utils.ExceptionsHelper; import java.io.IOException; @@ -71,7 +71,7 @@ public class DeleteJobAction extends Action { @Override public Task createTask(long id, String type, String action, TaskId parentTaskId, Map headers) { - return new JobStorageDeletionTask(id, type, action, "delete-job-" + jobId, parentTaskId, headers); + return new JobDeletionTask(id, type, action, "delete-job-" + jobId, parentTaskId, headers); } @Override diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/FindFileStructureAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/FindFileStructureAction.java index d10fedfb589..78fcc4939ca 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/FindFileStructureAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/FindFileStructureAction.java @@ -16,6 +16,7 @@ import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.xcontent.StatusToXContentObject; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.rest.RestStatus; @@ -112,6 +113,7 @@ public class FindFileStructureAction extends Action columnNames; @@ -151,6 +154,14 @@ public class FindFileStructureAction extends Action { public static final ParseField IGNORE_DOWNTIME = new ParseField("ignore_downtime"); public static final ParseField TIMEOUT = new ParseField("timeout"); - public static ObjectParser PARSER = new ObjectParser<>(TASK_NAME, JobParams::new); + public static ObjectParser PARSER = new ObjectParser<>(TASK_NAME, true, JobParams::new); static { PARSER.declareString(JobParams::setJobId, Job.ID); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/StartDatafeedAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/StartDatafeedAction.java index 9c4a67ec61f..57a602b4cf0 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/StartDatafeedAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/StartDatafeedAction.java @@ -17,7 +17,7 @@ import org.elasticsearch.client.ElasticsearchClient; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.joda.DateMathParser; +import org.elasticsearch.common.time.DateMathParser; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.xcontent.ObjectParser; import org.elasticsearch.common.xcontent.ToXContent; @@ -141,7 +141,7 @@ public class StartDatafeedAction extends Action { public static class DatafeedParams implements XPackPlugin.XPackPersistentTaskParams { - public static ObjectParser PARSER = new ObjectParser<>(TASK_NAME, DatafeedParams::new); + public static ObjectParser PARSER = new ObjectParser<>(TASK_NAME, true, DatafeedParams::new); static { PARSER.declareString((params, datafeedId) -> params.datafeedId = datafeedId, DatafeedConfig.ID); @@ -153,7 +153,7 @@ public class StartDatafeedAction extends Action { } static long parseDateOrThrow(String date, ParseField paramName, LongSupplier now) { - DateMathParser dateMathParser = new DateMathParser(DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER); + DateMathParser dateMathParser = DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.toDateMathParser(); try { return dateMathParser.parse(date, now); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/datafeed/DatafeedConfig.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/datafeed/DatafeedConfig.java index 03b58732a37..9cad992327e 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/datafeed/DatafeedConfig.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/datafeed/DatafeedConfig.java @@ -35,6 +35,7 @@ import org.elasticsearch.xpack.core.ml.utils.time.TimeUtils; import java.io.IOException; import java.util.ArrayList; +import java.util.Collection; import java.util.Collections; import java.util.Comparator; import java.util.List; @@ -549,7 +550,7 @@ public class DatafeedConfig extends AbstractDiffable implements throw ExceptionsHelper.badRequestException( Messages.getMessage(Messages.DATAFEED_CONFIG_CANNOT_USE_SCRIPT_FIELDS_WITH_AGGS)); } - List aggregatorFactories = aggregations.getAggregatorFactories(); + Collection aggregatorFactories = aggregations.getAggregatorFactories(); if (aggregatorFactories.isEmpty()) { throw ExceptionsHelper.badRequestException(Messages.DATAFEED_AGGREGATIONS_REQUIRES_DATE_HISTOGRAM); } @@ -560,7 +561,7 @@ public class DatafeedConfig extends AbstractDiffable implements checkHistogramIntervalIsPositive(histogramAggregation); } - private static void checkNoMoreHistogramAggregations(List aggregations) { + private static void checkNoMoreHistogramAggregations(Collection aggregations) { for (AggregationBuilder agg : aggregations) { if (ExtractorUtils.isHistogram(agg)) { throw ExceptionsHelper.badRequestException(Messages.DATAFEED_AGGREGATIONS_MAX_ONE_DATE_HISTOGRAM); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/datafeed/extractor/ExtractorUtils.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/datafeed/extractor/ExtractorUtils.java index b0794adae4a..6d9312654fa 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/datafeed/extractor/ExtractorUtils.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/datafeed/extractor/ExtractorUtils.java @@ -26,7 +26,7 @@ import org.joda.time.DateTimeZone; import java.io.IOException; import java.util.Arrays; -import java.util.List; +import java.util.Collection; import java.util.concurrent.TimeUnit; /** @@ -83,7 +83,7 @@ public final class ExtractorUtils { * @param aggregations List of aggregations * @return A {@link HistogramAggregationBuilder} or a {@link DateHistogramAggregationBuilder} */ - public static AggregationBuilder getHistogramAggregation(List aggregations) { + public static AggregationBuilder getHistogramAggregation(Collection aggregations) { if (aggregations.isEmpty()) { throw ExceptionsHelper.badRequestException(Messages.getMessage(Messages.DATAFEED_AGGREGATIONS_REQUIRES_DATE_HISTOGRAM)); } @@ -91,7 +91,7 @@ public final class ExtractorUtils { throw ExceptionsHelper.badRequestException(Messages.DATAFEED_AGGREGATIONS_REQUIRES_DATE_HISTOGRAM_NO_SIBLINGS); } - AggregationBuilder agg = aggregations.get(0); + AggregationBuilder agg = aggregations.iterator().next(); if (isHistogram(agg)) { return agg; } else { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/filestructurefinder/FieldStats.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/filestructurefinder/FieldStats.java index 8f624d000cc..b371ca739bb 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/filestructurefinder/FieldStats.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/filestructurefinder/FieldStats.java @@ -123,16 +123,16 @@ public class FieldStats implements ToXContentObject, Writeable { builder.field(COUNT.getPreferredName(), count); builder.field(CARDINALITY.getPreferredName(), cardinality); if (minValue != null) { - builder.field(MIN_VALUE.getPreferredName(), minValue); + builder.field(MIN_VALUE.getPreferredName(), toIntegerIfInteger(minValue)); } if (maxValue != null) { - builder.field(MAX_VALUE.getPreferredName(), maxValue); + builder.field(MAX_VALUE.getPreferredName(), toIntegerIfInteger(maxValue)); } if (meanValue != null) { - builder.field(MEAN_VALUE.getPreferredName(), meanValue); + builder.field(MEAN_VALUE.getPreferredName(), toIntegerIfInteger(meanValue)); } if (medianValue != null) { - builder.field(MEDIAN_VALUE.getPreferredName(), medianValue); + builder.field(MEDIAN_VALUE.getPreferredName(), toIntegerIfInteger(medianValue)); } if (topHits.isEmpty() == false) { builder.field(TOP_HITS.getPreferredName(), topHits); @@ -142,6 +142,15 @@ public class FieldStats implements ToXContentObject, Writeable { return builder; } + public static Number toIntegerIfInteger(double d) { + + if (d >= Integer.MIN_VALUE && d <= Integer.MAX_VALUE && Double.compare(d, StrictMath.rint(d)) == 0) { + return (int) d; + } + + return d; + } + @Override public int hashCode() { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/filestructurefinder/FileStructure.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/filestructurefinder/FileStructure.java index db5f29f3b1b..1ac9f081ebe 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/filestructurefinder/FileStructure.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/filestructurefinder/FileStructure.java @@ -99,7 +99,8 @@ public class FileStructure implements ToXContentObject, Writeable { public static final ParseField SHOULD_TRIM_FIELDS = new ParseField("should_trim_fields"); public static final ParseField GROK_PATTERN = new ParseField("grok_pattern"); public static final ParseField TIMESTAMP_FIELD = new ParseField("timestamp_field"); - public static final ParseField TIMESTAMP_FORMATS = new ParseField("timestamp_formats"); + public static final ParseField JODA_TIMESTAMP_FORMATS = new ParseField("joda_timestamp_formats"); + public static final ParseField JAVA_TIMESTAMP_FORMATS = new ParseField("java_timestamp_formats"); public static final ParseField NEED_CLIENT_TIMEZONE = new ParseField("need_client_timezone"); public static final ParseField MAPPINGS = new ParseField("mappings"); public static final ParseField FIELD_STATS = new ParseField("field_stats"); @@ -123,7 +124,8 @@ public class FileStructure implements ToXContentObject, Writeable { PARSER.declareBoolean(Builder::setShouldTrimFields, SHOULD_TRIM_FIELDS); PARSER.declareString(Builder::setGrokPattern, GROK_PATTERN); PARSER.declareString(Builder::setTimestampField, TIMESTAMP_FIELD); - PARSER.declareStringArray(Builder::setTimestampFormats, TIMESTAMP_FORMATS); + PARSER.declareStringArray(Builder::setJodaTimestampFormats, JODA_TIMESTAMP_FORMATS); + PARSER.declareStringArray(Builder::setJavaTimestampFormats, JAVA_TIMESTAMP_FORMATS); PARSER.declareBoolean(Builder::setNeedClientTimezone, NEED_CLIENT_TIMEZONE); PARSER.declareObject(Builder::setMappings, (p, c) -> new TreeMap<>(p.map()), MAPPINGS); PARSER.declareObject(Builder::setFieldStats, (p, c) -> { @@ -150,7 +152,8 @@ public class FileStructure implements ToXContentObject, Writeable { private final Character quote; private final Boolean shouldTrimFields; private final String grokPattern; - private final List timestampFormats; + private final List jodaTimestampFormats; + private final List javaTimestampFormats; private final String timestampField; private final boolean needClientTimezone; private final SortedMap mappings; @@ -160,8 +163,9 @@ public class FileStructure implements ToXContentObject, Writeable { public FileStructure(int numLinesAnalyzed, int numMessagesAnalyzed, String sampleStart, String charset, Boolean hasByteOrderMarker, Format format, String multilineStartPattern, String excludeLinesPattern, List columnNames, Boolean hasHeaderRow, Character delimiter, Character quote, Boolean shouldTrimFields, String grokPattern, - String timestampField, List timestampFormats, boolean needClientTimezone, Map mappings, - Map fieldStats, List explanation) { + String timestampField, List jodaTimestampFormats, List javaTimestampFormats, + boolean needClientTimezone, Map mappings, Map fieldStats, + List explanation) { this.numLinesAnalyzed = numLinesAnalyzed; this.numMessagesAnalyzed = numMessagesAnalyzed; @@ -178,7 +182,10 @@ public class FileStructure implements ToXContentObject, Writeable { this.shouldTrimFields = shouldTrimFields; this.grokPattern = grokPattern; this.timestampField = timestampField; - this.timestampFormats = (timestampFormats == null) ? null : Collections.unmodifiableList(new ArrayList<>(timestampFormats)); + this.jodaTimestampFormats = + (jodaTimestampFormats == null) ? null : Collections.unmodifiableList(new ArrayList<>(jodaTimestampFormats)); + this.javaTimestampFormats = + (javaTimestampFormats == null) ? null : Collections.unmodifiableList(new ArrayList<>(javaTimestampFormats)); this.needClientTimezone = needClientTimezone; this.mappings = Collections.unmodifiableSortedMap(new TreeMap<>(mappings)); this.fieldStats = Collections.unmodifiableSortedMap(new TreeMap<>(fieldStats)); @@ -200,7 +207,8 @@ public class FileStructure implements ToXContentObject, Writeable { quote = in.readBoolean() ? (char) in.readVInt() : null; shouldTrimFields = in.readOptionalBoolean(); grokPattern = in.readOptionalString(); - timestampFormats = in.readBoolean() ? Collections.unmodifiableList(in.readList(StreamInput::readString)) : null; + jodaTimestampFormats = in.readBoolean() ? Collections.unmodifiableList(in.readList(StreamInput::readString)) : null; + javaTimestampFormats = in.readBoolean() ? Collections.unmodifiableList(in.readList(StreamInput::readString)) : null; timestampField = in.readOptionalString(); needClientTimezone = in.readBoolean(); mappings = Collections.unmodifiableSortedMap(new TreeMap<>(in.readMap())); @@ -239,11 +247,17 @@ public class FileStructure implements ToXContentObject, Writeable { } out.writeOptionalBoolean(shouldTrimFields); out.writeOptionalString(grokPattern); - if (timestampFormats == null) { + if (jodaTimestampFormats == null) { out.writeBoolean(false); } else { out.writeBoolean(true); - out.writeCollection(timestampFormats, StreamOutput::writeString); + out.writeCollection(jodaTimestampFormats, StreamOutput::writeString); + } + if (javaTimestampFormats == null) { + out.writeBoolean(false); + } else { + out.writeBoolean(true); + out.writeCollection(javaTimestampFormats, StreamOutput::writeString); } out.writeOptionalString(timestampField); out.writeBoolean(needClientTimezone); @@ -312,8 +326,12 @@ public class FileStructure implements ToXContentObject, Writeable { return timestampField; } - public List getTimestampFormats() { - return timestampFormats; + public List getJodaTimestampFormats() { + return jodaTimestampFormats; + } + + public List getJavaTimestampFormats() { + return javaTimestampFormats; } public boolean needClientTimezone() { @@ -371,8 +389,11 @@ public class FileStructure implements ToXContentObject, Writeable { if (timestampField != null && timestampField.isEmpty() == false) { builder.field(TIMESTAMP_FIELD.getPreferredName(), timestampField); } - if (timestampFormats != null && timestampFormats.isEmpty() == false) { - builder.field(TIMESTAMP_FORMATS.getPreferredName(), timestampFormats); + if (jodaTimestampFormats != null && jodaTimestampFormats.isEmpty() == false) { + builder.field(JODA_TIMESTAMP_FORMATS.getPreferredName(), jodaTimestampFormats); + } + if (javaTimestampFormats != null && javaTimestampFormats.isEmpty() == false) { + builder.field(JAVA_TIMESTAMP_FORMATS.getPreferredName(), javaTimestampFormats); } builder.field(NEED_CLIENT_TIMEZONE.getPreferredName(), needClientTimezone); builder.field(MAPPINGS.getPreferredName(), mappings); @@ -396,7 +417,7 @@ public class FileStructure implements ToXContentObject, Writeable { return Objects.hash(numLinesAnalyzed, numMessagesAnalyzed, sampleStart, charset, hasByteOrderMarker, format, multilineStartPattern, excludeLinesPattern, columnNames, hasHeaderRow, delimiter, quote, shouldTrimFields, grokPattern, - timestampField, timestampFormats, needClientTimezone, mappings, fieldStats, explanation); + timestampField, jodaTimestampFormats, javaTimestampFormats, needClientTimezone, mappings, fieldStats, explanation); } @Override @@ -413,7 +434,6 @@ public class FileStructure implements ToXContentObject, Writeable { FileStructure that = (FileStructure) other; return this.numLinesAnalyzed == that.numLinesAnalyzed && this.numMessagesAnalyzed == that.numMessagesAnalyzed && - this.needClientTimezone == that.needClientTimezone && Objects.equals(this.sampleStart, that.sampleStart) && Objects.equals(this.charset, that.charset) && Objects.equals(this.hasByteOrderMarker, that.hasByteOrderMarker) && @@ -427,7 +447,9 @@ public class FileStructure implements ToXContentObject, Writeable { Objects.equals(this.shouldTrimFields, that.shouldTrimFields) && Objects.equals(this.grokPattern, that.grokPattern) && Objects.equals(this.timestampField, that.timestampField) && - Objects.equals(this.timestampFormats, that.timestampFormats) && + Objects.equals(this.jodaTimestampFormats, that.jodaTimestampFormats) && + Objects.equals(this.javaTimestampFormats, that.javaTimestampFormats) && + this.needClientTimezone == that.needClientTimezone && Objects.equals(this.mappings, that.mappings) && Objects.equals(this.fieldStats, that.fieldStats) && Objects.equals(this.explanation, that.explanation); @@ -450,7 +472,8 @@ public class FileStructure implements ToXContentObject, Writeable { private Boolean shouldTrimFields; private String grokPattern; private String timestampField; - private List timestampFormats; + private List jodaTimestampFormats; + private List javaTimestampFormats; private boolean needClientTimezone; private Map mappings; private Map fieldStats = Collections.emptyMap(); @@ -539,8 +562,13 @@ public class FileStructure implements ToXContentObject, Writeable { return this; } - public Builder setTimestampFormats(List timestampFormats) { - this.timestampFormats = timestampFormats; + public Builder setJodaTimestampFormats(List jodaTimestampFormats) { + this.jodaTimestampFormats = jodaTimestampFormats; + return this; + } + + public Builder setJavaTimestampFormats(List javaTimestampFormats) { + this.javaTimestampFormats = javaTimestampFormats; return this; } @@ -652,11 +680,21 @@ public class FileStructure implements ToXContentObject, Writeable { throw new IllegalStateException("enum value [" + format + "] missing from switch."); } - if ((timestampField == null) != (timestampFormats == null || timestampFormats.isEmpty())) { - throw new IllegalArgumentException("Timestamp field and timestamp formats must both be specified or neither be specified."); + boolean isTimestampFieldSpecified = (timestampField != null); + boolean isJodaTimestampFormatsSpecified = (jodaTimestampFormats != null && jodaTimestampFormats.isEmpty() == false); + boolean isJavaTimestampFormatsSpecified = (javaTimestampFormats != null && javaTimestampFormats.isEmpty() == false); + + if (isTimestampFieldSpecified != isJodaTimestampFormatsSpecified) { + throw new IllegalArgumentException( + "Timestamp field and Joda timestamp formats must both be specified or neither be specified."); } - if (needClientTimezone && timestampField == null) { + if (isTimestampFieldSpecified != isJavaTimestampFormatsSpecified) { + throw new IllegalArgumentException( + "Timestamp field and Java timestamp formats must both be specified or neither be specified."); + } + + if (needClientTimezone && isTimestampFieldSpecified == false) { throw new IllegalArgumentException("Client timezone cannot be needed if there is no timestamp field."); } @@ -670,7 +708,7 @@ public class FileStructure implements ToXContentObject, Writeable { return new FileStructure(numLinesAnalyzed, numMessagesAnalyzed, sampleStart, charset, hasByteOrderMarker, format, multilineStartPattern, excludeLinesPattern, columnNames, hasHeaderRow, delimiter, quote, shouldTrimFields, grokPattern, - timestampField, timestampFormats, needClientTimezone, mappings, fieldStats, explanation); + timestampField, jodaTimestampFormats, javaTimestampFormats, needClientTimezone, mappings, fieldStats, explanation); } } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/persistence/JobDeletionTask.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/persistence/JobDeletionTask.java new file mode 100644 index 00000000000..2f218cfb2dc --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/persistence/JobDeletionTask.java @@ -0,0 +1,18 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.core.ml.job.persistence; + +import org.elasticsearch.tasks.Task; +import org.elasticsearch.tasks.TaskId; + +import java.util.Map; + +public class JobDeletionTask extends Task { + + public JobDeletionTask(long id, String type, String action, String description, TaskId parentTask, Map headers) { + super(id, type, action, description, parentTask, headers); + } +} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/persistence/JobStorageDeletionTask.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/persistence/JobStorageDeletionTask.java deleted file mode 100644 index 43cc372b6c7..00000000000 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/persistence/JobStorageDeletionTask.java +++ /dev/null @@ -1,301 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License; - * you may not use this file except in compliance with the Elastic License. - */ -package org.elasticsearch.xpack.core.ml.job.persistence; - -import com.carrotsearch.hppc.cursors.ObjectObjectCursor; -import org.apache.logging.log4j.Logger; -import org.elasticsearch.action.ActionListener; -import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequest; -import org.elasticsearch.action.admin.indices.alias.get.GetAliasesRequest; -import org.elasticsearch.action.admin.indices.alias.get.GetAliasesResponse; -import org.elasticsearch.action.admin.indices.delete.DeleteIndexRequest; -import org.elasticsearch.action.bulk.BulkItemResponse; -import org.elasticsearch.action.bulk.BulkResponse; -import org.elasticsearch.action.search.SearchAction; -import org.elasticsearch.action.search.SearchRequest; -import org.elasticsearch.action.search.SearchResponse; -import org.elasticsearch.action.support.IndicesOptions; -import org.elasticsearch.action.support.master.AcknowledgedResponse; -import org.elasticsearch.client.Client; -import org.elasticsearch.cluster.ClusterState; -import org.elasticsearch.cluster.metadata.AliasMetaData; -import org.elasticsearch.common.CheckedConsumer; -import org.elasticsearch.common.logging.Loggers; -import org.elasticsearch.index.IndexNotFoundException; -import org.elasticsearch.index.query.ConstantScoreQueryBuilder; -import org.elasticsearch.index.query.IdsQueryBuilder; -import org.elasticsearch.index.query.QueryBuilders; -import org.elasticsearch.index.query.TermQueryBuilder; -import org.elasticsearch.index.reindex.BulkByScrollResponse; -import org.elasticsearch.index.reindex.DeleteByQueryAction; -import org.elasticsearch.index.reindex.DeleteByQueryRequest; -import org.elasticsearch.search.builder.SearchSourceBuilder; -import org.elasticsearch.tasks.Task; -import org.elasticsearch.tasks.TaskId; -import org.elasticsearch.xpack.core.ml.action.GetModelSnapshotsAction; -import org.elasticsearch.xpack.core.ml.action.util.PageParams; -import org.elasticsearch.xpack.core.ml.job.config.Job; -import org.elasticsearch.xpack.core.ml.job.process.autodetect.state.CategorizerState; -import org.elasticsearch.xpack.core.ml.job.process.autodetect.state.ModelSnapshot; -import org.elasticsearch.xpack.core.ml.job.process.autodetect.state.Quantiles; -import org.elasticsearch.xpack.core.ml.utils.MlIndicesUtils; - -import java.util.ArrayList; -import java.util.HashSet; -import java.util.List; -import java.util.Map; -import java.util.Set; -import java.util.function.Consumer; - -import static org.elasticsearch.xpack.core.ClientHelper.ML_ORIGIN; -import static org.elasticsearch.xpack.core.ClientHelper.executeAsyncWithOrigin; - -/* - Moving this class to plugin-core caused a *lot* of server side logic to be pulled in to plugin-core. This should be considered as needing - refactoring to move it back to core. See DeleteJobAction for its use. -*/ -public class JobStorageDeletionTask extends Task { - - private static final int MAX_SNAPSHOTS_TO_DELETE = 10000; - - private final Logger logger; - - public JobStorageDeletionTask(long id, String type, String action, String description, TaskId parentTask, Map headers) { - super(id, type, action, description, parentTask, headers); - this.logger = Loggers.getLogger(getClass()); - } - - public void delete(String jobId, Client client, ClusterState state, - CheckedConsumer finishedHandler, - Consumer failureHandler) { - - final String indexName = AnomalyDetectorsIndex.getPhysicalIndexFromState(state, jobId); - final String indexPattern = indexName + "-*"; - - final ActionListener completionHandler = ActionListener.wrap( - response -> finishedHandler.accept(response.isAcknowledged()), - failureHandler); - - // Step 7. If we did not drop the index and after DBQ state done, we delete the aliases - ActionListener dbqHandler = ActionListener.wrap( - bulkByScrollResponse -> { - if (bulkByScrollResponse == null) { // no action was taken by DBQ, assume Index was deleted - completionHandler.onResponse(new AcknowledgedResponse(true)); - } else { - if (bulkByScrollResponse.isTimedOut()) { - logger.warn("[{}] DeleteByQuery for indices [{}, {}] timed out.", jobId, indexName, indexPattern); - } - if (!bulkByScrollResponse.getBulkFailures().isEmpty()) { - logger.warn("[{}] {} failures and {} conflicts encountered while running DeleteByQuery on indices [{}, {}].", - jobId, bulkByScrollResponse.getBulkFailures().size(), bulkByScrollResponse.getVersionConflicts(), - indexName, indexPattern); - for (BulkItemResponse.Failure failure : bulkByScrollResponse.getBulkFailures()) { - logger.warn("DBQ failure: " + failure); - } - } - deleteAliases(jobId, client, completionHandler); - } - }, - failureHandler); - - // Step 6. If we did not delete the index, we run a delete by query - ActionListener deleteByQueryExecutor = ActionListener.wrap( - response -> { - if (response) { - logger.info("Running DBQ on [" + indexName + "," + indexPattern + "] for job [" + jobId + "]"); - DeleteByQueryRequest request = new DeleteByQueryRequest(indexName, indexPattern); - ConstantScoreQueryBuilder query = - new ConstantScoreQueryBuilder(new TermQueryBuilder(Job.ID.getPreferredName(), jobId)); - request.setQuery(query); - request.setIndicesOptions(MlIndicesUtils.addIgnoreUnavailable(IndicesOptions.lenientExpandOpen())); - request.setSlices(5); - request.setAbortOnVersionConflict(false); - request.setRefresh(true); - - executeAsyncWithOrigin(client, ML_ORIGIN, DeleteByQueryAction.INSTANCE, request, dbqHandler); - } else { // We did not execute DBQ, no need to delete aliases or check the response - dbqHandler.onResponse(null); - } - }, - failureHandler); - - // Step 5. If we have any hits, that means we are NOT the only job on this index, and should not delete it - // if we do not have any hits, we can drop the index and then skip the DBQ and alias deletion - ActionListener customIndexSearchHandler = ActionListener.wrap( - searchResponse -> { - if (searchResponse == null || searchResponse.getHits().totalHits > 0) { - deleteByQueryExecutor.onResponse(true); // We need to run DBQ and alias deletion - } else { - logger.info("Running DELETE Index on [" + indexName + "] for job [" + jobId + "]"); - DeleteIndexRequest request = new DeleteIndexRequest(indexName); - request.indicesOptions(IndicesOptions.lenientExpandOpen()); - // If we have deleted the index, then we don't need to delete the aliases or run the DBQ - executeAsyncWithOrigin( - client.threadPool().getThreadContext(), - ML_ORIGIN, - request, - ActionListener.wrap( - response -> deleteByQueryExecutor.onResponse(false), // skip DBQ && Alias - failureHandler), - client.admin().indices()::delete); - } - }, - failure -> { - if (failure.getClass() == IndexNotFoundException.class) { // assume the index is already deleted - deleteByQueryExecutor.onResponse(false); // skip DBQ && Alias - } else { - failureHandler.accept(failure); - } - } - ); - - // Step 4. Determine if we are on a shared index by looking at `.ml-anomalies-shared` or the custom index's aliases - ActionListener deleteCategorizerStateHandler = ActionListener.wrap( - response -> { - if (indexName.equals(AnomalyDetectorsIndexFields.RESULTS_INDEX_PREFIX + - AnomalyDetectorsIndexFields.RESULTS_INDEX_DEFAULT)) { - customIndexSearchHandler.onResponse(null); //don't bother searching the index any further, we are on the default shared - } else { - SearchSourceBuilder source = new SearchSourceBuilder() - .size(1) - .query(QueryBuilders.boolQuery().filter( - QueryBuilders.boolQuery().mustNot(QueryBuilders.termQuery(Job.ID.getPreferredName(), jobId)))); - - SearchRequest searchRequest = new SearchRequest(indexName); - searchRequest.source(source); - executeAsyncWithOrigin(client, ML_ORIGIN, SearchAction.INSTANCE, searchRequest, customIndexSearchHandler); - } - }, - failureHandler - ); - - // Step 3. Delete quantiles done, delete the categorizer state - ActionListener deleteQuantilesHandler = ActionListener.wrap( - response -> deleteCategorizerState(jobId, client, 1, deleteCategorizerStateHandler), - failureHandler); - - // Step 2. Delete state done, delete the quantiles - ActionListener deleteStateHandler = ActionListener.wrap( - bulkResponse -> deleteQuantiles(jobId, client, deleteQuantilesHandler), - failureHandler); - - // Step 1. Delete the model state - deleteModelState(jobId, client, deleteStateHandler); - } - - private void deleteQuantiles(String jobId, Client client, ActionListener finishedHandler) { - // The quantiles type and doc ID changed in v5.5 so delete both the old and new format - DeleteByQueryRequest request = new DeleteByQueryRequest(AnomalyDetectorsIndex.jobStateIndexName()); - // Just use ID here, not type, as trying to delete different types spams the logs with an exception stack trace - IdsQueryBuilder query = new IdsQueryBuilder().addIds(Quantiles.documentId(jobId), - // TODO: remove in 7.0 - Quantiles.v54DocumentId(jobId)); - request.setQuery(query); - request.setIndicesOptions(MlIndicesUtils.addIgnoreUnavailable(IndicesOptions.lenientExpandOpen())); - request.setAbortOnVersionConflict(false); - request.setRefresh(true); - - executeAsyncWithOrigin(client, ML_ORIGIN, DeleteByQueryAction.INSTANCE, request, ActionListener.wrap( - response -> finishedHandler.onResponse(true), - e -> { - // It's not a problem for us if the index wasn't found - it's equivalent to document not found - if (e instanceof IndexNotFoundException) { - finishedHandler.onResponse(true); - } else { - finishedHandler.onFailure(e); - } - })); - } - - private void deleteModelState(String jobId, Client client, ActionListener listener) { - GetModelSnapshotsAction.Request request = new GetModelSnapshotsAction.Request(jobId, null); - request.setPageParams(new PageParams(0, MAX_SNAPSHOTS_TO_DELETE)); - executeAsyncWithOrigin(client, ML_ORIGIN, GetModelSnapshotsAction.INSTANCE, request, ActionListener.wrap( - response -> { - List deleteCandidates = response.getPage().results(); - JobDataDeleter deleter = new JobDataDeleter(client, jobId); - deleter.deleteModelSnapshots(deleteCandidates, listener); - }, - listener::onFailure)); - } - - private void deleteCategorizerState(String jobId, Client client, int docNum, ActionListener finishedHandler) { - // The categorizer state type and doc ID changed in v5.5 so delete both the old and new format - DeleteByQueryRequest request = new DeleteByQueryRequest(AnomalyDetectorsIndex.jobStateIndexName()); - // Just use ID here, not type, as trying to delete different types spams the logs with an exception stack trace - IdsQueryBuilder query = new IdsQueryBuilder().addIds(CategorizerState.documentId(jobId, docNum), - // TODO: remove in 7.0 - CategorizerState.v54DocumentId(jobId, docNum)); - request.setQuery(query); - request.setIndicesOptions(MlIndicesUtils.addIgnoreUnavailable(IndicesOptions.lenientExpandOpen())); - request.setAbortOnVersionConflict(false); - request.setRefresh(true); - - executeAsyncWithOrigin(client, ML_ORIGIN, DeleteByQueryAction.INSTANCE, request, ActionListener.wrap( - response -> { - // If we successfully deleted a document try the next one; if not we're done - if (response.getDeleted() > 0) { - // There's an assumption here that there won't be very many categorizer - // state documents, so the recursion won't go more than, say, 5 levels deep - deleteCategorizerState(jobId, client, docNum + 1, finishedHandler); - return; - } - finishedHandler.onResponse(true); - }, - e -> { - // It's not a problem for us if the index wasn't found - it's equivalent to document not found - if (e instanceof IndexNotFoundException) { - finishedHandler.onResponse(true); - } else { - finishedHandler.onFailure(e); - } - })); - } - - private void deleteAliases(String jobId, Client client, ActionListener finishedHandler) { - final String readAliasName = AnomalyDetectorsIndex.jobResultsAliasedName(jobId); - final String writeAliasName = AnomalyDetectorsIndex.resultsWriteAlias(jobId); - - // first find the concrete indices associated with the aliases - GetAliasesRequest aliasesRequest = new GetAliasesRequest().aliases(readAliasName, writeAliasName) - .indicesOptions(IndicesOptions.lenientExpandOpen()); - executeAsyncWithOrigin(client.threadPool().getThreadContext(), ML_ORIGIN, aliasesRequest, - ActionListener.wrap( - getAliasesResponse -> { - // remove the aliases from the concrete indices found in the first step - IndicesAliasesRequest removeRequest = buildRemoveAliasesRequest(getAliasesResponse); - if (removeRequest == null) { - // don't error if the job's aliases have already been deleted - carry on and delete the - // rest of the job's data - finishedHandler.onResponse(new AcknowledgedResponse(true)); - return; - } - executeAsyncWithOrigin(client.threadPool().getThreadContext(), ML_ORIGIN, removeRequest, - ActionListener.wrap( - finishedHandler::onResponse, - finishedHandler::onFailure), - client.admin().indices()::aliases); - }, - finishedHandler::onFailure), client.admin().indices()::getAliases); - } - - private IndicesAliasesRequest buildRemoveAliasesRequest(GetAliasesResponse getAliasesResponse) { - Set aliases = new HashSet<>(); - List indices = new ArrayList<>(); - for (ObjectObjectCursor> entry : getAliasesResponse.getAliases()) { - // The response includes _all_ indices, but only those associated with - // the aliases we asked about will have associated AliasMetaData - if (entry.value.isEmpty() == false) { - indices.add(entry.key); - entry.value.forEach(metadata -> aliases.add(metadata.getAlias())); - } - } - return aliases.isEmpty() ? null : new IndicesAliasesRequest().addAliasAction( - IndicesAliasesRequest.AliasActions.remove() - .aliases(aliases.toArray(new String[aliases.size()])) - .indices(indices.toArray(new String[indices.size()]))); - } -} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/job/RollupJobConfig.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/job/RollupJobConfig.java index 27461c62b67..68e2dc50d9f 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/job/RollupJobConfig.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/job/RollupJobConfig.java @@ -222,7 +222,7 @@ public class RollupJobConfig implements NamedWriteable, ToXContentObject { builder.endArray(); } if (timeout != null) { - builder.field(TIMEOUT, timeout); + builder.field(TIMEOUT, timeout.getStringRep()); } builder.field(PAGE_SIZE, pageSize); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/job/RollupJobStatus.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/job/RollupJobStatus.java index 0a2f046907c..d95efcef08b 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/job/RollupJobStatus.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/job/RollupJobStatus.java @@ -163,6 +163,6 @@ public class RollupJobStatus implements Task.Status, PersistentTaskState { @Override public int hashCode() { - return Objects.hash(state, currentPosition, upgradedDocumentID); + return Objects.hash(state, currentPosition, upgradedDocumentID); } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/scheduler/Cron.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/scheduler/Cron.java index 7f4da3fbf1a..d4ccc22d32a 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/scheduler/Cron.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/scheduler/Cron.java @@ -254,7 +254,6 @@ public class Cron implements ToXContentFragment { private transient boolean lastdayOfMonth = false; private transient boolean nearestWeekday = false; private transient int lastdayOffset = 0; - private transient boolean expressionParsed = false; public static final int MAX_YEAR = Calendar.getInstance(UTC, Locale.ROOT).get(Calendar.YEAR) + 100; @@ -802,7 +801,6 @@ public class Cron implements ToXContentFragment { //////////////////////////////////////////////////////////////////////////// private void buildExpression(String expression) { - expressionParsed = true; try { @@ -1214,32 +1212,6 @@ public class Cron implements ToXContentFragment { return buf.toString(); } - private static String expressionSetSummary(java.util.ArrayList list) { - - if (list.contains(NO_SPEC)) { - return "?"; - } - if (list.contains(ALL_SPEC)) { - return "*"; - } - - StringBuilder buf = new StringBuilder(); - - Iterator itr = list.iterator(); - boolean first = true; - while (itr.hasNext()) { - Integer iVal = itr.next(); - String val = iVal.toString(); - if (!first) { - buf.append(","); - } - buf.append(val); - first = false; - } - - return buf.toString(); - } - private static int skipWhiteSpace(int i, String s) { for (; i < s.length() && (s.charAt(i) == ' ' || s.charAt(i) == '\t'); i++) { ; @@ -1352,14 +1324,28 @@ public class Cron implements ToXContentFragment { int max = -1; if (stopAt < startAt) { switch (type) { - case SECOND : max = 60; break; - case MINUTE : max = 60; break; - case HOUR : max = 24; break; - case MONTH : max = 12; break; - case DAY_OF_WEEK : max = 7; break; - case DAY_OF_MONTH : max = 31; break; - case YEAR : throw new IllegalArgumentException("Start year must be less than stop year"); - default : throw new IllegalArgumentException("Unexpected type encountered"); + case SECOND: + max = 60; + break; + case MINUTE: + max = 60; + break; + case HOUR: + max = 24; + break; + case MONTH: + max = 12; + break; + case DAY_OF_WEEK: + max = 7; + break; + case DAY_OF_MONTH: + max = 31; + break; + case YEAR: + throw new IllegalArgumentException("Start year must be less than stop year"); + default: + throw new IllegalArgumentException("Unexpected type encountered"); } stopAt += max; } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/ScrollHelper.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/ScrollHelper.java index a481f880311..97f8eb5fa11 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/ScrollHelper.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/ScrollHelper.java @@ -5,6 +5,9 @@ */ package org.elasticsearch.xpack.core.security; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.apache.logging.log4j.message.ParameterizedMessage; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.search.ClearScrollRequest; import org.elasticsearch.action.search.SearchRequest; @@ -12,7 +15,6 @@ import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.search.SearchScrollRequest; import org.elasticsearch.action.support.ContextPreservingActionListener; import org.elasticsearch.client.Client; -import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.index.IndexNotFoundException; import org.elasticsearch.search.SearchHit; @@ -25,6 +27,7 @@ import java.util.function.Function; public final class ScrollHelper { + private static final Logger LOGGER = LogManager.getLogger(ScrollHelper.class); private ScrollHelper() {} /** @@ -35,13 +38,15 @@ public final class ScrollHelper { Function hitParser) { final List results = new ArrayList<>(); if (request.scroll() == null) { // we do scroll by default lets see if we can get rid of this at some point. - request.scroll(TimeValue.timeValueSeconds(10L)); + throw new IllegalArgumentException("request must have scroll set"); } final Consumer clearScroll = (response) -> { if (response != null && response.getScrollId() != null) { ClearScrollRequest clearScrollRequest = new ClearScrollRequest(); clearScrollRequest.addScrollId(response.getScrollId()); - client.clearScroll(clearScrollRequest, ActionListener.wrap((r) -> {}, (e) -> {})); + client.clearScroll(clearScrollRequest, ActionListener.wrap((r) -> {}, e -> + LOGGER.warn(new ParameterizedMessage("clear scroll failed for scroll id [{}]", response.getScrollId()), e) + )); } }; // This function is MADNESS! But it works, don't think about it too hard... diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/SecurityContext.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/SecurityContext.java index 99788ac1de4..c737ab75d81 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/SecurityContext.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/SecurityContext.java @@ -6,8 +6,8 @@ package org.elasticsearch.xpack.core.security; import org.apache.logging.log4j.Logger; +import org.apache.logging.log4j.LogManager; import org.elasticsearch.Version; -import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.common.util.concurrent.ThreadContext.StoredContext; @@ -23,8 +23,8 @@ import java.util.function.Consumer; * A lightweight utility that can find the current user and authentication information for the local thread. */ public class SecurityContext { + private final Logger logger = LogManager.getLogger(SecurityContext.class); - private final Logger logger; private final ThreadContext threadContext; private final UserSettings userSettings; private final String nodeName; @@ -35,9 +35,8 @@ public class SecurityContext { * and {@link UserSettings#getAuthentication()} will always return null. */ public SecurityContext(Settings settings, ThreadContext threadContext) { - this.logger = Loggers.getLogger(getClass(), settings); this.threadContext = threadContext; - this.userSettings = new UserSettings(settings, threadContext); + this.userSettings = new UserSettings(threadContext); this.nodeName = Node.NODE_NAME_SETTING.get(settings); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/UserSettings.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/UserSettings.java index 7f22f90351e..c7f22e8742e 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/UserSettings.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/UserSettings.java @@ -6,8 +6,7 @@ package org.elasticsearch.xpack.core.security; import org.apache.logging.log4j.Logger; -import org.elasticsearch.common.logging.Loggers; -import org.elasticsearch.common.settings.Settings; +import org.apache.logging.log4j.LogManager; import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.xpack.core.security.authc.Authentication; import org.elasticsearch.xpack.core.security.user.User; @@ -15,15 +14,14 @@ import org.elasticsearch.xpack.core.security.user.User; import java.io.IOException; public final class UserSettings { - private final Logger logger; + private final Logger logger = LogManager.getLogger(UserSettings.class); + private final ThreadContext threadContext; - UserSettings(Settings settings, ThreadContext threadContext) { - this.logger = Loggers.getLogger(getClass(), settings); + UserSettings(ThreadContext threadContext) { this.threadContext = threadContext; } - /** * Returns the current user information, or null if the current request has no authentication info. */ diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/Realm.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/Realm.java index bc8869d5d83..5ccd8bec1a5 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/Realm.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/Realm.java @@ -6,6 +6,7 @@ package org.elasticsearch.xpack.core.security.authc; import org.apache.logging.log4j.Logger; +import org.apache.logging.log4j.LogManager; import org.elasticsearch.action.ActionListener; import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.license.XPackLicenseState; @@ -25,7 +26,7 @@ import java.util.Map; */ public abstract class Realm implements Comparable { - protected final Logger logger; + protected final Logger logger = LogManager.getLogger(getClass()); protected final String type; public String getType() { @@ -37,7 +38,6 @@ public abstract class Realm implements Comparable { public Realm(String type, RealmConfig config) { this.type = type; this.config = config; - this.logger = config.logger(getClass()); } /** diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/RealmConfig.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/RealmConfig.java index 865d0117b81..759f938491e 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/RealmConfig.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/RealmConfig.java @@ -5,8 +5,6 @@ */ package org.elasticsearch.xpack.core.security.authc; -import org.apache.logging.log4j.Logger; -import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.env.Environment; @@ -59,10 +57,6 @@ public class RealmConfig { return globalSettings; } - public Logger logger(Class clazz) { - return Loggers.getLogger(clazz, globalSettings); - } - public Environment env() { return env; } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/accesscontrol/SecurityIndexSearcherWrapper.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/accesscontrol/SecurityIndexSearcherWrapper.java index e0dc36b4117..dbb359bb70f 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/accesscontrol/SecurityIndexSearcherWrapper.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/accesscontrol/SecurityIndexSearcherWrapper.java @@ -6,6 +6,7 @@ package org.elasticsearch.xpack.core.security.authz.accesscontrol; import org.apache.logging.log4j.Logger; +import org.apache.logging.log4j.LogManager; import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.search.BooleanQuery; @@ -30,14 +31,12 @@ import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.logging.LoggerMessageFormat; -import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.lucene.search.Queries; import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.common.xcontent.LoggingDeprecationHandler; import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.cache.bitset.BitsetFilterCache; import org.elasticsearch.index.engine.EngineException; import org.elasticsearch.index.query.BoolQueryBuilder; @@ -89,19 +88,18 @@ import static org.apache.lucene.search.BooleanClause.Occur.SHOULD; * instance. */ public class SecurityIndexSearcherWrapper extends IndexSearcherWrapper { + private static final Logger logger = LogManager.getLogger(SecurityIndexSearcherWrapper.class); private final Function queryShardContextProvider; private final BitsetFilterCache bitsetFilterCache; private final XPackLicenseState licenseState; private final ThreadContext threadContext; - private final Logger logger; private final ScriptService scriptService; - public SecurityIndexSearcherWrapper(IndexSettings indexSettings, Function queryShardContextProvider, + public SecurityIndexSearcherWrapper(Function queryShardContextProvider, BitsetFilterCache bitsetFilterCache, ThreadContext threadContext, XPackLicenseState licenseState, ScriptService scriptService) { this.scriptService = scriptService; - this.logger = Loggers.getLogger(getClass(), indexSettings.getSettings()); this.queryShardContextProvider = queryShardContextProvider; this.bitsetFilterCache = bitsetFilterCache; this.threadContext = threadContext; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/permission/FieldPermissions.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/permission/FieldPermissions.java index 144d2877463..53d6c328f5d 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/permission/FieldPermissions.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/permission/FieldPermissions.java @@ -23,7 +23,6 @@ import org.elasticsearch.xpack.core.security.support.Automatons; import java.io.IOException; import java.util.Arrays; import java.util.HashMap; -import java.util.HashSet; import java.util.List; import java.util.Set; import java.util.stream.Collectors; @@ -44,7 +43,6 @@ public final class FieldPermissions implements Accountable { private static final long BASE_FIELD_PERM_DEF_BYTES = RamUsageEstimator.shallowSizeOf(new FieldPermissionsDefinition(null, null)); private static final long BASE_FIELD_GROUP_BYTES = RamUsageEstimator.shallowSizeOf(new FieldGrantExcludeGroup(null, null)); - private static final long BASE_HASHSET_SIZE = RamUsageEstimator.shallowSizeOfInstance(HashSet.class); private static final long BASE_HASHSET_ENTRY_SIZE; static { HashMap map = new HashMap<>(); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/privilege/ClusterPrivilege.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/privilege/ClusterPrivilege.java index 6c52d3e75dc..686969da35e 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/privilege/ClusterPrivilege.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/privilege/ClusterPrivilege.java @@ -11,6 +11,7 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.collect.MapBuilder; import org.elasticsearch.xpack.core.security.action.token.InvalidateTokenAction; import org.elasticsearch.xpack.core.security.action.token.RefreshTokenAction; +import org.elasticsearch.xpack.core.security.action.user.HasPrivilegesAction; import org.elasticsearch.xpack.core.security.support.Automatons; import java.util.Collections; @@ -42,7 +43,9 @@ public final class ClusterPrivilege extends Privilege { private static final Automaton MANAGE_IDX_TEMPLATE_AUTOMATON = patterns("indices:admin/template/*"); private static final Automaton MANAGE_INGEST_PIPELINE_AUTOMATON = patterns("cluster:admin/ingest/pipeline/*"); private static final Automaton MANAGE_ROLLUP_AUTOMATON = patterns("cluster:admin/xpack/rollup/*", "cluster:monitor/xpack/rollup/*"); - private static final Automaton MANAGE_CCR_AUTOMATON = patterns("cluster:admin/xpack/ccr/*", ClusterStateAction.NAME); + private static final Automaton MANAGE_CCR_AUTOMATON = + patterns("cluster:admin/xpack/ccr/*", ClusterStateAction.NAME, HasPrivilegesAction.NAME); + private static final Automaton READ_CCR_AUTOMATON = patterns(ClusterStateAction.NAME, HasPrivilegesAction.NAME); public static final ClusterPrivilege NONE = new ClusterPrivilege("none", Automatons.EMPTY); public static final ClusterPrivilege ALL = new ClusterPrivilege("all", ALL_CLUSTER_AUTOMATON); @@ -63,6 +66,7 @@ public final class ClusterPrivilege extends Privilege { public static final ClusterPrivilege MANAGE_SAML = new ClusterPrivilege("manage_saml", MANAGE_SAML_AUTOMATON); public static final ClusterPrivilege MANAGE_PIPELINE = new ClusterPrivilege("manage_pipeline", "cluster:admin/ingest/pipeline/*"); public static final ClusterPrivilege MANAGE_CCR = new ClusterPrivilege("manage_ccr", MANAGE_CCR_AUTOMATON); + public static final ClusterPrivilege READ_CCR = new ClusterPrivilege("read_ccr", READ_CCR_AUTOMATON); public static final Predicate ACTION_MATCHER = ClusterPrivilege.ALL.predicate(); @@ -84,6 +88,7 @@ public final class ClusterPrivilege extends Privilege { .put("manage_pipeline", MANAGE_PIPELINE) .put("manage_rollup", MANAGE_ROLLUP) .put("manage_ccr", MANAGE_CCR) + .put("read_ccr", READ_CCR) .immutableMap(); private static final ConcurrentHashMap, ClusterPrivilege> CACHE = new ConcurrentHashMap<>(); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/privilege/IndexPrivilege.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/privilege/IndexPrivilege.java index 779f2765f48..bc8c408731b 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/privilege/IndexPrivilege.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/privilege/IndexPrivilege.java @@ -21,6 +21,7 @@ import org.elasticsearch.action.admin.indices.settings.get.GetSettingsAction; import org.elasticsearch.action.admin.indices.validate.query.ValidateQueryAction; import org.elasticsearch.common.Strings; import org.elasticsearch.common.collect.MapBuilder; +import org.elasticsearch.xpack.core.ccr.action.PutFollowAction; import org.elasticsearch.xpack.core.security.support.Automatons; import java.util.Arrays; @@ -55,7 +56,7 @@ public final class IndexPrivilege extends Privilege { private static final Automaton VIEW_METADATA_AUTOMATON = patterns(GetAliasesAction.NAME, AliasesExistAction.NAME, GetIndexAction.NAME, IndicesExistsAction.NAME, GetFieldMappingsAction.NAME + "*", GetMappingsAction.NAME, ClusterSearchShardsAction.NAME, TypesExistsAction.NAME, ValidateQueryAction.NAME + "*", GetSettingsAction.NAME); - private static final Automaton CREATE_FOLLOW_INDEX_AUTOMATON = patterns("indices:admin/xpack/ccr/create_and_follow_index"); + private static final Automaton CREATE_FOLLOW_INDEX_AUTOMATON = patterns(PutFollowAction.NAME); public static final IndexPrivilege NONE = new IndexPrivilege("none", Automatons.EMPTY); public static final IndexPrivilege ALL = new IndexPrivilege("all", ALL_AUTOMATON); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ssl/RestrictedTrustConfig.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ssl/RestrictedTrustConfig.java index 201965b4188..37b3334cd0f 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ssl/RestrictedTrustConfig.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ssl/RestrictedTrustConfig.java @@ -30,12 +30,10 @@ import java.util.Objects; public final class RestrictedTrustConfig extends TrustConfig { private static final String RESTRICTIONS_KEY_SUBJECT_NAME = "trust.subject_name"; - private final Settings settings; private final String groupConfigPath; private final TrustConfig delegate; - RestrictedTrustConfig(Settings settings, String groupConfigPath, TrustConfig delegate) { - this.settings = settings; + RestrictedTrustConfig(String groupConfigPath, TrustConfig delegate) { this.groupConfigPath = Objects.requireNonNull(groupConfigPath); this.delegate = Objects.requireNonNull(delegate); } @@ -45,7 +43,7 @@ public final class RestrictedTrustConfig extends TrustConfig { try { final X509ExtendedTrustManager delegateTrustManager = delegate.createTrustManager(environment); final CertificateTrustRestrictions trustGroupConfig = readTrustGroup(resolveGroupConfigPath(environment)); - return new RestrictedTrustManager(settings, delegateTrustManager, trustGroupConfig); + return new RestrictedTrustManager(delegateTrustManager, trustGroupConfig); } catch (IOException e) { throw new ElasticsearchException("failed to initialize TrustManager for {}", e, toString()); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ssl/RestrictedTrustManager.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ssl/RestrictedTrustManager.java index 8a82694785a..f457ce8f752 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ssl/RestrictedTrustManager.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ssl/RestrictedTrustManager.java @@ -6,9 +6,8 @@ package org.elasticsearch.xpack.core.ssl; import org.apache.logging.log4j.Logger; +import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.message.ParameterizedMessage; -import org.elasticsearch.common.logging.Loggers; -import org.elasticsearch.common.settings.Settings; import javax.net.ssl.SSLEngine; import javax.net.ssl.X509ExtendedTrustManager; @@ -35,15 +34,14 @@ import java.util.stream.Collectors; * The underlying certificate validation is delegated to another TrustManager. */ public final class RestrictedTrustManager extends X509ExtendedTrustManager { - + private static final Logger logger = LogManager.getLogger(RestrictedTrustManager.class); private static final String CN_OID = "2.5.4.3"; private static final int SAN_CODE_OTHERNAME = 0; - private final Logger logger; + private final X509ExtendedTrustManager delegate; private final CertificateTrustRestrictions trustRestrictions; - public RestrictedTrustManager(Settings settings, X509ExtendedTrustManager delegate, CertificateTrustRestrictions restrictions) { - this.logger = Loggers.getLogger(getClass(), settings); + public RestrictedTrustManager(X509ExtendedTrustManager delegate, CertificateTrustRestrictions restrictions) { this.delegate = delegate; this.trustRestrictions = restrictions; logger.debug("Configured with trust restrictions: [{}]", restrictions); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ssl/SSLConfiguration.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ssl/SSLConfiguration.java index 48dba65a3a6..9054d664eec 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ssl/SSLConfiguration.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ssl/SSLConfiguration.java @@ -206,7 +206,7 @@ public final class SSLConfiguration { private static TrustConfig createTrustConfig(Settings settings, KeyConfig keyConfig, SSLConfiguration global) { final TrustConfig trustConfig = createCertChainTrustConfig(settings, keyConfig, global); return SETTINGS_PARSER.trustRestrictionsPath.get(settings) - .map(path -> (TrustConfig) new RestrictedTrustConfig(settings, path, trustConfig)) + .map(path -> (TrustConfig) new RestrictedTrustConfig(path, trustConfig)) .orElse(trustConfig); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ssl/SSLService.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ssl/SSLService.java index 07d438b243b..08513ce7412 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ssl/SSLService.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ssl/SSLService.java @@ -24,6 +24,7 @@ import javax.net.ssl.KeyManagerFactory; import javax.net.ssl.SSLContext; import javax.net.ssl.SSLEngine; import javax.net.ssl.SSLParameters; +import javax.net.ssl.SSLSession; import javax.net.ssl.SSLSessionContext; import javax.net.ssl.SSLSocket; import javax.net.ssl.SSLSocketFactory; @@ -542,17 +543,6 @@ public class SSLService extends AbstractComponent { return context; } - /** - * Invalidates the sessions in the provided {@link SSLSessionContext} - */ - private void invalidateSessions(SSLSessionContext sslSessionContext) { - Enumeration sessionIds = sslSessionContext.getIds(); - while (sessionIds.hasMoreElements()) { - byte[] sessionId = sessionIds.nextElement(); - sslSessionContext.getSession(sessionId).invalidate(); - } - } - synchronized void reload() { invalidateSessions(context.getClientSessionContext()); invalidateSessions(context.getServerSessionContext()); @@ -592,6 +582,24 @@ public class SSLService extends AbstractComponent { } } + /** + * Invalidates the sessions in the provided {@link SSLSessionContext} + */ + static void invalidateSessions(SSLSessionContext sslSessionContext) { + Enumeration sessionIds = sslSessionContext.getIds(); + while (sessionIds.hasMoreElements()) { + byte[] sessionId = sessionIds.nextElement(); + SSLSession session = sslSessionContext.getSession(sessionId); + // a SSLSession could be null as there is no lock while iterating, the session cache + // could have evicted a value, the session could be timed out, or the session could + // have already been invalidated, which removes the value from the session cache in the + // sun implementation + if (session != null) { + session.invalidate(); + } + } + } + /** * @return A map of Settings prefix to Settings object */ diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/client/WatchSourceBuilder.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/client/WatchSourceBuilder.java index b82e9b64109..001a430ddb1 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/client/WatchSourceBuilder.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/client/WatchSourceBuilder.java @@ -185,7 +185,6 @@ public class WatchSourceBuilder implements ToXContentObject { static class TransformedAction implements ToXContentObject { - private final String id; private final Action action; @Nullable private final TimeValue throttlePeriod; @Nullable private final Condition condition; @@ -193,7 +192,6 @@ public class WatchSourceBuilder implements ToXContentObject { TransformedAction(String id, Action action, @Nullable TimeValue throttlePeriod, @Nullable Condition condition, @Nullable Transform transform) { - this.id = id; this.throttlePeriod = throttlePeriod; this.condition = condition; this.transform = transform; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/support/WatcherDateTimeUtils.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/support/WatcherDateTimeUtils.java index 097d136c629..991f9ba3323 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/support/WatcherDateTimeUtils.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/support/WatcherDateTimeUtils.java @@ -9,8 +9,8 @@ import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.joda.DateMathParser; import org.elasticsearch.common.joda.FormatDateTimeFormatter; +import org.elasticsearch.common.time.DateMathParser; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; @@ -26,7 +26,7 @@ import java.util.concurrent.TimeUnit; public class WatcherDateTimeUtils { public static final FormatDateTimeFormatter dateTimeFormatter = DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER; - public static final DateMathParser dateMathParser = new DateMathParser(dateTimeFormatter); + public static final DateMathParser dateMathParser = dateTimeFormatter.toDateMathParser(); private WatcherDateTimeUtils() { } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transform/TransformRegistry.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transform/TransformRegistry.java index 4d729c10c7d..0365b310583 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transform/TransformRegistry.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transform/TransformRegistry.java @@ -6,7 +6,6 @@ package org.elasticsearch.xpack.core.watcher.transform; import org.elasticsearch.ElasticsearchParseException; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.xpack.core.watcher.transform.chain.ChainTransform; import org.elasticsearch.xpack.core.watcher.transform.chain.ChainTransformFactory; @@ -20,9 +19,9 @@ public class TransformRegistry { private final Map factories; - public TransformRegistry(Settings settings, Map factories) { + public TransformRegistry(Map factories) { Map map = new HashMap<>(factories); - map.put(ChainTransform.TYPE, new ChainTransformFactory(settings, this)); + map.put(ChainTransform.TYPE, new ChainTransformFactory(this)); this.factories = Collections.unmodifiableMap(map); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transform/chain/ChainTransformFactory.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transform/chain/ChainTransformFactory.java index 403f1d02909..97047abede4 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transform/chain/ChainTransformFactory.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transform/chain/ChainTransformFactory.java @@ -5,8 +5,7 @@ */ package org.elasticsearch.xpack.core.watcher.transform.chain; -import org.elasticsearch.common.logging.Loggers; -import org.elasticsearch.common.settings.Settings; +import org.apache.logging.log4j.LogManager; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.xpack.core.watcher.transform.ExecutableTransform; import org.elasticsearch.xpack.core.watcher.transform.Transform; @@ -20,8 +19,8 @@ public final class ChainTransformFactory extends TransformFactory readers) { + List wrapped = new ArrayList<>(readers.size()); + for (LeafReader reader : readers) { + LeafReader wrap = wrap(reader); + assert wrap != null; + if (wrap.numDocs() != 0) { + wrapped.add(wrap); + } + } + return wrapped.toArray(new LeafReader[0]); + } + + @Override + public LeafReader wrap(LeafReader reader) { + return reader; + } + }); + } + + @Override + protected DirectoryReader doWrapDirectoryReader(DirectoryReader in) throws IOException { + return new DropFullDeletedSegmentsReader(in); + } + + @Override + public CacheHelper getReaderCacheHelper() { + return in.getReaderCacheHelper(); + } + } + } diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/DatafeedParamsTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/DatafeedParamsTests.java new file mode 100644 index 00000000000..24a6dbacfad --- /dev/null +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/DatafeedParamsTests.java @@ -0,0 +1,48 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +package org.elasticsearch.xpack.core.ml.action; + +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.test.AbstractSerializingTestCase; + +import java.io.IOException; + +public class DatafeedParamsTests extends AbstractSerializingTestCase { + @Override + protected StartDatafeedAction.DatafeedParams doParseInstance(XContentParser parser) throws IOException { + return StartDatafeedAction.DatafeedParams.parseRequest(null, parser); + } + + public static StartDatafeedAction.DatafeedParams createDatafeedParams() { + StartDatafeedAction.DatafeedParams params = + new StartDatafeedAction.DatafeedParams(randomAlphaOfLength(10), randomNonNegativeLong()); + if (randomBoolean()) { + params.setEndTime(randomNonNegativeLong()); + } + if (randomBoolean()) { + params.setTimeout(TimeValue.timeValueMillis(randomNonNegativeLong())); + } + return params; + } + + @Override + protected StartDatafeedAction.DatafeedParams createTestInstance() { + return createDatafeedParams(); + } + + @Override + protected Writeable.Reader instanceReader() { + return StartDatafeedAction.DatafeedParams::new; + } + + @Override + protected boolean supportsUnknownFields() { + return true; + } +} diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/JobParamsTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/JobParamsTests.java new file mode 100644 index 00000000000..740b01abf0d --- /dev/null +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/JobParamsTests.java @@ -0,0 +1,45 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +package org.elasticsearch.xpack.core.ml.action; + +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.test.AbstractSerializingTestCase; + +import java.io.IOException; + +public class JobParamsTests extends AbstractSerializingTestCase { + + @Override + protected OpenJobAction.JobParams doParseInstance(XContentParser parser) throws IOException { + return OpenJobAction.JobParams.parseRequest(null, parser); + } + + public static OpenJobAction.JobParams createJobParams() { + OpenJobAction.JobParams params = new OpenJobAction.JobParams(randomAlphaOfLengthBetween(1, 20)); + if (randomBoolean()) { + params.setTimeout(TimeValue.timeValueMillis(randomNonNegativeLong())); + } + return params; + } + + @Override + protected OpenJobAction.JobParams createTestInstance() { + return createJobParams(); + } + + @Override + protected Writeable.Reader instanceReader() { + return OpenJobAction.JobParams::new; + } + + @Override + protected boolean supportsUnknownFields() { + return true; + } +} diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/OpenJobActionRequestTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/OpenJobActionRequestTests.java index de85907a83e..da1ce5fdd0f 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/OpenJobActionRequestTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/OpenJobActionRequestTests.java @@ -5,7 +5,6 @@ */ package org.elasticsearch.xpack.core.ml.action; -import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.test.AbstractStreamableXContentTestCase; import org.elasticsearch.xpack.core.ml.action.OpenJobAction.Request; @@ -14,11 +13,7 @@ public class OpenJobActionRequestTests extends AbstractStreamableXContentTestCas @Override protected Request createTestInstance() { - OpenJobAction.JobParams params = new OpenJobAction.JobParams(randomAlphaOfLengthBetween(1, 20)); - if (randomBoolean()) { - params.setTimeout(TimeValue.timeValueMillis(randomNonNegativeLong())); - } - return new Request(params); + return new Request(JobParamsTests.createJobParams()); } @Override diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/PostCalendarEventActionRequestTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/PostCalendarEventActionRequestTests.java index ce6a64be6c5..af94c180a1f 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/PostCalendarEventActionRequestTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/PostCalendarEventActionRequestTests.java @@ -10,7 +10,6 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.test.AbstractStreamableTestCase; -import org.elasticsearch.xpack.core.ml.action.PostCalendarEventsAction; import org.elasticsearch.xpack.core.ml.calendars.ScheduledEvent; import org.elasticsearch.xpack.core.ml.calendars.ScheduledEventTests; @@ -63,7 +62,6 @@ public class PostCalendarEventActionRequestTests extends AbstractStreamableTestC public void testParseRequest_throwsIfCalendarIdsAreDifferent() throws IOException { PostCalendarEventsAction.Request sourceRequest = createTestInstance("foo"); - PostCalendarEventsAction.Request request = new PostCalendarEventsAction.Request("bar", sourceRequest.getScheduledEvents()); StringBuilder requestString = new StringBuilder(); requestString.append("{\"events\": ["); diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/StartDatafeedActionRequestTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/StartDatafeedActionRequestTests.java index bae610c5e36..fe2bb5d6508 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/StartDatafeedActionRequestTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/StartDatafeedActionRequestTests.java @@ -6,10 +6,8 @@ package org.elasticsearch.xpack.core.ml.action; import org.elasticsearch.ElasticsearchParseException; -import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.test.AbstractStreamableXContentTestCase; -import org.elasticsearch.xpack.core.ml.action.StartDatafeedAction.DatafeedParams; import org.elasticsearch.xpack.core.ml.action.StartDatafeedAction.Request; import static org.hamcrest.Matchers.equalTo; @@ -18,14 +16,7 @@ public class StartDatafeedActionRequestTests extends AbstractStreamableXContentT @Override protected Request createTestInstance() { - DatafeedParams params = new DatafeedParams(randomAlphaOfLength(10), randomNonNegativeLong()); - if (randomBoolean()) { - params.setEndTime(randomNonNegativeLong()); - } - if (randomBoolean()) { - params.setTimeout(TimeValue.timeValueMillis(randomNonNegativeLong())); - } - return new Request(params); + return new Request(DatafeedParamsTests.createDatafeedParams()); } @Override diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/filestructurefinder/FieldStatsTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/filestructurefinder/FieldStatsTests.java index 30f7c8f5576..ec46d25edd4 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/filestructurefinder/FieldStatsTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/filestructurefinder/FieldStatsTests.java @@ -32,8 +32,13 @@ public class FieldStatsTests extends AbstractSerializingTestCase { Double medianValue = null; boolean isMetric = randomBoolean(); if (isMetric) { - minValue = randomDouble(); - maxValue = randomDouble(); + if (randomBoolean()) { + minValue = randomDouble(); + maxValue = randomDouble(); + } else { + minValue = (double) randomInt(); + maxValue = (double) randomInt(); + } meanValue = randomDouble(); medianValue = randomDouble(); } @@ -42,7 +47,7 @@ public class FieldStatsTests extends AbstractSerializingTestCase { for (int i = 0; i < Math.min(10, cardinality); ++i) { Map topHit = new LinkedHashMap<>(); if (isMetric) { - topHit.put("value", randomDouble()); + topHit.put("value", randomBoolean() ? randomDouble() : (double) randomInt()); } else { topHit.put("value", randomAlphaOfLength(20)); } diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/filestructurefinder/FileStructureTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/filestructurefinder/FileStructureTests.java index ac6c647136b..d008b31f9a6 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/filestructurefinder/FileStructureTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/filestructurefinder/FileStructureTests.java @@ -63,7 +63,8 @@ public class FileStructureTests extends AbstractSerializingTestCase T compile(String name, String script, ScriptContext context, Map options) { MockCompiledScript compiledScript = new MockCompiledScript(name, options, script, p -> script); - if (context.instanceClazz.equals(ExecutableScript.class)) { - return context.factoryClazz.cast((ExecutableScript.Factory) compiledScript::createExecutableScript); - } else if (context.instanceClazz.equals(SearchScript.class)) { + if (context.instanceClazz.equals(SearchScript.class)) { return context.factoryClazz.cast((SearchScript.Factory) compiledScript::createSearchScript); } throw new IllegalArgumentException("mock painless does not know how to handle context [" + context.name + "]"); diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/audit/logfile/CapturingLogger.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/audit/logfile/CapturingLogger.java index ede18c8241b..3ea15d48f2b 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/audit/logfile/CapturingLogger.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/audit/logfile/CapturingLogger.java @@ -17,7 +17,6 @@ import org.apache.logging.log4j.core.config.Configuration; import org.apache.logging.log4j.core.config.LoggerConfig; import org.apache.logging.log4j.core.filter.RegexFilter; import org.elasticsearch.common.Nullable; -import org.elasticsearch.common.logging.ESLoggerFactory; import org.elasticsearch.common.logging.Loggers; import java.util.ArrayList; @@ -50,7 +49,7 @@ public class CapturingLogger { // careful, don't "bury" this on the call stack, unless you know what you're doing final StackTraceElement caller = Thread.currentThread().getStackTrace()[2]; final String name = caller.getClassName() + "." + caller.getMethodName() + "." + level.toString(); - final Logger logger = ESLoggerFactory.getLogger(name); + final Logger logger = LogManager.getLogger(name); Loggers.setLevel(logger, level); final MockAppender appender = new MockAppender(name, layout); appender.start(); diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/accesscontrol/SecurityIndexSearcherWrapperIntegrationTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/accesscontrol/SecurityIndexSearcherWrapperIntegrationTests.java index ac6e0d0e151..90b0c6ee773 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/accesscontrol/SecurityIndexSearcherWrapperIntegrationTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/accesscontrol/SecurityIndexSearcherWrapperIntegrationTests.java @@ -86,7 +86,7 @@ public class SecurityIndexSearcherWrapperIntegrationTests extends ESTestCase { }); XPackLicenseState licenseState = mock(XPackLicenseState.class); when(licenseState.isDocumentAndFieldLevelSecurityAllowed()).thenReturn(true); - SecurityIndexSearcherWrapper wrapper = new SecurityIndexSearcherWrapper(indexSettings, s -> queryShardContext, + SecurityIndexSearcherWrapper wrapper = new SecurityIndexSearcherWrapper(s -> queryShardContext, bitsetFilterCache, threadContext, licenseState, scriptService) { @Override diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/accesscontrol/SecurityIndexSearcherWrapperUnitTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/accesscontrol/SecurityIndexSearcherWrapperUnitTests.java index 207c9d22198..06838ac6ffa 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/accesscontrol/SecurityIndexSearcherWrapperUnitTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/accesscontrol/SecurityIndexSearcherWrapperUnitTests.java @@ -152,7 +152,7 @@ public class SecurityIndexSearcherWrapperUnitTests extends ESTestCase { public void testDefaultMetaFields() throws Exception { securityIndexSearcherWrapper = - new SecurityIndexSearcherWrapper(indexSettings, null, null, threadContext, licenseState, scriptService) { + new SecurityIndexSearcherWrapper(null, null, threadContext, licenseState, scriptService) { @Override protected IndicesAccessControl getIndicesAccessControl() { IndicesAccessControl.IndexAccessControl indexAccessControl = new IndicesAccessControl.IndexAccessControl(true, @@ -182,14 +182,14 @@ public class SecurityIndexSearcherWrapperUnitTests extends ESTestCase { public void testWrapReaderWhenFeatureDisabled() throws Exception { when(licenseState.isDocumentAndFieldLevelSecurityAllowed()).thenReturn(false); securityIndexSearcherWrapper = - new SecurityIndexSearcherWrapper(indexSettings, null, null, threadContext, licenseState, scriptService); + new SecurityIndexSearcherWrapper(null, null, threadContext, licenseState, scriptService); DirectoryReader reader = securityIndexSearcherWrapper.wrap(esIn); assertThat(reader, sameInstance(esIn)); } public void testWrapSearcherWhenFeatureDisabled() throws Exception { securityIndexSearcherWrapper = - new SecurityIndexSearcherWrapper(indexSettings, null, null, threadContext, licenseState, scriptService); + new SecurityIndexSearcherWrapper(null, null, threadContext, licenseState, scriptService); IndexSearcher indexSearcher = new IndexSearcher(esIn); IndexSearcher result = securityIndexSearcherWrapper.wrap(indexSearcher); assertThat(result, sameInstance(indexSearcher)); @@ -228,7 +228,7 @@ public class SecurityIndexSearcherWrapperUnitTests extends ESTestCase { DirectoryReader directoryReader = DocumentSubsetReader.wrap(esIn, bitsetFilterCache, new MatchAllDocsQuery()); IndexSearcher indexSearcher = new IndexSearcher(directoryReader); securityIndexSearcherWrapper = - new SecurityIndexSearcherWrapper(indexSettings, null, null, threadContext, licenseState, scriptService); + new SecurityIndexSearcherWrapper(null, null, threadContext, licenseState, scriptService); IndexSearcher result = securityIndexSearcherWrapper.wrap(indexSearcher); assertThat(result, not(sameInstance(indexSearcher))); assertThat(result.getSimilarity(), sameInstance(indexSearcher.getSimilarity())); @@ -237,7 +237,7 @@ public class SecurityIndexSearcherWrapperUnitTests extends ESTestCase { public void testIntersectScorerAndRoleBits() throws Exception { securityIndexSearcherWrapper = - new SecurityIndexSearcherWrapper(indexSettings, null, null, threadContext, licenseState, scriptService); + new SecurityIndexSearcherWrapper(null, null, threadContext, licenseState, scriptService); final Directory directory = newDirectory(); IndexWriter iw = new IndexWriter( directory, @@ -326,7 +326,7 @@ public class SecurityIndexSearcherWrapperUnitTests extends ESTestCase { public void testFieldPermissionsWithFieldExceptions() throws Exception { securityIndexSearcherWrapper = - new SecurityIndexSearcherWrapper(indexSettings, null, null, threadContext, licenseState, null); + new SecurityIndexSearcherWrapper(null, null, threadContext, licenseState, null); String[] grantedFields = new String[]{}; String[] deniedFields; Set expected = new HashSet<>(META_FIELDS); @@ -427,7 +427,7 @@ public class SecurityIndexSearcherWrapperUnitTests extends ESTestCase { User user = new User("_username", new String[]{"role1", "role2"}, "_full_name", "_email", Collections.singletonMap("key", "value"), true); securityIndexSearcherWrapper = - new SecurityIndexSearcherWrapper(indexSettings, null, null, threadContext, licenseState, scriptService) { + new SecurityIndexSearcherWrapper(null, null, threadContext, licenseState, scriptService) { @Override protected User getUser() { @@ -475,7 +475,7 @@ public class SecurityIndexSearcherWrapperUnitTests extends ESTestCase { public void testSkipTemplating() throws Exception { securityIndexSearcherWrapper = - new SecurityIndexSearcherWrapper(indexSettings, null, null, threadContext, licenseState, scriptService); + new SecurityIndexSearcherWrapper(null, null, threadContext, licenseState, scriptService); XContentBuilder builder = jsonBuilder(); String querySource = Strings.toString(new TermQueryBuilder("field", "value").toXContent(builder, ToXContent.EMPTY_PARAMS)); String result = securityIndexSearcherWrapper.evaluateTemplate(querySource); diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ssl/RestrictedTrustConfigTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ssl/RestrictedTrustConfigTests.java index 2e39ebe8452..4f7f28850d6 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ssl/RestrictedTrustConfigTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ssl/RestrictedTrustConfigTests.java @@ -69,7 +69,7 @@ public class RestrictedTrustConfigTests extends ESTestCase { } }; - final RestrictedTrustConfig restrictedTrustConfig = new RestrictedTrustConfig(settings, groupConfigPath.toString(), delegate); + final RestrictedTrustConfig restrictedTrustConfig = new RestrictedTrustConfig(groupConfigPath.toString(), delegate); List filesToMonitor = restrictedTrustConfig.filesToMonitor(environment); List expectedPathList = new ArrayList<>(otherFiles); expectedPathList.add(groupConfigPath); diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ssl/RestrictedTrustManagerTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ssl/RestrictedTrustManagerTests.java index 24dc2d9847a..32f75f56da2 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ssl/RestrictedTrustManagerTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ssl/RestrictedTrustManagerTests.java @@ -6,8 +6,7 @@ package org.elasticsearch.xpack.core.ssl; import org.apache.logging.log4j.Logger; -import org.elasticsearch.common.logging.Loggers; -import org.elasticsearch.common.settings.Settings; +import org.apache.logging.log4j.LogManager; import org.elasticsearch.test.ESTestCase; import org.hamcrest.Description; import org.hamcrest.TypeSafeMatcher; @@ -50,7 +49,7 @@ public class RestrictedTrustManagerTests extends ESTestCase { @BeforeClass public static void ensureSupportedLocale() throws Exception { - Logger logger = Loggers.getLogger(RestrictedTrustManagerTests.class); + Logger logger = LogManager.getLogger(RestrictedTrustManagerTests.class); if (isUnusableLocale()) { // See: https://github.com/elastic/elasticsearch/issues/33081 logger.warn("Attempting to run RestrictedTrustManagerTests tests in an unusable locale in a FIPS JVM. Certificate expiration " + @@ -129,7 +128,7 @@ public class RestrictedTrustManagerTests extends ESTestCase { trustedNames.add("node" + node + ".cluster" + trustedCluster + ".elasticsearch"); } final CertificateTrustRestrictions restrictions = new CertificateTrustRestrictions(trustedNames); - final RestrictedTrustManager trustManager = new RestrictedTrustManager(Settings.EMPTY, baseTrustManager, restrictions); + final RestrictedTrustManager trustManager = new RestrictedTrustManager(baseTrustManager, restrictions); assertSingleClusterIsTrusted(trustedCluster, trustManager, trustedNames); } @@ -137,7 +136,7 @@ public class RestrictedTrustManagerTests extends ESTestCase { final int trustedCluster = randomIntBetween(1, numberOfClusters); final List trustedNames = Collections.singletonList("*.cluster" + trustedCluster + ".elasticsearch"); final CertificateTrustRestrictions restrictions = new CertificateTrustRestrictions(trustedNames); - final RestrictedTrustManager trustManager = new RestrictedTrustManager(Settings.EMPTY, baseTrustManager, restrictions); + final RestrictedTrustManager trustManager = new RestrictedTrustManager(baseTrustManager, restrictions); assertSingleClusterIsTrusted(trustedCluster, trustManager, trustedNames); } @@ -147,7 +146,7 @@ public class RestrictedTrustManagerTests extends ESTestCase { final CertificateTrustRestrictions restrictions = new CertificateTrustRestrictions( trustedNames ); - final RestrictedTrustManager trustManager = new RestrictedTrustManager(Settings.EMPTY, baseTrustManager, restrictions); + final RestrictedTrustManager trustManager = new RestrictedTrustManager(baseTrustManager, restrictions); for (int cluster = 1; cluster <= numberOfClusters; cluster++) { for (int node = 1; node <= numberOfNodes; node++) { if (node == trustedNode) { @@ -161,7 +160,7 @@ public class RestrictedTrustManagerTests extends ESTestCase { public void testThatDelegateTrustManagerIsRespected() throws Exception { final CertificateTrustRestrictions restrictions = new CertificateTrustRestrictions(Collections.singletonList("*.elasticsearch")); - final RestrictedTrustManager trustManager = new RestrictedTrustManager(Settings.EMPTY, baseTrustManager, restrictions); + final RestrictedTrustManager trustManager = new RestrictedTrustManager(baseTrustManager, restrictions); for (String cert : certificates.keySet()) { if (cert.endsWith("/ca")) { assertTrusted(trustManager, cert); diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ssl/SSLConfigurationReloaderTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ssl/SSLConfigurationReloaderTests.java index 9202207a14e..d8e0b693f70 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ssl/SSLConfigurationReloaderTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ssl/SSLConfigurationReloaderTests.java @@ -477,9 +477,7 @@ public class SSLConfigurationReloaderTests extends ESTestCase { try (InputStream is = Files.newInputStream(keyStorePath)) { keyStore.load(is, keyStorePass.toCharArray()); } - // TODO Revisit TLS1.2 pinning when TLS1.3 is fully supported - // https://github.com/elastic/elasticsearch/issues/32276 - final SSLContext sslContext = new SSLContextBuilder().useProtocol("TLSv1.2").loadKeyMaterial(keyStore, keyStorePass.toCharArray()) + final SSLContext sslContext = new SSLContextBuilder().loadKeyMaterial(keyStore, keyStorePass.toCharArray()) .build(); MockWebServer server = new MockWebServer(sslContext, false); server.enqueue(new MockResponse().setResponseCode(200).setBody("body")); @@ -493,9 +491,7 @@ public class SSLConfigurationReloaderTests extends ESTestCase { keyStore.load(null, password.toCharArray()); keyStore.setKeyEntry("testnode_ec", PemUtils.readPrivateKey(keyPath, password::toCharArray), password.toCharArray(), CertParsingUtils.readCertificates(Collections.singletonList(certPath))); - // TODO Revisit TLS1.2 pinning when TLS1.3 is fully supported - // https://github.com/elastic/elasticsearch/issues/32276 - final SSLContext sslContext = new SSLContextBuilder().useProtocol("TLSv1.2").loadKeyMaterial(keyStore, password.toCharArray()) + final SSLContext sslContext = new SSLContextBuilder().loadKeyMaterial(keyStore, password.toCharArray()) .build(); MockWebServer server = new MockWebServer(sslContext, false); server.enqueue(new MockResponse().setResponseCode(200).setBody("body")); @@ -510,7 +506,7 @@ public class SSLConfigurationReloaderTests extends ESTestCase { try (InputStream is = Files.newInputStream(trustStorePath)) { trustStore.load(is, trustStorePass.toCharArray()); } - final SSLContext sslContext = new SSLContextBuilder().useProtocol("TLSv1.2").loadTrustMaterial(trustStore, null).build(); + final SSLContext sslContext = new SSLContextBuilder().loadTrustMaterial(trustStore, null).build(); return HttpClients.custom().setSSLContext(sslContext).build(); } @@ -527,7 +523,7 @@ public class SSLConfigurationReloaderTests extends ESTestCase { for (Certificate cert : CertParsingUtils.readCertificates(trustedCertificatePaths)) { trustStore.setCertificateEntry(cert.toString(), cert); } - final SSLContext sslContext = new SSLContextBuilder().useProtocol("TLSv1.2").loadTrustMaterial(trustStore, null).build(); + final SSLContext sslContext = new SSLContextBuilder().loadTrustMaterial(trustStore, null).build(); return HttpClients.custom().setSSLContext(sslContext).build(); } diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ssl/SSLServiceTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ssl/SSLServiceTests.java index 048ad2e8e36..e0fee670d8d 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ssl/SSLServiceTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ssl/SSLServiceTests.java @@ -19,6 +19,7 @@ import org.apache.http.nio.conn.ssl.SSLIOSessionStrategy; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.common.CheckedRunnable; import org.elasticsearch.common.Strings; +import org.elasticsearch.common.SuppressForbidden; import org.elasticsearch.common.settings.MockSecureSettings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; @@ -35,19 +36,29 @@ import javax.net.ssl.HostnameVerifier; import javax.net.ssl.SSLContext; import javax.net.ssl.SSLEngine; import javax.net.ssl.SSLParameters; +import javax.net.ssl.SSLPeerUnverifiedException; +import javax.net.ssl.SSLSession; +import javax.net.ssl.SSLSessionContext; import javax.net.ssl.SSLSocket; import javax.net.ssl.SSLSocketFactory; import javax.net.ssl.X509ExtendedTrustManager; +import javax.security.cert.X509Certificate; import java.nio.file.Path; import java.security.AccessController; +import java.security.Principal; import java.security.PrivilegedActionException; import java.security.PrivilegedExceptionAction; +import java.security.cert.Certificate; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.Comparator; +import java.util.Enumeration; +import java.util.HashMap; import java.util.Iterator; import java.util.List; +import java.util.Map; +import java.util.concurrent.atomic.AtomicInteger; import static org.hamcrest.Matchers.arrayContainingInAnyOrder; import static org.hamcrest.Matchers.contains; @@ -654,6 +665,57 @@ public class SSLServiceTests extends ESTestCase { assertFalse(iterator.hasNext()); } + public void testSSLSessionInvalidationHandlesNullSessions() { + final int numEntries = randomIntBetween(1, 32); + final AtomicInteger invalidationCounter = new AtomicInteger(); + int numNull = 0; + final Map sessionMap = new HashMap<>(); + for (int i = 0; i < numEntries; i++) { + final byte[] id = randomByteArrayOfLength(2); + final SSLSession sslSession; + if (rarely()) { + sslSession = null; + numNull++; + } else { + sslSession = new MockSSLSession(id, invalidationCounter::incrementAndGet); + } + sessionMap.put(id, sslSession); + } + + SSLSessionContext sslSessionContext = new SSLSessionContext() { + @Override + public SSLSession getSession(byte[] sessionId) { + return sessionMap.get(sessionId); + } + + @Override + public Enumeration getIds() { + return Collections.enumeration(sessionMap.keySet()); + } + + @Override + public void setSessionTimeout(int seconds) throws IllegalArgumentException { + } + + @Override + public int getSessionTimeout() { + return 0; + } + + @Override + public void setSessionCacheSize(int size) throws IllegalArgumentException { + } + + @Override + public int getSessionCacheSize() { + return 0; + } + }; + + SSLService.invalidateSessions(sslSessionContext); + assertEquals(numEntries - numNull, invalidationCounter.get()); + } + @Network public void testThatSSLContextWithoutSettingsWorks() throws Exception { SSLService sslService = new SSLService(Settings.EMPTY, env); @@ -761,4 +823,120 @@ public class SSLServiceTests extends ESTestCase { } } + private static final class MockSSLSession implements SSLSession { + + private final byte[] id; + private final Runnable invalidation; + + private MockSSLSession(byte[] id, Runnable invalidation) { + this.id = id; + this.invalidation = invalidation; + } + + @Override + public byte[] getId() { + return id; + } + + @Override + public SSLSessionContext getSessionContext() { + return null; + } + + @Override + public long getCreationTime() { + return 0; + } + + @Override + public long getLastAccessedTime() { + return 0; + } + + @Override + public void invalidate() { + invalidation.run(); + } + + @Override + public boolean isValid() { + return false; + } + + @Override + public void putValue(String name, Object value) { + + } + + @Override + public Object getValue(String name) { + return null; + } + + @Override + public void removeValue(String name) { + + } + + @Override + public String[] getValueNames() { + return new String[0]; + } + + @Override + public Certificate[] getPeerCertificates() throws SSLPeerUnverifiedException { + return new Certificate[0]; + } + + @Override + public Certificate[] getLocalCertificates() { + return new Certificate[0]; + } + + @SuppressForbidden(reason = "need to reference deprecated class to implement JDK interface") + @Override + public X509Certificate[] getPeerCertificateChain() throws SSLPeerUnverifiedException { + return new X509Certificate[0]; + } + + @Override + public Principal getPeerPrincipal() throws SSLPeerUnverifiedException { + return null; + } + + @Override + public Principal getLocalPrincipal() { + return null; + } + + @Override + public String getCipherSuite() { + return null; + } + + @Override + public String getProtocol() { + return null; + } + + @Override + public String getPeerHost() { + return null; + } + + @Override + public int getPeerPort() { + return 0; + } + + @Override + public int getPacketBufferSize() { + return 0; + } + + @Override + public int getApplicationBufferSize() { + return 0; + } + } } diff --git a/x-pack/plugin/graph/src/test/java/org/elasticsearch/xpack/graph/test/GraphTests.java b/x-pack/plugin/graph/src/test/java/org/elasticsearch/xpack/graph/test/GraphTests.java index a58d8e8a8b0..611bf74fbf4 100644 --- a/x-pack/plugin/graph/src/test/java/org/elasticsearch/xpack/graph/test/GraphTests.java +++ b/x-pack/plugin/graph/src/test/java/org/elasticsearch/xpack/graph/test/GraphTests.java @@ -27,9 +27,9 @@ import org.elasticsearch.script.Script; import org.elasticsearch.script.ScriptType; import org.elasticsearch.test.ESSingleNodeTestCase; import org.elasticsearch.xpack.core.XPackPlugin; -import org.elasticsearch.xpack.graph.Graph; import org.elasticsearch.xpack.core.graph.action.GraphExploreAction; import org.elasticsearch.xpack.core.graph.action.GraphExploreRequestBuilder; +import org.elasticsearch.xpack.graph.Graph; import java.util.Collection; import java.util.Collections; @@ -46,7 +46,7 @@ import static org.hamcrest.Matchers.greaterThan; public class GraphTests extends ESSingleNodeTestCase { - + static class DocTemplate { int numDocs; String[] people; @@ -61,20 +61,20 @@ public class GraphTests extends ESSingleNodeTestCase { this.people = people; } } - + static final DocTemplate[] socialNetTemplate = { new DocTemplate(10, "60s", "beatles", "john", "paul", "george", "ringo"), - new DocTemplate(2, "60s", "collaboration", "ravi", "george"), - new DocTemplate(3, "80s", "travelling wilburys", "roy", "george", "jeff"), - new DocTemplate(5, "80s", "travelling wilburys", "roy", "jeff", "bob"), - new DocTemplate(1, "70s", "collaboration", "roy", "elvis"), - new DocTemplate(10, "90s", "nirvana", "dave", "kurt"), - new DocTemplate(2, "00s", "collaboration", "dave", "paul"), - new DocTemplate(2, "80s", "collaboration", "stevie", "paul"), - new DocTemplate(2, "70s", "collaboration", "john", "yoko"), + new DocTemplate(2, "60s", "collaboration", "ravi", "george"), + new DocTemplate(3, "80s", "travelling wilburys", "roy", "george", "jeff"), + new DocTemplate(5, "80s", "travelling wilburys", "roy", "jeff", "bob"), + new DocTemplate(1, "70s", "collaboration", "roy", "elvis"), + new DocTemplate(10, "90s", "nirvana", "dave", "kurt"), + new DocTemplate(2, "00s", "collaboration", "dave", "paul"), + new DocTemplate(2, "80s", "collaboration", "stevie", "paul"), + new DocTemplate(2, "70s", "collaboration", "john", "yoko"), new DocTemplate(100, "70s", "fillerDoc", "other", "irrelevant", "duplicated", "spammy", "background") - }; + }; @Override public void setUp() throws Exception { @@ -112,7 +112,7 @@ public class GraphTests extends ESSingleNodeTestCase { assertEquals(1, shardSegments.getSegments().size()); } } - + assertHitCount(client().prepareSearch().setQuery(matchAllQuery()).get(), numDocs); } @@ -126,7 +126,7 @@ public class GraphTests extends ESSingleNodeTestCase { Hop hop1 = grb.createNextHop(QueryBuilders.termQuery("description", "beatles")); hop1.addVertexRequest("people").size(10).minDocCount(1); // members of beatles grb.createNextHop(null).addVertexRequest("people").size(100).minDocCount(1); // friends of members of beatles - + GraphExploreResponse response = grb.get(); checkVertexDepth(response, 0, "john", "paul", "george", "ringo"); @@ -135,8 +135,8 @@ public class GraphTests extends ESSingleNodeTestCase { checkVertexIsMoreImportant(response, "John's only collaboration is more relevant than George's with profligate Roy", "yoko", "roy"); assertNull("Elvis is a 3rd tier connection so should not be returned here", response.getVertex(Vertex.createId("people","elvis"))); } - - + + @Override protected Settings nodeSettings() { // Disable security otherwise authentication failures happen creating indices. @@ -155,7 +155,7 @@ public class GraphTests extends ESSingleNodeTestCase { Hop hop1 = grb.createNextHop(QueryBuilders.termQuery("description", "beatles")); hop1.addVertexRequest("people").size(10).minDocCount(1); // members of beatles //70s friends of beatles - grb.createNextHop(QueryBuilders.termQuery("decade", "70s")).addVertexRequest("people").size(100).minDocCount(1); + grb.createNextHop(QueryBuilders.termQuery("decade", "70s")).addVertexRequest("people").size(100).minDocCount(1); GraphExploreResponse response = grb.get(); @@ -163,37 +163,37 @@ public class GraphTests extends ESSingleNodeTestCase { checkVertexDepth(response, 1, "yoko"); assertNull("Roy collaborated with George in the 80s not the 70s", response.getVertex(Vertex.createId("people","roy"))); assertNull("Stevie collaborated with Paul in the 80s not the 70s", response.getVertex(Vertex.createId("people","stevie"))); - + } - - + + public void testLargeNumberTermsStartCrawl() { GraphExploreRequestBuilder grb = new GraphExploreRequestBuilder(client(), GraphExploreAction.INSTANCE).setIndices("test"); Hop hop1 = grb.createNextHop(null); - VertexRequest peopleNames = hop1.addVertexRequest("people").minDocCount(1); + VertexRequest peopleNames = hop1.addVertexRequest("people").minDocCount(1); peopleNames.addInclude("john", 1); - + for (int i = 0; i < BooleanQuery.getMaxClauseCount()+1; i++) { - peopleNames.addInclude("unknown"+i, 1); + peopleNames.addInclude("unknown"+i, 1); } - + grb.createNextHop(null).addVertexRequest("people").size(100).minDocCount(1); // friends of members of beatles - + GraphExploreResponse response = grb.get(); checkVertexDepth(response, 0, "john"); checkVertexDepth(response, 1, "yoko"); - } + } public void testTargetedQueryCrawlDepth2() { GraphExploreRequestBuilder grb = new GraphExploreRequestBuilder(client(), GraphExploreAction.INSTANCE).setIndices("test"); Hop hop1 = grb.createNextHop(QueryBuilders.termQuery("description", "beatles")); hop1.addVertexRequest("people").size(10).minDocCount(1); // members of beatles //00s friends of beatles - grb.createNextHop(QueryBuilders.termQuery("decade", "00s")).addVertexRequest("people").size(100).minDocCount(1); + grb.createNextHop(QueryBuilders.termQuery("decade", "00s")).addVertexRequest("people").size(100).minDocCount(1); //90s friends of friends of beatles - grb.createNextHop(QueryBuilders.termQuery("decade", "90s")).addVertexRequest("people").size(100).minDocCount(1); + grb.createNextHop(QueryBuilders.termQuery("decade", "90s")).addVertexRequest("people").size(100).minDocCount(1); GraphExploreResponse response = grb.get(); @@ -201,9 +201,9 @@ public class GraphTests extends ESSingleNodeTestCase { checkVertexDepth(response, 0, "john", "paul", "george", "ringo"); checkVertexDepth(response, 1, "dave"); checkVertexDepth(response, 2, "kurt"); - + } - + public void testPopularityQueryCrawl() { GraphExploreRequestBuilder grb = new GraphExploreRequestBuilder(client(), GraphExploreAction.INSTANCE).setIndices("test"); // Turning off the significance feature means we reward popularity @@ -211,7 +211,7 @@ public class GraphTests extends ESSingleNodeTestCase { Hop hop1 = grb.createNextHop(QueryBuilders.termQuery("description", "beatles")); hop1.addVertexRequest("people").size(10).minDocCount(1); // members of beatles grb.createNextHop(null).addVertexRequest("people").size(100).minDocCount(1); // friends of members of beatles - + GraphExploreResponse response = grb.get(); checkVertexDepth(response, 0, "john", "paul", "george", "ringo"); @@ -219,15 +219,15 @@ public class GraphTests extends ESSingleNodeTestCase { checkVertexIsMoreImportant(response, "Yoko has more collaborations than Stevie", "yoko", "stevie"); checkVertexIsMoreImportant(response, "Roy has more collaborations than Stevie", "roy", "stevie"); assertNull("Elvis is a 3rd tier connection so should not be returned here", response.getVertex(Vertex.createId("people","elvis"))); - } - + } + public void testTimedoutQueryCrawl() { GraphExploreRequestBuilder grb = new GraphExploreRequestBuilder(client(), GraphExploreAction.INSTANCE).setIndices("test"); grb.setTimeout(TimeValue.timeValueMillis(400)); Hop hop1 = grb.createNextHop(QueryBuilders.termQuery("description", "beatles")); hop1.addVertexRequest("people").size(10).minDocCount(1); // members of beatles //00s friends of beatles - grb.createNextHop(QueryBuilders.termQuery("decade", "00s")).addVertexRequest("people").size(100).minDocCount(1); + grb.createNextHop(QueryBuilders.termQuery("decade", "00s")).addVertexRequest("people").size(100).minDocCount(1); // A query that should cause a timeout ScriptQueryBuilder timeoutQuery = QueryBuilders.scriptQuery(new Script(ScriptType.INLINE, "mockscript", "graph_timeout", Collections.emptyMap())); @@ -237,13 +237,13 @@ public class GraphTests extends ESSingleNodeTestCase { assertTrue(response.isTimedOut()); checkVertexDepth(response, 0, "john", "paul", "george", "ringo"); - - // Most of the test runs we reach dave in the allotted time before we hit our + + // Most of the test runs we reach dave in the allotted time before we hit our // intended delay but sometimes this doesn't happen so I commented this line out. - - // checkVertexDepth(response, 1, "dave"); + + // checkVertexDepth(response, 1, "dave"); } - + public void testNonDiversifiedCrawl() { GraphExploreRequestBuilder grb = new GraphExploreRequestBuilder(client(), GraphExploreAction.INSTANCE).setIndices("test"); @@ -289,14 +289,14 @@ public class GraphTests extends ESSingleNodeTestCase { String message = expectedError.toString(); assertTrue(message.contains("Sample diversifying key must be a single valued-field")); } - + public void testMappedAndUnmappedQueryCrawl() { GraphExploreRequestBuilder grb = new GraphExploreRequestBuilder(client(), GraphExploreAction.INSTANCE) .setIndices("test", "idx_unmapped"); Hop hop1 = grb.createNextHop(QueryBuilders.termQuery("description", "beatles")); hop1.addVertexRequest("people").size(10).minDocCount(1); // members of beatles grb.createNextHop(null).addVertexRequest("people").size(100).minDocCount(1); // friends of members of beatles - + GraphExploreResponse response = grb.get(); checkVertexDepth(response, 0, "john", "paul", "george", "ringo"); @@ -304,13 +304,13 @@ public class GraphTests extends ESSingleNodeTestCase { checkVertexIsMoreImportant(response, "John's only collaboration is more relevant than one of Paul's many", "yoko", "stevie"); checkVertexIsMoreImportant(response, "John's only collaboration is more relevant than George's with profligate Roy", "yoko", "roy"); assertNull("Elvis is a 3rd tier connection so should not be returned here", response.getVertex(Vertex.createId("people","elvis"))); - } - + } + public void testUnmappedQueryCrawl() { GraphExploreRequestBuilder grb = new GraphExploreRequestBuilder(client(), GraphExploreAction.INSTANCE).setIndices("idx_unmapped"); Hop hop1 = grb.createNextHop(QueryBuilders.termQuery("description", "beatles")); hop1.addVertexRequest("people").size(10).minDocCount(1); - + GraphExploreResponse response = grb.get(); assertEquals(0, response.getConnections().size()); assertEquals(0, response.getVertices().size()); @@ -327,7 +327,7 @@ public class GraphTests extends ESSingleNodeTestCase { assertTrue(rte.getMessage().contains(GraphExploreRequest.NO_HOPS_ERROR_MESSAGE)); } - Hop hop = grb.createNextHop(null); + grb.createNextHop(null); try { grb.get(); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MachineLearning.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MachineLearning.java index cd13b2c8bb6..5f09b896fd0 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MachineLearning.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MachineLearning.java @@ -168,7 +168,6 @@ import org.elasticsearch.xpack.ml.job.categorization.MlClassicTokenizerFactory; import org.elasticsearch.xpack.ml.job.persistence.JobDataCountsPersister; import org.elasticsearch.xpack.ml.job.persistence.JobResultsPersister; import org.elasticsearch.xpack.ml.job.persistence.JobResultsProvider; -import org.elasticsearch.xpack.ml.job.process.DataCountsReporter; import org.elasticsearch.xpack.ml.job.process.NativeController; import org.elasticsearch.xpack.ml.job.process.NativeControllerHolder; import org.elasticsearch.xpack.ml.job.process.autodetect.AutodetectBuilder; @@ -292,8 +291,7 @@ public class MachineLearning extends Plugin implements ActionPlugin, AnalysisPlu MAX_MACHINE_MEMORY_PERCENT, AutodetectBuilder.DONT_PERSIST_MODEL_STATE_SETTING, AutodetectBuilder.MAX_ANOMALY_RECORDS_SETTING, - DataCountsReporter.ACCEPTABLE_PERCENTAGE_DATE_PARSE_ERRORS_SETTING, - DataCountsReporter.ACCEPTABLE_PERCENTAGE_OUT_OF_ORDER_ERRORS_SETTING, + AutodetectBuilder.MAX_ANOMALY_RECORDS_SETTING_DYNAMIC, AutodetectProcessManager.MAX_RUNNING_JOBS_PER_NODE, AutodetectProcessManager.MAX_OPEN_JOBS_PER_NODE, AutodetectProcessManager.MIN_DISK_SPACE_OFF_HEAP)); @@ -379,7 +377,12 @@ public class MachineLearning extends Plugin implements ActionPlugin, AnalysisPlu // This will only only happen when path.home is not set, which is disallowed in production throw new ElasticsearchException("Failed to create native process controller for Machine Learning"); } - autodetectProcessFactory = new NativeAutodetectProcessFactory(environment, settings, nativeController, client); + autodetectProcessFactory = new NativeAutodetectProcessFactory( + environment, + settings, + nativeController, + client, + clusterService); normalizerProcessFactory = new NativeNormalizerProcessFactory(environment, settings, nativeController); } catch (IOException e) { // This also should not happen in production, as the MachineLearningFeatureSet should have diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteJobAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteJobAction.java index a74d1498f10..1d285b91f2f 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteJobAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteJobAction.java @@ -5,27 +5,50 @@ */ package org.elasticsearch.xpack.ml.action; +import com.carrotsearch.hppc.cursors.ObjectObjectCursor; import org.elasticsearch.ElasticsearchStatusException; import org.elasticsearch.ResourceNotFoundException; import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequest; +import org.elasticsearch.action.admin.indices.alias.get.GetAliasesRequest; +import org.elasticsearch.action.admin.indices.alias.get.GetAliasesResponse; +import org.elasticsearch.action.admin.indices.delete.DeleteIndexRequest; +import org.elasticsearch.action.bulk.BulkItemResponse; +import org.elasticsearch.action.bulk.BulkResponse; +import org.elasticsearch.action.search.SearchAction; +import org.elasticsearch.action.search.SearchRequest; +import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.action.support.master.AcknowledgedResponse; import org.elasticsearch.action.support.master.TransportMasterNodeAction; import org.elasticsearch.client.Client; +import org.elasticsearch.cluster.AckedClusterStateUpdateTask; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.ClusterStateObserver; import org.elasticsearch.cluster.ClusterStateUpdateTask; import org.elasticsearch.cluster.block.ClusterBlockException; import org.elasticsearch.cluster.block.ClusterBlockLevel; +import org.elasticsearch.cluster.metadata.AliasMetaData; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.CheckedConsumer; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.index.IndexNotFoundException; +import org.elasticsearch.index.query.ConstantScoreQueryBuilder; +import org.elasticsearch.index.query.IdsQueryBuilder; +import org.elasticsearch.index.query.QueryBuilders; +import org.elasticsearch.index.query.TermQueryBuilder; +import org.elasticsearch.index.reindex.BulkByScrollResponse; +import org.elasticsearch.index.reindex.DeleteByQueryAction; +import org.elasticsearch.index.reindex.DeleteByQueryRequest; import org.elasticsearch.node.NodeClosedException; import org.elasticsearch.persistent.PersistentTasksCustomMetaData; import org.elasticsearch.persistent.PersistentTasksService; +import org.elasticsearch.search.builder.SearchSourceBuilder; import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; @@ -33,31 +56,51 @@ import org.elasticsearch.xpack.core.ml.MachineLearningField; import org.elasticsearch.xpack.core.ml.MlMetadata; import org.elasticsearch.xpack.core.ml.MlTasks; import org.elasticsearch.xpack.core.ml.action.DeleteJobAction; +import org.elasticsearch.xpack.core.ml.action.GetModelSnapshotsAction; import org.elasticsearch.xpack.core.ml.action.KillProcessAction; -import org.elasticsearch.xpack.core.ml.job.persistence.JobStorageDeletionTask; -import org.elasticsearch.xpack.ml.job.JobManager; +import org.elasticsearch.xpack.core.ml.action.util.PageParams; +import org.elasticsearch.xpack.core.ml.job.config.Job; +import org.elasticsearch.xpack.core.ml.job.messages.Messages; +import org.elasticsearch.xpack.core.ml.job.persistence.AnomalyDetectorsIndex; +import org.elasticsearch.xpack.core.ml.job.persistence.AnomalyDetectorsIndexFields; +import org.elasticsearch.xpack.core.ml.job.process.autodetect.state.CategorizerState; +import org.elasticsearch.xpack.core.ml.job.process.autodetect.state.ModelSnapshot; +import org.elasticsearch.xpack.core.ml.job.process.autodetect.state.Quantiles; +import org.elasticsearch.xpack.ml.job.persistence.JobDataDeleter; +import org.elasticsearch.xpack.ml.job.persistence.JobResultsProvider; +import org.elasticsearch.xpack.ml.notifications.Auditor; +import org.elasticsearch.xpack.ml.utils.MlIndicesUtils; +import java.util.ArrayList; +import java.util.HashSet; +import java.util.List; +import java.util.Set; import java.util.concurrent.TimeoutException; +import java.util.function.Consumer; import static org.elasticsearch.xpack.core.ClientHelper.ML_ORIGIN; import static org.elasticsearch.xpack.core.ClientHelper.executeAsyncWithOrigin; public class TransportDeleteJobAction extends TransportMasterNodeAction { + private static final int MAX_SNAPSHOTS_TO_DELETE = 10000; + private final Client client; - private final JobManager jobManager; private final PersistentTasksService persistentTasksService; + private final Auditor auditor; + private final JobResultsProvider jobResultsProvider; @Inject public TransportDeleteJobAction(Settings settings, TransportService transportService, ClusterService clusterService, ThreadPool threadPool, ActionFilters actionFilters, - IndexNameExpressionResolver indexNameExpressionResolver, JobManager jobManager, - PersistentTasksService persistentTasksService, Client client) { + IndexNameExpressionResolver indexNameExpressionResolver, PersistentTasksService persistentTasksService, + Client client, Auditor auditor, JobResultsProvider jobResultsProvider) { super(settings, DeleteJobAction.NAME, transportService, clusterService, threadPool, actionFilters, indexNameExpressionResolver, DeleteJobAction.Request::new); this.client = client; - this.jobManager = jobManager; this.persistentTasksService = persistentTasksService; + this.auditor = auditor; + this.jobResultsProvider = jobResultsProvider; } @Override @@ -72,14 +115,14 @@ public class TransportDeleteJobAction extends TransportMasterNodeAction listener) throws Exception { + ActionListener listener) { ActionListener markAsDeletingListener = ActionListener.wrap( response -> { if (request.isForce()) { - forceDeleteJob(request, (JobStorageDeletionTask) task, listener); + forceDeleteJob(request, listener); } else { - normalDeleteJob(request, (JobStorageDeletionTask) task, listener); + normalDeleteJob(request, listener); } }, e -> { @@ -95,7 +138,7 @@ public class TransportDeleteJobAction extends TransportMasterNodeAction { if (request.isForce() && e2 instanceof TimeoutException) { - forceDeleteJob(request, (JobStorageDeletionTask) task, listener); + forceDeleteJob(request, listener); } else { listener.onFailure(e2); } @@ -110,8 +153,7 @@ public class TransportDeleteJobAction extends TransportMasterNodeAction listener) throws Exception { + protected void masterOperation(DeleteJobAction.Request request, ClusterState state, ActionListener listener) { throw new UnsupportedOperationException("the Task parameter is required"); } @@ -120,13 +162,290 @@ public class TransportDeleteJobAction extends TransportMasterNodeAction listener) { - jobManager.deleteJob(request, task, listener); + private void normalDeleteJob(DeleteJobAction.Request request, ActionListener listener) { + String jobId = request.getJobId(); + logger.debug("Deleting job '" + jobId + "'"); + + // Step 4. When the job has been removed from the cluster state, return a response + // ------- + CheckedConsumer apiResponseHandler = jobDeleted -> { + if (jobDeleted) { + logger.info("Job [" + jobId + "] deleted"); + auditor.info(jobId, Messages.getMessage(Messages.JOB_AUDIT_DELETED)); + listener.onResponse(new AcknowledgedResponse(true)); + } else { + listener.onResponse(new AcknowledgedResponse(false)); + } + }; + + // Step 3. When the physical storage has been deleted, remove from Cluster State + // ------- + CheckedConsumer deleteJobStateHandler = response -> clusterService.submitStateUpdateTask( + "delete-job-" + jobId, + new AckedClusterStateUpdateTask(request, ActionListener.wrap(apiResponseHandler, listener::onFailure)) { + + @Override + protected Boolean newResponse(boolean acknowledged) { + return acknowledged && response; + } + + @Override + public ClusterState execute(ClusterState currentState) { + MlMetadata currentMlMetadata = MlMetadata.getMlMetadata(currentState); + if (currentMlMetadata.getJobs().containsKey(jobId) == false) { + // We wouldn't have got here if the job never existed so + // the Job must have been deleted by another action. + // Don't error in this case + return currentState; + } + + MlMetadata.Builder builder = new MlMetadata.Builder(currentMlMetadata); + builder.deleteJob(jobId, currentState.getMetaData().custom(PersistentTasksCustomMetaData.TYPE)); + return buildNewClusterState(currentState, builder); + } + }); + + + // Step 2. Remove the job from any calendars + CheckedConsumer removeFromCalendarsHandler = response -> jobResultsProvider.removeJobFromCalendars(jobId, + ActionListener.wrap(deleteJobStateHandler::accept, listener::onFailure )); + + + // Step 1. Delete the physical storage + deleteJobDocuments(jobId, removeFromCalendarsHandler, listener::onFailure); } - private void forceDeleteJob(DeleteJobAction.Request request, JobStorageDeletionTask task, - ActionListener listener) { + private void deleteJobDocuments(String jobId, CheckedConsumer finishedHandler, Consumer failureHandler) { + + final String indexName = AnomalyDetectorsIndex.getPhysicalIndexFromState(clusterService.state(), jobId); + final String indexPattern = indexName + "-*"; + + final ActionListener completionHandler = ActionListener.wrap( + response -> finishedHandler.accept(response.isAcknowledged()), + failureHandler); + + // Step 7. If we did not drop the index and after DBQ state done, we delete the aliases + ActionListener dbqHandler = ActionListener.wrap( + bulkByScrollResponse -> { + if (bulkByScrollResponse == null) { // no action was taken by DBQ, assume Index was deleted + completionHandler.onResponse(new AcknowledgedResponse(true)); + } else { + if (bulkByScrollResponse.isTimedOut()) { + logger.warn("[{}] DeleteByQuery for indices [{}, {}] timed out.", jobId, indexName, indexPattern); + } + if (!bulkByScrollResponse.getBulkFailures().isEmpty()) { + logger.warn("[{}] {} failures and {} conflicts encountered while running DeleteByQuery on indices [{}, {}].", + jobId, bulkByScrollResponse.getBulkFailures().size(), bulkByScrollResponse.getVersionConflicts(), + indexName, indexPattern); + for (BulkItemResponse.Failure failure : bulkByScrollResponse.getBulkFailures()) { + logger.warn("DBQ failure: " + failure); + } + } + deleteAliases(jobId, client, completionHandler); + } + }, + failureHandler); + + // Step 6. If we did not delete the index, we run a delete by query + ActionListener deleteByQueryExecutor = ActionListener.wrap( + response -> { + if (response) { + logger.info("Running DBQ on [" + indexName + "," + indexPattern + "] for job [" + jobId + "]"); + DeleteByQueryRequest request = new DeleteByQueryRequest(indexName, indexPattern); + ConstantScoreQueryBuilder query = + new ConstantScoreQueryBuilder(new TermQueryBuilder(Job.ID.getPreferredName(), jobId)); + request.setQuery(query); + request.setIndicesOptions(MlIndicesUtils.addIgnoreUnavailable(IndicesOptions.lenientExpandOpen())); + request.setSlices(5); + request.setAbortOnVersionConflict(false); + request.setRefresh(true); + + executeAsyncWithOrigin(client, ML_ORIGIN, DeleteByQueryAction.INSTANCE, request, dbqHandler); + } else { // We did not execute DBQ, no need to delete aliases or check the response + dbqHandler.onResponse(null); + } + }, + failureHandler); + + // Step 5. If we have any hits, that means we are NOT the only job on this index, and should not delete it + // if we do not have any hits, we can drop the index and then skip the DBQ and alias deletion + ActionListener customIndexSearchHandler = ActionListener.wrap( + searchResponse -> { + if (searchResponse == null || searchResponse.getHits().totalHits > 0) { + deleteByQueryExecutor.onResponse(true); // We need to run DBQ and alias deletion + } else { + logger.info("Running DELETE Index on [" + indexName + "] for job [" + jobId + "]"); + DeleteIndexRequest request = new DeleteIndexRequest(indexName); + request.indicesOptions(IndicesOptions.lenientExpandOpen()); + // If we have deleted the index, then we don't need to delete the aliases or run the DBQ + executeAsyncWithOrigin( + client.threadPool().getThreadContext(), + ML_ORIGIN, + request, + ActionListener.wrap( + response -> deleteByQueryExecutor.onResponse(false), // skip DBQ && Alias + failureHandler), + client.admin().indices()::delete); + } + }, + failure -> { + if (failure.getClass() == IndexNotFoundException.class) { // assume the index is already deleted + deleteByQueryExecutor.onResponse(false); // skip DBQ && Alias + } else { + failureHandler.accept(failure); + } + } + ); + + // Step 4. Determine if we are on a shared index by looking at `.ml-anomalies-shared` or the custom index's aliases + ActionListener deleteCategorizerStateHandler = ActionListener.wrap( + response -> { + if (indexName.equals(AnomalyDetectorsIndexFields.RESULTS_INDEX_PREFIX + + AnomalyDetectorsIndexFields.RESULTS_INDEX_DEFAULT)) { + //don't bother searching the index any further, we are on the default shared + customIndexSearchHandler.onResponse(null); + } else { + SearchSourceBuilder source = new SearchSourceBuilder() + .size(1) + .query(QueryBuilders.boolQuery().filter( + QueryBuilders.boolQuery().mustNot(QueryBuilders.termQuery(Job.ID.getPreferredName(), jobId)))); + + SearchRequest searchRequest = new SearchRequest(indexName); + searchRequest.source(source); + executeAsyncWithOrigin(client, ML_ORIGIN, SearchAction.INSTANCE, searchRequest, customIndexSearchHandler); + } + }, + failureHandler + ); + + // Step 3. Delete quantiles done, delete the categorizer state + ActionListener deleteQuantilesHandler = ActionListener.wrap( + response -> deleteCategorizerState(jobId, client, 1, deleteCategorizerStateHandler), + failureHandler); + + // Step 2. Delete state done, delete the quantiles + ActionListener deleteStateHandler = ActionListener.wrap( + bulkResponse -> deleteQuantiles(jobId, client, deleteQuantilesHandler), + failureHandler); + + // Step 1. Delete the model state + deleteModelState(jobId, client, deleteStateHandler); + } + + private void deleteQuantiles(String jobId, Client client, ActionListener finishedHandler) { + // The quantiles type and doc ID changed in v5.5 so delete both the old and new format + DeleteByQueryRequest request = new DeleteByQueryRequest(AnomalyDetectorsIndex.jobStateIndexName()); + // Just use ID here, not type, as trying to delete different types spams the logs with an exception stack trace + IdsQueryBuilder query = new IdsQueryBuilder().addIds(Quantiles.documentId(jobId), + // TODO: remove in 7.0 + Quantiles.v54DocumentId(jobId)); + request.setQuery(query); + request.setIndicesOptions(MlIndicesUtils.addIgnoreUnavailable(IndicesOptions.lenientExpandOpen())); + request.setAbortOnVersionConflict(false); + request.setRefresh(true); + + executeAsyncWithOrigin(client, ML_ORIGIN, DeleteByQueryAction.INSTANCE, request, ActionListener.wrap( + response -> finishedHandler.onResponse(true), + e -> { + // It's not a problem for us if the index wasn't found - it's equivalent to document not found + if (e instanceof IndexNotFoundException) { + finishedHandler.onResponse(true); + } else { + finishedHandler.onFailure(e); + } + })); + } + + private void deleteModelState(String jobId, Client client, ActionListener listener) { + GetModelSnapshotsAction.Request request = new GetModelSnapshotsAction.Request(jobId, null); + request.setPageParams(new PageParams(0, MAX_SNAPSHOTS_TO_DELETE)); + executeAsyncWithOrigin(client, ML_ORIGIN, GetModelSnapshotsAction.INSTANCE, request, ActionListener.wrap( + response -> { + List deleteCandidates = response.getPage().results(); + JobDataDeleter deleter = new JobDataDeleter(client, jobId); + deleter.deleteModelSnapshots(deleteCandidates, listener); + }, + listener::onFailure)); + } + + private void deleteCategorizerState(String jobId, Client client, int docNum, ActionListener finishedHandler) { + // The categorizer state type and doc ID changed in v5.5 so delete both the old and new format + DeleteByQueryRequest request = new DeleteByQueryRequest(AnomalyDetectorsIndex.jobStateIndexName()); + // Just use ID here, not type, as trying to delete different types spams the logs with an exception stack trace + IdsQueryBuilder query = new IdsQueryBuilder().addIds(CategorizerState.documentId(jobId, docNum), + // TODO: remove in 7.0 + CategorizerState.v54DocumentId(jobId, docNum)); + request.setQuery(query); + request.setIndicesOptions(MlIndicesUtils.addIgnoreUnavailable(IndicesOptions.lenientExpandOpen())); + request.setAbortOnVersionConflict(false); + request.setRefresh(true); + + executeAsyncWithOrigin(client, ML_ORIGIN, DeleteByQueryAction.INSTANCE, request, ActionListener.wrap( + response -> { + // If we successfully deleted a document try the next one; if not we're done + if (response.getDeleted() > 0) { + // There's an assumption here that there won't be very many categorizer + // state documents, so the recursion won't go more than, say, 5 levels deep + deleteCategorizerState(jobId, client, docNum + 1, finishedHandler); + return; + } + finishedHandler.onResponse(true); + }, + e -> { + // It's not a problem for us if the index wasn't found - it's equivalent to document not found + if (e instanceof IndexNotFoundException) { + finishedHandler.onResponse(true); + } else { + finishedHandler.onFailure(e); + } + })); + } + + private void deleteAliases(String jobId, Client client, ActionListener finishedHandler) { + final String readAliasName = AnomalyDetectorsIndex.jobResultsAliasedName(jobId); + final String writeAliasName = AnomalyDetectorsIndex.resultsWriteAlias(jobId); + + // first find the concrete indices associated with the aliases + GetAliasesRequest aliasesRequest = new GetAliasesRequest().aliases(readAliasName, writeAliasName) + .indicesOptions(IndicesOptions.lenientExpandOpen()); + executeAsyncWithOrigin(client.threadPool().getThreadContext(), ML_ORIGIN, aliasesRequest, + ActionListener.wrap( + getAliasesResponse -> { + // remove the aliases from the concrete indices found in the first step + IndicesAliasesRequest removeRequest = buildRemoveAliasesRequest(getAliasesResponse); + if (removeRequest == null) { + // don't error if the job's aliases have already been deleted - carry on and delete the + // rest of the job's data + finishedHandler.onResponse(new AcknowledgedResponse(true)); + return; + } + executeAsyncWithOrigin(client.threadPool().getThreadContext(), ML_ORIGIN, removeRequest, + ActionListener.wrap( + finishedHandler::onResponse, + finishedHandler::onFailure), + client.admin().indices()::aliases); + }, + finishedHandler::onFailure), client.admin().indices()::getAliases); + } + + private IndicesAliasesRequest buildRemoveAliasesRequest(GetAliasesResponse getAliasesResponse) { + Set aliases = new HashSet<>(); + List indices = new ArrayList<>(); + for (ObjectObjectCursor> entry : getAliasesResponse.getAliases()) { + // The response includes _all_ indices, but only those associated with + // the aliases we asked about will have associated AliasMetaData + if (entry.value.isEmpty() == false) { + indices.add(entry.key); + entry.value.forEach(metadata -> aliases.add(metadata.getAlias())); + } + } + return aliases.isEmpty() ? null : new IndicesAliasesRequest().addAliasAction( + IndicesAliasesRequest.AliasActions.remove() + .aliases(aliases.toArray(new String[aliases.size()])) + .indices(indices.toArray(new String[indices.size()]))); + } + + private void forceDeleteJob(DeleteJobAction.Request request, ActionListener listener) { final ClusterState state = clusterService.state(); final String jobId = request.getJobId(); @@ -135,13 +454,13 @@ public class TransportDeleteJobAction extends TransportMasterNodeAction removeTaskListener = new ActionListener() { @Override public void onResponse(Boolean response) { - jobManager.deleteJob(request, task, listener); + normalDeleteJob(request, listener); } @Override public void onFailure(Exception e) { if (e instanceof ResourceNotFoundException) { - jobManager.deleteJob(request, task, listener); + normalDeleteJob(request, listener); } else { listener.onFailure(e); } @@ -151,9 +470,7 @@ public class TransportDeleteJobAction extends TransportMasterNodeAction killJobListener = ActionListener.wrap( - response -> { - removePersistentTask(request.getJobId(), state, removeTaskListener); - }, + response -> removePersistentTask(request.getJobId(), state, removeTaskListener), e -> { if (e instanceof ElasticsearchStatusException) { // Killing the process marks the task as completed so it @@ -197,7 +514,7 @@ public class TransportDeleteJobAction extends TransportMasterNodeAction listener, boolean force) { + private void markJobAsDeleting(String jobId, ActionListener listener, boolean force) { clusterService.submitStateUpdateTask("mark-job-as-deleted", new ClusterStateUpdateTask() { @Override public ClusterState execute(ClusterState currentState) { @@ -220,7 +537,7 @@ public class TransportDeleteJobAction extends TransportMasterNodeAction listener) { + private void waitForDeletingJob(String jobId, TimeValue timeout, ActionListener listener) { ClusterStateObserver stateObserver = new ClusterStateObserver(clusterService, timeout, logger, threadPool.getThreadContext()); ClusterState clusterState = stateObserver.setAndGetObservedState(); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteModelSnapshotAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteModelSnapshotAction.java index b880bf6fa0c..6d0721b03d9 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteModelSnapshotAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteModelSnapshotAction.java @@ -20,7 +20,7 @@ import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.ml.action.DeleteModelSnapshotAction; import org.elasticsearch.xpack.core.ml.job.config.Job; import org.elasticsearch.xpack.core.ml.job.messages.Messages; -import org.elasticsearch.xpack.core.ml.job.persistence.JobDataDeleter; +import org.elasticsearch.xpack.ml.job.persistence.JobDataDeleter; import org.elasticsearch.xpack.core.ml.job.process.autodetect.state.ModelSnapshot; import org.elasticsearch.xpack.ml.job.JobManager; import org.elasticsearch.xpack.ml.job.persistence.JobResultsProvider; diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportFindFileStructureAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportFindFileStructureAction.java index ec37a2b7481..0906af9a80d 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportFindFileStructureAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportFindFileStructureAction.java @@ -48,10 +48,10 @@ public class TransportFindFileStructureAction private FindFileStructureAction.Response buildFileStructureResponse(FindFileStructureAction.Request request) throws Exception { - FileStructureFinderManager structureFinderManager = new FileStructureFinderManager(); + FileStructureFinderManager structureFinderManager = new FileStructureFinderManager(threadPool.scheduler()); FileStructureFinder fileStructureFinder = structureFinderManager.findFileStructure(request.getLinesToSample(), - request.getSample().streamInput(), new FileStructureOverrides(request)); + request.getSample().streamInput(), new FileStructureOverrides(request), request.getTimeout()); return new FindFileStructureAction.Response(fileStructureFinder.getStructure()); } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetFiltersAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetFiltersAction.java index 83a4c12b819..55fbdfa0f55 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetFiltersAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetFiltersAction.java @@ -33,7 +33,7 @@ import org.elasticsearch.xpack.core.ml.action.GetFiltersAction; import org.elasticsearch.xpack.core.ml.action.util.PageParams; import org.elasticsearch.xpack.core.ml.action.util.QueryPage; import org.elasticsearch.xpack.core.ml.job.config.MlFilter; -import org.elasticsearch.xpack.core.ml.utils.MlIndicesUtils; +import org.elasticsearch.xpack.ml.utils.MlIndicesUtils; import java.io.IOException; import java.io.InputStream; diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetOverallBucketsAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetOverallBucketsAction.java index 4a17a2654c6..1ff5bdecf69 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetOverallBucketsAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetOverallBucketsAction.java @@ -29,19 +29,19 @@ import org.elasticsearch.xpack.core.ml.action.GetOverallBucketsAction; import org.elasticsearch.xpack.core.ml.action.util.QueryPage; import org.elasticsearch.xpack.core.ml.job.config.Job; import org.elasticsearch.xpack.core.ml.job.persistence.AnomalyDetectorsIndex; -import org.elasticsearch.xpack.ml.job.persistence.BucketsQueryBuilder; import org.elasticsearch.xpack.core.ml.job.results.Bucket; import org.elasticsearch.xpack.core.ml.job.results.OverallBucket; import org.elasticsearch.xpack.core.ml.job.results.Result; import org.elasticsearch.xpack.core.ml.utils.ExceptionsHelper; import org.elasticsearch.xpack.core.ml.utils.Intervals; -import org.elasticsearch.xpack.core.ml.utils.MlIndicesUtils; import org.elasticsearch.xpack.ml.MachineLearning; import org.elasticsearch.xpack.ml.job.JobManager; +import org.elasticsearch.xpack.ml.job.persistence.BucketsQueryBuilder; import org.elasticsearch.xpack.ml.job.persistence.overallbuckets.OverallBucketsAggregator; import org.elasticsearch.xpack.ml.job.persistence.overallbuckets.OverallBucketsCollector; import org.elasticsearch.xpack.ml.job.persistence.overallbuckets.OverallBucketsProcessor; import org.elasticsearch.xpack.ml.job.persistence.overallbuckets.OverallBucketsProvider; +import org.elasticsearch.xpack.ml.utils.MlIndicesUtils; import java.util.HashSet; import java.util.List; diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportRevertModelSnapshotAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportRevertModelSnapshotAction.java index 07b9dade4d8..6d5b8bdb0db 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportRevertModelSnapshotAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportRevertModelSnapshotAction.java @@ -25,7 +25,7 @@ import org.elasticsearch.xpack.core.ml.action.RevertModelSnapshotAction; import org.elasticsearch.xpack.core.ml.job.config.Job; import org.elasticsearch.xpack.core.ml.job.config.JobState; import org.elasticsearch.xpack.core.ml.job.messages.Messages; -import org.elasticsearch.xpack.core.ml.job.persistence.JobDataDeleter; +import org.elasticsearch.xpack.ml.job.persistence.JobDataDeleter; import org.elasticsearch.xpack.core.ml.job.process.autodetect.state.ModelSnapshot; import org.elasticsearch.xpack.core.ml.utils.ExceptionsHelper; import org.elasticsearch.xpack.ml.job.JobManager; diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/filestructurefinder/DelimitedFileStructureFinder.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/filestructurefinder/DelimitedFileStructureFinder.java index a103560480d..8cdbd030eb5 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/filestructurefinder/DelimitedFileStructureFinder.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/filestructurefinder/DelimitedFileStructureFinder.java @@ -41,10 +41,11 @@ public class DelimitedFileStructureFinder implements FileStructureFinder { static DelimitedFileStructureFinder makeDelimitedFileStructureFinder(List explanation, String sample, String charsetName, Boolean hasByteOrderMarker, CsvPreference csvPreference, - boolean trimFields, FileStructureOverrides overrides) + boolean trimFields, FileStructureOverrides overrides, + TimeoutChecker timeoutChecker) throws IOException { - Tuple>, List> parsed = readRows(sample, csvPreference); + Tuple>, List> parsed = readRows(sample, csvPreference, timeoutChecker); List> rows = parsed.v1(); List lineNumbers = parsed.v2(); @@ -106,7 +107,8 @@ public class DelimitedFileStructureFinder implements FileStructureFinder { structureBuilder.setShouldTrimFields(true); } - Tuple timeField = FileStructureUtils.guessTimestampField(explanation, sampleRecords, overrides); + Tuple timeField = FileStructureUtils.guessTimestampField(explanation, sampleRecords, overrides, + timeoutChecker); if (timeField != null) { String timeLineRegex = null; StringBuilder builder = new StringBuilder("^"); @@ -141,13 +143,14 @@ public class DelimitedFileStructureFinder implements FileStructureFinder { } structureBuilder.setTimestampField(timeField.v1()) - .setTimestampFormats(timeField.v2().dateFormats) + .setJodaTimestampFormats(timeField.v2().jodaTimestampFormats) + .setJavaTimestampFormats(timeField.v2().javaTimestampFormats) .setNeedClientTimezone(timeField.v2().hasTimezoneDependentParsing()) .setMultilineStartPattern(timeLineRegex); } Tuple, SortedMap> mappingsAndFieldStats = - FileStructureUtils.guessMappingsAndCalculateFieldStats(explanation, sampleRecords); + FileStructureUtils.guessMappingsAndCalculateFieldStats(explanation, sampleRecords, timeoutChecker); SortedMap mappings = mappingsAndFieldStats.v1(); if (timeField != null) { @@ -182,7 +185,8 @@ public class DelimitedFileStructureFinder implements FileStructureFinder { return structure; } - static Tuple>, List> readRows(String sample, CsvPreference csvPreference) throws IOException { + static Tuple>, List> readRows(String sample, CsvPreference csvPreference, TimeoutChecker timeoutChecker) + throws IOException { int fieldsInFirstRow = -1; @@ -203,6 +207,7 @@ public class DelimitedFileStructureFinder implements FileStructureFinder { } } rows.add(row); + timeoutChecker.check("delimited record parsing"); lineNumbers.add(csvReader.getLineNumber()); } } catch (SuperCsvException e) { diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/filestructurefinder/DelimitedFileStructureFinderFactory.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/filestructurefinder/DelimitedFileStructureFinderFactory.java index 62e5eff517e..982a6ff7035 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/filestructurefinder/DelimitedFileStructureFinderFactory.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/filestructurefinder/DelimitedFileStructureFinderFactory.java @@ -62,8 +62,8 @@ public class DelimitedFileStructureFinderFactory implements FileStructureFinderF @Override public FileStructureFinder createFromSample(List explanation, String sample, String charsetName, Boolean hasByteOrderMarker, - FileStructureOverrides overrides) throws IOException { + FileStructureOverrides overrides, TimeoutChecker timeoutChecker) throws IOException { return DelimitedFileStructureFinder.makeDelimitedFileStructureFinder(explanation, sample, charsetName, hasByteOrderMarker, - csvPreference, trimFields, overrides); + csvPreference, trimFields, overrides, timeoutChecker); } } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/filestructurefinder/FieldStatsCalculator.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/filestructurefinder/FieldStatsCalculator.java index 130a37dbc19..40dff9116d7 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/filestructurefinder/FieldStatsCalculator.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/filestructurefinder/FieldStatsCalculator.java @@ -15,6 +15,7 @@ import java.util.List; import java.util.Map; import java.util.SortedMap; import java.util.TreeMap; +import java.util.function.Function; import java.util.stream.Collectors; /** @@ -152,18 +153,20 @@ public class FieldStatsCalculator { List> findNumericTopHits(int numTopHits) { assert countsByNumericValue != null; - return findTopHits(numTopHits, countsByNumericValue, Comparator.comparing(Map.Entry::getKey)); + return findTopHits(numTopHits, countsByNumericValue, Comparator.comparing(Map.Entry::getKey), + FieldStats::toIntegerIfInteger); } List> findStringTopHits(int numTopHits) { - return findTopHits(numTopHits, countsByStringValue, Comparator.comparing(Map.Entry::getKey)); + return findTopHits(numTopHits, countsByStringValue, Comparator.comparing(Map.Entry::getKey), s -> s); } /** * Order by descending count, with a secondary sort to ensure reproducibility of results. */ private static List> findTopHits(int numTopHits, Map countsByValue, - Comparator> secondarySort) { + Comparator> secondarySort, + Function outputMapper) { List> sortedByCount = countsByValue.entrySet().stream() .sorted(Comparator.comparing(Map.Entry::getValue, Comparator.reverseOrder()).thenComparing(secondarySort)) @@ -174,7 +177,7 @@ public class FieldStatsCalculator { for (Map.Entry entry : sortedByCount) { Map topHit = new LinkedHashMap<>(3); - topHit.put("value", entry.getKey()); + topHit.put("value", outputMapper.apply(entry.getKey())); topHit.put("count", entry.getValue()); topHits.add(topHit); } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/filestructurefinder/FileStructureFinderFactory.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/filestructurefinder/FileStructureFinderFactory.java index bff4b2115b0..8790b8f5268 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/filestructurefinder/FileStructureFinderFactory.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/filestructurefinder/FileStructureFinderFactory.java @@ -39,9 +39,10 @@ public interface FileStructureFinderFactory { * @param hasByteOrderMarker Did the sample have a byte order marker? null means "not relevant". * @param overrides Stores structure decisions that have been made by the end user, and should * take precedence over anything the {@link FileStructureFinder} may decide. + * @param timeoutChecker Will abort the operation if its timeout is exceeded. * @return A {@link FileStructureFinder} object suitable for determining the structure of the supplied sample. * @throws Exception if something goes wrong during creation. */ FileStructureFinder createFromSample(List explanation, String sample, String charsetName, Boolean hasByteOrderMarker, - FileStructureOverrides overrides) throws Exception; + FileStructureOverrides overrides, TimeoutChecker timeoutChecker) throws Exception; } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/filestructurefinder/FileStructureFinderManager.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/filestructurefinder/FileStructureFinderManager.java index 7949998d16e..a508735af07 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/filestructurefinder/FileStructureFinderManager.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/filestructurefinder/FileStructureFinderManager.java @@ -7,7 +7,9 @@ package org.elasticsearch.xpack.ml.filestructurefinder; import com.ibm.icu.text.CharsetDetector; import com.ibm.icu.text.CharsetMatch; +import org.elasticsearch.ElasticsearchTimeoutException; import org.elasticsearch.common.collect.Tuple; +import org.elasticsearch.common.unit.TimeValue; import java.io.BufferedInputStream; import java.io.BufferedReader; @@ -23,15 +25,17 @@ import java.util.Collections; import java.util.HashSet; import java.util.List; import java.util.Locale; +import java.util.Objects; import java.util.Optional; import java.util.Set; +import java.util.concurrent.ScheduledExecutorService; import java.util.stream.Collectors; /** * Runs the high-level steps needed to create ingest configs for the specified file. In order: * 1. Determine the most likely character set (UTF-8, UTF-16LE, ISO-8859-2, etc.) * 2. Load a sample of the file, consisting of the first 1000 lines of the file - * 3. Determine the most likely file structure - one of ND-JSON, XML, CSV, TSV or semi-structured text + * 3. Determine the most likely file structure - one of ND-JSON, XML, delimited or semi-structured text * 4. Create an appropriate structure object and delegate writing configs to it */ public final class FileStructureFinderManager { @@ -81,8 +85,18 @@ public final class FileStructureFinderManager { private static final int BUFFER_SIZE = 8192; + private final ScheduledExecutorService scheduler; + + /** + * Create the file structure manager. + * @param scheduler Used for checking timeouts. + */ + public FileStructureFinderManager(ScheduledExecutorService scheduler) { + this.scheduler = Objects.requireNonNull(scheduler); + } + public FileStructureFinder findFileStructure(Integer idealSampleLineCount, InputStream fromFile) throws Exception { - return findFileStructure(idealSampleLineCount, fromFile, FileStructureOverrides.EMPTY_OVERRIDES); + return findFileStructure(idealSampleLineCount, fromFile, FileStructureOverrides.EMPTY_OVERRIDES, null); } /** @@ -95,42 +109,49 @@ public final class FileStructureFinderManager { * @param overrides Aspects of the file structure that are known in advance. These take precedence over * values determined by structure analysis. An exception will be thrown if the file structure * is incompatible with an overridden value. + * @param timeout The maximum time the analysis is permitted to take. If it takes longer than this an + * {@link ElasticsearchTimeoutException} may be thrown (although not necessarily immediately + * the timeout is exceeded). * @return A {@link FileStructureFinder} object from which the structure and messages can be queried. * @throws Exception A variety of problems could occur at various stages of the structure finding process. */ - public FileStructureFinder findFileStructure(Integer idealSampleLineCount, InputStream fromFile, FileStructureOverrides overrides) + public FileStructureFinder findFileStructure(Integer idealSampleLineCount, InputStream fromFile, FileStructureOverrides overrides, + TimeValue timeout) throws Exception { return findFileStructure(new ArrayList<>(), (idealSampleLineCount == null) ? DEFAULT_IDEAL_SAMPLE_LINE_COUNT : idealSampleLineCount, - fromFile, overrides); + fromFile, overrides, timeout); } public FileStructureFinder findFileStructure(List explanation, int idealSampleLineCount, InputStream fromFile) throws Exception { - return findFileStructure(new ArrayList<>(), idealSampleLineCount, fromFile, FileStructureOverrides.EMPTY_OVERRIDES); + return findFileStructure(explanation, idealSampleLineCount, fromFile, FileStructureOverrides.EMPTY_OVERRIDES, null); } public FileStructureFinder findFileStructure(List explanation, int idealSampleLineCount, InputStream fromFile, - FileStructureOverrides overrides) throws Exception { + FileStructureOverrides overrides, TimeValue timeout) throws Exception { - String charsetName = overrides.getCharset(); - Reader sampleReader; - if (charsetName != null) { - // Creating the reader will throw if the specified character set does not exist - sampleReader = new InputStreamReader(fromFile, charsetName); - explanation.add("Using specified character encoding [" + charsetName + "]"); - } else { - CharsetMatch charsetMatch = findCharset(explanation, fromFile); - charsetName = charsetMatch.getName(); - sampleReader = charsetMatch.getReader(); + try (TimeoutChecker timeoutChecker = new TimeoutChecker("structure analysis", timeout, scheduler)) { + + String charsetName = overrides.getCharset(); + Reader sampleReader; + if (charsetName != null) { + // Creating the reader will throw if the specified character set does not exist + sampleReader = new InputStreamReader(fromFile, charsetName); + explanation.add("Using specified character encoding [" + charsetName + "]"); + } else { + CharsetMatch charsetMatch = findCharset(explanation, fromFile, timeoutChecker); + charsetName = charsetMatch.getName(); + sampleReader = charsetMatch.getReader(); + } + + Tuple sampleInfo = sampleFile(sampleReader, charsetName, MIN_SAMPLE_LINE_COUNT, + Math.max(MIN_SAMPLE_LINE_COUNT, idealSampleLineCount), timeoutChecker); + + return makeBestStructureFinder(explanation, sampleInfo.v1(), charsetName, sampleInfo.v2(), overrides, timeoutChecker); } - - Tuple sampleInfo = sampleFile(sampleReader, charsetName, MIN_SAMPLE_LINE_COUNT, - Math.max(MIN_SAMPLE_LINE_COUNT, idealSampleLineCount)); - - return makeBestStructureFinder(explanation, sampleInfo.v1(), charsetName, sampleInfo.v2(), overrides); } - CharsetMatch findCharset(List explanation, InputStream inputStream) throws Exception { + CharsetMatch findCharset(List explanation, InputStream inputStream, TimeoutChecker timeoutChecker) throws Exception { // We need an input stream that supports mark and reset, so wrap the argument // in a BufferedInputStream if it doesn't already support this feature @@ -141,6 +162,7 @@ public final class FileStructureFinderManager { // This is from ICU4J CharsetDetector charsetDetector = new CharsetDetector().setText(inputStream); CharsetMatch[] charsetMatches = charsetDetector.detectAll(); + timeoutChecker.check("character set detection"); // Determine some extra characteristics of the input to compensate for some deficiencies of ICU4J boolean pureAscii = true; @@ -164,6 +186,7 @@ public final class FileStructureFinderManager { remainingLength -= bytesRead; } while (containsZeroBytes == false && remainingLength > 0); inputStream.reset(); + timeoutChecker.check("character set detection"); if (pureAscii) { // If the input is pure ASCII then many single byte character sets will match. We want to favour @@ -220,7 +243,7 @@ public final class FileStructureFinderManager { } FileStructureFinder makeBestStructureFinder(List explanation, String sample, String charsetName, Boolean hasByteOrderMarker, - FileStructureOverrides overrides) throws Exception { + FileStructureOverrides overrides, TimeoutChecker timeoutChecker) throws Exception { Character delimiter = overrides.getDelimiter(); Character quote = overrides.getQuote(); @@ -250,8 +273,9 @@ public final class FileStructureFinderManager { } for (FileStructureFinderFactory factory : factories) { + timeoutChecker.check("high level format detection"); if (factory.canCreateFromSample(explanation, sample)) { - return factory.createFromSample(explanation, sample, charsetName, hasByteOrderMarker, overrides); + return factory.createFromSample(explanation, sample, charsetName, hasByteOrderMarker, overrides, timeoutChecker); } } @@ -259,7 +283,8 @@ public final class FileStructureFinderManager { ((overrides.getFormat() == null) ? "any known formats" : "the specified format [" + overrides.getFormat() + "]")); } - private Tuple sampleFile(Reader reader, String charsetName, int minLines, int maxLines) throws IOException { + private Tuple sampleFile(Reader reader, String charsetName, int minLines, int maxLines, TimeoutChecker timeoutChecker) + throws IOException { int lineCount = 0; BufferedReader bufferedReader = new BufferedReader(reader); @@ -283,6 +308,7 @@ public final class FileStructureFinderManager { String line; while ((line = bufferedReader.readLine()) != null && ++lineCount <= maxLines) { sample.append(line).append('\n'); + timeoutChecker.check("sample line splitting"); } if (lineCount < minLines) { diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/filestructurefinder/FileStructureUtils.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/filestructurefinder/FileStructureUtils.java index 66ecee5b311..796587a9c58 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/filestructurefinder/FileStructureUtils.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/filestructurefinder/FileStructureUtils.java @@ -54,11 +54,12 @@ public final class FileStructureUtils { * @param overrides Aspects of the file structure that are known in advance. These take precedence over * values determined by structure analysis. An exception will be thrown if the file structure * is incompatible with an overridden value. + * @param timeoutChecker Will abort the operation if its timeout is exceeded. * @return A tuple of (field name, timestamp format) if one can be found, or null if * there is no consistent timestamp. */ static Tuple guessTimestampField(List explanation, List> sampleRecords, - FileStructureOverrides overrides) { + FileStructureOverrides overrides, TimeoutChecker timeoutChecker) { if (sampleRecords.isEmpty()) { return null; } @@ -80,6 +81,8 @@ public final class FileStructureUtils { break; } + timeoutChecker.check("timestamp field determination"); + TimestampMatch match = TimestampFormatFinder.findFirstFullMatch(fieldValue.toString(), overrides.getTimestampFormat()); if (match == null || match.candidateIndex != candidate.v2().candidateIndex) { if (overrides.getTimestampFormat() != null) { @@ -143,11 +146,14 @@ public final class FileStructureUtils { /** * Given the sampled records, guess appropriate Elasticsearch mappings. + * @param explanation List of reasons for making decisions. May contain items when passed and new reasons + * can be appended by this method. * @param sampleRecords The sampled records. + * @param timeoutChecker Will abort the operation if its timeout is exceeded. * @return A map of field name to mapping settings. */ - static Tuple, SortedMap> - guessMappingsAndCalculateFieldStats(List explanation, List> sampleRecords) { + static Tuple, SortedMap> guessMappingsAndCalculateFieldStats( + List explanation, List> sampleRecords, TimeoutChecker timeoutChecker) { SortedMap mappings = new TreeMap<>(); SortedMap fieldStats = new TreeMap<>(); @@ -163,7 +169,7 @@ public final class FileStructureUtils { ).collect(Collectors.toList()); Tuple, FieldStats> mappingAndFieldStats = - guessMappingAndCalculateFieldStats(explanation, fieldName, fieldValues); + guessMappingAndCalculateFieldStats(explanation, fieldName, fieldValues, timeoutChecker); if (mappingAndFieldStats != null) { if (mappingAndFieldStats.v1() != null) { mappings.put(fieldName, mappingAndFieldStats.v1()); @@ -178,7 +184,8 @@ public final class FileStructureUtils { } static Tuple, FieldStats> guessMappingAndCalculateFieldStats(List explanation, - String fieldName, List fieldValues) { + String fieldName, List fieldValues, + TimeoutChecker timeoutChecker) { if (fieldValues == null || fieldValues.isEmpty()) { // We can get here if all the records that contained a given field had a null value for it. // In this case it's best not to make any statement about what the mapping type should be. @@ -196,11 +203,13 @@ public final class FileStructureUtils { if (fieldValues.stream().anyMatch(value -> value instanceof List || value instanceof Object[])) { // Elasticsearch fields can be either arrays or single values, but array values must all have the same type return guessMappingAndCalculateFieldStats(explanation, fieldName, - fieldValues.stream().flatMap(FileStructureUtils::flatten).collect(Collectors.toList())); + fieldValues.stream().flatMap(FileStructureUtils::flatten).collect(Collectors.toList()), timeoutChecker); } Collection fieldValuesAsStrings = fieldValues.stream().map(Object::toString).collect(Collectors.toList()); - return new Tuple<>(guessScalarMapping(explanation, fieldName, fieldValuesAsStrings), calculateFieldStats(fieldValuesAsStrings)); + Map mapping = guessScalarMapping(explanation, fieldName, fieldValuesAsStrings); + timeoutChecker.check("mapping determination"); + return new Tuple<>(mapping, calculateFieldStats(fieldValuesAsStrings, timeoutChecker)); } private static Stream flatten(Object value) { @@ -240,7 +249,7 @@ public final class FileStructureUtils { Iterator iter = fieldValues.iterator(); TimestampMatch timestampMatch = TimestampFormatFinder.findFirstFullMatch(iter.next()); while (timestampMatch != null && iter.hasNext()) { - // To be mapped as type date all the values must match the same date format - it is + // To be mapped as type date all the values must match the same timestamp format - it is // not acceptable for all values to be dates, but with different formats if (timestampMatch.equals(TimestampFormatFinder.findFirstFullMatch(iter.next(), timestampMatch.candidateIndex)) == false) { timestampMatch = null; @@ -278,12 +287,14 @@ public final class FileStructureUtils { /** * Calculate stats for a set of field values. * @param fieldValues Values of the field for which field stats are to be calculated. + * @param timeoutChecker Will abort the operation if its timeout is exceeded. * @return The stats calculated from the field values. */ - static FieldStats calculateFieldStats(Collection fieldValues) { + static FieldStats calculateFieldStats(Collection fieldValues, TimeoutChecker timeoutChecker) { FieldStatsCalculator calculator = new FieldStatsCalculator(); calculator.accept(fieldValues); + timeoutChecker.check("field stats calculation"); return calculator.calculate(NUM_TOP_HITS); } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/filestructurefinder/GrokPatternCreator.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/filestructurefinder/GrokPatternCreator.java index 54be5079c9d..4c6549ad393 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/filestructurefinder/GrokPatternCreator.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/filestructurefinder/GrokPatternCreator.java @@ -123,6 +123,7 @@ public final class GrokPatternCreator { private final Map fieldStats; private final Map fieldNameCountStore = new HashMap<>(); private final StringBuilder overallGrokPatternBuilder = new StringBuilder(); + private final TimeoutChecker timeoutChecker; /** * @@ -130,14 +131,16 @@ public final class GrokPatternCreator { * can be appended by the methods of this class. * @param sampleMessages Sample messages that any Grok pattern found must match. * @param mappings Will be updated with mappings appropriate for the returned pattern, if non-null. + * @param timeoutChecker Will abort the operation if its timeout is exceeded. * @param fieldStats Will be updated with field stats for the fields in the returned pattern, if non-null. */ public GrokPatternCreator(List explanation, Collection sampleMessages, Map mappings, - Map fieldStats) { + Map fieldStats, TimeoutChecker timeoutChecker) { this.explanation = explanation; this.sampleMessages = Collections.unmodifiableCollection(sampleMessages); this.mappings = mappings; this.fieldStats = fieldStats; + this.timeoutChecker = timeoutChecker; } /** @@ -150,8 +153,8 @@ public final class GrokPatternCreator { for (FullMatchGrokPatternCandidate candidate : FULL_MATCH_GROK_PATTERNS) { if (timestampField == null || timestampField.equals(candidate.getTimeField())) { - if (candidate.matchesAll(sampleMessages)) { - return candidate.processMatch(explanation, sampleMessages, mappings, fieldStats); + if (candidate.matchesAll(sampleMessages, timeoutChecker)) { + return candidate.processMatch(explanation, sampleMessages, mappings, fieldStats, timeoutChecker); } } } @@ -169,8 +172,8 @@ public final class GrokPatternCreator { public void validateFullLineGrokPattern(String grokPattern, String timestampField) { FullMatchGrokPatternCandidate candidate = FullMatchGrokPatternCandidate.fromGrokPattern(grokPattern, timestampField); - if (candidate.matchesAll(sampleMessages)) { - candidate.processMatch(explanation, sampleMessages, mappings, fieldStats); + if (candidate.matchesAll(sampleMessages, timeoutChecker)) { + candidate.processMatch(explanation, sampleMessages, mappings, fieldStats, timeoutChecker); } else { throw new IllegalArgumentException("Supplied Grok pattern [" + grokPattern + "] does not match sample messages"); } @@ -213,7 +216,7 @@ public final class GrokPatternCreator { Collection prefaces = new ArrayList<>(); Collection epilogues = new ArrayList<>(); String patternBuilderContent = - chosenPattern.processCaptures(fieldNameCountStore, snippets, prefaces, epilogues, mappings, fieldStats); + chosenPattern.processCaptures(fieldNameCountStore, snippets, prefaces, epilogues, mappings, fieldStats, timeoutChecker); appendBestGrokMatchForStrings(false, prefaces, ignoreKeyValueCandidateLeft, ignoreValueOnlyCandidatesLeft); overallGrokPatternBuilder.append(patternBuilderContent); appendBestGrokMatchForStrings(isLast, epilogues, ignoreKeyValueCandidateRight, ignoreValueOnlyCandidatesRight); @@ -407,7 +410,8 @@ public final class GrokPatternCreator { * @return The string that needs to be incorporated into the overall Grok pattern for the line. */ String processCaptures(Map fieldNameCountStore, Collection snippets, Collection prefaces, - Collection epilogues, Map mappings, Map fieldStats); + Collection epilogues, Map mappings, Map fieldStats, + TimeoutChecker timeoutChecker); } /** @@ -464,7 +468,8 @@ public final class GrokPatternCreator { */ @Override public String processCaptures(Map fieldNameCountStore, Collection snippets, Collection prefaces, - Collection epilogues, Map mappings, Map fieldStats) { + Collection epilogues, Map mappings, Map fieldStats, + TimeoutChecker timeoutChecker) { Collection values = new ArrayList<>(); for (String snippet : snippets) { Map captures = grok.captures(snippet); @@ -475,6 +480,7 @@ public final class GrokPatternCreator { prefaces.add(captures.getOrDefault(PREFACE, "").toString()); values.add(captures.getOrDefault(VALUE, "").toString()); epilogues.add(captures.getOrDefault(EPILOGUE, "").toString()); + timeoutChecker.check("full message Grok pattern field extraction"); } String adjustedFieldName = buildFieldName(fieldNameCountStore, fieldName); if (mappings != null) { @@ -485,11 +491,12 @@ public final class GrokPatternCreator { if (timestampMatch != null) { fullMappingType = timestampMatch.getEsDateMappingTypeWithFormat(); } + timeoutChecker.check("mapping determination"); } mappings.put(adjustedFieldName, fullMappingType); } if (fieldStats != null) { - fieldStats.put(adjustedFieldName, FileStructureUtils.calculateFieldStats(values)); + fieldStats.put(adjustedFieldName, FileStructureUtils.calculateFieldStats(values, timeoutChecker)); } return "%{" + grokPatternName + ":" + adjustedFieldName + "}"; } @@ -535,7 +542,8 @@ public final class GrokPatternCreator { @Override public String processCaptures(Map fieldNameCountStore, Collection snippets, Collection prefaces, - Collection epilogues, Map mappings, Map fieldStats) { + Collection epilogues, Map mappings, Map fieldStats, + TimeoutChecker timeoutChecker) { if (fieldName == null) { throw new IllegalStateException("Cannot process KV matches until a field name has been determined"); } @@ -551,13 +559,15 @@ public final class GrokPatternCreator { prefaces.add(captures.getOrDefault(PREFACE, "").toString()); values.add(captures.getOrDefault(VALUE, "").toString()); epilogues.add(captures.getOrDefault(EPILOGUE, "").toString()); + timeoutChecker.check("full message Grok pattern field extraction"); } String adjustedFieldName = buildFieldName(fieldNameCountStore, fieldName); if (mappings != null) { mappings.put(adjustedFieldName, FileStructureUtils.guessScalarMapping(explanation, adjustedFieldName, values)); + timeoutChecker.check("mapping determination"); } if (fieldStats != null) { - fieldStats.put(adjustedFieldName, FileStructureUtils.calculateFieldStats(values)); + fieldStats.put(adjustedFieldName, FileStructureUtils.calculateFieldStats(values, timeoutChecker)); } return "\\b" + fieldName + "=%{USER:" + adjustedFieldName + "}"; } @@ -574,8 +584,9 @@ public final class GrokPatternCreator { @Override public String processCaptures(Map fieldNameCountStore, Collection snippets, Collection prefaces, - Collection epilogues, Map mappings, Map fieldStats) { - return super.processCaptures(fieldNameCountStore, snippets, prefaces, epilogues, null, fieldStats); + Collection epilogues, Map mappings, Map fieldStats, + TimeoutChecker timeoutChecker) { + return super.processCaptures(fieldNameCountStore, snippets, prefaces, epilogues, null, fieldStats, timeoutChecker); } } @@ -606,16 +617,22 @@ public final class GrokPatternCreator { return timeField; } - public boolean matchesAll(Collection sampleMessages) { - return sampleMessages.stream().allMatch(grok::match); + public boolean matchesAll(Collection sampleMessages, TimeoutChecker timeoutChecker) { + for (String sampleMessage : sampleMessages) { + if (grok.match(sampleMessage) == false) { + return false; + } + timeoutChecker.check("full message Grok pattern matching"); + } + return true; } /** * This must only be called if {@link #matchesAll} returns true. * @return A tuple of (time field name, Grok string). */ - public Tuple processMatch(List explanation, Collection sampleMessages, - Map mappings, Map fieldStats) { + public Tuple processMatch(List explanation, Collection sampleMessages, Map mappings, + Map fieldStats, TimeoutChecker timeoutChecker) { explanation.add("A full message Grok pattern [" + grokPattern.substring(2, grokPattern.length() - 1) + "] looks appropriate"); @@ -641,6 +658,7 @@ public final class GrokPatternCreator { } }); } + timeoutChecker.check("full message Grok pattern field extraction"); } for (Map.Entry> valuesForField : valuesPerField.entrySet()) { @@ -650,10 +668,11 @@ public final class GrokPatternCreator { if (fieldName.equals(timeField) == false) { mappings.put(fieldName, FileStructureUtils.guessScalarMapping(explanation, fieldName, valuesForField.getValue())); + timeoutChecker.check("mapping determination"); } } if (fieldStats != null) { - fieldStats.put(fieldName, FileStructureUtils.calculateFieldStats(valuesForField.getValue())); + fieldStats.put(fieldName, FileStructureUtils.calculateFieldStats(valuesForField.getValue(), timeoutChecker)); } } } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/filestructurefinder/JsonFileStructureFinder.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/filestructurefinder/JsonFileStructureFinder.java index b20658f872b..7263474505f 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/filestructurefinder/JsonFileStructureFinder.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/filestructurefinder/JsonFileStructureFinder.java @@ -33,8 +33,8 @@ public class JsonFileStructureFinder implements FileStructureFinder { private final FileStructure structure; static JsonFileStructureFinder makeJsonFileStructureFinder(List explanation, String sample, String charsetName, - Boolean hasByteOrderMarker, FileStructureOverrides overrides) - throws IOException { + Boolean hasByteOrderMarker, FileStructureOverrides overrides, + TimeoutChecker timeoutChecker) throws IOException { List> sampleRecords = new ArrayList<>(); @@ -43,6 +43,7 @@ public class JsonFileStructureFinder implements FileStructureFinder { XContentParser parser = jsonXContent.createParser(NamedXContentRegistry.EMPTY, DeprecationHandler.THROW_UNSUPPORTED_OPERATION, sampleMessage); sampleRecords.add(parser.mapOrdered()); + timeoutChecker.check("JSON parsing"); } FileStructure.Builder structureBuilder = new FileStructure.Builder(FileStructure.Format.JSON) @@ -52,15 +53,17 @@ public class JsonFileStructureFinder implements FileStructureFinder { .setNumLinesAnalyzed(sampleMessages.size()) .setNumMessagesAnalyzed(sampleRecords.size()); - Tuple timeField = FileStructureUtils.guessTimestampField(explanation, sampleRecords, overrides); + Tuple timeField = + FileStructureUtils.guessTimestampField(explanation, sampleRecords, overrides, timeoutChecker); if (timeField != null) { structureBuilder.setTimestampField(timeField.v1()) - .setTimestampFormats(timeField.v2().dateFormats) + .setJodaTimestampFormats(timeField.v2().jodaTimestampFormats) + .setJavaTimestampFormats(timeField.v2().javaTimestampFormats) .setNeedClientTimezone(timeField.v2().hasTimezoneDependentParsing()); } Tuple, SortedMap> mappingsAndFieldStats = - FileStructureUtils.guessMappingsAndCalculateFieldStats(explanation, sampleRecords); + FileStructureUtils.guessMappingsAndCalculateFieldStats(explanation, sampleRecords, timeoutChecker); SortedMap mappings = mappingsAndFieldStats.v1(); if (timeField != null) { diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/filestructurefinder/JsonFileStructureFinderFactory.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/filestructurefinder/JsonFileStructureFinderFactory.java index cfeaa222679..e49f597a83c 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/filestructurefinder/JsonFileStructureFinderFactory.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/filestructurefinder/JsonFileStructureFinderFactory.java @@ -68,8 +68,9 @@ public class JsonFileStructureFinderFactory implements FileStructureFinderFactor @Override public FileStructureFinder createFromSample(List explanation, String sample, String charsetName, Boolean hasByteOrderMarker, - FileStructureOverrides overrides) throws IOException { - return JsonFileStructureFinder.makeJsonFileStructureFinder(explanation, sample, charsetName, hasByteOrderMarker, overrides); + FileStructureOverrides overrides, TimeoutChecker timeoutChecker) throws IOException { + return JsonFileStructureFinder.makeJsonFileStructureFinder(explanation, sample, charsetName, hasByteOrderMarker, overrides, + timeoutChecker); } private static class ContextPrintingStringReader extends StringReader { diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/filestructurefinder/TextLogFileStructureFinder.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/filestructurefinder/TextLogFileStructureFinder.java index e6e445a3ff6..2d3072dda39 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/filestructurefinder/TextLogFileStructureFinder.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/filestructurefinder/TextLogFileStructureFinder.java @@ -28,10 +28,11 @@ public class TextLogFileStructureFinder implements FileStructureFinder { private final FileStructure structure; static TextLogFileStructureFinder makeTextLogFileStructureFinder(List explanation, String sample, String charsetName, - Boolean hasByteOrderMarker, FileStructureOverrides overrides) { + Boolean hasByteOrderMarker, FileStructureOverrides overrides, + TimeoutChecker timeoutChecker) { String[] sampleLines = sample.split("\n"); - Tuple> bestTimestamp = mostLikelyTimestamp(sampleLines, overrides); + Tuple> bestTimestamp = mostLikelyTimestamp(sampleLines, overrides, timeoutChecker); if (bestTimestamp == null) { // Is it appropriate to treat a file that is neither structured nor has // a regular pattern of timestamps as a log file? Probably not... @@ -68,6 +69,7 @@ public class TextLogFileStructureFinder implements FileStructureFinder { ++linesInMessage; } } + timeoutChecker.check("multi-line message determination"); if (sampleMessages.size() < 2) { preamble.append(sampleLine).append('\n'); } @@ -88,7 +90,7 @@ public class TextLogFileStructureFinder implements FileStructureFinder { SortedMap fieldStats = new TreeMap<>(); - GrokPatternCreator grokPatternCreator = new GrokPatternCreator(explanation, sampleMessages, mappings, fieldStats); + GrokPatternCreator grokPatternCreator = new GrokPatternCreator(explanation, sampleMessages, mappings, fieldStats, timeoutChecker); // We can't parse directly into @timestamp using Grok, so parse to some other time field, which the date filter will then remove String interimTimestampField = overrides.getTimestampField(); String grokPattern = overrides.getGrokPattern(); @@ -98,7 +100,8 @@ public class TextLogFileStructureFinder implements FileStructureFinder { } grokPatternCreator.validateFullLineGrokPattern(grokPattern, interimTimestampField); } else { - Tuple timestampFieldAndFullMatchGrokPattern = grokPatternCreator.findFullLineGrokPattern(interimTimestampField); + Tuple timestampFieldAndFullMatchGrokPattern = + grokPatternCreator.findFullLineGrokPattern(interimTimestampField); if (timestampFieldAndFullMatchGrokPattern != null) { interimTimestampField = timestampFieldAndFullMatchGrokPattern.v1(); grokPattern = timestampFieldAndFullMatchGrokPattern.v2(); @@ -112,7 +115,8 @@ public class TextLogFileStructureFinder implements FileStructureFinder { FileStructure structure = structureBuilder .setTimestampField(interimTimestampField) - .setTimestampFormats(bestTimestamp.v1().dateFormats) + .setJodaTimestampFormats(bestTimestamp.v1().jodaTimestampFormats) + .setJavaTimestampFormats(bestTimestamp.v1().javaTimestampFormats) .setNeedClientTimezone(bestTimestamp.v1().hasTimezoneDependentParsing()) .setGrokPattern(grokPattern) .setMappings(mappings) @@ -138,7 +142,8 @@ public class TextLogFileStructureFinder implements FileStructureFinder { return structure; } - static Tuple> mostLikelyTimestamp(String[] sampleLines, FileStructureOverrides overrides) { + static Tuple> mostLikelyTimestamp(String[] sampleLines, FileStructureOverrides overrides, + TimeoutChecker timeoutChecker) { Map>> timestampMatches = new LinkedHashMap<>(); @@ -147,8 +152,8 @@ public class TextLogFileStructureFinder implements FileStructureFinder { for (String sampleLine : sampleLines) { TimestampMatch match = TimestampFormatFinder.findFirstMatch(sampleLine, overrides.getTimestampFormat()); if (match != null) { - TimestampMatch pureMatch = new TimestampMatch(match.candidateIndex, "", match.dateFormats, match.simplePattern, - match.grokPatternName, ""); + TimestampMatch pureMatch = new TimestampMatch(match.candidateIndex, "", match.jodaTimestampFormats, + match.javaTimestampFormats, match.simplePattern, match.grokPatternName, ""); timestampMatches.compute(pureMatch, (k, v) -> { if (v == null) { return new Tuple<>(weightForMatch(match.preface), new HashSet<>(Collections.singletonList(match.preface))); @@ -159,6 +164,7 @@ public class TextLogFileStructureFinder implements FileStructureFinder { }); differenceBetweenTwoHighestWeights = findDifferenceBetweenTwoHighestWeights(timestampMatches.values()); } + timeoutChecker.check("timestamp format determination"); // The highest possible weight is 1, so if the difference between the two highest weights // is less than the number of lines remaining then the leader cannot possibly be overtaken if (differenceBetweenTwoHighestWeights > --remainingLines) { diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/filestructurefinder/TextLogFileStructureFinderFactory.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/filestructurefinder/TextLogFileStructureFinderFactory.java index b92b705aaff..5931fea5f1a 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/filestructurefinder/TextLogFileStructureFinderFactory.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/filestructurefinder/TextLogFileStructureFinderFactory.java @@ -41,8 +41,8 @@ public class TextLogFileStructureFinderFactory implements FileStructureFinderFac @Override public FileStructureFinder createFromSample(List explanation, String sample, String charsetName, Boolean hasByteOrderMarker, - FileStructureOverrides overrides) { + FileStructureOverrides overrides, TimeoutChecker timeoutChecker) { return TextLogFileStructureFinder.makeTextLogFileStructureFinder(explanation, sample, charsetName, hasByteOrderMarker, - overrides); + overrides, timeoutChecker); } } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/filestructurefinder/TimeoutChecker.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/filestructurefinder/TimeoutChecker.java new file mode 100644 index 00000000000..30c01882729 --- /dev/null +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/filestructurefinder/TimeoutChecker.java @@ -0,0 +1,78 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.ml.filestructurefinder; + +import org.elasticsearch.ElasticsearchTimeoutException; +import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.common.util.concurrent.FutureUtils; +import org.elasticsearch.grok.Grok; + +import java.io.Closeable; +import java.util.concurrent.ScheduledExecutorService; +import java.util.concurrent.ScheduledFuture; +import java.util.concurrent.TimeUnit; + +/** + * This class can be used to keep track of when a long running operation started and + * to check whether it has run for longer than permitted. + * + * An object should be constructed at the beginning of the operation and then the + * {@link #check} method called periodically during the processing of the operation. + * + * This class does not use the {@link Thread#interrupt} mechanism because some other + * methods already convert interruptions to other types of exceptions (for example + * {@link Grok#captures}) and this would lead to non-uniform exception types and + * misleading error messages in the event that the interrupt was handled by one of + * these methods. The code in the long running operation would still have to + * periodically call {@link Thread#interrupted}, so it is not much more of an + * inconvenience to have to periodically call this class's {@link #check} method. + */ +public class TimeoutChecker implements Closeable { + + private final String operation; + private final ScheduledFuture future; + private final TimeValue timeout; + private volatile boolean timeoutExceeded; + + /** + * The constructor should be called at the start of the operation whose duration + * is to be checked, as the timeout is measured relative to time of construction. + * @param operation A description of the operation whose duration is to be checked. + * @param timeout The timeout period. If null then there is no timeout. + * @param scheduler Used to schedule the timer. This may be null + * in the case where {@code timeout} is also null. + */ + public TimeoutChecker(String operation, TimeValue timeout, ScheduledExecutorService scheduler) { + this.operation = operation; + this.timeout = timeout; + this.future = (timeout != null) ? scheduler.schedule(this::setTimeoutExceeded, timeout.nanos(), TimeUnit.NANOSECONDS) : null; + } + + /** + * Stops the timer if running. + */ + @Override + public void close() { + FutureUtils.cancel(future); + } + + /** + * Check whether the operation has been running longer than the permitted time. + * @param where Which stage of the operation is currently in progress? + * @throws ElasticsearchTimeoutException If the operation is found to have taken longer than the permitted time. + */ + public void check(String where) { + + if (timeoutExceeded) { + throw new ElasticsearchTimeoutException("Aborting " + operation + " during [" + where + + "] as it has taken longer than the timeout of [" + timeout + "]"); + } + } + + private void setTimeoutExceeded() { + timeoutExceeded = true; + } +} diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/filestructurefinder/TimestampFormatFinder.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/filestructurefinder/TimestampFormatFinder.java index 363b1352a54..7ed95f656b4 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/filestructurefinder/TimestampFormatFinder.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/filestructurefinder/TimestampFormatFinder.java @@ -12,7 +12,6 @@ import java.util.Arrays; import java.util.Collections; import java.util.LinkedHashMap; import java.util.List; -import java.util.Locale; import java.util.Map; import java.util.Objects; import java.util.regex.Matcher; @@ -30,8 +29,12 @@ public final class TimestampFormatFinder { private static final String PREFACE = "preface"; private static final String EPILOGUE = "epilogue"; - private static final Pattern FRACTIONAL_SECOND_INTERPRETER = Pattern.compile("([:.,])(\\d{3,9})"); + private static final String FRACTIONAL_SECOND_SEPARATORS = ":.,"; + private static final Pattern FRACTIONAL_SECOND_INTERPRETER = Pattern.compile("([" + FRACTIONAL_SECOND_SEPARATORS + "])(\\d{3,9})"); private static final char DEFAULT_FRACTIONAL_SECOND_SEPARATOR = ','; + private static final Pattern FRACTIONAL_SECOND_TIMESTAMP_FORMAT_PATTERN = + Pattern.compile("([" + FRACTIONAL_SECOND_SEPARATORS + "]S{3,9})"); + private static final String DEFAULT_FRACTIONAL_SECOND_FORMAT = DEFAULT_FRACTIONAL_SECOND_SEPARATOR + "SSS"; /** * The timestamp patterns are complex and it can be slow to prove they do not @@ -48,7 +51,9 @@ public final class TimestampFormatFinder { // The end of some number (likely year or day) followed by a space then HH:mm Pattern.compile("\\d \\d{2}:\\d{2}\\b"), // HH:mm:ss surrounded by spaces - Pattern.compile(" \\d{2}:\\d{2}:\\d{2} ") + Pattern.compile(" \\d{2}:\\d{2}:\\d{2} "), + // Literal 'T' surrounded by numbers + Pattern.compile("\\dT\\d") ); /** @@ -59,81 +64,107 @@ public final class TimestampFormatFinder { // The TOMCAT_DATESTAMP format has to come before ISO8601 because it's basically ISO8601 but // with a space before the timezone, and because the timezone is optional in ISO8601 it will // be recognised as that with the timezone missed off if ISO8601 is checked first - new CandidateTimestampFormat("YYYY-MM-dd HH:mm:ss,SSS Z", "\\b\\d{4}-\\d{2}-\\d{2} \\d{2}:\\d{2}:\\d{2},\\d{3}", + new CandidateTimestampFormat("YYYY-MM-dd HH:mm:ss,SSS Z", "yyyy-MM-dd HH:mm:ss,SSS XX", + "\\b\\d{4}-\\d{2}-\\d{2} \\d{2}:\\d{2}:\\d{2},\\d{3}", "\\b20\\d{2}-%{MONTHNUM}-%{MONTHDAY} %{HOUR}:?%{MINUTE}:(?:[0-5][0-9]|60)[:.,][0-9]{3,9} (?:Z|[+-]%{HOUR}%{MINUTE})\\b", "TOMCAT_DATESTAMP", Arrays.asList(0, 1)), // The Elasticsearch ISO8601 parser requires a literal T between the date and time, so // longhand formats are needed if there's a space instead - new CandidateTimestampFormat("YYYY-MM-dd HH:mm:ss,SSSZ", "\\b\\d{4}-\\d{2}-\\d{2} \\d{2}:\\d{2}:\\d{2},\\d{3}", + new CandidateTimestampFormat("YYYY-MM-dd HH:mm:ss,SSSZ", "yyyy-MM-dd HH:mm:ss,SSSXX", + "\\b\\d{4}-\\d{2}-\\d{2} \\d{2}:\\d{2}:\\d{2},\\d{3}", "\\b%{YEAR}-%{MONTHNUM}-%{MONTHDAY} %{HOUR}:?%{MINUTE}:(?:[0-5][0-9]|60)[:.,][0-9]{3,9}(?:Z|[+-]%{HOUR}%{MINUTE})\\b", "TIMESTAMP_ISO8601", Arrays.asList(0, 1)), - new CandidateTimestampFormat("YYYY-MM-dd HH:mm:ss,SSSZZ", "\\b\\d{4}-\\d{2}-\\d{2} \\d{2}:\\d{2}:\\d{2},\\d{3}", + new CandidateTimestampFormat("YYYY-MM-dd HH:mm:ss,SSSZZ", "yyyy-MM-dd HH:mm:ss,SSSXXX", + "\\b\\d{4}-\\d{2}-\\d{2} \\d{2}:\\d{2}:\\d{2},\\d{3}", "\\b%{YEAR}-%{MONTHNUM}-%{MONTHDAY} %{HOUR}:?%{MINUTE}:(?:[0-5][0-9]|60)[:.,][0-9]{3,9}[+-]%{HOUR}:%{MINUTE}\\b", "TIMESTAMP_ISO8601", Arrays.asList(0, 1)), - new CandidateTimestampFormat("YYYY-MM-dd HH:mm:ss,SSS", "\\b\\d{4}-\\d{2}-\\d{2} \\d{2}:\\d{2}:\\d{2},\\d{3}", + new CandidateTimestampFormat("YYYY-MM-dd HH:mm:ss,SSS", "yyyy-MM-dd HH:mm:ss,SSS", + "\\b\\d{4}-\\d{2}-\\d{2} \\d{2}:\\d{2}:\\d{2},\\d{3}", "\\b%{YEAR}-%{MONTHNUM}-%{MONTHDAY} %{HOUR}:?%{MINUTE}:(?:[0-5][0-9]|60)[:.,][0-9]{3,9}\\b", "TIMESTAMP_ISO8601", Arrays.asList(0, 1)), - new CandidateTimestampFormat("YYYY-MM-dd HH:mm:ssZ", "\\b\\d{4}-\\d{2}-\\d{2} \\d{2}:\\d{2}:\\d{2}", + new CandidateTimestampFormat("YYYY-MM-dd HH:mm:ssZ", "yyyy-MM-dd HH:mm:ssXX", "\\b\\d{4}-\\d{2}-\\d{2} \\d{2}:\\d{2}:\\d{2}", "\\b%{YEAR}-%{MONTHNUM}-%{MONTHDAY} %{HOUR}:?%{MINUTE}:(?:[0-5][0-9]|60)(?:Z|[+-]%{HOUR}%{MINUTE})\\b", "TIMESTAMP_ISO8601", Arrays.asList(0, 1)), - new CandidateTimestampFormat("YYYY-MM-dd HH:mm:ssZZ", "\\b\\d{4}-\\d{2}-\\d{2} \\d{2}:\\d{2}:\\d{2}", + new CandidateTimestampFormat("YYYY-MM-dd HH:mm:ssZZ", "yyyy-MM-dd HH:mm:ssXXX", "\\b\\d{4}-\\d{2}-\\d{2} \\d{2}:\\d{2}:\\d{2}", "\\b%{YEAR}-%{MONTHNUM}-%{MONTHDAY} %{HOUR}:?%{MINUTE}:(?:[0-5][0-9]|60)[+-]%{HOUR}:%{MINUTE}\\b", "TIMESTAMP_ISO8601", Arrays.asList(0, 1)), - new CandidateTimestampFormat("YYYY-MM-dd HH:mm:ss", "\\b\\d{4}-\\d{2}-\\d{2} \\d{2}:\\d{2}:\\d{2}", + new CandidateTimestampFormat("YYYY-MM-dd HH:mm:ss", "yyyy-MM-dd HH:mm:ss", "\\b\\d{4}-\\d{2}-\\d{2} \\d{2}:\\d{2}:\\d{2}", "\\b%{YEAR}-%{MONTHNUM}-%{MONTHDAY} %{HOUR}:?%{MINUTE}:(?:[0-5][0-9]|60)\\b", "TIMESTAMP_ISO8601", Arrays.asList(0, 1)), - new CandidateTimestampFormat("ISO8601", "\\b\\d{4}-\\d{2}-\\d{2}T\\d{2}:\\d{2}:\\d{2}", "\\b%{TIMESTAMP_ISO8601}\\b", - "TIMESTAMP_ISO8601"), - new CandidateTimestampFormat("EEE MMM dd YYYY HH:mm:ss zzz", + // When using Java time the Elasticsearch ISO8601 parser for fractional time requires that the fractional + // separator match the current JVM locale, which is too restrictive for arbitrary log file parsing + new CandidateTimestampFormat("ISO8601", "yyyy-MM-dd'T'HH:mm:ss,SSSXX", + "\\b\\d{4}-\\d{2}-\\d{2}T\\d{2}:\\d{2}:\\d{2},\\d{3}", + "\\b%{YEAR}-%{MONTHNUM}-%{MONTHDAY}T%{HOUR}:?%{MINUTE}:(?:[0-5][0-9]|60)[:.,][0-9]{3,9}(?:Z|[+-]%{HOUR}%{MINUTE})\\b", + "TIMESTAMP_ISO8601", Collections.singletonList(3)), + new CandidateTimestampFormat("ISO8601", "yyyy-MM-dd'T'HH:mm:ss,SSSXXX", + "\\b\\d{4}-\\d{2}-\\d{2}T\\d{2}:\\d{2}:\\d{2},\\d{3}", + "\\b%{YEAR}-%{MONTHNUM}-%{MONTHDAY}T%{HOUR}:?%{MINUTE}:(?:[0-5][0-9]|60)[:.,][0-9]{3,9}[+-]%{HOUR}:%{MINUTE}\\b", + "TIMESTAMP_ISO8601", Collections.singletonList(3)), + new CandidateTimestampFormat("ISO8601", "yyyy-MM-dd'T'HH:mm:ss,SSS", + "\\b\\d{4}-\\d{2}-\\d{2}T\\d{2}:\\d{2}:\\d{2},\\d{3}", + "\\b%{YEAR}-%{MONTHNUM}-%{MONTHDAY}T%{HOUR}:?%{MINUTE}:(?:[0-5][0-9]|60)[:.,][0-9]{3,9}\\b", "TIMESTAMP_ISO8601", + Collections.singletonList(3)), + new CandidateTimestampFormat("ISO8601", "ISO8601", "\\b\\d{4}-\\d{2}-\\d{2}T\\d{2}:\\d{2}:\\d{2}", "\\b%{TIMESTAMP_ISO8601}\\b", + "TIMESTAMP_ISO8601", Collections.singletonList(3)), + new CandidateTimestampFormat("EEE MMM dd YYYY HH:mm:ss zzz", "EEE MMM dd yyyy HH:mm:ss zzz", "\\b[A-Z]\\S{2,8} [A-Z]\\S{2,8} \\d{1,2} \\d{4} \\d{2}:\\d{2}:\\d{2} ", "\\b%{DAY} %{MONTH} %{MONTHDAY} %{YEAR} %{HOUR}:%{MINUTE}:(?:[0-5][0-9]|60) %{TZ}\\b", "DATESTAMP_RFC822", Arrays.asList(1, 2)), - new CandidateTimestampFormat("EEE MMM dd YYYY HH:mm zzz", "\\b[A-Z]\\S{2,8} [A-Z]\\S{2,8} \\d{1,2} \\d{4} \\d{2}:\\d{2} ", + new CandidateTimestampFormat("EEE MMM dd YYYY HH:mm zzz", "EEE MMM dd yyyy HH:mm zzz", + "\\b[A-Z]\\S{2,8} [A-Z]\\S{2,8} \\d{1,2} \\d{4} \\d{2}:\\d{2} ", "\\b%{DAY} %{MONTH} %{MONTHDAY} %{YEAR} %{HOUR}:%{MINUTE} %{TZ}\\b", "DATESTAMP_RFC822", Collections.singletonList(1)), - new CandidateTimestampFormat("EEE, dd MMM YYYY HH:mm:ss ZZ", + new CandidateTimestampFormat("EEE, dd MMM YYYY HH:mm:ss ZZ", "EEE, dd MMM yyyy HH:mm:ss XXX", "\\b[A-Z]\\S{2,8}, \\d{1,2} [A-Z]\\S{2,8} \\d{4} \\d{2}:\\d{2}:\\d{2} ", "\\b%{DAY}, %{MONTHDAY} %{MONTH} %{YEAR} %{HOUR}:%{MINUTE}:(?:[0-5][0-9]|60) (?:Z|[+-]%{HOUR}:%{MINUTE})\\b", "DATESTAMP_RFC2822", Arrays.asList(1, 2)), - new CandidateTimestampFormat("EEE, dd MMM YYYY HH:mm:ss Z", + new CandidateTimestampFormat("EEE, dd MMM YYYY HH:mm:ss Z", "EEE, dd MMM yyyy HH:mm:ss XX", "\\b[A-Z]\\S{2,8}, \\d{1,2} [A-Z]\\S{2,8} \\d{4} \\d{2}:\\d{2}:\\d{2} ", "\\b%{DAY}, %{MONTHDAY} %{MONTH} %{YEAR} %{HOUR}:%{MINUTE}:(?:[0-5][0-9]|60) (?:Z|[+-]%{HOUR}%{MINUTE})\\b", "DATESTAMP_RFC2822", Arrays.asList(1, 2)), - new CandidateTimestampFormat("EEE, dd MMM YYYY HH:mm ZZ", "\\b[A-Z]\\S{2,8}, \\d{1,2} [A-Z]\\S{2,8} \\d{4} \\d{2}:\\d{2} ", + new CandidateTimestampFormat("EEE, dd MMM YYYY HH:mm ZZ", "EEE, dd MMM yyyy HH:mm XXX", + "\\b[A-Z]\\S{2,8}, \\d{1,2} [A-Z]\\S{2,8} \\d{4} \\d{2}:\\d{2} ", "\\b%{DAY}, %{MONTHDAY} %{MONTH} %{YEAR} %{HOUR}:%{MINUTE} (?:Z|[+-]%{HOUR}:%{MINUTE})\\b", "DATESTAMP_RFC2822", Collections.singletonList(1)), - new CandidateTimestampFormat("EEE, dd MMM YYYY HH:mm Z", "\\b[A-Z]\\S{2,8}, \\d{1,2} [A-Z]\\S{2,8} \\d{4} \\d{2}:\\d{2} ", + new CandidateTimestampFormat("EEE, dd MMM YYYY HH:mm Z", "EEE, dd MMM yyyy HH:mm XX", + "\\b[A-Z]\\S{2,8}, \\d{1,2} [A-Z]\\S{2,8} \\d{4} \\d{2}:\\d{2} ", "\\b%{DAY}, %{MONTHDAY} %{MONTH} %{YEAR} %{HOUR}:%{MINUTE} (?:Z|[+-]%{HOUR}%{MINUTE})\\b", "DATESTAMP_RFC2822", Collections.singletonList(1)), - new CandidateTimestampFormat("EEE MMM dd HH:mm:ss zzz YYYY", + new CandidateTimestampFormat("EEE MMM dd HH:mm:ss zzz YYYY", "EEE MMM dd HH:mm:ss zzz yyyy", "\\b[A-Z]\\S{2,8} [A-Z]\\S{2,8} \\d{1,2} \\d{2}:\\d{2}:\\d{2} [A-Z]{3,4} \\d{4}\\b", "\\b%{DAY} %{MONTH} %{MONTHDAY} %{HOUR}:%{MINUTE}:(?:[0-5][0-9]|60) %{TZ} %{YEAR}\\b", "DATESTAMP_OTHER", Arrays.asList(1, 2)), - new CandidateTimestampFormat("EEE MMM dd HH:mm zzz YYYY", + new CandidateTimestampFormat("EEE MMM dd HH:mm zzz YYYY", "EEE MMM dd HH:mm zzz yyyy", "\\b[A-Z]\\S{2,8} [A-Z]\\S{2,8} \\d{1,2} \\d{2}:\\d{2} [A-Z]{3,4} \\d{4}\\b", "\\b%{DAY} %{MONTH} %{MONTHDAY} %{HOUR}:%{MINUTE} %{TZ} %{YEAR}\\b", "DATESTAMP_OTHER", Collections.singletonList(1)), - new CandidateTimestampFormat("YYYYMMddHHmmss", "\\b\\d{14}\\b", + new CandidateTimestampFormat("YYYYMMddHHmmss", "yyyyMMddHHmmss", "\\b\\d{14}\\b", "\\b20\\d{2}%{MONTHNUM2}(?:(?:0[1-9])|(?:[12][0-9])|(?:3[01]))(?:2[0123]|[01][0-9])%{MINUTE}(?:[0-5][0-9]|60)\\b", "DATESTAMP_EVENTLOG"), - new CandidateTimestampFormat("EEE MMM dd HH:mm:ss YYYY", + new CandidateTimestampFormat("EEE MMM dd HH:mm:ss YYYY", "EEE MMM dd HH:mm:ss yyyy", "\\b[A-Z]\\S{2,8} [A-Z]\\S{2,8} \\d{1,2} \\d{2}:\\d{2}:\\d{2} \\d{4}\\b", "\\b%{DAY} %{MONTH} %{MONTHDAY} %{HOUR}:%{MINUTE}:(?:[0-5][0-9]|60) %{YEAR}\\b", "HTTPDERROR_DATE", Arrays.asList(1, 2)), new CandidateTimestampFormat(Arrays.asList("MMM dd HH:mm:ss,SSS", "MMM d HH:mm:ss,SSS"), + Arrays.asList("MMM dd HH:mm:ss,SSS", "MMM d HH:mm:ss,SSS"), "\\b[A-Z]\\S{2,8} {1,2}\\d{1,2} \\d{2}:\\d{2}:\\d{2},\\d{3}", "%{MONTH} +%{MONTHDAY} %{HOUR}:%{MINUTE}:(?:[0-5][0-9]|60)[:.,][0-9]{3,9}\\b", "SYSLOGTIMESTAMP", Collections.singletonList(1)), new CandidateTimestampFormat(Arrays.asList("MMM dd HH:mm:ss", "MMM d HH:mm:ss"), + Arrays.asList("MMM dd HH:mm:ss", "MMM d HH:mm:ss"), "\\b[A-Z]\\S{2,8} {1,2}\\d{1,2} \\d{2}:\\d{2}:\\d{2}\\b", "%{MONTH} +%{MONTHDAY} %{HOUR}:%{MINUTE}:(?:[0-5][0-9]|60)\\b", "SYSLOGTIMESTAMP", Collections.singletonList(1)), - new CandidateTimestampFormat("dd/MMM/YYYY:HH:mm:ss Z", "\\b\\d{2}/[A-Z]\\S{2}/\\d{4}:\\d{2}:\\d{2}:\\d{2} ", + new CandidateTimestampFormat("dd/MMM/YYYY:HH:mm:ss Z", "dd/MMM/yyyy:HH:mm:ss XX", + "\\b\\d{2}/[A-Z]\\S{2}/\\d{4}:\\d{2}:\\d{2}:\\d{2} ", "\\b%{MONTHDAY}/%{MONTH}/%{YEAR}:%{HOUR}:%{MINUTE}:(?:[0-5][0-9]|60) [+-]?%{HOUR}%{MINUTE}\\b", "HTTPDATE"), - new CandidateTimestampFormat("MMM dd, YYYY K:mm:ss a", "\\b[A-Z]\\S{2,8} \\d{1,2}, \\d{4} \\d{1,2}:\\d{2}:\\d{2} [AP]M\\b", + new CandidateTimestampFormat("MMM dd, YYYY h:mm:ss a", "MMM dd, yyyy h:mm:ss a", + "\\b[A-Z]\\S{2,8} \\d{1,2}, \\d{4} \\d{1,2}:\\d{2}:\\d{2} [AP]M\\b", "%{MONTH} %{MONTHDAY}, 20\\d{2} %{HOUR}:%{MINUTE}:(?:[0-5][0-9]|60) (?:AM|PM)\\b", "CATALINA_DATESTAMP"), new CandidateTimestampFormat(Arrays.asList("MMM dd YYYY HH:mm:ss", "MMM d YYYY HH:mm:ss"), + Arrays.asList("MMM dd yyyy HH:mm:ss", "MMM d yyyy HH:mm:ss"), "\\b[A-Z]\\S{2,8} {1,2}\\d{1,2} \\d{4} \\d{2}:\\d{2}:\\d{2}\\b", "%{MONTH} +%{MONTHDAY} %{YEAR} %{HOUR}:%{MINUTE}:(?:[0-5][0-9]|60)\\b", "CISCOTIMESTAMP", Collections.singletonList(1)), - new CandidateTimestampFormat("UNIX_MS", "\\b\\d{13}\\b", "\\b\\d{13}\\b", "POSINT"), - new CandidateTimestampFormat("UNIX", "\\b\\d{10}\\.\\d{3,9}\\b", "\\b\\d{10}\\.(?:\\d{3}){1,3}\\b", "NUMBER"), - new CandidateTimestampFormat("UNIX", "\\b\\d{10}\\b", "\\b\\d{10}\\b", "POSINT"), - new CandidateTimestampFormat("TAI64N", "\\b[0-9A-Fa-f]{24}\\b", "\\b[0-9A-Fa-f]{24}\\b", "BASE16NUM") + new CandidateTimestampFormat("UNIX_MS", "UNIX_MS", "\\b\\d{13}\\b", "\\b\\d{13}\\b", "POSINT"), + new CandidateTimestampFormat("UNIX", "UNIX", "\\b\\d{10}\\.\\d{3,9}\\b", "\\b\\d{10}\\.(?:\\d{3}){1,3}\\b", "NUMBER"), + new CandidateTimestampFormat("UNIX", "UNIX", "\\b\\d{10}\\b", "\\b\\d{10}\\b", "POSINT"), + new CandidateTimestampFormat("TAI64N", "TAI64N", "\\b[0-9A-Fa-f]{24}\\b", "\\b[0-9A-Fa-f]{24}\\b", "BASE16NUM") ); private TimestampFormatFinder() { @@ -151,7 +182,7 @@ public final class TimestampFormatFinder { /** * Find the first timestamp format that matches part of the supplied value. * @param text The value that the returned timestamp format must exist within. - * @param requiredFormat A date format that any returned match must support. + * @param requiredFormat A timestamp format that any returned match must support. * @return The timestamp format, or null if none matches. */ public static TimestampMatch findFirstMatch(String text, String requiredFormat) { @@ -174,14 +205,19 @@ public final class TimestampFormatFinder { * excluding a specified number of candidate formats. * @param text The value that the returned timestamp format must exist within. * @param ignoreCandidates The number of candidate formats to exclude from the search. - * @param requiredFormat A date format that any returned match must support. + * @param requiredFormat A timestamp format that any returned match must support. * @return The timestamp format, or null if none matches. */ public static TimestampMatch findFirstMatch(String text, int ignoreCandidates, String requiredFormat) { + if (ignoreCandidates >= ORDERED_CANDIDATE_FORMATS.size()) { + return null; + } Boolean[] quickRuleoutMatches = new Boolean[QUICK_RULE_OUT_PATTERNS.size()]; int index = ignoreCandidates; + String adjustedRequiredFormat = adjustRequiredFormat(requiredFormat); for (CandidateTimestampFormat candidate : ORDERED_CANDIDATE_FORMATS.subList(ignoreCandidates, ORDERED_CANDIDATE_FORMATS.size())) { - if (requiredFormat == null || candidate.dateFormats.contains(requiredFormat)) { + if (adjustedRequiredFormat == null || candidate.jodaTimestampFormats.contains(adjustedRequiredFormat) || + candidate.javaTimestampFormats.contains(adjustedRequiredFormat)) { boolean quicklyRuledOut = false; for (Integer quickRuleOutIndex : candidate.quickRuleOutIndices) { if (quickRuleoutMatches[quickRuleOutIndex] == null) { @@ -219,7 +255,7 @@ public final class TimestampFormatFinder { /** * Find the best timestamp format for matching an entire field value. * @param text The value that the returned timestamp format must match in its entirety. - * @param requiredFormat A date format that any returned match must support. + * @param requiredFormat A timestamp format that any returned match must support. * @return The timestamp format, or null if none matches. */ public static TimestampMatch findFirstFullMatch(String text, String requiredFormat) { @@ -242,13 +278,18 @@ public final class TimestampFormatFinder { * excluding a specified number of candidate formats. * @param text The value that the returned timestamp format must match in its entirety. * @param ignoreCandidates The number of candidate formats to exclude from the search. - * @param requiredFormat A date format that any returned match must support. + * @param requiredFormat A timestamp format that any returned match must support. * @return The timestamp format, or null if none matches. */ public static TimestampMatch findFirstFullMatch(String text, int ignoreCandidates, String requiredFormat) { + if (ignoreCandidates >= ORDERED_CANDIDATE_FORMATS.size()) { + return null; + } int index = ignoreCandidates; + String adjustedRequiredFormat = adjustRequiredFormat(requiredFormat); for (CandidateTimestampFormat candidate : ORDERED_CANDIDATE_FORMATS.subList(ignoreCandidates, ORDERED_CANDIDATE_FORMATS.size())) { - if (requiredFormat == null || candidate.dateFormats.contains(requiredFormat)) { + if (adjustedRequiredFormat == null || candidate.jodaTimestampFormats.contains(adjustedRequiredFormat) || + candidate.javaTimestampFormats.contains(adjustedRequiredFormat)) { Map captures = candidate.strictFullMatchGrok.captures(text); if (captures != null) { return makeTimestampMatch(candidate, index, "", text, ""); @@ -259,16 +300,32 @@ public final class TimestampFormatFinder { return null; } + /** + * If a required timestamp format contains a fractional seconds component, adjust it to the + * fractional seconds format that's in the candidate timestamp formats, i.e. ",SSS". So, for + * example, "YYYY-MM-dd HH:mm:ss.SSSSSSSSS Z" would get adjusted to "YYYY-MM-dd HH:mm:ss,SSS Z". + */ + static String adjustRequiredFormat(String requiredFormat) { + + return (requiredFormat == null) ? null : + FRACTIONAL_SECOND_TIMESTAMP_FORMAT_PATTERN.matcher(requiredFormat).replaceFirst(DEFAULT_FRACTIONAL_SECOND_FORMAT); + } + private static TimestampMatch makeTimestampMatch(CandidateTimestampFormat chosenTimestampFormat, int chosenIndex, String preface, String matchedDate, String epilogue) { Tuple fractionalSecondsInterpretation = interpretFractionalSeconds(matchedDate); - List dateFormats = chosenTimestampFormat.dateFormats; + List jodaTimestampFormats = chosenTimestampFormat.jodaTimestampFormats; + List javaTimestampFormats = chosenTimestampFormat.javaTimestampFormats; Pattern simplePattern = chosenTimestampFormat.simplePattern; char separator = fractionalSecondsInterpretation.v1(); if (separator != DEFAULT_FRACTIONAL_SECOND_SEPARATOR) { - dateFormats = dateFormats.stream().map(dateFormat -> dateFormat.replace(DEFAULT_FRACTIONAL_SECOND_SEPARATOR, separator)) + jodaTimestampFormats = jodaTimestampFormats.stream() + .map(jodaTimestampFormat -> jodaTimestampFormat.replace(DEFAULT_FRACTIONAL_SECOND_SEPARATOR, separator)) .collect(Collectors.toList()); - if (dateFormats.stream().noneMatch(dateFormat -> dateFormat.startsWith("UNIX"))) { + javaTimestampFormats = javaTimestampFormats.stream() + .map(javaTimestampFormat -> javaTimestampFormat.replace(DEFAULT_FRACTIONAL_SECOND_SEPARATOR, separator)) + .collect(Collectors.toList()); + if (jodaTimestampFormats.stream().noneMatch(jodaTimestampFormat -> jodaTimestampFormat.startsWith("UNIX"))) { String patternStr = simplePattern.pattern(); int separatorPos = patternStr.lastIndexOf(DEFAULT_FRACTIONAL_SECOND_SEPARATOR); if (separatorPos >= 0) { @@ -281,11 +338,15 @@ public final class TimestampFormatFinder { int numberOfDigitsInFractionalComponent = fractionalSecondsInterpretation.v2(); if (numberOfDigitsInFractionalComponent > 3) { String fractionalSecondsFormat = "SSSSSSSSS".substring(0, numberOfDigitsInFractionalComponent); - dateFormats = dateFormats.stream().map(dateFormat -> dateFormat.replace("SSS", fractionalSecondsFormat)) + jodaTimestampFormats = jodaTimestampFormats.stream() + .map(jodaTimestampFormat -> jodaTimestampFormat.replace("SSS", fractionalSecondsFormat)) + .collect(Collectors.toList()); + javaTimestampFormats = javaTimestampFormats.stream() + .map(javaTimestampFormat -> javaTimestampFormat.replace("SSS", fractionalSecondsFormat)) .collect(Collectors.toList()); } - return new TimestampMatch(chosenIndex, preface, dateFormats, simplePattern, chosenTimestampFormat.standardGrokPatternName, - epilogue); + return new TimestampMatch(chosenIndex, preface, jodaTimestampFormats, javaTimestampFormats, simplePattern, + chosenTimestampFormat.standardGrokPatternName, epilogue); } /** @@ -323,7 +384,12 @@ public final class TimestampFormatFinder { /** * Time format specifier(s) that will work with Logstash and Ingest pipeline date parsers. */ - public final List dateFormats; + public final List jodaTimestampFormats; + + /** + * Time format specifier(s) that will work with Logstash and Ingest pipeline date parsers. + */ + public final List javaTimestampFormats; /** * A simple regex that will work in many languages to detect whether the timestamp format @@ -341,25 +407,25 @@ public final class TimestampFormatFinder { */ public final String epilogue; - TimestampMatch(int candidateIndex, String preface, String dateFormat, String simpleRegex, String grokPatternName, String epilogue) { - this(candidateIndex, preface, Collections.singletonList(dateFormat), simpleRegex, grokPatternName, epilogue); + TimestampMatch(int candidateIndex, String preface, String jodaTimestampFormat, String javaTimestampFormat, String simpleRegex, + String grokPatternName, String epilogue) { + this(candidateIndex, preface, Collections.singletonList(jodaTimestampFormat), Collections.singletonList(javaTimestampFormat), + simpleRegex, grokPatternName, epilogue); } - TimestampMatch(int candidateIndex, String preface, String dateFormat, String simpleRegex, String grokPatternName, String epilogue, - boolean hasFractionalComponentSmallerThanMillisecond) { - this(candidateIndex, preface, Collections.singletonList(dateFormat), simpleRegex, grokPatternName, epilogue); + TimestampMatch(int candidateIndex, String preface, List jodaTimestampFormats, List javaTimestampFormats, + String simpleRegex, String grokPatternName, String epilogue) { + this(candidateIndex, preface, jodaTimestampFormats, javaTimestampFormats, Pattern.compile(simpleRegex), grokPatternName, + epilogue); } - TimestampMatch(int candidateIndex, String preface, List dateFormats, String simpleRegex, String grokPatternName, - String epilogue) { - this(candidateIndex, preface, dateFormats, Pattern.compile(simpleRegex), grokPatternName, epilogue); - } - - TimestampMatch(int candidateIndex, String preface, List dateFormats, Pattern simplePattern, String grokPatternName, + TimestampMatch(int candidateIndex, String preface, List jodaTimestampFormats, List javaTimestampFormats, + Pattern simplePattern, String grokPatternName, String epilogue) { this.candidateIndex = candidateIndex; this.preface = preface; - this.dateFormats = dateFormats; + this.jodaTimestampFormats = Collections.unmodifiableList(jodaTimestampFormats); + this.javaTimestampFormats = Collections.unmodifiableList(javaTimestampFormats); this.simplePattern = simplePattern; this.grokPatternName = grokPatternName; this.epilogue = epilogue; @@ -370,8 +436,8 @@ public final class TimestampFormatFinder { * I.e., does the textual representation NOT define the timezone? */ public boolean hasTimezoneDependentParsing() { - return dateFormats.stream() - .anyMatch(dateFormat -> dateFormat.contains("HH") && dateFormat.toLowerCase(Locale.ROOT).indexOf('z') == -1); + return javaTimestampFormats.stream().anyMatch(javaTimestampFormat -> + javaTimestampFormat.indexOf('X') == -1 && javaTimestampFormat.indexOf('z') == -1 && javaTimestampFormat.contains("mm")); } /** @@ -380,13 +446,13 @@ public final class TimestampFormatFinder { * and possibly also a "format" setting. */ public Map getEsDateMappingTypeWithFormat() { - if (dateFormats.contains("TAI64N")) { - // There's no format for TAI64N in the date formats used in mappings + if (jodaTimestampFormats.contains("TAI64N")) { + // There's no format for TAI64N in the timestamp formats used in mappings return Collections.singletonMap(FileStructureUtils.MAPPING_TYPE_SETTING, "keyword"); } Map mapping = new LinkedHashMap<>(); mapping.put(FileStructureUtils.MAPPING_TYPE_SETTING, "date"); - String formats = dateFormats.stream().flatMap(format -> { + String formats = jodaTimestampFormats.stream().flatMap(format -> { switch (format) { case "ISO8601": return Stream.empty(); @@ -406,7 +472,8 @@ public final class TimestampFormatFinder { @Override public int hashCode() { - return Objects.hash(candidateIndex, preface, dateFormats, simplePattern.pattern(), grokPatternName, epilogue); + return Objects.hash(candidateIndex, preface, jodaTimestampFormats, javaTimestampFormats, simplePattern.pattern(), + grokPatternName, epilogue); } @Override @@ -421,7 +488,8 @@ public final class TimestampFormatFinder { TimestampMatch that = (TimestampMatch) other; return this.candidateIndex == that.candidateIndex && Objects.equals(this.preface, that.preface) && - Objects.equals(this.dateFormats, that.dateFormats) && + Objects.equals(this.jodaTimestampFormats, that.jodaTimestampFormats) && + Objects.equals(this.javaTimestampFormats, that.javaTimestampFormats) && Objects.equals(this.simplePattern.pattern(), that.simplePattern.pattern()) && Objects.equals(this.grokPatternName, that.grokPatternName) && Objects.equals(this.epilogue, that.epilogue); @@ -430,7 +498,8 @@ public final class TimestampFormatFinder { @Override public String toString() { return "index = " + candidateIndex + (preface.isEmpty() ? "" : ", preface = '" + preface + "'") + - ", date formats = " + dateFormats.stream().collect(Collectors.joining("', '", "[ '", "' ]")) + + ", Joda timestamp formats = " + jodaTimestampFormats.stream().collect(Collectors.joining("', '", "[ '", "' ]")) + + ", Java timestamp formats = " + javaTimestampFormats.stream().collect(Collectors.joining("', '", "[ '", "' ]")) + ", simple pattern = '" + simplePattern.pattern() + "', grok pattern = '" + grokPatternName + "'" + (epilogue.isEmpty() ? "" : ", epilogue = '" + epilogue + "'"); } @@ -438,29 +507,36 @@ public final class TimestampFormatFinder { static final class CandidateTimestampFormat { - final List dateFormats; + final List jodaTimestampFormats; + final List javaTimestampFormats; final Pattern simplePattern; final Grok strictSearchGrok; final Grok strictFullMatchGrok; final String standardGrokPatternName; final List quickRuleOutIndices; - CandidateTimestampFormat(String dateFormat, String simpleRegex, String strictGrokPattern, String standardGrokPatternName) { - this(Collections.singletonList(dateFormat), simpleRegex, strictGrokPattern, standardGrokPatternName); + CandidateTimestampFormat(String jodaTimestampFormat, String javaTimestampFormat, String simpleRegex, String strictGrokPattern, + String standardGrokPatternName) { + this(Collections.singletonList(jodaTimestampFormat), Collections.singletonList(javaTimestampFormat), simpleRegex, + strictGrokPattern, standardGrokPatternName); } - CandidateTimestampFormat(String dateFormat, String simpleRegex, String strictGrokPattern, String standardGrokPatternName, - List quickRuleOutIndices) { - this(Collections.singletonList(dateFormat), simpleRegex, strictGrokPattern, standardGrokPatternName, quickRuleOutIndices); + CandidateTimestampFormat(String jodaTimestampFormat, String javaTimestampFormat, String simpleRegex, String strictGrokPattern, + String standardGrokPatternName, List quickRuleOutIndices) { + this(Collections.singletonList(jodaTimestampFormat), Collections.singletonList(javaTimestampFormat), simpleRegex, + strictGrokPattern, standardGrokPatternName, quickRuleOutIndices); } - CandidateTimestampFormat(List dateFormats, String simpleRegex, String strictGrokPattern, String standardGrokPatternName) { - this(dateFormats, simpleRegex, strictGrokPattern, standardGrokPatternName, Collections.emptyList()); + CandidateTimestampFormat(List jodaTimestampFormats, List javaTimestampFormats, String simpleRegex, + String strictGrokPattern, String standardGrokPatternName) { + this(jodaTimestampFormats, javaTimestampFormats, simpleRegex, strictGrokPattern, standardGrokPatternName, + Collections.emptyList()); } - CandidateTimestampFormat(List dateFormats, String simpleRegex, String strictGrokPattern, String standardGrokPatternName, - List quickRuleOutIndices) { - this.dateFormats = dateFormats; + CandidateTimestampFormat(List jodaTimestampFormats, List javaTimestampFormats, String simpleRegex, + String strictGrokPattern, String standardGrokPatternName, List quickRuleOutIndices) { + this.jodaTimestampFormats = jodaTimestampFormats; + this.javaTimestampFormats = javaTimestampFormats; this.simplePattern = Pattern.compile(simpleRegex, Pattern.MULTILINE); // The (?m) here has the Ruby meaning, which is equivalent to (?s) in Java this.strictSearchGrok = new Grok(Grok.getBuiltinPatterns(), "(?m)%{DATA:" + PREFACE + "}" + strictGrokPattern + diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/filestructurefinder/XmlFileStructureFinder.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/filestructurefinder/XmlFileStructureFinder.java index d5e3fba34c9..1022d6d0ec0 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/filestructurefinder/XmlFileStructureFinder.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/filestructurefinder/XmlFileStructureFinder.java @@ -38,7 +38,8 @@ public class XmlFileStructureFinder implements FileStructureFinder { private final FileStructure structure; static XmlFileStructureFinder makeXmlFileStructureFinder(List explanation, String sample, String charsetName, - Boolean hasByteOrderMarker, FileStructureOverrides overrides) + Boolean hasByteOrderMarker, FileStructureOverrides overrides, + TimeoutChecker timeoutChecker) throws IOException, ParserConfigurationException, SAXException { String messagePrefix; @@ -66,6 +67,7 @@ public class XmlFileStructureFinder implements FileStructureFinder { sampleRecords.add(docToMap(docBuilder.parse(is))); sampleMessages.add(sampleDoc); linesConsumed += numNewlinesIn(sampleDoc); + timeoutChecker.check("XML parsing"); } catch (SAXException e) { // Tolerate an incomplete last record as long as we have one complete record if (sampleRecords.isEmpty() || i < sampleDocEnds.length - 1) { @@ -90,15 +92,17 @@ public class XmlFileStructureFinder implements FileStructureFinder { .setNumMessagesAnalyzed(sampleRecords.size()) .setMultilineStartPattern("^\\s*<" + topLevelTag); - Tuple timeField = FileStructureUtils.guessTimestampField(explanation, sampleRecords, overrides); + Tuple timeField = + FileStructureUtils.guessTimestampField(explanation, sampleRecords, overrides, timeoutChecker); if (timeField != null) { structureBuilder.setTimestampField(timeField.v1()) - .setTimestampFormats(timeField.v2().dateFormats) + .setJodaTimestampFormats(timeField.v2().jodaTimestampFormats) + .setJavaTimestampFormats(timeField.v2().javaTimestampFormats) .setNeedClientTimezone(timeField.v2().hasTimezoneDependentParsing()); } Tuple, SortedMap> mappingsAndFieldStats = - FileStructureUtils.guessMappingsAndCalculateFieldStats(explanation, sampleRecords); + FileStructureUtils.guessMappingsAndCalculateFieldStats(explanation, sampleRecords, timeoutChecker); if (mappingsAndFieldStats.v2() != null) { structureBuilder.setFieldStats(mappingsAndFieldStats.v2()); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/filestructurefinder/XmlFileStructureFinderFactory.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/filestructurefinder/XmlFileStructureFinderFactory.java index 3079f53931d..9f52e666a33 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/filestructurefinder/XmlFileStructureFinderFactory.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/filestructurefinder/XmlFileStructureFinderFactory.java @@ -122,8 +122,9 @@ public class XmlFileStructureFinderFactory implements FileStructureFinderFactory @Override public FileStructureFinder createFromSample(List explanation, String sample, String charsetName, Boolean hasByteOrderMarker, - FileStructureOverrides overrides) + FileStructureOverrides overrides, TimeoutChecker timeoutChecker) throws IOException, ParserConfigurationException, SAXException { - return XmlFileStructureFinder.makeXmlFileStructureFinder(explanation, sample, charsetName, hasByteOrderMarker, overrides); + return XmlFileStructureFinder.makeXmlFileStructureFinder(explanation, sample, charsetName, hasByteOrderMarker, overrides, + timeoutChecker); } } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/JobManager.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/JobManager.java index d361bb21112..f84c23db5ef 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/JobManager.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/JobManager.java @@ -9,7 +9,6 @@ import org.elasticsearch.ResourceNotFoundException; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.index.IndexResponse; import org.elasticsearch.action.support.WriteRequest; -import org.elasticsearch.action.support.master.AcknowledgedResponse; import org.elasticsearch.client.Client; import org.elasticsearch.cluster.AckedClusterStateUpdateTask; import org.elasticsearch.cluster.ClusterState; @@ -34,7 +33,6 @@ import org.elasticsearch.xpack.core.XPackPlugin; import org.elasticsearch.xpack.core.ml.MachineLearningField; import org.elasticsearch.xpack.core.ml.MlMetadata; import org.elasticsearch.xpack.core.ml.MlTasks; -import org.elasticsearch.xpack.core.ml.action.DeleteJobAction; import org.elasticsearch.xpack.core.ml.action.PutJobAction; import org.elasticsearch.xpack.core.ml.action.RevertModelSnapshotAction; import org.elasticsearch.xpack.core.ml.action.UpdateJobAction; @@ -47,7 +45,6 @@ import org.elasticsearch.xpack.core.ml.job.config.JobState; import org.elasticsearch.xpack.core.ml.job.config.JobUpdate; import org.elasticsearch.xpack.core.ml.job.config.MlFilter; import org.elasticsearch.xpack.core.ml.job.messages.Messages; -import org.elasticsearch.xpack.core.ml.job.persistence.JobStorageDeletionTask; import org.elasticsearch.xpack.core.ml.job.process.autodetect.state.ModelSizeStats; import org.elasticsearch.xpack.core.ml.job.process.autodetect.state.ModelSnapshot; import org.elasticsearch.xpack.core.ml.utils.ExceptionsHelper; @@ -489,64 +486,6 @@ public class JobManager extends AbstractComponent { } } - public void deleteJob(DeleteJobAction.Request request, JobStorageDeletionTask task, - ActionListener actionListener) { - - String jobId = request.getJobId(); - logger.debug("Deleting job '" + jobId + "'"); - - // Step 4. When the job has been removed from the cluster state, return a response - // ------- - CheckedConsumer apiResponseHandler = jobDeleted -> { - if (jobDeleted) { - logger.info("Job [" + jobId + "] deleted"); - auditor.info(jobId, Messages.getMessage(Messages.JOB_AUDIT_DELETED)); - actionListener.onResponse(new AcknowledgedResponse(true)); - } else { - actionListener.onResponse(new AcknowledgedResponse(false)); - } - }; - - // Step 3. When the physical storage has been deleted, remove from Cluster State - // ------- - CheckedConsumer deleteJobStateHandler = response -> clusterService.submitStateUpdateTask("delete-job-" + jobId, - new AckedClusterStateUpdateTask(request, ActionListener.wrap(apiResponseHandler, actionListener::onFailure)) { - - @Override - protected Boolean newResponse(boolean acknowledged) { - return acknowledged && response; - } - - @Override - public ClusterState execute(ClusterState currentState) { - MlMetadata currentMlMetadata = MlMetadata.getMlMetadata(currentState); - if (currentMlMetadata.getJobs().containsKey(jobId) == false) { - // We wouldn't have got here if the job never existed so - // the Job must have been deleted by another action. - // Don't error in this case - return currentState; - } - - MlMetadata.Builder builder = new MlMetadata.Builder(currentMlMetadata); - builder.deleteJob(jobId, currentState.getMetaData().custom(PersistentTasksCustomMetaData.TYPE)); - return buildNewClusterState(currentState, builder); - } - }); - - - // Step 2. Remove the job from any calendars - CheckedConsumer removeFromCalendarsHandler = response -> { - jobResultsProvider.removeJobFromCalendars(jobId, ActionListener.wrap(deleteJobStateHandler::accept, - actionListener::onFailure )); - }; - - - // Step 1. Delete the physical storage - - // This task manages the physical deletion of the job state and results - task.delete(jobId, client, clusterService.state(), removeFromCalendarsHandler, actionListener::onFailure); - } - public void revertSnapshot(RevertModelSnapshotAction.Request request, ActionListener actionListener, ModelSnapshot modelSnapshot) { diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/BatchedDocumentsIterator.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/BatchedDocumentsIterator.java index d50a7c3f8c2..e8655548592 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/BatchedDocumentsIterator.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/BatchedDocumentsIterator.java @@ -16,7 +16,7 @@ import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.builder.SearchSourceBuilder; import org.elasticsearch.search.sort.SortBuilders; import org.elasticsearch.xpack.core.ml.job.persistence.ElasticsearchMappings; -import org.elasticsearch.xpack.core.ml.utils.MlIndicesUtils; +import org.elasticsearch.xpack.ml.utils.MlIndicesUtils; import java.util.ArrayDeque; import java.util.Collections; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/persistence/JobDataDeleter.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/JobDataDeleter.java similarity index 97% rename from x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/persistence/JobDataDeleter.java rename to x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/JobDataDeleter.java index cc86ce17bb9..1e0825d14f9 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/persistence/JobDataDeleter.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/JobDataDeleter.java @@ -3,7 +3,7 @@ * or more contributor license agreements. Licensed under the Elastic License; * you may not use this file except in compliance with the Elastic License. */ -package org.elasticsearch.xpack.core.ml.job.persistence; +package org.elasticsearch.xpack.ml.job.persistence; import org.apache.logging.log4j.Logger; import org.elasticsearch.action.ActionListener; @@ -21,6 +21,8 @@ import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.index.reindex.DeleteByQueryAction; import org.elasticsearch.index.reindex.DeleteByQueryRequest; +import org.elasticsearch.xpack.core.ml.job.persistence.AnomalyDetectorsIndex; +import org.elasticsearch.xpack.core.ml.job.persistence.ElasticsearchMappings; import org.elasticsearch.xpack.core.ml.job.process.autodetect.state.ModelSnapshot; import org.elasticsearch.xpack.core.ml.job.process.autodetect.state.ModelState; import org.elasticsearch.xpack.core.ml.job.results.Result; diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/JobResultsPersister.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/JobResultsPersister.java index 5f0043b8645..233a2b4078a 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/JobResultsPersister.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/JobResultsPersister.java @@ -25,7 +25,6 @@ import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.xpack.core.ml.job.persistence.AnomalyDetectorsIndex; import org.elasticsearch.xpack.core.ml.job.persistence.ElasticsearchMappings; -import org.elasticsearch.xpack.core.ml.job.persistence.JobDataDeleter; import org.elasticsearch.xpack.core.ml.job.process.autodetect.state.ModelSizeStats; import org.elasticsearch.xpack.core.ml.job.process.autodetect.state.ModelSnapshot; import org.elasticsearch.xpack.core.ml.job.process.autodetect.state.Quantiles; diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/JobResultsProvider.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/JobResultsProvider.java index 09a0a25cc4d..9338d24dd68 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/JobResultsProvider.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/JobResultsProvider.java @@ -64,8 +64,8 @@ import org.elasticsearch.search.aggregations.Aggregation; import org.elasticsearch.search.aggregations.AggregationBuilders; import org.elasticsearch.search.aggregations.Aggregations; import org.elasticsearch.search.aggregations.bucket.terms.StringTerms; -import org.elasticsearch.search.aggregations.metrics.Stats; import org.elasticsearch.search.aggregations.metrics.ExtendedStats; +import org.elasticsearch.search.aggregations.metrics.Stats; import org.elasticsearch.search.builder.SearchSourceBuilder; import org.elasticsearch.search.sort.FieldSortBuilder; import org.elasticsearch.search.sort.SortBuilders; @@ -99,11 +99,11 @@ import org.elasticsearch.xpack.core.ml.stats.CountAccumulator; import org.elasticsearch.xpack.core.ml.stats.ForecastStats; import org.elasticsearch.xpack.core.ml.stats.StatsAccumulator; import org.elasticsearch.xpack.core.ml.utils.ExceptionsHelper; -import org.elasticsearch.xpack.core.ml.utils.MlIndicesUtils; import org.elasticsearch.xpack.core.security.support.Exceptions; import org.elasticsearch.xpack.ml.job.categorization.GrokPatternCreator; import org.elasticsearch.xpack.ml.job.persistence.InfluencersQueryBuilder.InfluencersQuery; import org.elasticsearch.xpack.ml.job.process.autodetect.params.AutodetectParams; +import org.elasticsearch.xpack.ml.utils.MlIndicesUtils; import java.io.IOException; import java.io.InputStream; diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/DataCountsReporter.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/DataCountsReporter.java index d906ccf2f7a..0d2b1bb345a 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/DataCountsReporter.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/DataCountsReporter.java @@ -7,10 +7,7 @@ package org.elasticsearch.xpack.ml.job.process; import org.elasticsearch.action.ActionListener; import org.elasticsearch.common.component.AbstractComponent; -import org.elasticsearch.common.settings.Setting; -import org.elasticsearch.common.settings.Setting.Property; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.xpack.core.ml.job.config.Job; import org.elasticsearch.xpack.core.ml.job.process.autodetect.state.DataCounts; import org.elasticsearch.xpack.ml.job.persistence.JobDataCountsPersister; @@ -38,21 +35,6 @@ import java.util.function.Function; * function returns {@code true} the usage is logged. */ public class DataCountsReporter extends AbstractComponent { - /** - * The max percentage of date parse errors allowed before - * an exception is thrown. - */ - public static final Setting ACCEPTABLE_PERCENTAGE_DATE_PARSE_ERRORS_SETTING = Setting.intSetting("max.percent.date.errors", 25, - Property.NodeScope); - - /** - * The max percentage of out of order records allowed before - * an exception is thrown. - */ - public static final Setting ACCEPTABLE_PERCENTAGE_OUT_OF_ORDER_ERRORS_SETTING = Setting - .intSetting("max.percent.outoforder.errors", 25, Property.NodeScope); - - private static final TimeValue PERSIST_INTERVAL = TimeValue.timeValueMillis(10_000L); private final Job job; private final JobDataCountsPersister dataCountsPersister; @@ -66,9 +48,6 @@ public class DataCountsReporter extends AbstractComponent { private long logEvery = 1; private long logCount = 0; - private final int acceptablePercentDateParseErrors; - private final int acceptablePercentOutOfOrderErrors; - private Function reportingBoundaryFunction; private DataStreamDiagnostics diagnostics; @@ -84,9 +63,6 @@ public class DataCountsReporter extends AbstractComponent { incrementalRecordStats = new DataCounts(job.getId()); diagnostics = new DataStreamDiagnostics(job, counts); - acceptablePercentDateParseErrors = ACCEPTABLE_PERCENTAGE_DATE_PARSE_ERRORS_SETTING.get(settings); - acceptablePercentOutOfOrderErrors = ACCEPTABLE_PERCENTAGE_OUT_OF_ORDER_ERRORS_SETTING.get(settings); - reportingBoundaryFunction = this::reportEvery10000Records; } @@ -242,14 +218,6 @@ public class DataCountsReporter extends AbstractComponent { return totalRecordStats.getInputFieldCount(); } - public int getAcceptablePercentDateParseErrors() { - return acceptablePercentDateParseErrors; - } - - public int getAcceptablePercentOutOfOrderErrors() { - return acceptablePercentOutOfOrderErrors; - } - public void setAnalysedFieldsPerRecord(long value) { analyzedFieldsPerRecord = value; } @@ -352,4 +320,5 @@ public class DataCountsReporter extends AbstractComponent { diagnostics.resetCounts(); } + } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/AutodetectBuilder.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/AutodetectBuilder.java index 0094eba97ce..4942200606d 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/AutodetectBuilder.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/AutodetectBuilder.java @@ -83,8 +83,16 @@ public class AutodetectBuilder { /** * The maximum number of anomaly records that will be written each bucket */ + @Deprecated public static final Setting MAX_ANOMALY_RECORDS_SETTING = Setting.intSetting("max.anomaly.records", DEFAULT_MAX_NUM_RECORDS, - Setting.Property.NodeScope); + Setting.Property.NodeScope, Setting.Property.Deprecated); + // Though this setting is dynamic, it is only set when a new job is opened. So, already runnin jobs will not get the updated value. + public static final Setting MAX_ANOMALY_RECORDS_SETTING_DYNAMIC = Setting.intSetting( + "xpack.ml.max_anomaly_records", + MAX_ANOMALY_RECORDS_SETTING, + 1, + Setting.Property.NodeScope, + Setting.Property.Dynamic); /** * Config setting storing the flag that disables model persistence @@ -244,9 +252,8 @@ public class AutodetectBuilder { return command; } - static String maxAnomalyRecordsArg(Settings settings) { - return "--maxAnomalyRecords=" + MAX_ANOMALY_RECORDS_SETTING.get(settings); + return "--maxAnomalyRecords=" + MAX_ANOMALY_RECORDS_SETTING_DYNAMIC.get(settings); } private static String getTimeFieldOrDefault(Job job) { diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/AutodetectProcessManager.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/AutodetectProcessManager.java index fa05c2e63ee..ec6b67da1dc 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/AutodetectProcessManager.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/AutodetectProcessManager.java @@ -493,8 +493,10 @@ public class AutodetectProcessManager extends AbstractComponent { Job job = jobManager.getJobOrThrowIfUnknown(jobId); // A TP with no queue, so that we fail immediately if there are no threads available ExecutorService autoDetectExecutorService = threadPool.executor(MachineLearning.AUTODETECT_THREAD_POOL_NAME); - DataCountsReporter dataCountsReporter = new DataCountsReporter(settings, job, autodetectParams.dataCounts(), - jobDataCountsPersister); + DataCountsReporter dataCountsReporter = new DataCountsReporter(settings, + job, + autodetectParams.dataCounts(), + jobDataCountsPersister); ScoresUpdater scoresUpdater = new ScoresUpdater(job, jobResultsProvider, new JobRenormalizedResultsPersister(job.getId(), settings, client), normalizerFactory); ExecutorService renormalizerExecutorService = threadPool.executor(MachineLearning.UTILITY_THREAD_POOL_NAME); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/NativeAutodetectProcessFactory.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/NativeAutodetectProcessFactory.java index 01ad0bec85a..06055476f76 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/NativeAutodetectProcessFactory.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/NativeAutodetectProcessFactory.java @@ -7,6 +7,7 @@ package org.elasticsearch.xpack.ml.job.process.autodetect; import org.apache.logging.log4j.Logger; import org.elasticsearch.client.Client; +import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.EsRejectedExecutionException; @@ -40,12 +41,15 @@ public class NativeAutodetectProcessFactory implements AutodetectProcessFactory private final Environment env; private final Settings settings; private final NativeController nativeController; + private final ClusterService clusterService; - public NativeAutodetectProcessFactory(Environment env, Settings settings, NativeController nativeController, Client client) { + public NativeAutodetectProcessFactory(Environment env, Settings settings, NativeController nativeController, Client client, + ClusterService clusterService) { this.env = Objects.requireNonNull(env); this.settings = Objects.requireNonNull(settings); this.nativeController = Objects.requireNonNull(nativeController); this.client = client; + this.clusterService = clusterService; } @Override @@ -85,8 +89,15 @@ public class NativeAutodetectProcessFactory implements AutodetectProcessFactory private void createNativeProcess(Job job, AutodetectParams autodetectParams, ProcessPipes processPipes, List filesToDelete) { try { + + Settings updatedSettings = Settings.builder() + .put(settings) + .put(AutodetectBuilder.MAX_ANOMALY_RECORDS_SETTING_DYNAMIC.getKey(), + clusterService.getClusterSettings().get(AutodetectBuilder.MAX_ANOMALY_RECORDS_SETTING_DYNAMIC)) + .build(); + AutodetectBuilder autodetectBuilder = new AutodetectBuilder(job, filesToDelete, LOGGER, env, - settings, nativeController, processPipes) + updatedSettings, nativeController, processPipes) .referencedFilters(autodetectParams.filters()) .scheduledEvents(autodetectParams.scheduledEvents()); @@ -95,7 +106,6 @@ public class NativeAutodetectProcessFactory implements AutodetectProcessFactory if (autodetectParams.quantiles() != null) { autodetectBuilder.quantiles(autodetectParams.quantiles()); } - autodetectBuilder.build(); processPipes.connectStreams(PROCESS_STARTUP_TIMEOUT); } catch (IOException e) { @@ -104,5 +114,6 @@ public class NativeAutodetectProcessFactory implements AutodetectProcessFactory throw ExceptionsHelper.serverError(msg, e); } } + } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/normalizer/Normalizer.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/normalizer/Normalizer.java index 22e7d3ba995..74eb01987c5 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/normalizer/Normalizer.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/normalizer/Normalizer.java @@ -190,23 +190,34 @@ public class Normalizer { * Encapsulate the logic for deciding whether a change to a normalized score * is "big". *

    - * Current logic is that a big change is a change of at least 1 or more than - * than 50% of the higher of the two values. + * Current logic is that a change is considered big if any of the following criteria are met: + *

      + *
    • the change would result in a change of colour in the UI + * (e.g. severity would be changed from WARNING to MINOR)
    • + *
    • the change is at least 1.5
    • + *
    • the change in values is greater than 67% of the higher of the two values.
    • + *
    + * These values have been chosen through a process of experimentation, in particular it was desired to reduce + * the number of updates written to the results index due to renormalization events for performance reasons + * while not changing the normalized scores greatly * * @param oldVal The old value of the normalized score * @param newVal The new value of the normalized score * @return true if the update is considered "big" */ private static boolean isBigUpdate(double oldVal, double newVal) { - if (Math.abs(oldVal - newVal) >= 1.0) { + if ((int) (oldVal / 25.0) != (int) (newVal / 25.0)) { + return true; + } + if (Math.abs(oldVal - newVal) >= 1.5) { return true; } if (oldVal > newVal) { - if (oldVal * 0.5 > newVal) { + if (oldVal * 0.33 > newVal) { return true; } } else { - if (newVal * 0.5 > oldVal) { + if (newVal * 0.33 > oldVal) { return true; } } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/RestFindFileStructureAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/RestFindFileStructureAction.java index 316a4b56e4a..0528c30e059 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/RestFindFileStructureAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/RestFindFileStructureAction.java @@ -8,6 +8,7 @@ package org.elasticsearch.xpack.ml.rest; import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestRequest; @@ -20,9 +21,12 @@ import org.elasticsearch.xpack.ml.filestructurefinder.FileStructureFinderManager import java.io.IOException; import java.util.Collections; import java.util.Set; +import java.util.concurrent.TimeUnit; public class RestFindFileStructureAction extends BaseRestHandler { + private static final TimeValue DEFAULT_TIMEOUT = new TimeValue(25, TimeUnit.SECONDS); + public RestFindFileStructureAction(Settings settings, RestController controller) { super(settings); controller.registerHandler(RestRequest.Method.POST, MachineLearning.BASE_PATH + "find_file_structure", this); @@ -39,6 +43,8 @@ public class RestFindFileStructureAction extends BaseRestHandler { FindFileStructureAction.Request request = new FindFileStructureAction.Request(); request.setLinesToSample(restRequest.paramAsInt(FindFileStructureAction.Request.LINES_TO_SAMPLE.getPreferredName(), FileStructureFinderManager.DEFAULT_IDEAL_SAMPLE_LINE_COUNT)); + request.setTimeout(TimeValue.parseTimeValue(restRequest.param(FindFileStructureAction.Request.TIMEOUT.getPreferredName()), + DEFAULT_TIMEOUT, FindFileStructureAction.Request.TIMEOUT.getPreferredName())); request.setCharset(restRequest.param(FindFileStructureAction.Request.CHARSET.getPreferredName())); request.setFormat(restRequest.param(FindFileStructureAction.Request.FORMAT.getPreferredName())); request.setColumnNames(restRequest.paramAsStringArray(FindFileStructureAction.Request.COLUMN_NAMES.getPreferredName(), null)); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/modelsnapshots/RestGetModelSnapshotsAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/modelsnapshots/RestGetModelSnapshotsAction.java index ceeb55442e5..5b4895f6bbb 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/modelsnapshots/RestGetModelSnapshotsAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/modelsnapshots/RestGetModelSnapshotsAction.java @@ -12,11 +12,11 @@ import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.action.RestToXContentListener; -import org.elasticsearch.xpack.ml.MachineLearning; import org.elasticsearch.xpack.core.ml.action.GetModelSnapshotsAction; import org.elasticsearch.xpack.core.ml.action.GetModelSnapshotsAction.Request; import org.elasticsearch.xpack.core.ml.action.util.PageParams; import org.elasticsearch.xpack.core.ml.job.config.Job; +import org.elasticsearch.xpack.ml.MachineLearning; import java.io.IOException; @@ -30,7 +30,6 @@ public class RestGetModelSnapshotsAction extends BaseRestHandler { private final String DEFAULT_SORT = null; private final String DEFAULT_START = null; private final String DEFAULT_END = null; - private final String DEFAULT_DESCRIPTION = null; private final boolean DEFAULT_DESC_ORDER = true; public RestGetModelSnapshotsAction(Settings settings, RestController controller) { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/utils/MlIndicesUtils.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/utils/MlIndicesUtils.java similarity index 93% rename from x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/utils/MlIndicesUtils.java rename to x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/utils/MlIndicesUtils.java index c916b6664d2..63557c3bf23 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/utils/MlIndicesUtils.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/utils/MlIndicesUtils.java @@ -3,7 +3,7 @@ * or more contributor license agreements. Licensed under the Elastic License; * you may not use this file except in compliance with the Elastic License. */ -package org.elasticsearch.xpack.core.ml.utils; +package org.elasticsearch.xpack.ml.utils; import org.elasticsearch.action.support.IndicesOptions; diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/MachineLearningFeatureSetTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/MachineLearningFeatureSetTests.java index ff6a5451268..6a2f25f6a30 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/MachineLearningFeatureSetTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/MachineLearningFeatureSetTests.java @@ -347,7 +347,7 @@ public class MachineLearningFeatureSetTests extends ESTestCase { roles.add(DiscoveryNode.Role.DATA); roles.add(DiscoveryNode.Role.MASTER); roles.add(DiscoveryNode.Role.INGEST); - nodesBuilder.add(new DiscoveryNode(randomAlphaOfLength(i+1), + nodesBuilder.add(new DiscoveryNode("ml-feature-set-given-ml-node-" + i, new TransportAddress(TransportAddress.META_ADDRESS, 9100 + i), attrs, roles, @@ -359,7 +359,7 @@ public class MachineLearningFeatureSetTests extends ESTestCase { roles.add(DiscoveryNode.Role.DATA); roles.add(DiscoveryNode.Role.MASTER); roles.add(DiscoveryNode.Role.INGEST); - nodesBuilder.add(new DiscoveryNode(randomAlphaOfLength(i+1), + nodesBuilder.add(new DiscoveryNode("ml-feature-set-given-non-ml-node-" + i, new TransportAddress(TransportAddress.META_ADDRESS, 9300 + i), attrs, roles, diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/extractor/aggregation/AggregationDataExtractorTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/extractor/aggregation/AggregationDataExtractorTests.java index 903ab4af115..93bdc125890 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/extractor/aggregation/AggregationDataExtractorTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/extractor/aggregation/AggregationDataExtractorTests.java @@ -17,6 +17,7 @@ import org.elasticsearch.search.aggregations.Aggregations; import org.elasticsearch.search.aggregations.AggregatorFactories; import org.elasticsearch.search.aggregations.bucket.histogram.Histogram; import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.ml.datafeed.extractor.aggregation.AggregationTestUtils.Term; import org.junit.Before; import java.io.BufferedReader; @@ -33,7 +34,6 @@ import java.util.Optional; import java.util.Set; import java.util.stream.Collectors; -import static org.elasticsearch.xpack.ml.datafeed.extractor.aggregation.AggregationTestUtils.Term; import static org.elasticsearch.xpack.ml.datafeed.extractor.aggregation.AggregationTestUtils.createHistogramBucket; import static org.elasticsearch.xpack.ml.datafeed.extractor.aggregation.AggregationTestUtils.createMax; import static org.elasticsearch.xpack.ml.datafeed.extractor.aggregation.AggregationTestUtils.createTerms; @@ -254,7 +254,7 @@ public class AggregationDataExtractorTests extends ESTestCase { extractor.setNextResponse(createResponseWithShardFailures()); assertThat(extractor.hasNext(), is(true)); - IOException e = expectThrows(IOException.class, extractor::next); + expectThrows(IOException.class, extractor::next); } public void testExtractionGivenInitSearchResponseEncounteredUnavailableShards() { diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/filestructurefinder/DelimitedFileStructureFinderTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/filestructurefinder/DelimitedFileStructureFinderTests.java index decc61a5397..10bdf0d16d8 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/filestructurefinder/DelimitedFileStructureFinderTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/filestructurefinder/DelimitedFileStructureFinderTests.java @@ -30,7 +30,7 @@ public class DelimitedFileStructureFinderTests extends FileStructureTestCase { String charset = randomFrom(POSSIBLE_CHARSETS); Boolean hasByteOrderMarker = randomHasByteOrderMarker(charset); FileStructureFinder structureFinder = csvFactory.createFromSample(explanation, sample, charset, hasByteOrderMarker, - FileStructureOverrides.EMPTY_OVERRIDES); + FileStructureOverrides.EMPTY_OVERRIDES, NOOP_TIMEOUT_CHECKER); FileStructure structure = structureFinder.getStructure(); @@ -50,7 +50,7 @@ public class DelimitedFileStructureFinderTests extends FileStructureTestCase { assertEquals(Arrays.asList("time", "message"), structure.getColumnNames()); assertNull(structure.getGrokPattern()); assertEquals("time", structure.getTimestampField()); - assertEquals(Collections.singletonList("ISO8601"), structure.getTimestampFormats()); + assertEquals(Collections.singletonList("ISO8601"), structure.getJodaTimestampFormats()); } public void testCreateConfigsGivenCompleteCsvAndColumnNamesOverride() throws Exception { @@ -64,7 +64,8 @@ public class DelimitedFileStructureFinderTests extends FileStructureTestCase { String charset = randomFrom(POSSIBLE_CHARSETS); Boolean hasByteOrderMarker = randomHasByteOrderMarker(charset); - FileStructureFinder structureFinder = csvFactory.createFromSample(explanation, sample, charset, hasByteOrderMarker, overrides); + FileStructureFinder structureFinder = csvFactory.createFromSample(explanation, sample, charset, hasByteOrderMarker, overrides, + NOOP_TIMEOUT_CHECKER); FileStructure structure = structureFinder.getStructure(); @@ -84,7 +85,7 @@ public class DelimitedFileStructureFinderTests extends FileStructureTestCase { assertEquals(Arrays.asList("my_time", "my_message"), structure.getColumnNames()); assertNull(structure.getGrokPattern()); assertEquals("my_time", structure.getTimestampField()); - assertEquals(Collections.singletonList("ISO8601"), structure.getTimestampFormats()); + assertEquals(Collections.singletonList("ISO8601"), structure.getJodaTimestampFormats()); } public void testCreateConfigsGivenCompleteCsvAndHasHeaderRowOverride() throws Exception { @@ -100,7 +101,8 @@ public class DelimitedFileStructureFinderTests extends FileStructureTestCase { String charset = randomFrom(POSSIBLE_CHARSETS); Boolean hasByteOrderMarker = randomHasByteOrderMarker(charset); - FileStructureFinder structureFinder = csvFactory.createFromSample(explanation, sample, charset, hasByteOrderMarker, overrides); + FileStructureFinder structureFinder = csvFactory.createFromSample(explanation, sample, charset, hasByteOrderMarker, overrides, + NOOP_TIMEOUT_CHECKER); FileStructure structure = structureFinder.getStructure(); @@ -120,7 +122,7 @@ public class DelimitedFileStructureFinderTests extends FileStructureTestCase { assertEquals(Arrays.asList("column1", "column2"), structure.getColumnNames()); assertNull(structure.getGrokPattern()); assertNull(structure.getTimestampField()); - assertNull(structure.getTimestampFormats()); + assertNull(structure.getJodaTimestampFormats()); } public void testCreateConfigsGivenCsvWithIncompleteLastRecord() throws Exception { @@ -133,7 +135,7 @@ public class DelimitedFileStructureFinderTests extends FileStructureTestCase { String charset = randomFrom(POSSIBLE_CHARSETS); Boolean hasByteOrderMarker = randomHasByteOrderMarker(charset); FileStructureFinder structureFinder = csvFactory.createFromSample(explanation, sample, charset, hasByteOrderMarker, - FileStructureOverrides.EMPTY_OVERRIDES); + FileStructureOverrides.EMPTY_OVERRIDES, NOOP_TIMEOUT_CHECKER); FileStructure structure = structureFinder.getStructure(); @@ -153,7 +155,7 @@ public class DelimitedFileStructureFinderTests extends FileStructureTestCase { assertEquals(Arrays.asList("message", "time", "count"), structure.getColumnNames()); assertNull(structure.getGrokPattern()); assertEquals("time", structure.getTimestampField()); - assertEquals(Collections.singletonList("ISO8601"), structure.getTimestampFormats()); + assertEquals(Collections.singletonList("ISO8601"), structure.getJodaTimestampFormats()); } public void testCreateConfigsGivenCsvWithTrailingNulls() throws Exception { @@ -168,7 +170,7 @@ public class DelimitedFileStructureFinderTests extends FileStructureTestCase { String charset = randomFrom(POSSIBLE_CHARSETS); Boolean hasByteOrderMarker = randomHasByteOrderMarker(charset); FileStructureFinder structureFinder = csvFactory.createFromSample(explanation, sample, charset, hasByteOrderMarker, - FileStructureOverrides.EMPTY_OVERRIDES); + FileStructureOverrides.EMPTY_OVERRIDES, NOOP_TIMEOUT_CHECKER); FileStructure structure = structureFinder.getStructure(); @@ -193,7 +195,7 @@ public class DelimitedFileStructureFinderTests extends FileStructureTestCase { "tip_amount", "tolls_amount", "improvement_surcharge", "total_amount", "column18", "column19"), structure.getColumnNames()); assertNull(structure.getGrokPattern()); assertEquals("tpep_pickup_datetime", structure.getTimestampField()); - assertEquals(Collections.singletonList("YYYY-MM-dd HH:mm:ss"), structure.getTimestampFormats()); + assertEquals(Collections.singletonList("YYYY-MM-dd HH:mm:ss"), structure.getJodaTimestampFormats()); } public void testCreateConfigsGivenCsvWithTrailingNullsAndOverriddenTimeField() throws Exception { @@ -212,7 +214,8 @@ public class DelimitedFileStructureFinderTests extends FileStructureTestCase { String charset = randomFrom(POSSIBLE_CHARSETS); Boolean hasByteOrderMarker = randomHasByteOrderMarker(charset); - FileStructureFinder structureFinder = csvFactory.createFromSample(explanation, sample, charset, hasByteOrderMarker, overrides); + FileStructureFinder structureFinder = csvFactory.createFromSample(explanation, sample, charset, hasByteOrderMarker, overrides, + NOOP_TIMEOUT_CHECKER); FileStructure structure = structureFinder.getStructure(); @@ -237,7 +240,7 @@ public class DelimitedFileStructureFinderTests extends FileStructureTestCase { "tip_amount", "tolls_amount", "improvement_surcharge", "total_amount", "column18", "column19"), structure.getColumnNames()); assertNull(structure.getGrokPattern()); assertEquals("tpep_dropoff_datetime", structure.getTimestampField()); - assertEquals(Collections.singletonList("YYYY-MM-dd HH:mm:ss"), structure.getTimestampFormats()); + assertEquals(Collections.singletonList("YYYY-MM-dd HH:mm:ss"), structure.getJodaTimestampFormats()); } public void testCreateConfigsGivenCsvWithTrailingNullsExceptHeader() throws Exception { @@ -252,7 +255,7 @@ public class DelimitedFileStructureFinderTests extends FileStructureTestCase { String charset = randomFrom(POSSIBLE_CHARSETS); Boolean hasByteOrderMarker = randomHasByteOrderMarker(charset); FileStructureFinder structureFinder = csvFactory.createFromSample(explanation, sample, charset, hasByteOrderMarker, - FileStructureOverrides.EMPTY_OVERRIDES); + FileStructureOverrides.EMPTY_OVERRIDES, NOOP_TIMEOUT_CHECKER); FileStructure structure = structureFinder.getStructure(); @@ -277,7 +280,7 @@ public class DelimitedFileStructureFinderTests extends FileStructureTestCase { "tip_amount", "tolls_amount", "improvement_surcharge", "total_amount"), structure.getColumnNames()); assertNull(structure.getGrokPattern()); assertEquals("tpep_pickup_datetime", structure.getTimestampField()); - assertEquals(Collections.singletonList("YYYY-MM-dd HH:mm:ss"), structure.getTimestampFormats()); + assertEquals(Collections.singletonList("YYYY-MM-dd HH:mm:ss"), structure.getJodaTimestampFormats()); } public void testCreateConfigsGivenCsvWithTrailingNullsExceptHeaderAndColumnNamesOverride() throws Exception { @@ -298,7 +301,8 @@ public class DelimitedFileStructureFinderTests extends FileStructureTestCase { String charset = randomFrom(POSSIBLE_CHARSETS); Boolean hasByteOrderMarker = randomHasByteOrderMarker(charset); - FileStructureFinder structureFinder = csvFactory.createFromSample(explanation, sample, charset, hasByteOrderMarker, overrides); + FileStructureFinder structureFinder = csvFactory.createFromSample(explanation, sample, charset, hasByteOrderMarker, overrides, + NOOP_TIMEOUT_CHECKER); FileStructure structure = structureFinder.getStructure(); @@ -324,7 +328,7 @@ public class DelimitedFileStructureFinderTests extends FileStructureTestCase { structure.getColumnNames()); assertNull(structure.getGrokPattern()); assertEquals("my_tpep_pickup_datetime", structure.getTimestampField()); - assertEquals(Collections.singletonList("YYYY-MM-dd HH:mm:ss"), structure.getTimestampFormats()); + assertEquals(Collections.singletonList("YYYY-MM-dd HH:mm:ss"), structure.getJodaTimestampFormats()); } public void testCreateConfigsGivenCsvWithTimeLastColumn() throws Exception { @@ -336,7 +340,7 @@ public class DelimitedFileStructureFinderTests extends FileStructureTestCase { String charset = randomFrom(POSSIBLE_CHARSETS); Boolean hasByteOrderMarker = randomHasByteOrderMarker(charset); FileStructureFinder structureFinder = csvFactory.createFromSample(explanation, sample, charset, hasByteOrderMarker, - FileStructureOverrides.EMPTY_OVERRIDES); + FileStructureOverrides.EMPTY_OVERRIDES, NOOP_TIMEOUT_CHECKER); FileStructure structure = structureFinder.getStructure(); @@ -357,7 +361,7 @@ public class DelimitedFileStructureFinderTests extends FileStructureTestCase { assertEquals(Arrays.asList("pos_id", "trip_id", "latitude", "longitude", "altitude", "timestamp"), structure.getColumnNames()); assertNull(structure.getGrokPattern()); assertEquals("timestamp", structure.getTimestampField()); - assertEquals(Collections.singletonList("YYYY-MM-dd HH:mm:ss.SSSSSS"), structure.getTimestampFormats()); + assertEquals(Collections.singletonList("YYYY-MM-dd HH:mm:ss.SSSSSS"), structure.getJodaTimestampFormats()); } public void testFindHeaderFromSampleGivenHeaderInSample() throws IOException { @@ -368,20 +372,21 @@ public class DelimitedFileStructureFinderTests extends FileStructureTestCase { "2014-06-23 00:00:01Z,KLM,1355.4812,farequote\n"; Tuple header = DelimitedFileStructureFinder.findHeaderFromSample(explanation, - DelimitedFileStructureFinder.readRows(withHeader, CsvPreference.EXCEL_PREFERENCE).v1(), FileStructureOverrides.EMPTY_OVERRIDES); + DelimitedFileStructureFinder.readRows(withHeader, CsvPreference.EXCEL_PREFERENCE, NOOP_TIMEOUT_CHECKER).v1(), + FileStructureOverrides.EMPTY_OVERRIDES); assertTrue(header.v1()); assertThat(header.v2(), arrayContaining("time", "airline", "responsetime", "sourcetype")); } public void testFindHeaderFromSampleGivenHeaderNotInSample() throws IOException { - String withoutHeader = "2014-06-23 00:00:00Z,AAL,132.2046,farequote\n" + + String noHeader = "2014-06-23 00:00:00Z,AAL,132.2046,farequote\n" + "2014-06-23 00:00:00Z,JZA,990.4628,farequote\n" + "2014-06-23 00:00:01Z,JBU,877.5927,farequote\n" + "2014-06-23 00:00:01Z,KLM,1355.4812,farequote\n"; Tuple header = DelimitedFileStructureFinder.findHeaderFromSample(explanation, - DelimitedFileStructureFinder.readRows(withoutHeader, CsvPreference.EXCEL_PREFERENCE).v1(), + DelimitedFileStructureFinder.readRows(noHeader, CsvPreference.EXCEL_PREFERENCE, NOOP_TIMEOUT_CHECKER).v1(), FileStructureOverrides.EMPTY_OVERRIDES); assertFalse(header.v1()); diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/filestructurefinder/FieldStatsCalculatorTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/filestructurefinder/FieldStatsCalculatorTests.java index 08035dc741d..30445a4a77c 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/filestructurefinder/FieldStatsCalculatorTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/filestructurefinder/FieldStatsCalculatorTests.java @@ -73,16 +73,16 @@ public class FieldStatsCalculatorTests extends FileStructureTestCase { FieldStatsCalculator calculator = new FieldStatsCalculator(); - calculator.accept(Arrays.asList("4", "4", "7", "4", "6", "5", "6", "5", "16", "4", "5")); + calculator.accept(Arrays.asList("4", "4", "7", "4", "6", "5.2", "6", "5.2", "16", "4", "5.2")); List> topHits = calculator.findNumericTopHits(3); assertEquals(3, topHits.size()); - assertEquals(4.0, topHits.get(0).get("value")); + assertEquals(4, topHits.get(0).get("value")); assertEquals(4, topHits.get(0).get("count")); - assertEquals(5.0, topHits.get(1).get("value")); + assertEquals(5.2, topHits.get(1).get("value")); assertEquals(3, topHits.get(1).get("count")); - assertEquals(6.0, topHits.get(2).get("value")); + assertEquals(6, topHits.get(2).get("value")); assertEquals(2, topHits.get(2).get("count")); } @@ -124,25 +124,25 @@ public class FieldStatsCalculatorTests extends FileStructureTestCase { FieldStatsCalculator calculator = new FieldStatsCalculator(); - calculator.accept(Arrays.asList("4", "4", "7", "4", "6", "5", "6", "5", "16", "4", "5")); + calculator.accept(Arrays.asList("4.5", "4.5", "7", "4.5", "6", "5", "6", "5", "25", "4.5", "5")); FieldStats stats = calculator.calculate(3); assertEquals(11L, stats.getCount()); assertEquals(5, stats.getCardinality()); - assertEquals(4.0, stats.getMinValue(), 1e-10); - assertEquals(16.0, stats.getMaxValue(), 1e-10); - assertEquals(6.0, stats.getMeanValue(), 1e-10); + assertEquals(4.5, stats.getMinValue(), 1e-10); + assertEquals(25.0, stats.getMaxValue(), 1e-10); + assertEquals(7.0, stats.getMeanValue(), 1e-10); assertEquals(5.0, stats.getMedianValue(), 1e-10); List> topHits = stats.getTopHits(); assertEquals(3, topHits.size()); - assertEquals(4.0, topHits.get(0).get("value")); + assertEquals(4.5, topHits.get(0).get("value")); assertEquals(4, topHits.get(0).get("count")); - assertEquals(5.0, topHits.get(1).get("value")); + assertEquals(5, topHits.get(1).get("value")); assertEquals(3, topHits.get(1).get("count")); - assertEquals(6.0, topHits.get(2).get("value")); + assertEquals(6, topHits.get(2).get("value")); assertEquals(2, topHits.get(2).get("count")); } diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/filestructurefinder/FileStructureFinderManagerTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/filestructurefinder/FileStructureFinderManagerTests.java index 00929ff474c..4329e076ce6 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/filestructurefinder/FileStructureFinderManagerTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/filestructurefinder/FileStructureFinderManagerTests.java @@ -6,26 +6,50 @@ package org.elasticsearch.xpack.ml.filestructurefinder; import com.ibm.icu.text.CharsetMatch; +import org.elasticsearch.ElasticsearchTimeoutException; +import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.xpack.core.ml.filestructurefinder.FileStructure; +import org.junit.After; +import org.junit.Before; import java.io.ByteArrayInputStream; +import java.io.IOException; +import java.io.InputStream; +import java.io.PipedInputStream; +import java.io.PipedOutputStream; import java.nio.charset.Charset; import java.nio.charset.StandardCharsets; import java.util.Arrays; +import java.util.concurrent.ScheduledExecutorService; +import java.util.concurrent.ScheduledThreadPoolExecutor; +import java.util.concurrent.TimeUnit; import static org.elasticsearch.xpack.ml.filestructurefinder.FileStructureOverrides.EMPTY_OVERRIDES; +import static org.hamcrest.Matchers.endsWith; import static org.hamcrest.Matchers.startsWith; import static org.hamcrest.core.IsInstanceOf.instanceOf; public class FileStructureFinderManagerTests extends FileStructureTestCase { - private FileStructureFinderManager structureFinderManager = new FileStructureFinderManager(); + private ScheduledExecutorService scheduler; + private FileStructureFinderManager structureFinderManager; + + @Before + public void setup() { + scheduler = new ScheduledThreadPoolExecutor(1); + structureFinderManager = new FileStructureFinderManager(scheduler); + } + + @After + public void shutdownScheduler() { + scheduler.shutdown(); + } public void testFindCharsetGivenCharacterWidths() throws Exception { for (Charset charset : Arrays.asList(StandardCharsets.UTF_8, StandardCharsets.UTF_16LE, StandardCharsets.UTF_16BE)) { CharsetMatch charsetMatch = structureFinderManager.findCharset(explanation, - new ByteArrayInputStream(TEXT_SAMPLE.getBytes(charset))); + new ByteArrayInputStream(TEXT_SAMPLE.getBytes(charset)), NOOP_TIMEOUT_CHECKER); assertEquals(charset.name(), charsetMatch.getName()); } } @@ -41,7 +65,8 @@ public class FileStructureFinderManagerTests extends FileStructureTestCase { } try { - CharsetMatch charsetMatch = structureFinderManager.findCharset(explanation, new ByteArrayInputStream(binaryBytes)); + CharsetMatch charsetMatch = structureFinderManager.findCharset(explanation, new ByteArrayInputStream(binaryBytes), + NOOP_TIMEOUT_CHECKER); assertThat(charsetMatch.getName(), startsWith("UTF-16")); } catch (IllegalArgumentException e) { assertEquals("Could not determine a usable character encoding for the input - could it be binary data?", e.getMessage()); @@ -50,7 +75,7 @@ public class FileStructureFinderManagerTests extends FileStructureTestCase { public void testMakeBestStructureGivenJson() throws Exception { assertThat(structureFinderManager.makeBestStructureFinder(explanation, JSON_SAMPLE, StandardCharsets.UTF_8.name(), randomBoolean(), - EMPTY_OVERRIDES), instanceOf(JsonFileStructureFinder.class)); + EMPTY_OVERRIDES, NOOP_TIMEOUT_CHECKER), instanceOf(JsonFileStructureFinder.class)); } public void testMakeBestStructureGivenJsonAndDelimitedOverride() throws Exception { @@ -61,12 +86,12 @@ public class FileStructureFinderManagerTests extends FileStructureTestCase { .setFormat(FileStructure.Format.DELIMITED).setQuote('\'').build(); assertThat(structureFinderManager.makeBestStructureFinder(explanation, JSON_SAMPLE, StandardCharsets.UTF_8.name(), randomBoolean(), - overrides), instanceOf(DelimitedFileStructureFinder.class)); + overrides, NOOP_TIMEOUT_CHECKER), instanceOf(DelimitedFileStructureFinder.class)); } public void testMakeBestStructureGivenXml() throws Exception { assertThat(structureFinderManager.makeBestStructureFinder(explanation, XML_SAMPLE, StandardCharsets.UTF_8.name(), randomBoolean(), - EMPTY_OVERRIDES), instanceOf(XmlFileStructureFinder.class)); + EMPTY_OVERRIDES, NOOP_TIMEOUT_CHECKER), instanceOf(XmlFileStructureFinder.class)); } public void testMakeBestStructureGivenXmlAndTextOverride() throws Exception { @@ -74,12 +99,12 @@ public class FileStructureFinderManagerTests extends FileStructureTestCase { FileStructureOverrides overrides = FileStructureOverrides.builder().setFormat(FileStructure.Format.SEMI_STRUCTURED_TEXT).build(); assertThat(structureFinderManager.makeBestStructureFinder(explanation, XML_SAMPLE, StandardCharsets.UTF_8.name(), randomBoolean(), - overrides), instanceOf(TextLogFileStructureFinder.class)); + overrides, NOOP_TIMEOUT_CHECKER), instanceOf(TextLogFileStructureFinder.class)); } public void testMakeBestStructureGivenCsv() throws Exception { assertThat(structureFinderManager.makeBestStructureFinder(explanation, CSV_SAMPLE, StandardCharsets.UTF_8.name(), randomBoolean(), - EMPTY_OVERRIDES), instanceOf(DelimitedFileStructureFinder.class)); + EMPTY_OVERRIDES, NOOP_TIMEOUT_CHECKER), instanceOf(DelimitedFileStructureFinder.class)); } public void testMakeBestStructureGivenCsvAndJsonOverride() { @@ -88,14 +113,14 @@ public class FileStructureFinderManagerTests extends FileStructureTestCase { IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> structureFinderManager.makeBestStructureFinder(explanation, CSV_SAMPLE, StandardCharsets.UTF_8.name(), randomBoolean(), - overrides)); + overrides, NOOP_TIMEOUT_CHECKER)); assertEquals("Input did not match the specified format [json]", e.getMessage()); } public void testMakeBestStructureGivenText() throws Exception { assertThat(structureFinderManager.makeBestStructureFinder(explanation, TEXT_SAMPLE, StandardCharsets.UTF_8.name(), randomBoolean(), - EMPTY_OVERRIDES), instanceOf(TextLogFileStructureFinder.class)); + EMPTY_OVERRIDES, NOOP_TIMEOUT_CHECKER), instanceOf(TextLogFileStructureFinder.class)); } public void testMakeBestStructureGivenTextAndDelimitedOverride() throws Exception { @@ -105,6 +130,48 @@ public class FileStructureFinderManagerTests extends FileStructureTestCase { .setFormat(FileStructure.Format.DELIMITED).setDelimiter(':').build(); assertThat(structureFinderManager.makeBestStructureFinder(explanation, TEXT_SAMPLE, StandardCharsets.UTF_8.name(), randomBoolean(), - overrides), instanceOf(DelimitedFileStructureFinder.class)); + overrides, NOOP_TIMEOUT_CHECKER), instanceOf(DelimitedFileStructureFinder.class)); + } + + public void testFindFileStructureTimeout() throws IOException, InterruptedException { + + // The number of lines might need increasing in the future if computers get really fast, + // but currently we're not even close to finding the structure of this much data in 10ms + int linesOfJunk = 10000; + TimeValue timeout = new TimeValue(10, TimeUnit.MILLISECONDS); + + try (PipedOutputStream generator = new PipedOutputStream()) { + + Thread junkProducer = new Thread(() -> { + try { + // This is not just junk; this is comma separated junk + for (int count = 0; count < linesOfJunk; ++count) { + generator.write(randomAlphaOfLength(100).getBytes(StandardCharsets.UTF_8)); + generator.write(','); + generator.write(randomAlphaOfLength(100).getBytes(StandardCharsets.UTF_8)); + generator.write(','); + generator.write(randomAlphaOfLength(100).getBytes(StandardCharsets.UTF_8)); + generator.write('\n'); + } + } catch (IOException e) { + // Expected if timeout occurs and the input stream is closed before junk generation is complete + } + }); + + try (InputStream bigInput = new PipedInputStream(generator)) { + + junkProducer.start(); + + ElasticsearchTimeoutException e = expectThrows(ElasticsearchTimeoutException.class, + () -> structureFinderManager.findFileStructure(explanation, linesOfJunk - 1, bigInput, EMPTY_OVERRIDES, timeout)); + + assertThat(e.getMessage(), startsWith("Aborting structure analysis during [")); + assertThat(e.getMessage(), endsWith("] as it has taken longer than the timeout of [" + timeout + "]")); + } + + // This shouldn't take anything like 10 seconds, but VMs can stall so it's best to + // set the timeout fairly high to avoid the work that spurious failures cause + junkProducer.join(10000L); + } } } diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/filestructurefinder/FileStructureTestCase.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/filestructurefinder/FileStructureTestCase.java index 6246a7ad01e..1a0da875e67 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/filestructurefinder/FileStructureTestCase.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/filestructurefinder/FileStructureTestCase.java @@ -68,6 +68,9 @@ public abstract class FileStructureTestCase extends ESTestCase { "\n" + "\n"; + // This doesn't need closing because it has an infinite timeout + protected static final TimeoutChecker NOOP_TIMEOUT_CHECKER = new TimeoutChecker("unit test", null, null); + protected List explanation; @Before diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/filestructurefinder/FileStructureUtilsTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/filestructurefinder/FileStructureUtilsTests.java index 8dbfb6a8047..c0e175f27b2 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/filestructurefinder/FileStructureUtilsTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/filestructurefinder/FileStructureUtilsTests.java @@ -35,11 +35,11 @@ public class FileStructureUtilsTests extends FileStructureTestCase { public void testGuessTimestampGivenSingleSampleSingleField() { Map sample = Collections.singletonMap("field1", "2018-05-24T17:28:31,735"); - Tuple match = - FileStructureUtils.guessTimestampField(explanation, Collections.singletonList(sample), EMPTY_OVERRIDES); + Tuple match = FileStructureUtils.guessTimestampField(explanation, Collections.singletonList(sample), + EMPTY_OVERRIDES, NOOP_TIMEOUT_CHECKER); assertNotNull(match); assertEquals("field1", match.v1()); - assertThat(match.v2().dateFormats, contains("ISO8601")); + assertThat(match.v2().jodaTimestampFormats, contains("ISO8601")); assertEquals("TIMESTAMP_ISO8601", match.v2().grokPatternName); } @@ -48,11 +48,11 @@ public class FileStructureUtilsTests extends FileStructureTestCase { FileStructureOverrides overrides = FileStructureOverrides.builder().setTimestampField("field1").build(); Map sample = Collections.singletonMap("field1", "2018-05-24T17:28:31,735"); - Tuple match = - FileStructureUtils.guessTimestampField(explanation, Collections.singletonList(sample), overrides); + Tuple match = FileStructureUtils.guessTimestampField(explanation, Collections.singletonList(sample), + overrides, NOOP_TIMEOUT_CHECKER); assertNotNull(match); assertEquals("field1", match.v1()); - assertThat(match.v2().dateFormats, contains("ISO8601")); + assertThat(match.v2().jodaTimestampFormats, contains("ISO8601")); assertEquals("TIMESTAMP_ISO8601", match.v2().grokPatternName); } @@ -62,7 +62,8 @@ public class FileStructureUtilsTests extends FileStructureTestCase { Map sample = Collections.singletonMap("field1", "2018-05-24T17:28:31,735"); IllegalArgumentException e = expectThrows(IllegalArgumentException.class, - () -> FileStructureUtils.guessTimestampField(explanation, Collections.singletonList(sample), overrides)); + () -> FileStructureUtils.guessTimestampField(explanation, Collections.singletonList(sample), overrides, + NOOP_TIMEOUT_CHECKER)); assertEquals("Specified timestamp field [field2] is not present in record [{field1=2018-05-24T17:28:31,735}]", e.getMessage()); } @@ -72,11 +73,11 @@ public class FileStructureUtilsTests extends FileStructureTestCase { FileStructureOverrides overrides = FileStructureOverrides.builder().setTimestampFormat("ISO8601").build(); Map sample = Collections.singletonMap("field1", "2018-05-24T17:28:31,735"); - Tuple match = - FileStructureUtils.guessTimestampField(explanation, Collections.singletonList(sample), overrides); + Tuple match = FileStructureUtils.guessTimestampField(explanation, Collections.singletonList(sample), + overrides, NOOP_TIMEOUT_CHECKER); assertNotNull(match); assertEquals("field1", match.v1()); - assertThat(match.v2().dateFormats, contains("ISO8601")); + assertThat(match.v2().jodaTimestampFormats, contains("ISO8601")); assertEquals("TIMESTAMP_ISO8601", match.v2().grokPatternName); } @@ -86,7 +87,8 @@ public class FileStructureUtilsTests extends FileStructureTestCase { Map sample = Collections.singletonMap("field1", "2018-05-24T17:28:31,735"); IllegalArgumentException e = expectThrows(IllegalArgumentException.class, - () -> FileStructureUtils.guessTimestampField(explanation, Collections.singletonList(sample), overrides)); + () -> FileStructureUtils.guessTimestampField(explanation, Collections.singletonList(sample), overrides, + NOOP_TIMEOUT_CHECKER)); assertEquals("Specified timestamp format [EEE MMM dd HH:mm:ss YYYY] does not match for record [{field1=2018-05-24T17:28:31,735}]", e.getMessage()); @@ -95,27 +97,27 @@ public class FileStructureUtilsTests extends FileStructureTestCase { public void testGuessTimestampGivenSamplesWithSameSingleTimeField() { Map sample1 = Collections.singletonMap("field1", "2018-05-24T17:28:31,735"); Map sample2 = Collections.singletonMap("field1", "2018-05-24T17:33:39,406"); - Tuple match = - FileStructureUtils.guessTimestampField(explanation, Arrays.asList(sample1, sample2), EMPTY_OVERRIDES); + Tuple match = FileStructureUtils.guessTimestampField(explanation, Arrays.asList(sample1, sample2), + EMPTY_OVERRIDES, NOOP_TIMEOUT_CHECKER); assertNotNull(match); assertEquals("field1", match.v1()); - assertThat(match.v2().dateFormats, contains("ISO8601")); + assertThat(match.v2().jodaTimestampFormats, contains("ISO8601")); assertEquals("TIMESTAMP_ISO8601", match.v2().grokPatternName); } public void testGuessTimestampGivenSamplesWithOneSingleTimeFieldDifferentFormat() { Map sample1 = Collections.singletonMap("field1", "2018-05-24T17:28:31,735"); Map sample2 = Collections.singletonMap("field1", "2018-05-24 17:33:39,406"); - Tuple match = - FileStructureUtils.guessTimestampField(explanation, Arrays.asList(sample1, sample2), EMPTY_OVERRIDES); + Tuple match = FileStructureUtils.guessTimestampField(explanation, Arrays.asList(sample1, sample2), + EMPTY_OVERRIDES, NOOP_TIMEOUT_CHECKER); assertNull(match); } public void testGuessTimestampGivenSamplesWithDifferentSingleTimeField() { Map sample1 = Collections.singletonMap("field1", "2018-05-24T17:28:31,735"); Map sample2 = Collections.singletonMap("another_field", "2018-05-24T17:33:39,406"); - Tuple match = - FileStructureUtils.guessTimestampField(explanation, Arrays.asList(sample1, sample2), EMPTY_OVERRIDES); + Tuple match = FileStructureUtils.guessTimestampField(explanation, Arrays.asList(sample1, sample2), + EMPTY_OVERRIDES, NOOP_TIMEOUT_CHECKER); assertNull(match); } @@ -124,11 +126,11 @@ public class FileStructureUtilsTests extends FileStructureTestCase { sample.put("foo", "not a time"); sample.put("time", "2018-05-24 17:28:31,735"); sample.put("bar", 42); - Tuple match = - FileStructureUtils.guessTimestampField(explanation, Collections.singletonList(sample), EMPTY_OVERRIDES); + Tuple match = FileStructureUtils.guessTimestampField(explanation, Collections.singletonList(sample), + EMPTY_OVERRIDES, NOOP_TIMEOUT_CHECKER); assertNotNull(match); assertEquals("time", match.v1()); - assertThat(match.v2().dateFormats, contains("YYYY-MM-dd HH:mm:ss,SSS")); + assertThat(match.v2().jodaTimestampFormats, contains("YYYY-MM-dd HH:mm:ss,SSS")); assertEquals("TIMESTAMP_ISO8601", match.v2().grokPatternName); } @@ -141,11 +143,11 @@ public class FileStructureUtilsTests extends FileStructureTestCase { sample2.put("foo", "whatever"); sample2.put("time", "2018-05-29 11:53:02,837"); sample2.put("bar", 17); - Tuple match = - FileStructureUtils.guessTimestampField(explanation, Arrays.asList(sample1, sample2), EMPTY_OVERRIDES); + Tuple match = FileStructureUtils.guessTimestampField(explanation, Arrays.asList(sample1, sample2), + EMPTY_OVERRIDES, NOOP_TIMEOUT_CHECKER); assertNotNull(match); assertEquals("time", match.v1()); - assertThat(match.v2().dateFormats, contains("YYYY-MM-dd HH:mm:ss,SSS")); + assertThat(match.v2().jodaTimestampFormats, contains("YYYY-MM-dd HH:mm:ss,SSS")); assertEquals("TIMESTAMP_ISO8601", match.v2().grokPatternName); } @@ -158,8 +160,8 @@ public class FileStructureUtilsTests extends FileStructureTestCase { sample2.put("foo", "whatever"); sample2.put("time", "May 29 2018 11:53:02"); sample2.put("bar", 17); - Tuple match = - FileStructureUtils.guessTimestampField(explanation, Arrays.asList(sample1, sample2), EMPTY_OVERRIDES); + Tuple match = FileStructureUtils.guessTimestampField(explanation, Arrays.asList(sample1, sample2), + EMPTY_OVERRIDES, NOOP_TIMEOUT_CHECKER); assertNull(match); } @@ -172,11 +174,11 @@ public class FileStructureUtilsTests extends FileStructureTestCase { sample2.put("red_herring", "whatever"); sample2.put("time", "2018-05-29 11:53:02,837"); sample2.put("bar", 17); - Tuple match = - FileStructureUtils.guessTimestampField(explanation, Arrays.asList(sample1, sample2), EMPTY_OVERRIDES); + Tuple match = FileStructureUtils.guessTimestampField(explanation, Arrays.asList(sample1, sample2), + EMPTY_OVERRIDES, NOOP_TIMEOUT_CHECKER); assertNotNull(match); assertEquals("time", match.v1()); - assertThat(match.v2().dateFormats, contains("YYYY-MM-dd HH:mm:ss,SSS")); + assertThat(match.v2().jodaTimestampFormats, contains("YYYY-MM-dd HH:mm:ss,SSS")); assertEquals("TIMESTAMP_ISO8601", match.v2().grokPatternName); } @@ -189,11 +191,11 @@ public class FileStructureUtilsTests extends FileStructureTestCase { sample2.put("foo", "whatever"); sample2.put("time", "May 29 2018 11:53:02"); sample2.put("red_herring", "17"); - Tuple match = - FileStructureUtils.guessTimestampField(explanation, Arrays.asList(sample1, sample2), EMPTY_OVERRIDES); + Tuple match = FileStructureUtils.guessTimestampField(explanation, Arrays.asList(sample1, sample2), + EMPTY_OVERRIDES, NOOP_TIMEOUT_CHECKER); assertNotNull(match); assertEquals("time", match.v1()); - assertThat(match.v2().dateFormats, contains("MMM dd YYYY HH:mm:ss", "MMM d YYYY HH:mm:ss")); + assertThat(match.v2().jodaTimestampFormats, contains("MMM dd YYYY HH:mm:ss", "MMM d YYYY HH:mm:ss")); assertEquals("CISCOTIMESTAMP", match.v2().grokPatternName); } @@ -206,8 +208,8 @@ public class FileStructureUtilsTests extends FileStructureTestCase { sample2.put("foo", "whatever"); sample2.put("time2", "May 29 2018 11:53:02"); sample2.put("bar", 42); - Tuple match = - FileStructureUtils.guessTimestampField(explanation, Arrays.asList(sample1, sample2), EMPTY_OVERRIDES); + Tuple match = FileStructureUtils.guessTimestampField(explanation, Arrays.asList(sample1, sample2), + EMPTY_OVERRIDES, NOOP_TIMEOUT_CHECKER); assertNull(match); } @@ -222,11 +224,11 @@ public class FileStructureUtilsTests extends FileStructureTestCase { sample2.put("time2", "May 10 2018 11:53:02"); sample2.put("time3", "Thu, May 10 2018 11:53:02"); sample2.put("bar", 42); - Tuple match = - FileStructureUtils.guessTimestampField(explanation, Arrays.asList(sample1, sample2), EMPTY_OVERRIDES); + Tuple match = FileStructureUtils.guessTimestampField(explanation, Arrays.asList(sample1, sample2), + EMPTY_OVERRIDES, NOOP_TIMEOUT_CHECKER); assertNotNull(match); assertEquals("time2", match.v1()); - assertThat(match.v2().dateFormats, contains("MMM dd YYYY HH:mm:ss", "MMM d YYYY HH:mm:ss")); + assertThat(match.v2().jodaTimestampFormats, contains("MMM dd YYYY HH:mm:ss", "MMM d YYYY HH:mm:ss")); assertEquals("CISCOTIMESTAMP", match.v2().grokPatternName); } @@ -320,7 +322,8 @@ public class FileStructureUtilsTests extends FileStructureTestCase { sample2.put("nothing", null); Tuple, SortedMap> mappingsAndFieldStats = - FileStructureUtils.guessMappingsAndCalculateFieldStats(explanation, Arrays.asList(sample1, sample2)); + FileStructureUtils.guessMappingsAndCalculateFieldStats(explanation, Arrays.asList(sample1, sample2), + NOOP_TIMEOUT_CHECKER); assertNotNull(mappingsAndFieldStats); Map mappings = mappingsAndFieldStats.v1(); @@ -338,13 +341,13 @@ public class FileStructureUtilsTests extends FileStructureTestCase { assertEquals(3, fieldStats.size()); assertEquals(new FieldStats(2, 2, makeTopHits("not a time", 1, "whatever", 1)), fieldStats.get("foo")); assertEquals(new FieldStats(2, 2, makeTopHits("2018-05-24 17:28:31,735", 1, "2018-05-29 11:53:02,837", 1)), fieldStats.get("time")); - assertEquals(new FieldStats(2, 2, 17.0, 42.0, 29.5, 29.5, makeTopHits(17.0, 1, 42.0, 1)), fieldStats.get("bar")); + assertEquals(new FieldStats(2, 2, 17.0, 42.0, 29.5, 29.5, makeTopHits(17, 1, 42, 1)), fieldStats.get("bar")); assertNull(fieldStats.get("nothing")); } private Map guessMapping(List explanation, String fieldName, List fieldValues) { - Tuple, FieldStats> mappingAndFieldStats = - FileStructureUtils.guessMappingAndCalculateFieldStats(explanation, fieldName, fieldValues); + Tuple, FieldStats> mappingAndFieldStats = FileStructureUtils.guessMappingAndCalculateFieldStats(explanation, + fieldName, fieldValues, NOOP_TIMEOUT_CHECKER); return (mappingAndFieldStats == null) ? null : mappingAndFieldStats.v1(); } diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/filestructurefinder/GrokPatternCreatorTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/filestructurefinder/GrokPatternCreatorTests.java index 271e071fc27..dc48662fb35 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/filestructurefinder/GrokPatternCreatorTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/filestructurefinder/GrokPatternCreatorTests.java @@ -43,7 +43,7 @@ public class GrokPatternCreatorTests extends FileStructureTestCase { Collection prefaces = new ArrayList<>(); Collection epilogues = new ArrayList<>(); - candidate.processCaptures(fieldNameCountStore, matchingStrings, prefaces, epilogues, null, null); + candidate.processCaptures(fieldNameCountStore, matchingStrings, prefaces, epilogues, null, null, NOOP_TIMEOUT_CHECKER); assertThat(prefaces, containsInAnyOrder("[", "[", "junk [", "[")); assertThat(epilogues, containsInAnyOrder("] DEBUG ", "] ERROR ", "] INFO ", "] DEBUG ")); @@ -60,7 +60,7 @@ public class GrokPatternCreatorTests extends FileStructureTestCase { Collection prefaces = new ArrayList<>(); Collection epilogues = new ArrayList<>(); - candidate.processCaptures(fieldNameCountStore, matchingStrings, prefaces, epilogues, null, null); + candidate.processCaptures(fieldNameCountStore, matchingStrings, prefaces, epilogues, null, null, NOOP_TIMEOUT_CHECKER); assertThat(prefaces, containsInAnyOrder("before ", "abc ", "")); assertThat(epilogues, containsInAnyOrder(" after", " xyz", "")); @@ -73,7 +73,7 @@ public class GrokPatternCreatorTests extends FileStructureTestCase { "junk [2018-01-22T07:33:23] INFO ", "[2018-01-21T03:33:23] DEBUG "); - GrokPatternCreator grokPatternCreator = new GrokPatternCreator(explanation, snippets, null, null); + GrokPatternCreator grokPatternCreator = new GrokPatternCreator(explanation, snippets, null, null, NOOP_TIMEOUT_CHECKER); grokPatternCreator.appendBestGrokMatchForStrings(false, snippets, false, 0); assertEquals(".*?\\[%{TIMESTAMP_ISO8601:extra_timestamp}\\] %{LOGLEVEL:loglevel} ", @@ -87,7 +87,7 @@ public class GrokPatternCreatorTests extends FileStructureTestCase { " (4)", " (-5) "); - GrokPatternCreator grokPatternCreator = new GrokPatternCreator(explanation, snippets, null, null); + GrokPatternCreator grokPatternCreator = new GrokPatternCreator(explanation, snippets, null, null, NOOP_TIMEOUT_CHECKER); grokPatternCreator.appendBestGrokMatchForStrings(false, snippets, false, 0); assertEquals(".*?\\(%{INT:field}\\).*?", grokPatternCreator.getOverallGrokPatternBuilder().toString()); @@ -99,7 +99,7 @@ public class GrokPatternCreatorTests extends FileStructureTestCase { "prior to-3", "-4"); - GrokPatternCreator grokPatternCreator = new GrokPatternCreator(explanation, snippets, null, null); + GrokPatternCreator grokPatternCreator = new GrokPatternCreator(explanation, snippets, null, null, NOOP_TIMEOUT_CHECKER); grokPatternCreator.appendBestGrokMatchForStrings(false, snippets, false, 0); // It seems sensible that we don't detect these suffices as either base 10 or base 16 numbers @@ -113,7 +113,7 @@ public class GrokPatternCreatorTests extends FileStructureTestCase { " -123", "1f is hex"); - GrokPatternCreator grokPatternCreator = new GrokPatternCreator(explanation, snippets, null, null); + GrokPatternCreator grokPatternCreator = new GrokPatternCreator(explanation, snippets, null, null, NOOP_TIMEOUT_CHECKER); grokPatternCreator.appendBestGrokMatchForStrings(false, snippets, false, 0); assertEquals(".*?%{BASE16NUM:field}.*?", grokPatternCreator.getOverallGrokPatternBuilder().toString()); @@ -124,7 +124,7 @@ public class GrokPatternCreatorTests extends FileStructureTestCase { Collection snippets = Arrays.asList(" mappings = new HashMap<>(); - GrokPatternCreator grokPatternCreator = new GrokPatternCreator(explanation, sampleMessages, mappings, null); + GrokPatternCreator grokPatternCreator = new GrokPatternCreator(explanation, sampleMessages, mappings, null, + NOOP_TIMEOUT_CHECKER); assertEquals("%{SYSLOGTIMESTAMP:timestamp} .*? .*?\\[%{INT:field}\\]: %{LOGLEVEL:loglevel} \\(.*? .*? .*?\\) .*? " + "%{QUOTEDSTRING:field2}: %{IP:ipaddress}#%{INT:field3}", @@ -215,7 +216,8 @@ public class GrokPatternCreatorTests extends FileStructureTestCase { "Invalid chunk ignored."); Map mappings = new HashMap<>(); - GrokPatternCreator grokPatternCreator = new GrokPatternCreator(explanation, sampleMessages, mappings, null); + GrokPatternCreator grokPatternCreator = new GrokPatternCreator(explanation, sampleMessages, mappings, null, + NOOP_TIMEOUT_CHECKER); assertEquals("%{CATALINA_DATESTAMP:timestamp} .*? .*?\\n%{LOGLEVEL:loglevel}: .*", grokPatternCreator.createGrokPatternFromExamples("CATALINA_DATESTAMP", "timestamp")); @@ -237,7 +239,8 @@ public class GrokPatternCreatorTests extends FileStructureTestCase { "Info\tsshd\tsubsystem request for sftp"); Map mappings = new HashMap<>(); - GrokPatternCreator grokPatternCreator = new GrokPatternCreator(explanation, sampleMessages, mappings, null); + GrokPatternCreator grokPatternCreator = new GrokPatternCreator(explanation, sampleMessages, mappings, null, + NOOP_TIMEOUT_CHECKER); assertEquals("%{INT:field}\\t%{TIMESTAMP_ISO8601:timestamp}\\t%{TIMESTAMP_ISO8601:extra_timestamp}\\t%{INT:field2}\\t.*?\\t" + "%{IP:ipaddress}\\t.*?\\t%{LOGLEVEL:loglevel}\\t.*", @@ -270,7 +273,8 @@ public class GrokPatternCreatorTests extends FileStructureTestCase { "AppleWebKit/537.36 (KHTML, like Gecko) Chrome/32.0.1700.77 Safari/537.36\""); Map mappings = new HashMap<>(); - GrokPatternCreator grokPatternCreator = new GrokPatternCreator(explanation, sampleMessages, mappings, null); + GrokPatternCreator grokPatternCreator = new GrokPatternCreator(explanation, sampleMessages, mappings, null, + NOOP_TIMEOUT_CHECKER); assertEquals(new Tuple<>("timestamp", "%{COMBINEDAPACHELOG}"), grokPatternCreator.findFullLineGrokPattern(randomBoolean() ? "timestamp" : null)); @@ -300,7 +304,7 @@ public class GrokPatternCreatorTests extends FileStructureTestCase { ",\"rule1\",\"Accept\",\"\",\"\",\"\",\"0000000000000000\"" ); - GrokPatternCreator grokPatternCreator = new GrokPatternCreator(explanation, snippets, null, null); + GrokPatternCreator grokPatternCreator = new GrokPatternCreator(explanation, snippets, null, null, NOOP_TIMEOUT_CHECKER); Collection adjustedSnippets = grokPatternCreator.adjustForPunctuation(snippets); assertEquals("\",", grokPatternCreator.getOverallGrokPatternBuilder().toString()); @@ -317,7 +321,7 @@ public class GrokPatternCreatorTests extends FileStructureTestCase { "was added by 'User1'(id:2) to servergroup 'GAME'(id:9)" ); - GrokPatternCreator grokPatternCreator = new GrokPatternCreator(explanation, snippets, null, null); + GrokPatternCreator grokPatternCreator = new GrokPatternCreator(explanation, snippets, null, null, NOOP_TIMEOUT_CHECKER); Collection adjustedSnippets = grokPatternCreator.adjustForPunctuation(snippets); assertEquals("", grokPatternCreator.getOverallGrokPatternBuilder().toString()); @@ -343,7 +347,8 @@ public class GrokPatternCreatorTests extends FileStructureTestCase { "Info\tsshd\tsubsystem request for sftp"); Map mappings = new HashMap<>(); - GrokPatternCreator grokPatternCreator = new GrokPatternCreator(explanation, sampleMessages, mappings, null); + GrokPatternCreator grokPatternCreator = new GrokPatternCreator(explanation, sampleMessages, mappings, null, + NOOP_TIMEOUT_CHECKER); grokPatternCreator.validateFullLineGrokPattern(grokPattern, timestampField); assertEquals(9, mappings.size()); @@ -371,7 +376,8 @@ public class GrokPatternCreatorTests extends FileStructureTestCase { "Sep 8 11:55:42 linux named[22529]: error (unexpected RCODE REFUSED) resolving 'b.akamaiedge.net/A/IN': 95.110.64.205#53"); Map mappings = new HashMap<>(); - GrokPatternCreator grokPatternCreator = new GrokPatternCreator(explanation, sampleMessages, mappings, null); + GrokPatternCreator grokPatternCreator = new GrokPatternCreator(explanation, sampleMessages, mappings, null, + NOOP_TIMEOUT_CHECKER); IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> grokPatternCreator.validateFullLineGrokPattern(grokPattern, timestampField)); diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/filestructurefinder/JsonFileStructureFinderTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/filestructurefinder/JsonFileStructureFinderTests.java index 6856e9a6021..55074e8c382 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/filestructurefinder/JsonFileStructureFinderTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/filestructurefinder/JsonFileStructureFinderTests.java @@ -19,7 +19,7 @@ public class JsonFileStructureFinderTests extends FileStructureTestCase { String charset = randomFrom(POSSIBLE_CHARSETS); Boolean hasByteOrderMarker = randomHasByteOrderMarker(charset); FileStructureFinder structureFinder = factory.createFromSample(explanation, JSON_SAMPLE, charset, hasByteOrderMarker, - FileStructureOverrides.EMPTY_OVERRIDES); + FileStructureOverrides.EMPTY_OVERRIDES, NOOP_TIMEOUT_CHECKER); FileStructure structure = structureFinder.getStructure(); @@ -38,6 +38,6 @@ public class JsonFileStructureFinderTests extends FileStructureTestCase { assertNull(structure.getShouldTrimFields()); assertNull(structure.getGrokPattern()); assertEquals("timestamp", structure.getTimestampField()); - assertEquals(Collections.singletonList("UNIX_MS"), structure.getTimestampFormats()); + assertEquals(Collections.singletonList("UNIX_MS"), structure.getJodaTimestampFormats()); } } diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/filestructurefinder/TextLogFileStructureFinderTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/filestructurefinder/TextLogFileStructureFinderTests.java index 5bc40a16511..a848f384e2e 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/filestructurefinder/TextLogFileStructureFinderTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/filestructurefinder/TextLogFileStructureFinderTests.java @@ -107,7 +107,7 @@ public class TextLogFileStructureFinderTests extends FileStructureTestCase { String charset = randomFrom(POSSIBLE_CHARSETS); Boolean hasByteOrderMarker = randomHasByteOrderMarker(charset); FileStructureFinder structureFinder = factory.createFromSample(explanation, TEXT_SAMPLE, charset, hasByteOrderMarker, - FileStructureOverrides.EMPTY_OVERRIDES); + FileStructureOverrides.EMPTY_OVERRIDES, NOOP_TIMEOUT_CHECKER); FileStructure structure = structureFinder.getStructure(); @@ -119,14 +119,14 @@ public class TextLogFileStructureFinderTests extends FileStructureTestCase { assertEquals(hasByteOrderMarker, structure.getHasByteOrderMarker()); } assertNull(structure.getExcludeLinesPattern()); - assertEquals("^\\[\\b\\d{4}-\\d{2}-\\d{2}T\\d{2}:\\d{2}:\\d{2}", structure.getMultilineStartPattern()); + assertEquals("^\\[\\b\\d{4}-\\d{2}-\\d{2}T\\d{2}:\\d{2}:\\d{2},\\d{3}", structure.getMultilineStartPattern()); assertNull(structure.getDelimiter()); assertNull(structure.getQuote()); assertNull(structure.getHasHeaderRow()); assertNull(structure.getShouldTrimFields()); assertEquals("\\[%{TIMESTAMP_ISO8601:timestamp}\\]\\[%{LOGLEVEL:loglevel} \\]\\[.*", structure.getGrokPattern()); assertEquals("timestamp", structure.getTimestampField()); - assertEquals(Collections.singletonList("ISO8601"), structure.getTimestampFormats()); + assertEquals(Collections.singletonList("ISO8601"), structure.getJodaTimestampFormats()); } public void testCreateConfigsGivenElasticsearchLogAndTimestampFieldOverride() throws Exception { @@ -137,7 +137,8 @@ public class TextLogFileStructureFinderTests extends FileStructureTestCase { String charset = randomFrom(POSSIBLE_CHARSETS); Boolean hasByteOrderMarker = randomHasByteOrderMarker(charset); - FileStructureFinder structureFinder = factory.createFromSample(explanation, TEXT_SAMPLE, charset, hasByteOrderMarker, overrides); + FileStructureFinder structureFinder = factory.createFromSample(explanation, TEXT_SAMPLE, charset, hasByteOrderMarker, overrides, + NOOP_TIMEOUT_CHECKER); FileStructure structure = structureFinder.getStructure(); @@ -149,14 +150,14 @@ public class TextLogFileStructureFinderTests extends FileStructureTestCase { assertEquals(hasByteOrderMarker, structure.getHasByteOrderMarker()); } assertNull(structure.getExcludeLinesPattern()); - assertEquals("^\\[\\b\\d{4}-\\d{2}-\\d{2}T\\d{2}:\\d{2}:\\d{2}", structure.getMultilineStartPattern()); + assertEquals("^\\[\\b\\d{4}-\\d{2}-\\d{2}T\\d{2}:\\d{2}:\\d{2},\\d{3}", structure.getMultilineStartPattern()); assertNull(structure.getDelimiter()); assertNull(structure.getQuote()); assertNull(structure.getHasHeaderRow()); assertNull(structure.getShouldTrimFields()); assertEquals("\\[%{TIMESTAMP_ISO8601:my_time}\\]\\[%{LOGLEVEL:loglevel} \\]\\[.*", structure.getGrokPattern()); assertEquals("my_time", structure.getTimestampField()); - assertEquals(Collections.singletonList("ISO8601"), structure.getTimestampFormats()); + assertEquals(Collections.singletonList("ISO8601"), structure.getJodaTimestampFormats()); } public void testCreateConfigsGivenElasticsearchLogAndGrokPatternOverride() throws Exception { @@ -168,7 +169,8 @@ public class TextLogFileStructureFinderTests extends FileStructureTestCase { String charset = randomFrom(POSSIBLE_CHARSETS); Boolean hasByteOrderMarker = randomHasByteOrderMarker(charset); - FileStructureFinder structureFinder = factory.createFromSample(explanation, TEXT_SAMPLE, charset, hasByteOrderMarker, overrides); + FileStructureFinder structureFinder = factory.createFromSample(explanation, TEXT_SAMPLE, charset, hasByteOrderMarker, overrides, + NOOP_TIMEOUT_CHECKER); FileStructure structure = structureFinder.getStructure(); @@ -180,7 +182,7 @@ public class TextLogFileStructureFinderTests extends FileStructureTestCase { assertEquals(hasByteOrderMarker, structure.getHasByteOrderMarker()); } assertNull(structure.getExcludeLinesPattern()); - assertEquals("^\\[\\b\\d{4}-\\d{2}-\\d{2}T\\d{2}:\\d{2}:\\d{2}", structure.getMultilineStartPattern()); + assertEquals("^\\[\\b\\d{4}-\\d{2}-\\d{2}T\\d{2}:\\d{2}:\\d{2},\\d{3}", structure.getMultilineStartPattern()); assertNull(structure.getDelimiter()); assertNull(structure.getQuote()); assertNull(structure.getHasHeaderRow()); @@ -188,7 +190,7 @@ public class TextLogFileStructureFinderTests extends FileStructureTestCase { assertEquals("\\[%{TIMESTAMP_ISO8601:timestamp}\\]\\[%{LOGLEVEL:loglevel} *\\]" + "\\[%{JAVACLASS:class} *\\] \\[%{HOSTNAME:node}\\] %{JAVALOGMESSAGE:message}", structure.getGrokPattern()); assertEquals("timestamp", structure.getTimestampField()); - assertEquals(Collections.singletonList("ISO8601"), structure.getTimestampFormats()); + assertEquals(Collections.singletonList("ISO8601"), structure.getJodaTimestampFormats()); } public void testCreateConfigsGivenElasticsearchLogAndImpossibleGrokPatternOverride() { @@ -202,7 +204,7 @@ public class TextLogFileStructureFinderTests extends FileStructureTestCase { String charset = randomFrom(POSSIBLE_CHARSETS); Boolean hasByteOrderMarker = randomHasByteOrderMarker(charset); IllegalArgumentException e = expectThrows(IllegalArgumentException.class, - () -> factory.createFromSample(explanation, TEXT_SAMPLE, charset, hasByteOrderMarker, overrides)); + () -> factory.createFromSample(explanation, TEXT_SAMPLE, charset, hasByteOrderMarker, overrides, NOOP_TIMEOUT_CHECKER)); assertEquals("Supplied Grok pattern [\\[%{LOGLEVEL:loglevel} *\\]\\[%{HOSTNAME:node}\\]\\[%{TIMESTAMP_ISO8601:timestamp}\\] " + "\\[%{JAVACLASS:class} *\\] %{JAVALOGMESSAGE:message}] does not match sample messages", e.getMessage()); @@ -310,23 +312,25 @@ public class TextLogFileStructureFinderTests extends FileStructureTestCase { "[2018-06-27T11:59:23,588][INFO ][o.e.p.PluginsService ] [node-0] no plugins loaded\n"; Tuple> mostLikelyMatch = - TextLogFileStructureFinder.mostLikelyTimestamp(sample.split("\n"), FileStructureOverrides.EMPTY_OVERRIDES); + TextLogFileStructureFinder.mostLikelyTimestamp(sample.split("\n"), FileStructureOverrides.EMPTY_OVERRIDES, + NOOP_TIMEOUT_CHECKER); assertNotNull(mostLikelyMatch); - assertEquals(new TimestampMatch(7, "", "ISO8601", "\\b\\d{4}-\\d{2}-\\d{2}T\\d{2}:\\d{2}:\\d{2}", "TIMESTAMP_ISO8601", ""), - mostLikelyMatch.v1()); + assertEquals(new TimestampMatch(9, "", "ISO8601", "yyyy-MM-dd'T'HH:mm:ss,SSS", + "\\b\\d{4}-\\d{2}-\\d{2}T\\d{2}:\\d{2}:\\d{2},\\d{3}", "TIMESTAMP_ISO8601", ""), mostLikelyMatch.v1()); } public void testMostLikelyTimestampGivenExceptionTrace() { Tuple> mostLikelyMatch = - TextLogFileStructureFinder.mostLikelyTimestamp(EXCEPTION_TRACE_SAMPLE.split("\n"), FileStructureOverrides.EMPTY_OVERRIDES); + TextLogFileStructureFinder.mostLikelyTimestamp(EXCEPTION_TRACE_SAMPLE.split("\n"), FileStructureOverrides.EMPTY_OVERRIDES, + NOOP_TIMEOUT_CHECKER); assertNotNull(mostLikelyMatch); // Even though many lines have a timestamp near the end (in the Lucene version information), // these are so far along the lines that the weight of the timestamp near the beginning of the // first line should take precedence - assertEquals(new TimestampMatch(7, "", "ISO8601", "\\b\\d{4}-\\d{2}-\\d{2}T\\d{2}:\\d{2}:\\d{2}", "TIMESTAMP_ISO8601", ""), - mostLikelyMatch.v1()); + assertEquals(new TimestampMatch(9, "", "ISO8601", "yyyy-MM-dd'T'HH:mm:ss,SSS", + "\\b\\d{4}-\\d{2}-\\d{2}T\\d{2}:\\d{2}:\\d{2},\\d{3}", "TIMESTAMP_ISO8601", ""), mostLikelyMatch.v1()); } public void testMostLikelyTimestampGivenExceptionTraceAndTimestampFormatOverride() { @@ -334,12 +338,12 @@ public class TextLogFileStructureFinderTests extends FileStructureTestCase { FileStructureOverrides overrides = FileStructureOverrides.builder().setTimestampFormat("YYYY-MM-dd HH:mm:ss").build(); Tuple> mostLikelyMatch = - TextLogFileStructureFinder.mostLikelyTimestamp(EXCEPTION_TRACE_SAMPLE.split("\n"), overrides); + TextLogFileStructureFinder.mostLikelyTimestamp(EXCEPTION_TRACE_SAMPLE.split("\n"), overrides, NOOP_TIMEOUT_CHECKER); assertNotNull(mostLikelyMatch); // The override should force the seemingly inferior choice of timestamp - assertEquals(new TimestampMatch(6, "", "YYYY-MM-dd HH:mm:ss", "\\b\\d{4}-\\d{2}-\\d{2} \\d{2}:\\d{2}:\\d{2}", "TIMESTAMP_ISO8601", - ""), mostLikelyMatch.v1()); + assertEquals(new TimestampMatch(6, "", "YYYY-MM-dd HH:mm:ss", "yyyy-MM-dd HH:mm:ss", "\\b\\d{4}-\\d{2}-\\d{2} \\d{2}:\\d{2}:\\d{2}", + "TIMESTAMP_ISO8601", ""), mostLikelyMatch.v1()); } public void testMostLikelyTimestampGivenExceptionTraceAndImpossibleTimestampFormatOverride() { @@ -347,7 +351,7 @@ public class TextLogFileStructureFinderTests extends FileStructureTestCase { FileStructureOverrides overrides = FileStructureOverrides.builder().setTimestampFormat("MMM dd HH:mm:ss").build(); Tuple> mostLikelyMatch = - TextLogFileStructureFinder.mostLikelyTimestamp(EXCEPTION_TRACE_SAMPLE.split("\n"), overrides); + TextLogFileStructureFinder.mostLikelyTimestamp(EXCEPTION_TRACE_SAMPLE.split("\n"), overrides, NOOP_TIMEOUT_CHECKER); assertNull(mostLikelyMatch); } } diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/filestructurefinder/TimeoutCheckerTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/filestructurefinder/TimeoutCheckerTests.java new file mode 100644 index 00000000000..125aab7e45e --- /dev/null +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/filestructurefinder/TimeoutCheckerTests.java @@ -0,0 +1,60 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.ml.filestructurefinder; + +import org.elasticsearch.ElasticsearchTimeoutException; +import org.elasticsearch.common.unit.TimeValue; +import org.junit.After; +import org.junit.Before; + +import java.util.concurrent.ScheduledExecutorService; +import java.util.concurrent.ScheduledThreadPoolExecutor; + +public class TimeoutCheckerTests extends FileStructureTestCase { + + private ScheduledExecutorService scheduler; + + @Before + public void createScheduler() { + scheduler = new ScheduledThreadPoolExecutor(1); + } + + @After + public void shutdownScheduler() { + scheduler.shutdown(); + } + + public void testCheckNoTimeout() { + + NOOP_TIMEOUT_CHECKER.check("should never happen"); + } + + public void testCheckTimeoutNotExceeded() throws InterruptedException { + + TimeValue timeout = TimeValue.timeValueSeconds(10); + try (TimeoutChecker timeoutChecker = new TimeoutChecker("timeout not exceeded test", timeout, scheduler)) { + + for (int count = 0; count < 10; ++count) { + timeoutChecker.check("should not timeout"); + Thread.sleep(randomIntBetween(1, 10)); + } + } + } + + public void testCheckTimeoutExceeded() throws Exception { + + TimeValue timeout = TimeValue.timeValueMillis(10); + try (TimeoutChecker timeoutChecker = new TimeoutChecker("timeout exceeded test", timeout, scheduler)) { + + assertBusy(() -> { + ElasticsearchTimeoutException e = expectThrows(ElasticsearchTimeoutException.class, + () -> timeoutChecker.check("should timeout")); + assertEquals("Aborting timeout exceeded test during [should timeout] as it has taken longer than the timeout of [" + + timeout + "]", e.getMessage()); + }); + } + } +} diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/filestructurefinder/TimestampFormatFinderTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/filestructurefinder/TimestampFormatFinderTests.java index bf27912b9db..4b9b0b36c34 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/filestructurefinder/TimestampFormatFinderTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/filestructurefinder/TimestampFormatFinderTests.java @@ -6,14 +6,11 @@ package org.elasticsearch.xpack.ml.filestructurefinder; import org.elasticsearch.common.collect.Tuple; +import org.elasticsearch.common.time.DateFormatters; import org.elasticsearch.xpack.ml.filestructurefinder.TimestampFormatFinder.TimestampMatch; -import org.joda.time.DateTime; -import org.joda.time.DateTimeZone; -import org.joda.time.format.DateTimeFormat; -import org.joda.time.format.DateTimeFormatter; -import org.joda.time.format.ISODateTimeFormat; import java.util.Arrays; +import java.util.List; import java.util.Locale; public class TimestampFormatFinderTests extends FileStructureTestCase { @@ -28,199 +25,206 @@ public class TimestampFormatFinderTests extends FileStructureTestCase { public void testFindFirstMatchGivenOnlyIso8601() { - TimestampMatch expected = new TimestampMatch(7, "", "ISO8601", "\\b\\d{4}-\\d{2}-\\d{2}T\\d{2}:\\d{2}:\\d{2}", "TIMESTAMP_ISO8601", - ""); + validateTimestampMatch(new TimestampMatch(7, "", "ISO8601", "yyyy-MM-dd'T'HH:mm:ss,SSSXX", + "\\b\\d{4}-\\d{2}-\\d{2}T\\d{2}:\\d{2}:\\d{2},\\d{3}", "TIMESTAMP_ISO8601", ""), "2018-05-15T16:14:56,374Z", + 1526400896374L); + validateTimestampMatch(new TimestampMatch(7, "", "ISO8601", "yyyy-MM-dd'T'HH:mm:ss,SSSXX", + "\\b\\d{4}-\\d{2}-\\d{2}T\\d{2}:\\d{2}:\\d{2},\\d{3}", "TIMESTAMP_ISO8601", ""), "2018-05-15T17:14:56,374+0100", + 1526400896374L); + validateTimestampMatch(new TimestampMatch(8, "", "ISO8601", "yyyy-MM-dd'T'HH:mm:ss,SSSXXX", + "\\b\\d{4}-\\d{2}-\\d{2}T\\d{2}:\\d{2}:\\d{2},\\d{3}", "TIMESTAMP_ISO8601", ""), "2018-05-15T17:14:56,374+01:00", + 1526400896374L); + validateTimestampMatch(new TimestampMatch(9, "", "ISO8601", "yyyy-MM-dd'T'HH:mm:ss,SSS", + "\\b\\d{4}-\\d{2}-\\d{2}T\\d{2}:\\d{2}:\\d{2},\\d{3}", "TIMESTAMP_ISO8601", ""), "2018-05-15T17:14:56,374", 1526400896374L); - checkAndValidateDateFormat(expected, "2018-05-15T16:14:56,374Z", 1526400896374L); - checkAndValidateDateFormat(expected, "2018-05-15T17:14:56,374+0100", 1526400896374L); - checkAndValidateDateFormat(expected, "2018-05-15T17:14:56,374+01:00", 1526400896374L); - checkAndValidateDateFormat(expected, "2018-05-15T17:14:56,374", 1526400896374L); - checkAndValidateDateFormat(expected, "2018-05-15T16:14:56Z", 1526400896000L); - checkAndValidateDateFormat(expected, "2018-05-15T17:14:56+0100", 1526400896000L); - checkAndValidateDateFormat(expected, "2018-05-15T17:14:56+01:00", 1526400896000L); - checkAndValidateDateFormat(expected, "2018-05-15T17:14:56", 1526400896000L); + TimestampMatch pureIso8601Expected = new TimestampMatch(10, "", "ISO8601", "ISO8601", + "\\b\\d{4}-\\d{2}-\\d{2}T\\d{2}:\\d{2}:\\d{2}", "TIMESTAMP_ISO8601", ""); - checkAndValidateDateFormat(new TimestampMatch(1, "", "YYYY-MM-dd HH:mm:ss,SSSZ", + validateTimestampMatch(pureIso8601Expected, "2018-05-15T16:14:56Z", 1526400896000L); + validateTimestampMatch(pureIso8601Expected, "2018-05-15T17:14:56+0100", 1526400896000L); + validateTimestampMatch(pureIso8601Expected, "2018-05-15T17:14:56+01:00", 1526400896000L); + validateTimestampMatch(pureIso8601Expected, "2018-05-15T17:14:56", 1526400896000L); + + validateTimestampMatch(new TimestampMatch(1, "", "YYYY-MM-dd HH:mm:ss,SSSZ", "yyyy-MM-dd HH:mm:ss,SSSXX", "\\b\\d{4}-\\d{2}-\\d{2} \\d{2}:\\d{2}:\\d{2},\\d{3}", "TIMESTAMP_ISO8601", ""), "2018-05-15 16:14:56,374Z", 1526400896374L); - checkAndValidateDateFormat(new TimestampMatch(1, "", "YYYY-MM-dd HH:mm:ss,SSSZ", + validateTimestampMatch(new TimestampMatch(1, "", "YYYY-MM-dd HH:mm:ss,SSSZ", "yyyy-MM-dd HH:mm:ss,SSSXX", "\\b\\d{4}-\\d{2}-\\d{2} \\d{2}:\\d{2}:\\d{2},\\d{3}", "TIMESTAMP_ISO8601", ""), "2018-05-15 17:14:56,374+0100", 1526400896374L); - checkAndValidateDateFormat(new TimestampMatch(2, "", "YYYY-MM-dd HH:mm:ss,SSSZZ", + validateTimestampMatch(new TimestampMatch(2, "", "YYYY-MM-dd HH:mm:ss,SSSZZ", "yyyy-MM-dd HH:mm:ss,SSSXXX", "\\b\\d{4}-\\d{2}-\\d{2} \\d{2}:\\d{2}:\\d{2},\\d{3}", "TIMESTAMP_ISO8601", ""), "2018-05-15 17:14:56,374+01:00", 1526400896374L); - checkAndValidateDateFormat(new TimestampMatch(3, "", "YYYY-MM-dd HH:mm:ss,SSS", + validateTimestampMatch(new TimestampMatch(3, "", "YYYY-MM-dd HH:mm:ss,SSS", "yyyy-MM-dd HH:mm:ss,SSS", "\\b\\d{4}-\\d{2}-\\d{2} \\d{2}:\\d{2}:\\d{2},\\d{3}", "TIMESTAMP_ISO8601", ""), "2018-05-15 17:14:56,374", 1526400896374L); - checkAndValidateDateFormat(new TimestampMatch(4, "", "YYYY-MM-dd HH:mm:ssZ", + validateTimestampMatch(new TimestampMatch(4, "", "YYYY-MM-dd HH:mm:ssZ", "yyyy-MM-dd HH:mm:ssXX", "\\b\\d{4}-\\d{2}-\\d{2} \\d{2}:\\d{2}:\\d{2}", "TIMESTAMP_ISO8601", ""), "2018-05-15 16:14:56Z", 1526400896000L); - checkAndValidateDateFormat(new TimestampMatch(4, "", "YYYY-MM-dd HH:mm:ssZ", + validateTimestampMatch(new TimestampMatch(4, "", "YYYY-MM-dd HH:mm:ssZ", "yyyy-MM-dd HH:mm:ssXX", "\\b\\d{4}-\\d{2}-\\d{2} \\d{2}:\\d{2}:\\d{2}", "TIMESTAMP_ISO8601", ""), "2018-05-15 17:14:56+0100", 1526400896000L); - checkAndValidateDateFormat(new TimestampMatch(5, "", "YYYY-MM-dd HH:mm:ssZZ", + validateTimestampMatch(new TimestampMatch(5, "", "YYYY-MM-dd HH:mm:ssZZ", "yyyy-MM-dd HH:mm:ssXXX", "\\b\\d{4}-\\d{2}-\\d{2} \\d{2}:\\d{2}:\\d{2}", "TIMESTAMP_ISO8601", ""), "2018-05-15 17:14:56+01:00", 1526400896000L); - checkAndValidateDateFormat(new TimestampMatch(6, "", "YYYY-MM-dd HH:mm:ss", + validateTimestampMatch(new TimestampMatch(6, "", "YYYY-MM-dd HH:mm:ss", "yyyy-MM-dd HH:mm:ss", "\\b\\d{4}-\\d{2}-\\d{2} \\d{2}:\\d{2}:\\d{2}", "TIMESTAMP_ISO8601", ""), "2018-05-15 17:14:56", 1526400896000L); } - public void testFindFirstMatchGivenOnlyKnownDateFormat() { + public void testFindFirstMatchGivenOnlyKnownTimestampFormat() { // Note: some of the time formats give millisecond accuracy, some second accuracy and some minute accuracy - checkAndValidateDateFormat(new TimestampMatch(0, "", "YYYY-MM-dd HH:mm:ss,SSS Z", + validateTimestampMatch(new TimestampMatch(0, "", "YYYY-MM-dd HH:mm:ss,SSS Z", "yyyy-MM-dd HH:mm:ss,SSS XX", "\\b\\d{4}-\\d{2}-\\d{2} \\d{2}:\\d{2}:\\d{2},\\d{3}", "TOMCAT_DATESTAMP", ""), "2018-05-15 17:14:56,374 +0100", 1526400896374L); - checkAndValidateDateFormat(new TimestampMatch(8, "", "EEE MMM dd YYYY HH:mm:ss zzz", + validateTimestampMatch(new TimestampMatch(11, "", "EEE MMM dd YYYY HH:mm:ss zzz", "EEE MMM dd yyyy HH:mm:ss zzz", "\\b[A-Z]\\S{2,8} [A-Z]\\S{2,8} \\d{1,2} \\d{4} \\d{2}:\\d{2}:\\d{2} ", "DATESTAMP_RFC822", ""), "Tue May 15 2018 16:14:56 UTC", 1526400896000L); - checkAndValidateDateFormat(new TimestampMatch(9, "", "EEE MMM dd YYYY HH:mm zzz", + validateTimestampMatch(new TimestampMatch(12, "", "EEE MMM dd YYYY HH:mm zzz", "EEE MMM dd yyyy HH:mm zzz", "\\b[A-Z]\\S{2,8} [A-Z]\\S{2,8} \\d{1,2} \\d{4} \\d{2}:\\d{2} ", "DATESTAMP_RFC822", ""), "Tue May 15 2018 16:14 UTC", 1526400840000L); - checkAndValidateDateFormat(new TimestampMatch(10, "", "EEE, dd MMM YYYY HH:mm:ss ZZ", + validateTimestampMatch(new TimestampMatch(13, "", "EEE, dd MMM YYYY HH:mm:ss ZZ", "EEE, dd MMM yyyy HH:mm:ss XXX", "\\b[A-Z]\\S{2,8}, \\d{1,2} [A-Z]\\S{2,8} \\d{4} \\d{2}:\\d{2}:\\d{2} ", "DATESTAMP_RFC2822", ""), "Tue, 15 May 2018 17:14:56 +01:00", 1526400896000L); - checkAndValidateDateFormat(new TimestampMatch(11, "", "EEE, dd MMM YYYY HH:mm:ss Z", + validateTimestampMatch(new TimestampMatch(14, "", "EEE, dd MMM YYYY HH:mm:ss Z", "EEE, dd MMM yyyy HH:mm:ss XX", "\\b[A-Z]\\S{2,8}, \\d{1,2} [A-Z]\\S{2,8} \\d{4} \\d{2}:\\d{2}:\\d{2} ", "DATESTAMP_RFC2822", ""), "Tue, 15 May 2018 17:14:56 +0100", 1526400896000L); - checkAndValidateDateFormat(new TimestampMatch(12, "", "EEE, dd MMM YYYY HH:mm ZZ", + validateTimestampMatch(new TimestampMatch(15, "", "EEE, dd MMM YYYY HH:mm ZZ", "EEE, dd MMM yyyy HH:mm XXX", "\\b[A-Z]\\S{2,8}, \\d{1,2} [A-Z]\\S{2,8} \\d{4} \\d{2}:\\d{2} ", "DATESTAMP_RFC2822", ""), "Tue, 15 May 2018 17:14 +01:00", 1526400840000L); - checkAndValidateDateFormat(new TimestampMatch(13, "", "EEE, dd MMM YYYY HH:mm Z", + validateTimestampMatch(new TimestampMatch(16, "", "EEE, dd MMM YYYY HH:mm Z", "EEE, dd MMM yyyy HH:mm XX", "\\b[A-Z]\\S{2,8}, \\d{1,2} [A-Z]\\S{2,8} \\d{4} \\d{2}:\\d{2} ", "DATESTAMP_RFC2822", ""), "Tue, 15 May 2018 17:14 +0100", 1526400840000L); - checkAndValidateDateFormat(new TimestampMatch(14, "", "EEE MMM dd HH:mm:ss zzz YYYY", + validateTimestampMatch(new TimestampMatch(17, "", "EEE MMM dd HH:mm:ss zzz YYYY", "EEE MMM dd HH:mm:ss zzz yyyy", "\\b[A-Z]\\S{2,8} [A-Z]\\S{2,8} \\d{1,2} \\d{2}:\\d{2}:\\d{2} [A-Z]{3,4} \\d{4}\\b", "DATESTAMP_OTHER", ""), "Tue May 15 16:14:56 UTC 2018", 1526400896000L); - checkAndValidateDateFormat(new TimestampMatch(15, "", "EEE MMM dd HH:mm zzz YYYY", + validateTimestampMatch(new TimestampMatch(18, "", "EEE MMM dd HH:mm zzz YYYY", "EEE MMM dd HH:mm zzz yyyy", "\\b[A-Z]\\S{2,8} [A-Z]\\S{2,8} \\d{1,2} \\d{2}:\\d{2} [A-Z]{3,4} \\d{4}\\b", "DATESTAMP_OTHER", ""), "Tue May 15 16:14 UTC 2018", 1526400840000L); - checkAndValidateDateFormat(new TimestampMatch(16, "", "YYYYMMddHHmmss", "\\b\\d{14}\\b", "DATESTAMP_EVENTLOG", ""), + validateTimestampMatch(new TimestampMatch(19, "", "YYYYMMddHHmmss", "yyyyMMddHHmmss", "\\b\\d{14}\\b", + "DATESTAMP_EVENTLOG", ""), "20180515171456", 1526400896000L); - checkAndValidateDateFormat(new TimestampMatch(17, "", "EEE MMM dd HH:mm:ss YYYY", + validateTimestampMatch(new TimestampMatch(20, "", "EEE MMM dd HH:mm:ss YYYY", "EEE MMM dd HH:mm:ss yyyy", "\\b[A-Z]\\S{2,8} [A-Z]\\S{2,8} \\d{1,2} \\d{2}:\\d{2}:\\d{2} \\d{4}\\b", "HTTPDERROR_DATE", ""), "Tue May 15 17:14:56 2018", 1526400896000L); - checkAndValidateDateFormat(new TimestampMatch(18, "", Arrays.asList("MMM dd HH:mm:ss.SSS", "MMM d HH:mm:ss.SSS"), + validateTimestampMatch(new TimestampMatch(21, "", Arrays.asList("MMM dd HH:mm:ss.SSS", "MMM d HH:mm:ss.SSS"), + Arrays.asList("MMM dd HH:mm:ss.SSS", "MMM d HH:mm:ss.SSS"), "\\b[A-Z]\\S{2,8} {1,2}\\d{1,2} \\d{2}:\\d{2}:\\d{2}\\.\\d{3}", "SYSLOGTIMESTAMP", ""), "May 15 17:14:56.725", 1526400896725L); - checkAndValidateDateFormat(new TimestampMatch(19, "", Arrays.asList("MMM dd HH:mm:ss", "MMM d HH:mm:ss"), + validateTimestampMatch(new TimestampMatch(22, "", Arrays.asList("MMM dd HH:mm:ss", "MMM d HH:mm:ss"), + Arrays.asList("MMM dd HH:mm:ss", "MMM d HH:mm:ss"), "\\b[A-Z]\\S{2,8} {1,2}\\d{1,2} \\d{2}:\\d{2}:\\d{2}\\b", "SYSLOGTIMESTAMP", ""), "May 15 17:14:56", 1526400896000L); - checkAndValidateDateFormat(new TimestampMatch(20, "", "dd/MMM/YYYY:HH:mm:ss Z", + validateTimestampMatch(new TimestampMatch(23, "", "dd/MMM/YYYY:HH:mm:ss Z", "dd/MMM/yyyy:HH:mm:ss XX", "\\b\\d{2}/[A-Z]\\S{2}/\\d{4}:\\d{2}:\\d{2}:\\d{2} ", "HTTPDATE", ""), "15/May/2018:17:14:56 +0100", 1526400896000L); - checkAndValidateDateFormat(new TimestampMatch(21, "", "MMM dd, YYYY K:mm:ss a", + validateTimestampMatch(new TimestampMatch(24, "", "MMM dd, YYYY h:mm:ss a", "MMM dd, yyyy h:mm:ss a", "\\b[A-Z]\\S{2,8} \\d{1,2}, \\d{4} \\d{1,2}:\\d{2}:\\d{2} [AP]M\\b", "CATALINA_DATESTAMP", ""), "May 15, 2018 5:14:56 PM", 1526400896000L); - checkAndValidateDateFormat(new TimestampMatch(22, "", Arrays.asList("MMM dd YYYY HH:mm:ss", "MMM d YYYY HH:mm:ss"), + validateTimestampMatch(new TimestampMatch(25, "", Arrays.asList("MMM dd YYYY HH:mm:ss", "MMM d YYYY HH:mm:ss"), + Arrays.asList("MMM dd yyyy HH:mm:ss", "MMM d yyyy HH:mm:ss"), "\\b[A-Z]\\S{2,8} {1,2}\\d{1,2} \\d{4} \\d{2}:\\d{2}:\\d{2}\\b", "CISCOTIMESTAMP", ""), "May 15 2018 17:14:56", 1526400896000L); } public void testFindFirstMatchGivenOnlySystemDate() { - assertEquals(new TimestampMatch(23, "", "UNIX_MS", "\\b\\d{13}\\b", "POSINT", ""), + assertEquals(new TimestampMatch(26, "", "UNIX_MS", "UNIX_MS", "\\b\\d{13}\\b", "POSINT", ""), TimestampFormatFinder.findFirstMatch("1526400896374")); - assertEquals(new TimestampMatch(23, "", "UNIX_MS", "\\b\\d{13}\\b", "POSINT", ""), + assertEquals(new TimestampMatch(26, "", "UNIX_MS", "UNIX_MS", "\\b\\d{13}\\b", "POSINT", ""), TimestampFormatFinder.findFirstFullMatch("1526400896374")); - assertEquals(new TimestampMatch(24, "", "UNIX", "\\b\\d{10}\\.\\d{3,9}\\b", "NUMBER", ""), + assertEquals(new TimestampMatch(27, "", "UNIX", "UNIX", "\\b\\d{10}\\.\\d{3,9}\\b", "NUMBER", ""), TimestampFormatFinder.findFirstMatch("1526400896.736")); - assertEquals(new TimestampMatch(24, "", "UNIX", "\\b\\d{10}\\.\\d{3,9}\\b", "NUMBER", ""), + assertEquals(new TimestampMatch(27, "", "UNIX", "UNIX", "\\b\\d{10}\\.\\d{3,9}\\b", "NUMBER", ""), TimestampFormatFinder.findFirstFullMatch("1526400896.736")); - assertEquals(new TimestampMatch(25, "", "UNIX", "\\b\\d{10}\\b", "POSINT", ""), + assertEquals(new TimestampMatch(28, "", "UNIX", "UNIX", "\\b\\d{10}\\b", "POSINT", ""), TimestampFormatFinder.findFirstMatch("1526400896")); - assertEquals(new TimestampMatch(25, "", "UNIX", "\\b\\d{10}\\b", "POSINT", ""), + assertEquals(new TimestampMatch(28, "", "UNIX", "UNIX", "\\b\\d{10}\\b", "POSINT", ""), TimestampFormatFinder.findFirstFullMatch("1526400896")); - assertEquals(new TimestampMatch(26, "", "TAI64N", "\\b[0-9A-Fa-f]{24}\\b", "BASE16NUM", ""), + assertEquals(new TimestampMatch(29, "", "TAI64N", "TAI64N", "\\b[0-9A-Fa-f]{24}\\b", "BASE16NUM", ""), TimestampFormatFinder.findFirstMatch("400000005afb159a164ac980")); - assertEquals(new TimestampMatch(26, "", "TAI64N", "\\b[0-9A-Fa-f]{24}\\b", "BASE16NUM", ""), + assertEquals(new TimestampMatch(29, "", "TAI64N", "TAI64N", "\\b[0-9A-Fa-f]{24}\\b", "BASE16NUM", ""), TimestampFormatFinder.findFirstFullMatch("400000005afb159a164ac980")); } - private void checkAndValidateDateFormat(TimestampMatch expected, String text, long expectedEpochMs) { - - assertEquals(expected, TimestampFormatFinder.findFirstMatch(text)); - assertEquals(expected, TimestampFormatFinder.findFirstFullMatch(text)); - - // All the test times are for Tue May 15 2018 16:14:56 UTC, which is 17:14:56 in London - DateTimeZone zone = DateTimeZone.forID("Europe/London"); - DateTime parsed; - for (int i = 0; i < expected.dateFormats.size(); ++i) { - try { - String dateFormat = expected.dateFormats.get(i); - switch (dateFormat) { - case "ISO8601": - parsed = ISODateTimeFormat.dateTimeParser().withZone(zone).withDefaultYear(2018).parseDateTime(text); - break; - default: - DateTimeFormatter parser = DateTimeFormat.forPattern(dateFormat).withZone(zone).withLocale(Locale.UK); - parsed = parser.withDefaultYear(2018).parseDateTime(text); - break; - } - if (expectedEpochMs == parsed.getMillis()) { - break; - } - // If the last one isn't right then propagate - if (i == expected.dateFormats.size() - 1) { - assertEquals(expectedEpochMs, parsed.getMillis()); - } - } catch (RuntimeException e) { - // If the last one throws then propagate - if (i == expected.dateFormats.size() - 1) { - throw e; - } - } - } - assertTrue(expected.simplePattern.matcher(text).find()); - } - public void testFindFirstMatchGivenRealLogMessages() { - assertEquals(new TimestampMatch(7, "[", "ISO8601", "\\b\\d{4}-\\d{2}-\\d{2}T\\d{2}:\\d{2}:\\d{2}", "TIMESTAMP_ISO8601", + assertEquals(new TimestampMatch(9, "[", "ISO8601", "yyyy-MM-dd'T'HH:mm:ss,SSS", + "\\b\\d{4}-\\d{2}-\\d{2}T\\d{2}:\\d{2}:\\d{2},\\d{3}", "TIMESTAMP_ISO8601", "][INFO ][o.e.e.NodeEnvironment ] [node-0] heap size [3.9gb], compressed ordinary object pointers [true]"), TimestampFormatFinder.findFirstMatch("[2018-05-11T17:07:29,553][INFO ][o.e.e.NodeEnvironment ] [node-0] " + "heap size [3.9gb], compressed ordinary object pointers [true]")); - assertEquals(new TimestampMatch(20, "192.168.62.101 - - [", "dd/MMM/YYYY:HH:mm:ss Z", + assertEquals(new TimestampMatch(23, "192.168.62.101 - - [", "dd/MMM/YYYY:HH:mm:ss Z", "dd/MMM/yyyy:HH:mm:ss XX", "\\b\\d{2}/[A-Z]\\S{2}/\\d{4}:\\d{2}:\\d{2}:\\d{2} ", "HTTPDATE", "] \"POST //apiserv:8080/engine/v2/jobs HTTP/1.1\" 201 42 \"-\" \"curl/7.46.0\" 384"), TimestampFormatFinder.findFirstMatch("192.168.62.101 - - [29/Jun/2016:12:11:31 +0000] " + "\"POST //apiserv:8080/engine/v2/jobs HTTP/1.1\" 201 42 \"-\" \"curl/7.46.0\" 384")); - assertEquals(new TimestampMatch(21, "", "MMM dd, YYYY K:mm:ss a", + assertEquals(new TimestampMatch(24, "", "MMM dd, YYYY h:mm:ss a", "MMM dd, yyyy h:mm:ss a", "\\b[A-Z]\\S{2,8} \\d{1,2}, \\d{4} \\d{1,2}:\\d{2}:\\d{2} [AP]M\\b", "CATALINA_DATESTAMP", " org.apache.tomcat.util.http.Parameters processParameters"), TimestampFormatFinder.findFirstMatch("Aug 29, 2009 12:03:57 AM org.apache.tomcat.util.http.Parameters processParameters")); - assertEquals(new TimestampMatch(19, "", Arrays.asList("MMM dd HH:mm:ss", "MMM d HH:mm:ss"), + assertEquals(new TimestampMatch(22, "", Arrays.asList("MMM dd HH:mm:ss", "MMM d HH:mm:ss"), + Arrays.asList("MMM dd HH:mm:ss", "MMM d HH:mm:ss"), "\\b[A-Z]\\S{2,8} {1,2}\\d{1,2} \\d{2}:\\d{2}:\\d{2}\\b", "SYSLOGTIMESTAMP", " esxi1.acme.com Vpxa: " + "[3CB3FB90 verbose 'vpxavpxaInvtVm' opID=WFU-33d82c31] [VpxaInvtVmChangeListener] Guest DiskInfo Changed"), TimestampFormatFinder.findFirstMatch("Oct 19 17:04:44 esxi1.acme.com Vpxa: [3CB3FB90 verbose 'vpxavpxaInvtVm' " + "opID=WFU-33d82c31] [VpxaInvtVmChangeListener] Guest DiskInfo Changed")); - assertEquals(new TimestampMatch(7, "559550912540598297\t", "ISO8601", "\\b\\d{4}-\\d{2}-\\d{2}T\\d{2}:\\d{2}:\\d{2}", + assertEquals(new TimestampMatch(10, "559550912540598297\t", "ISO8601", "ISO8601", "\\b\\d{4}-\\d{2}-\\d{2}T\\d{2}:\\d{2}:\\d{2}", "TIMESTAMP_ISO8601", "\t2016-04-20T21:06:53Z\t38545844\tserv02nw07\t192.168.114.28\tAuthpriv\tInfo\tsshd\tsubsystem request for sftp"), TimestampFormatFinder.findFirstMatch("559550912540598297\t2016-04-20T14:06:53\t2016-04-20T21:06:53Z\t38545844\tserv02nw07\t" + "192.168.114.28\tAuthpriv\tInfo\tsshd\tsubsystem request for sftp")); - assertEquals(new TimestampMatch(19, "", Arrays.asList("MMM dd HH:mm:ss", "MMM d HH:mm:ss"), + assertEquals(new TimestampMatch(22, "", Arrays.asList("MMM dd HH:mm:ss", "MMM d HH:mm:ss"), + Arrays.asList("MMM dd HH:mm:ss", "MMM d HH:mm:ss"), "\\b[A-Z]\\S{2,8} {1,2}\\d{1,2} \\d{2}:\\d{2}:\\d{2}\\b", "SYSLOGTIMESTAMP", " dnsserv named[22529]: error (unexpected RCODE REFUSED) resolving 'www.elastic.co/A/IN': 95.110.68.206#53"), TimestampFormatFinder.findFirstMatch("Sep 8 11:55:35 dnsserv named[22529]: error (unexpected RCODE REFUSED) resolving " + "'www.elastic.co/A/IN': 95.110.68.206#53")); - assertEquals(new TimestampMatch(3, "", "YYYY-MM-dd HH:mm:ss.SSSSSS", "\\b\\d{4}-\\d{2}-\\d{2} \\d{2}:\\d{2}:\\d{2}\\.\\d{3}", - "TIMESTAMP_ISO8601", + assertEquals(new TimestampMatch(3, "", "YYYY-MM-dd HH:mm:ss.SSSSSS", "yyyy-MM-dd HH:mm:ss.SSSSSS", + "\\b\\d{4}-\\d{2}-\\d{2} \\d{2}:\\d{2}:\\d{2}\\.\\d{3}", "TIMESTAMP_ISO8601", "|INFO |VirtualServer |1 |client 'User1'(id:2) was added to channelgroup 'Channel Admin'(id:5) by client " + "'User1'(id:2) in channel '3er Instanz'(id:2)"), TimestampFormatFinder.findFirstMatch("2018-01-06 19:22:20.106822|INFO |VirtualServer |1 |client " + " 'User1'(id:2) was added to channelgroup 'Channel Admin'(id:5) by client 'User1'(id:2) in channel '3er Instanz'(id:2)")); + + // Differs from the above as the required format is specified + assertEquals(new TimestampMatch(3, "", "YYYY-MM-dd HH:mm:ss.SSSSSS", "yyyy-MM-dd HH:mm:ss.SSSSSS", + "\\b\\d{4}-\\d{2}-\\d{2} \\d{2}:\\d{2}:\\d{2}\\.\\d{3}", "TIMESTAMP_ISO8601", + "|INFO |VirtualServer |1 |client 'User1'(id:2) was added to channelgroup 'Channel Admin'(id:5) by client " + + "'User1'(id:2) in channel '3er Instanz'(id:2)"), + TimestampFormatFinder.findFirstMatch("2018-01-06 19:22:20.106822|INFO |VirtualServer |1 |client " + + " 'User1'(id:2) was added to channelgroup 'Channel Admin'(id:5) by client 'User1'(id:2) in channel '3er Instanz'(id:2)", + randomFrom("YYYY-MM-dd HH:mm:ss.SSSSSS", "yyyy-MM-dd HH:mm:ss.SSSSSS"))); + + // Non-matching required format specified + assertNull(TimestampFormatFinder.findFirstMatch("2018-01-06 19:22:20.106822|INFO |VirtualServer |1 |client " + + " 'User1'(id:2) was added to channelgroup 'Channel Admin'(id:5) by client 'User1'(id:2) in channel '3er Instanz'(id:2)", + randomFrom("UNIX", "EEE MMM dd YYYY HH:mm zzz"))); + } + + public void testAdjustRequiredFormat() { + assertEquals("YYYY-MM-dd HH:mm:ss,SSS Z", TimestampFormatFinder.adjustRequiredFormat("YYYY-MM-dd HH:mm:ss,SSS Z")); + assertEquals("YYYY-MM-dd HH:mm:ss,SSS Z", TimestampFormatFinder.adjustRequiredFormat("YYYY-MM-dd HH:mm:ss,SSSSSS Z")); + assertEquals("YYYY-MM-dd HH:mm:ss,SSS Z", TimestampFormatFinder.adjustRequiredFormat("YYYY-MM-dd HH:mm:ss,SSSSSSSSS Z")); + assertEquals("YYYY-MM-dd HH:mm:ss,SSS Z", TimestampFormatFinder.adjustRequiredFormat("YYYY-MM-dd HH:mm:ss.SSS Z")); + assertEquals("YYYY-MM-dd HH:mm:ss,SSS Z", TimestampFormatFinder.adjustRequiredFormat("YYYY-MM-dd HH:mm:ss.SSSSSS Z")); + assertEquals("YYYY-MM-dd HH:mm:ss,SSS Z", TimestampFormatFinder.adjustRequiredFormat("YYYY-MM-dd HH:mm:ss.SSSSSSSSS Z")); + assertEquals("YYYY-MM-dd HH:mm:ss,SSS", TimestampFormatFinder.adjustRequiredFormat("YYYY-MM-dd HH:mm:ss,SSS")); + assertEquals("YYYY-MM-dd HH:mm:ss,SSS", TimestampFormatFinder.adjustRequiredFormat("YYYY-MM-dd HH:mm:ss,SSSSSS")); + assertEquals("YYYY-MM-dd HH:mm:ss,SSS", TimestampFormatFinder.adjustRequiredFormat("YYYY-MM-dd HH:mm:ss,SSSSSSSSS")); + assertEquals("YYYY-MM-dd HH:mm:ss,SSS", TimestampFormatFinder.adjustRequiredFormat("YYYY-MM-dd HH:mm:ss.SSS")); + assertEquals("YYYY-MM-dd HH:mm:ss,SSS", TimestampFormatFinder.adjustRequiredFormat("YYYY-MM-dd HH:mm:ss.SSSSSS")); + assertEquals("YYYY-MM-dd HH:mm:ss,SSS", TimestampFormatFinder.adjustRequiredFormat("YYYY-MM-dd HH:mm:ss.SSSSSSSSS")); } public void testInterpretFractionalSeconds() { @@ -239,4 +243,112 @@ public class TimestampFormatFinderTests extends FileStructureTestCase { assertEquals(new Tuple<>(',', 3), TimestampFormatFinder.interpretFractionalSeconds("2018-01-06T17:21:25,764 Z")); assertEquals(new Tuple<>('.', 3), TimestampFormatFinder.interpretFractionalSeconds("2018-01-06T17:21:25.764 Z")); } + + private void validateTimestampMatch(TimestampMatch expected, String text, long expectedEpochMs) { + + assertEquals(expected, TimestampFormatFinder.findFirstMatch(text)); + assertEquals(expected, TimestampFormatFinder.findFirstFullMatch(text)); + assertEquals(expected, TimestampFormatFinder.findFirstMatch(text, expected.candidateIndex)); + assertEquals(expected, TimestampFormatFinder.findFirstFullMatch(text, expected.candidateIndex)); + assertNull(TimestampFormatFinder.findFirstMatch(text, Integer.MAX_VALUE)); + assertNull(TimestampFormatFinder.findFirstFullMatch(text, Integer.MAX_VALUE)); + assertEquals(expected, TimestampFormatFinder.findFirstMatch(text, randomFrom(expected.jodaTimestampFormats))); + assertEquals(expected, TimestampFormatFinder.findFirstFullMatch(text, randomFrom(expected.jodaTimestampFormats))); + assertEquals(expected, TimestampFormatFinder.findFirstMatch(text, randomFrom(expected.javaTimestampFormats))); + assertEquals(expected, TimestampFormatFinder.findFirstFullMatch(text, randomFrom(expected.javaTimestampFormats))); + assertNull(TimestampFormatFinder.findFirstMatch(text, "wrong format")); + assertNull(TimestampFormatFinder.findFirstFullMatch(text, "wrong format")); + + validateJodaTimestampFormats(expected.jodaTimestampFormats, text, expectedEpochMs); + validateJavaTimestampFormats(expected.javaTimestampFormats, text, expectedEpochMs); + + assertTrue(expected.simplePattern.matcher(text).find()); + } + + private void validateJodaTimestampFormats(List jodaTimestampFormats, String text, long expectedEpochMs) { + + // All the test times are for Tue May 15 2018 16:14:56 UTC, which is 17:14:56 in London. + // This is the timezone that will be used for any text representations that don't include it. + org.joda.time.DateTimeZone defaultZone = org.joda.time.DateTimeZone.forID("Europe/London"); + org.joda.time.DateTime parsed; + for (int i = 0; i < jodaTimestampFormats.size(); ++i) { + try { + String timestampFormat = jodaTimestampFormats.get(i); + switch (timestampFormat) { + case "ISO8601": + parsed = org.joda.time.format.ISODateTimeFormat.dateTimeParser() + .withZone(defaultZone).withDefaultYear(2018).parseDateTime(text); + break; + default: + org.joda.time.format.DateTimeFormatter parser = + org.joda.time.format.DateTimeFormat.forPattern(timestampFormat).withZone(defaultZone).withLocale(Locale.ROOT); + parsed = parser.withDefaultYear(2018).parseDateTime(text); + break; + } + if (expectedEpochMs == parsed.getMillis()) { + break; + } + // If the last one isn't right then propagate + if (i == jodaTimestampFormats.size() - 1) { + assertEquals(expectedEpochMs, parsed.getMillis()); + } + } catch (RuntimeException e) { + // If the last one throws then propagate + if (i == jodaTimestampFormats.size() - 1) { + throw e; + } + } + } + } + + private void validateJavaTimestampFormats(List javaTimestampFormats, String text, long expectedEpochMs) { + + // All the test times are for Tue May 15 2018 16:14:56 UTC, which is 17:14:56 in London. + // This is the timezone that will be used for any text representations that don't include it. + java.time.ZoneId defaultZone = java.time.ZoneId.of("Europe/London"); + java.time.temporal.TemporalAccessor parsed; + for (int i = 0; i < javaTimestampFormats.size(); ++i) { + try { + String timestampFormat = javaTimestampFormats.get(i); + switch (timestampFormat) { + case "ISO8601": + parsed = DateFormatters.forPattern("strict_date_optional_time_nanos").withZone(defaultZone).parse(text); + break; + default: + java.time.format.DateTimeFormatter parser = new java.time.format.DateTimeFormatterBuilder() + .appendPattern(timestampFormat).parseDefaulting(java.time.temporal.ChronoField.YEAR_OF_ERA, 2018) + .toFormatter(Locale.ROOT); + // This next line parses the textual date without any default timezone, so if + // the text doesn't contain the timezone then the resulting temporal accessor + // will be incomplete (i.e. impossible to convert to an Instant). You would + // hope that it would be possible to specify a timezone to be used only in this + // case, and in Java 9 and 10 it is, by adding withZone(zone) before the + // parse(text) call. However, with Java 8 this overrides any timezone parsed + // from the text. The solution is to parse twice, once without a default + // timezone and then again with a default timezone if the first parse didn't + // find one in the text. + parsed = parser.parse(text); + if (parsed.query(java.time.temporal.TemporalQueries.zone()) == null) { + // TODO: when Java 8 is no longer supported remove the two + // lines and comment above and the closing brace below + parsed = parser.withZone(defaultZone).parse(text); + } + break; + } + long actualEpochMs = java.time.Instant.from(parsed).toEpochMilli(); + if (expectedEpochMs == actualEpochMs) { + break; + } + // If the last one isn't right then propagate + if (i == javaTimestampFormats.size() - 1) { + assertEquals(expectedEpochMs, actualEpochMs); + } + } catch (RuntimeException e) { + // If the last one throws then propagate + if (i == javaTimestampFormats.size() - 1) { + throw e; + } + } + } + } } diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/filestructurefinder/XmlFileStructureFinderTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/filestructurefinder/XmlFileStructureFinderTests.java index 01c44147b04..b6f93a6e39b 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/filestructurefinder/XmlFileStructureFinderTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/filestructurefinder/XmlFileStructureFinderTests.java @@ -19,7 +19,7 @@ public class XmlFileStructureFinderTests extends FileStructureTestCase { String charset = randomFrom(POSSIBLE_CHARSETS); Boolean hasByteOrderMarker = randomHasByteOrderMarker(charset); FileStructureFinder structureFinder = factory.createFromSample(explanation, XML_SAMPLE, charset, hasByteOrderMarker, - FileStructureOverrides.EMPTY_OVERRIDES); + FileStructureOverrides.EMPTY_OVERRIDES, NOOP_TIMEOUT_CHECKER); FileStructure structure = structureFinder.getStructure(); @@ -38,6 +38,6 @@ public class XmlFileStructureFinderTests extends FileStructureTestCase { assertNull(structure.getShouldTrimFields()); assertNull(structure.getGrokPattern()); assertEquals("timestamp", structure.getTimestampField()); - assertEquals(Collections.singletonList("UNIX_MS"), structure.getTimestampFormats()); + assertEquals(Collections.singletonList("UNIX_MS"), structure.getJodaTimestampFormats()); } } diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/process/DataCountsReporterTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/process/DataCountsReporterTests.java index d3afb732418..4be8d74274c 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/process/DataCountsReporterTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/process/DataCountsReporterTests.java @@ -31,8 +31,6 @@ import static org.mockito.Mockito.never; import static org.mockito.Mockito.verify; public class DataCountsReporterTests extends ESTestCase { - private static final int MAX_PERCENT_DATE_PARSE_ERRORS = 40; - private static final int MAX_PERCENT_OUT_OF_ORDER_ERRORS = 30; private Job job; private JobDataCountsPersister jobDataCountsPersister; @@ -42,8 +40,6 @@ public class DataCountsReporterTests extends ESTestCase { @Before public void setUpMocks() { settings = Settings.builder().put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) - .put(DataCountsReporter.ACCEPTABLE_PERCENTAGE_DATE_PARSE_ERRORS_SETTING.getKey(), MAX_PERCENT_DATE_PARSE_ERRORS) - .put(DataCountsReporter.ACCEPTABLE_PERCENTAGE_OUT_OF_ORDER_ERRORS_SETTING.getKey(), MAX_PERCENT_OUT_OF_ORDER_ERRORS) .build(); AnalysisConfig.Builder acBuilder = new AnalysisConfig.Builder(Arrays.asList(new Detector.Builder("metric", "field").build())); @@ -51,6 +47,7 @@ public class DataCountsReporterTests extends ESTestCase { acBuilder.setLatency(TimeValue.ZERO); acBuilder.setDetectors(Arrays.asList(new Detector.Builder("metric", "field").build())); + Job.Builder builder = new Job.Builder("sr"); builder.setAnalysisConfig(acBuilder); builder.setDataDescription(new DataDescription.Builder()); @@ -59,13 +56,6 @@ public class DataCountsReporterTests extends ESTestCase { jobDataCountsPersister = Mockito.mock(JobDataCountsPersister.class); } - public void testSettingAcceptablePercentages() throws IOException { - DataCountsReporter dataCountsReporter = new DataCountsReporter(settings, job, new DataCounts(job.getId()), - jobDataCountsPersister); - assertEquals(dataCountsReporter.getAcceptablePercentDateParseErrors(), MAX_PERCENT_DATE_PARSE_ERRORS); - assertEquals(dataCountsReporter.getAcceptablePercentOutOfOrderErrors(), MAX_PERCENT_OUT_OF_ORDER_ERRORS); - } - public void testSimpleConstructor() throws Exception { DataCountsReporter dataCountsReporter = new DataCountsReporter(settings, job, new DataCounts(job.getId()), jobDataCountsPersister); diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/process/DummyDataCountsReporter.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/process/DummyDataCountsReporter.java index bcf41a994b9..98ab4025bff 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/process/DummyDataCountsReporter.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/process/DummyDataCountsReporter.java @@ -26,7 +26,7 @@ class DummyDataCountsReporter extends DataCountsReporter { int logStatusCallCount = 0; - DummyDataCountsReporter() { + DummyDataCountsReporter() { super(Settings.EMPTY, createJob(), new DataCounts("DummyJobId"), mock(JobDataCountsPersister.class)); } diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/process/autodetect/AutodetectProcessManagerTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/process/autodetect/AutodetectProcessManagerTests.java index 43cc909e392..35a8bfae481 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/process/autodetect/AutodetectProcessManagerTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/process/autodetect/AutodetectProcessManagerTests.java @@ -126,6 +126,7 @@ public class AutodetectProcessManagerTests extends ESTestCase { normalizerFactory = mock(NormalizerFactory.class); auditor = mock(Auditor.class); + when(jobManager.getJobOrThrowIfUnknown("foo")).thenReturn(createJobDetails("foo")); doAnswer(invocationOnMock -> { @SuppressWarnings("unchecked") diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/process/autodetect/params/ForecastParamsTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/process/autodetect/params/ForecastParamsTests.java index 84d9e6ceabd..b3467a3d405 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/process/autodetect/params/ForecastParamsTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/process/autodetect/params/ForecastParamsTests.java @@ -16,7 +16,6 @@ import static org.hamcrest.Matchers.equalTo; public class ForecastParamsTests extends ESTestCase { - private static ParseField END = new ParseField("end"); private static ParseField DURATION = new ParseField("duration"); public void testForecastIdsAreUnique() { diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/process/normalizer/NormalizerTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/process/normalizer/NormalizerTests.java index d06146ad53f..661eeca98db 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/process/normalizer/NormalizerTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/process/normalizer/NormalizerTests.java @@ -29,7 +29,7 @@ public class NormalizerTests extends ESTestCase { private static final String INDEX_NAME = "foo-index"; private static final String QUANTILES_STATE = "someState"; private static final int BUCKET_SPAN = 600; - private static final double INITIAL_SCORE = 2.0; + private static final double INITIAL_SCORE = 3.0; private static final double FACTOR = 2.0; private Bucket generateBucket(Date timestamp) throws IOException { diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/results/ReservedFieldNamesTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/results/ReservedFieldNamesTests.java index 2fa4834d1ec..a08b53fba3c 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/results/ReservedFieldNamesTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/results/ReservedFieldNamesTests.java @@ -11,11 +11,10 @@ import org.elasticsearch.xpack.core.ml.job.results.ReservedFieldNames; public class ReservedFieldNamesTests extends ESTestCase { - public void testIsValidFieldName() throws Exception { + public void testIsValidFieldName() { assertTrue(ReservedFieldNames.isValidFieldName("host")); assertTrue(ReservedFieldNames.isValidFieldName("host.actual")); assertFalse(ReservedFieldNames.isValidFieldName("actual.host")); assertFalse(ReservedFieldNames.isValidFieldName(AnomalyRecord.BUCKET_SPAN.getPreferredName())); } - } \ No newline at end of file diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/utils/ChainTaskExecutorTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/utils/ChainTaskExecutorTests.java index 8b1b6314a29..87b83852ff5 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/utils/ChainTaskExecutorTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/utils/ChainTaskExecutorTests.java @@ -24,6 +24,7 @@ public class ChainTaskExecutorTests extends ESTestCase { private final ThreadPool threadPool = new TestThreadPool(getClass().getName()); private final CountDownLatch latch = new CountDownLatch(1); + @Override @After public void tearDown() throws Exception { try { @@ -37,8 +38,14 @@ public class ChainTaskExecutorTests extends ESTestCase { final List strings = new ArrayList<>(); ActionListener finalListener = createBlockingListener(() -> strings.add("last"), e -> fail()); ChainTaskExecutor chainTaskExecutor = new ChainTaskExecutor(threadPool.generic(), false); - chainTaskExecutor.add(listener -> { strings.add("first"); listener.onResponse(null); }); - chainTaskExecutor.add(listener -> { strings.add("second"); listener.onResponse(null); }); + chainTaskExecutor.add(listener -> { + strings.add("first"); + listener.onResponse(null); + }); + chainTaskExecutor.add(listener -> { + strings.add("second"); + listener.onResponse(null); + }); chainTaskExecutor.execute(finalListener); @@ -52,9 +59,17 @@ public class ChainTaskExecutorTests extends ESTestCase { ActionListener finalListener = createBlockingListener(() -> fail(), e -> assertThat(e.getMessage(), equalTo("some error"))); ChainTaskExecutor chainTaskExecutor = new ChainTaskExecutor(threadPool.generic(), true); - chainTaskExecutor.add(listener -> { strings.add("before"); listener.onResponse(null); }); - chainTaskExecutor.add(listener -> { throw new RuntimeException("some error"); }); - chainTaskExecutor.add(listener -> { strings.add("after"); listener.onResponse(null); }); + chainTaskExecutor.add(listener -> { + strings.add("before"); + listener.onResponse(null); + }); + chainTaskExecutor.add(listener -> { + throw new RuntimeException("some error"); + }); + chainTaskExecutor.add(listener -> { + strings.add("after"); + listener.onResponse(null); + }); chainTaskExecutor.execute(finalListener); @@ -68,9 +83,16 @@ public class ChainTaskExecutorTests extends ESTestCase { ActionListener finalListener = createBlockingListener(() -> fail(), e -> assertThat(e.getMessage(), equalTo("some error 1"))); ChainTaskExecutor chainTaskExecutor = new ChainTaskExecutor(threadPool.generic(), true); - chainTaskExecutor.add(listener -> { strings.add("before"); listener.onResponse(null); }); - chainTaskExecutor.add(listener -> { throw new RuntimeException("some error 1"); }); - chainTaskExecutor.add(listener -> { throw new RuntimeException("some error 2"); }); + chainTaskExecutor.add(listener -> { + strings.add("before"); + listener.onResponse(null); + }); + chainTaskExecutor.add(listener -> { + throw new RuntimeException("some error 1"); + }); + chainTaskExecutor.add(listener -> { + throw new RuntimeException("some error 2"); + }); chainTaskExecutor.execute(finalListener); @@ -83,9 +105,17 @@ public class ChainTaskExecutorTests extends ESTestCase { final List strings = new ArrayList<>(); ActionListener finalListener = createBlockingListener(() -> strings.add("last"), e -> fail()); ChainTaskExecutor chainTaskExecutor = new ChainTaskExecutor(threadPool.generic(), false); - chainTaskExecutor.add(listener -> { strings.add("before"); listener.onResponse(null); }); - chainTaskExecutor.add(listener -> { throw new RuntimeException("some error"); }); - chainTaskExecutor.add(listener -> { strings.add("after"); listener.onResponse(null); }); + chainTaskExecutor.add(listener -> { + strings.add("before"); + listener.onResponse(null); + }); + chainTaskExecutor.add(listener -> { + throw new RuntimeException("some error"); + }); + chainTaskExecutor.add(listener -> { + strings.add("after"); + listener.onResponse(null); + }); chainTaskExecutor.execute(finalListener); diff --git a/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/Monitoring.java b/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/Monitoring.java index 027cb7de937..d18286a9db5 100644 --- a/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/Monitoring.java +++ b/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/Monitoring.java @@ -39,6 +39,7 @@ import org.elasticsearch.xpack.core.ssl.SSLService; import org.elasticsearch.xpack.monitoring.action.TransportMonitoringBulkAction; import org.elasticsearch.xpack.monitoring.cleaner.CleanerService; import org.elasticsearch.xpack.monitoring.collector.Collector; +import org.elasticsearch.xpack.monitoring.collector.ccr.CcrAutoFollowStatsCollector; import org.elasticsearch.xpack.monitoring.collector.ccr.CcrStatsCollector; import org.elasticsearch.xpack.monitoring.collector.cluster.ClusterStatsCollector; import org.elasticsearch.xpack.monitoring.collector.indices.IndexRecoveryCollector; @@ -144,6 +145,7 @@ public class Monitoring extends Plugin implements ActionPlugin { collectors.add(new IndexRecoveryCollector(settings, clusterService, getLicenseState(), client)); collectors.add(new JobStatsCollector(settings, clusterService, getLicenseState(), client)); collectors.add(new CcrStatsCollector(settings, clusterService, getLicenseState(), client)); + collectors.add(new CcrAutoFollowStatsCollector(settings, clusterService, getLicenseState(), client)); final MonitoringService monitoringService = new MonitoringService(settings, clusterService, threadPool, collectors, exporters); @@ -183,6 +185,7 @@ public class Monitoring extends Plugin implements ActionPlugin { settings.add(IndexStatsCollector.INDEX_STATS_TIMEOUT); settings.add(JobStatsCollector.JOB_STATS_TIMEOUT); settings.add(CcrStatsCollector.CCR_STATS_TIMEOUT); + settings.add(CcrAutoFollowStatsCollector.CCR_AUTO_FOLLOW_STATS_TIMEOUT); settings.add(NodeStatsCollector.NODE_STATS_TIMEOUT); settings.addAll(Exporters.getSettings()); return Collections.unmodifiableList(settings); diff --git a/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/collector/ccr/AbstractCcrCollector.java b/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/collector/ccr/AbstractCcrCollector.java new file mode 100644 index 00000000000..f6b124d6df5 --- /dev/null +++ b/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/collector/ccr/AbstractCcrCollector.java @@ -0,0 +1,71 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +package org.elasticsearch.xpack.monitoring.collector.ccr; + +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.settings.Setting; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.common.util.concurrent.ThreadContext; +import org.elasticsearch.license.XPackLicenseState; +import org.elasticsearch.xpack.core.XPackSettings; +import org.elasticsearch.xpack.core.ccr.client.CcrClient; +import org.elasticsearch.xpack.core.monitoring.exporter.MonitoringDoc; +import org.elasticsearch.xpack.monitoring.collector.Collector; + +import java.util.Collection; + +import static org.elasticsearch.xpack.core.ClientHelper.MONITORING_ORIGIN; +import static org.elasticsearch.xpack.core.ClientHelper.stashWithOrigin; +import static org.elasticsearch.xpack.monitoring.collector.ccr.CcrStatsMonitoringDoc.TYPE; + +public abstract class AbstractCcrCollector extends Collector { + + private final ThreadContext threadContext; + final CcrClient ccrClient; + + AbstractCcrCollector( + final Settings settings, + final ClusterService clusterService, + final Setting timeoutSetting, + final XPackLicenseState licenseState, + final CcrClient ccrClient, + final ThreadContext threadContext) { + super(settings, TYPE, clusterService, timeoutSetting, licenseState); + this.ccrClient = ccrClient; + this.threadContext = threadContext; + } + + @Override + protected boolean shouldCollect(final boolean isElectedMaster) { + // this can only run when monitoring is allowed and CCR is enabled and allowed, but also only on the elected master node + return isElectedMaster + && super.shouldCollect(isElectedMaster) + && XPackSettings.CCR_ENABLED_SETTING.get(settings) + && licenseState.isCcrAllowed(); + } + + + @Override + protected Collection doCollect( + final MonitoringDoc.Node node, + final long interval, + final ClusterState clusterState) throws Exception { + try (ThreadContext.StoredContext ignore = stashWithOrigin(threadContext, MONITORING_ORIGIN)) { + final long timestamp = timestamp(); + final String clusterUuid = clusterUuid(clusterState); + return innerDoCollect(timestamp, clusterUuid, interval, node); + } + } + + abstract Collection innerDoCollect( + long timestamp, + String clusterUuid, + long interval, + MonitoringDoc.Node node) throws Exception; +} diff --git a/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/collector/ccr/AutoFollowStatsMonitoringDoc.java b/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/collector/ccr/AutoFollowStatsMonitoringDoc.java new file mode 100644 index 00000000000..82312203fd8 --- /dev/null +++ b/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/collector/ccr/AutoFollowStatsMonitoringDoc.java @@ -0,0 +1,47 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +package org.elasticsearch.xpack.monitoring.collector.ccr; + +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.xpack.core.ccr.AutoFollowStats; +import org.elasticsearch.xpack.core.monitoring.MonitoredSystem; +import org.elasticsearch.xpack.core.monitoring.exporter.MonitoringDoc; + +import java.io.IOException; +import java.util.Objects; + +public class AutoFollowStatsMonitoringDoc extends MonitoringDoc { + + public static final String TYPE = "ccr_auto_follow_stats"; + + private final AutoFollowStats stats; + + public AutoFollowStats stats() { + return stats; + } + + public AutoFollowStatsMonitoringDoc( + final String cluster, + final long timestamp, + final long intervalMillis, + final Node node, + final AutoFollowStats stats) { + super(cluster, timestamp, intervalMillis, node, MonitoredSystem.ES, TYPE, null); + this.stats = Objects.requireNonNull(stats, "stats"); + } + + + @Override + protected void innerToXContent(final XContentBuilder builder, final Params params) throws IOException { + builder.startObject(TYPE); + { + stats.toXContentFragment(builder, params); + } + builder.endObject(); + } + +} diff --git a/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/collector/ccr/CcrAutoFollowStatsCollector.java b/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/collector/ccr/CcrAutoFollowStatsCollector.java new file mode 100644 index 00000000000..e179c204416 --- /dev/null +++ b/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/collector/ccr/CcrAutoFollowStatsCollector.java @@ -0,0 +1,61 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +package org.elasticsearch.xpack.monitoring.collector.ccr; + +import org.elasticsearch.client.Client; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.settings.Setting; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.common.util.concurrent.ThreadContext; +import org.elasticsearch.license.XPackLicenseState; +import org.elasticsearch.xpack.core.XPackClient; +import org.elasticsearch.xpack.core.ccr.action.AutoFollowStatsAction; +import org.elasticsearch.xpack.core.ccr.client.CcrClient; +import org.elasticsearch.xpack.core.monitoring.exporter.MonitoringDoc; + +import java.util.Collection; +import java.util.Collections; + +public final class CcrAutoFollowStatsCollector extends AbstractCcrCollector { + + public static final Setting CCR_AUTO_FOLLOW_STATS_TIMEOUT = collectionTimeoutSetting("ccr.auto_follow.stats.timeout"); + + public CcrAutoFollowStatsCollector( + final Settings settings, + final ClusterService clusterService, + final XPackLicenseState licenseState, + final Client client) { + super(settings, clusterService, CCR_AUTO_FOLLOW_STATS_TIMEOUT, licenseState, new XPackClient(client).ccr(), + client.threadPool().getThreadContext()); + } + + CcrAutoFollowStatsCollector( + final Settings settings, + final ClusterService clusterService, + final XPackLicenseState licenseState, + final CcrClient ccrClient, + final ThreadContext threadContext) { + super(settings, clusterService, CCR_AUTO_FOLLOW_STATS_TIMEOUT, licenseState, ccrClient, threadContext); + } + + @Override + Collection innerDoCollect( + long timestamp, + String clusterUuid, + long interval, + MonitoringDoc.Node node) throws Exception { + + final AutoFollowStatsAction.Request request = new AutoFollowStatsAction.Request(); + final AutoFollowStatsAction.Response response = ccrClient.autoFollowStats(request).actionGet(getCollectionTimeout()); + + final AutoFollowStatsMonitoringDoc doc = + new AutoFollowStatsMonitoringDoc(clusterUuid, timestamp, interval, node, response.getStats()); + return Collections.singletonList(doc); + } + +} diff --git a/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/collector/ccr/CcrStatsCollector.java b/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/collector/ccr/CcrStatsCollector.java index 510f430d196..e9f3d09ef43 100644 --- a/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/collector/ccr/CcrStatsCollector.java +++ b/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/collector/ccr/CcrStatsCollector.java @@ -6,9 +6,7 @@ package org.elasticsearch.xpack.monitoring.collector.ccr; -import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.client.Client; -import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; @@ -16,32 +14,24 @@ import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.license.XPackLicenseState; import org.elasticsearch.xpack.core.XPackClient; -import org.elasticsearch.xpack.core.XPackSettings; import org.elasticsearch.xpack.core.ccr.action.CcrStatsAction; import org.elasticsearch.xpack.core.ccr.client.CcrClient; import org.elasticsearch.xpack.core.monitoring.exporter.MonitoringDoc; -import org.elasticsearch.xpack.monitoring.collector.Collector; import java.util.Collection; import java.util.stream.Collectors; -import static org.elasticsearch.xpack.core.ClientHelper.MONITORING_ORIGIN; -import static org.elasticsearch.xpack.core.ClientHelper.stashWithOrigin; -import static org.elasticsearch.xpack.monitoring.collector.ccr.CcrStatsMonitoringDoc.TYPE; - -public class CcrStatsCollector extends Collector { +public final class CcrStatsCollector extends AbstractCcrCollector { public static final Setting CCR_STATS_TIMEOUT = collectionTimeoutSetting("ccr.stats.timeout"); - private final ThreadContext threadContext; - private final CcrClient ccrClient; - public CcrStatsCollector( final Settings settings, final ClusterService clusterService, final XPackLicenseState licenseState, final Client client) { - this(settings, clusterService, licenseState, new XPackClient(client).ccr(), client.threadPool().getThreadContext()); + super(settings, clusterService, CCR_STATS_TIMEOUT, licenseState, new XPackClient(client).ccr(), + client.threadPool().getThreadContext()); } CcrStatsCollector( @@ -50,41 +40,26 @@ public class CcrStatsCollector extends Collector { final XPackLicenseState licenseState, final CcrClient ccrClient, final ThreadContext threadContext) { - super(settings, TYPE, clusterService, CCR_STATS_TIMEOUT, licenseState); - this.ccrClient = ccrClient; - this.threadContext = threadContext; + super(settings, clusterService, CCR_STATS_TIMEOUT, licenseState, ccrClient, threadContext); } @Override - protected boolean shouldCollect(final boolean isElectedMaster) { - // this can only run when monitoring is allowed and CCR is enabled and allowed, but also only on the elected master node - return isElectedMaster - && super.shouldCollect(isElectedMaster) - && XPackSettings.CCR_ENABLED_SETTING.get(settings) - && licenseState.isCcrAllowed(); - } + Collection innerDoCollect( + long timestamp, + String clusterUuid, + long interval, + MonitoringDoc.Node node) throws Exception { - @Override - protected Collection doCollect( - final MonitoringDoc.Node node, - final long interval, - final ClusterState clusterState) throws Exception { - try (ThreadContext.StoredContext ignore = stashWithOrigin(threadContext, MONITORING_ORIGIN)) { final CcrStatsAction.StatsRequest request = new CcrStatsAction.StatsRequest(); request.setIndices(getCollectionIndices()); - request.setIndicesOptions(IndicesOptions.lenientExpandOpen()); final CcrStatsAction.StatsResponses responses = ccrClient.stats(request).actionGet(getCollectionTimeout()); - final long timestamp = timestamp(); - final String clusterUuid = clusterUuid(clusterState); - - return responses - .getStatsResponses() - .stream() - .map(stats -> new CcrStatsMonitoringDoc(clusterUuid, timestamp, interval, node, stats.status())) - .collect(Collectors.toList()); - } + return responses + .getStatsResponses() + .stream() + .map(stats -> new CcrStatsMonitoringDoc(clusterUuid, timestamp, interval, node, stats.status())) + .collect(Collectors.toList()); } } diff --git a/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/collector/cluster/ClusterStatsCollector.java b/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/collector/cluster/ClusterStatsCollector.java index 1a05b034364..23fe4d46543 100644 --- a/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/collector/cluster/ClusterStatsCollector.java +++ b/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/collector/cluster/ClusterStatsCollector.java @@ -112,7 +112,8 @@ public class ClusterStatsCollector extends Collector { // Adds a cluster stats document return Collections.singleton( new ClusterStatsMonitoringDoc(clusterUuid, timestamp(), interval, node, clusterName, version, clusterStats.getStatus(), - license, apmIndicesExist, xpackUsage, clusterStats, clusterState, clusterNeedsTLSEnabled)); + license, apmIndicesExist, xpackUsage, clusterStats, clusterState, + clusterNeedsTLSEnabled)); } boolean doAPMIndicesExist(final ClusterState clusterState) { diff --git a/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/collector/cluster/ClusterStatsMonitoringDoc.java b/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/collector/cluster/ClusterStatsMonitoringDoc.java index 414945c2a15..75c0ba6b81c 100644 --- a/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/collector/cluster/ClusterStatsMonitoringDoc.java +++ b/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/collector/cluster/ClusterStatsMonitoringDoc.java @@ -8,10 +8,12 @@ package org.elasticsearch.xpack.monitoring.collector.cluster; import org.elasticsearch.action.admin.cluster.stats.ClusterStatsResponse; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.health.ClusterHealthStatus; +import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.collect.MapBuilder; +import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.license.License; @@ -45,6 +47,7 @@ public class ClusterStatsMonitoringDoc extends MonitoringDoc { ClusterState.Metric.NODES)); public static final String TYPE = "cluster_stats"; + protected static final String SETTING_CLUSTER_METADATA = "cluster.metadata"; private final String clusterName; private final String version; @@ -118,6 +121,14 @@ public class ClusterStatsMonitoringDoc extends MonitoringDoc { return clusterNeedsTLSEnabled; } + Settings getClusterMetaDataSettings() { + MetaData metaData = this.clusterState.getMetaData(); + if (metaData == null) { + return Settings.EMPTY; + } + return metaData.settings().getAsSettings(SETTING_CLUSTER_METADATA); + } + @Override protected void innerToXContent(XContentBuilder builder, Params params) throws IOException { builder.field("cluster_name", clusterName); @@ -156,6 +167,25 @@ public class ClusterStatsMonitoringDoc extends MonitoringDoc { builder.endObject(); } + Settings clusterMetaDataSettings = getClusterMetaDataSettings(); + if (clusterMetaDataSettings != null) { + builder.startObject("cluster_settings"); + { + if (clusterMetaDataSettings.size() > 0) { + builder.startObject("cluster"); + { + builder.startObject("metadata"); + { + clusterMetaDataSettings.toXContent(builder, params); + } + builder.endObject(); + } + builder.endObject(); + } + } + builder.endObject(); + } + builder.startObject("stack_stats"); { // in the future, it may be useful to pass in an object that represents APM (and others), but for now this diff --git a/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/collector/cluster/ClusterStatsMonitoringDocTests.java b/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/collector/cluster/ClusterStatsMonitoringDocTests.java index 294f56e26b0..ffc85cedc21 100644 --- a/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/collector/cluster/ClusterStatsMonitoringDocTests.java +++ b/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/collector/cluster/ClusterStatsMonitoringDocTests.java @@ -203,7 +203,12 @@ public class ClusterStatsMonitoringDocTests extends BaseMonitoringDocTestCase document, final boolean apmIndicesExist) { final Map source = (Map) document.get("_source"); - assertEquals(11, source.size()); + assertEquals(12, source.size()); assertThat((String) source.get("cluster_name"), not(isEmptyOrNullString())); assertThat(source.get("version"), equalTo(Version.CURRENT.toString())); diff --git a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/rest/RestGetRollupIndexCapsAction.java b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/rest/RestGetRollupIndexCapsAction.java index 4f4336f11ab..d402a56f885 100644 --- a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/rest/RestGetRollupIndexCapsAction.java +++ b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/rest/RestGetRollupIndexCapsAction.java @@ -8,6 +8,7 @@ package org.elasticsearch.xpack.rollup.rest; import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.common.ParseField; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestController; @@ -27,7 +28,8 @@ public class RestGetRollupIndexCapsAction extends BaseRestHandler { protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient client) { String index = restRequest.param(INDEX.getPreferredName()); IndicesOptions options = IndicesOptions.fromRequest(restRequest, IndicesOptions.STRICT_EXPAND_OPEN_FORBID_CLOSED); - GetRollupIndexCapsAction.Request request = new GetRollupIndexCapsAction.Request(new String[]{index}, options); + GetRollupIndexCapsAction.Request request = + new GetRollupIndexCapsAction.Request(Strings.splitStringByCommaToArray(index), options); return channel -> client.execute(GetRollupIndexCapsAction.INSTANCE, request, new RestToXContentListener<>(channel)); } diff --git a/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/action/SearchActionTests.java b/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/action/SearchActionTests.java index d7bb34bb156..5f9bd9fa01d 100644 --- a/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/action/SearchActionTests.java +++ b/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/action/SearchActionTests.java @@ -81,6 +81,7 @@ public class SearchActionTests extends ESTestCase { private NamedWriteableRegistry namedWriteableRegistry; + @Override @Before public void setUp() throws Exception { super.setUp(); @@ -384,7 +385,7 @@ public class SearchActionTests extends ESTestCase { SearchRequest rollup = msearch.requests().get(1); assertThat(rollup.indices().length, equalTo(1)); assertThat(rollup.indices()[0], equalTo(rollupIndices[0])); - assert(rollup.source().aggregations().getAggregatorFactories().get(0) instanceof FilterAggregationBuilder); + assert(rollup.source().aggregations().getAggregatorFactories().iterator().next() instanceof FilterAggregationBuilder); } public void testGoodButNullQuery() { @@ -417,7 +418,7 @@ public class SearchActionTests extends ESTestCase { SearchRequest rollup = msearch.requests().get(1); assertThat(rollup.indices().length, equalTo(1)); assertThat(rollup.indices()[0], equalTo(rollupIndices[0])); - assert(rollup.source().aggregations().getAggregatorFactories().get(0) instanceof FilterAggregationBuilder); + assert(rollup.source().aggregations().getAggregatorFactories().iterator().next() instanceof FilterAggregationBuilder); } public void testTwoMatchingJobs() { @@ -460,7 +461,7 @@ public class SearchActionTests extends ESTestCase { SearchRequest rollup = msearch.requests().get(1); assertThat(rollup.indices().length, equalTo(1)); assertThat(rollup.indices()[0], equalTo(rollupIndices[0])); - assert(rollup.source().aggregations().getAggregatorFactories().get(0) instanceof FilterAggregationBuilder); + assert(rollup.source().aggregations().getAggregatorFactories().iterator().next() instanceof FilterAggregationBuilder); assertThat(msearch.requests().size(), equalTo(2)); } @@ -507,7 +508,7 @@ public class SearchActionTests extends ESTestCase { SearchRequest rollup = msearch.requests().get(1); assertThat(rollup.indices().length, equalTo(1)); assertThat(rollup.indices()[0], equalTo(rollupIndices[0])); - assert(rollup.source().aggregations().getAggregatorFactories().get(0) instanceof FilterAggregationBuilder); + assert(rollup.source().aggregations().getAggregatorFactories().iterator().next() instanceof FilterAggregationBuilder); // The executed query should match the first job ("foo") because the second job contained a histo and the first didn't, @@ -523,8 +524,7 @@ public class SearchActionTests extends ESTestCase { public void testNoIndicesToSeparate() { String[] indices = new String[]{}; ImmutableOpenMap meta = ImmutableOpenMap.builder().build(); - IllegalArgumentException e = expectThrows(IllegalArgumentException.class, - () -> TransportRollupSearchAction.separateIndices(indices, meta)); + expectThrows(IllegalArgumentException.class, () -> TransportRollupSearchAction.separateIndices(indices, meta)); } public void testSeparateAll() { @@ -774,6 +774,7 @@ public class SearchActionTests extends ESTestCase { MultiSearchResponse msearchResponse = new MultiSearchResponse(new MultiSearchResponse.Item[]{unrolledResponse, rolledResponse}, 123); + SearchResponse response = TransportRollupSearchAction.processResponses(separateIndices, msearchResponse, mock(InternalAggregation.ReduceContext.class)); diff --git a/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/job/IndexerUtilsTests.java b/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/job/IndexerUtilsTests.java index f5d335ca6f1..bee43bce471 100644 --- a/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/job/IndexerUtilsTests.java +++ b/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/job/IndexerUtilsTests.java @@ -200,7 +200,6 @@ public class IndexerUtilsTests extends AggregatorTestCase { String indexName = randomAlphaOfLengthBetween(1, 10); RollupIndexerJobStats stats= new RollupIndexerJobStats(0, 0, 0, 0); - String timestampField = "the_histo"; String valueField = "the_avg"; Directory directory = newDirectory(); diff --git a/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/job/RollupIndexerIndexingTests.java b/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/job/RollupIndexerIndexingTests.java index 55f1cfbdbb2..e406ea6735e 100644 --- a/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/job/RollupIndexerIndexingTests.java +++ b/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/job/RollupIndexerIndexingTests.java @@ -29,7 +29,6 @@ import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.search.SearchResponseSections; import org.elasticsearch.action.search.ShardSearchFailure; -import org.elasticsearch.common.joda.DateMathParser; import org.elasticsearch.common.joda.Joda; import org.elasticsearch.common.rounding.Rounding; import org.elasticsearch.common.unit.TimeValue; @@ -47,13 +46,13 @@ import org.elasticsearch.search.aggregations.AggregatorTestCase; import org.elasticsearch.search.aggregations.bucket.composite.CompositeAggregation; import org.elasticsearch.search.aggregations.bucket.composite.CompositeAggregationBuilder; import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramInterval; +import org.elasticsearch.xpack.core.indexing.IndexerState; import org.elasticsearch.xpack.core.rollup.ConfigTestHelpers; import org.elasticsearch.xpack.core.rollup.job.DateHistogramGroupConfig; import org.elasticsearch.xpack.core.rollup.job.GroupConfig; import org.elasticsearch.xpack.core.rollup.job.MetricConfig; import org.elasticsearch.xpack.core.rollup.job.RollupJob; import org.elasticsearch.xpack.core.rollup.job.RollupJobConfig; -import org.elasticsearch.xpack.core.indexing.IndexerState; import org.joda.time.DateTime; import org.joda.time.DateTimeZone; import org.junit.Before; @@ -601,13 +600,14 @@ public class RollupIndexerIndexingTests extends AggregatorTestCase { RangeQueryBuilder range = (RangeQueryBuilder) request.source().query(); final DateTimeZone timeZone = range.timeZone() != null ? DateTimeZone.forID(range.timeZone()) : null; Query query = timestampField.rangeQuery(range.from(), range.to(), range.includeLower(), range.includeUpper(), - null, timeZone, new DateMathParser(Joda.forPattern(range.format())), queryShardContext); + null, timeZone, Joda.forPattern(range.format()).toDateMathParser(), queryShardContext); // extract composite agg assertThat(request.source().aggregations().getAggregatorFactories().size(), equalTo(1)); - assertThat(request.source().aggregations().getAggregatorFactories().get(0), instanceOf(CompositeAggregationBuilder.class)); + assertThat(request.source().aggregations().getAggregatorFactories().iterator().next(), + instanceOf(CompositeAggregationBuilder.class)); CompositeAggregationBuilder aggBuilder = - (CompositeAggregationBuilder) request.source().aggregations().getAggregatorFactories().get(0); + (CompositeAggregationBuilder) request.source().aggregations().getAggregatorFactories().iterator().next(); CompositeAggregation result = null; try { diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java index 42a2ad767d3..76b1a87f682 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java @@ -658,7 +658,7 @@ public class Security extends Plugin implements ActionPlugin, IngestPlugin, Netw assert getLicenseState() != null; if (XPackSettings.DLS_FLS_ENABLED.get(settings)) { module.setSearcherWrapper(indexService -> - new SecurityIndexSearcherWrapper(indexService.getIndexSettings(), + new SecurityIndexSearcherWrapper( shardId -> indexService.newQueryShardContext(shardId.id(), // we pass a null index reader, which is legal and will disable rewrite optimizations // based on index statistics, which is probably safer... diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/ExpiredTokenRemover.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/ExpiredTokenRemover.java index 78670dd99f6..ccc26bfc899 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/ExpiredTokenRemover.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/ExpiredTokenRemover.java @@ -6,11 +6,11 @@ package org.elasticsearch.xpack.security.authc; import org.apache.logging.log4j.Logger; +import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.message.ParameterizedMessage; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.bulk.BulkItemResponse; import org.elasticsearch.client.Client; -import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.util.concurrent.AbstractRunnable; @@ -35,15 +35,14 @@ import static org.elasticsearch.xpack.core.ClientHelper.executeAsyncWithOrigin; * Responsible for cleaning the invalidated tokens from the invalidated tokens index. */ final class ExpiredTokenRemover extends AbstractRunnable { + private static final Logger logger = LogManager.getLogger(ExpiredTokenRemover.class); private final Client client; private final AtomicBoolean inProgress = new AtomicBoolean(false); - private final Logger logger; private final TimeValue timeout; ExpiredTokenRemover(Settings settings, Client client) { this.client = client; - this.logger = Loggers.getLogger(getClass(), settings); this.timeout = TokenService.DELETE_TIMEOUT.get(settings); } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/TokenService.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/TokenService.java index 937bd22d982..4f1ec4ad8c0 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/TokenService.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/TokenService.java @@ -118,6 +118,7 @@ import java.util.function.Supplier; import static org.elasticsearch.action.support.TransportActions.isShardNotAvailableException; import static org.elasticsearch.gateway.GatewayService.STATE_NOT_RECOVERED_BLOCK; +import static org.elasticsearch.search.SearchService.DEFAULT_KEEPALIVE_SETTING; import static org.elasticsearch.xpack.core.ClientHelper.SECURITY_ORIGIN; import static org.elasticsearch.xpack.core.ClientHelper.executeAsyncWithOrigin; @@ -846,7 +847,7 @@ public final class TokenService extends AbstractComponent { ); final SearchRequest request = client.prepareSearch(SecurityIndexManager.SECURITY_INDEX_NAME) - .setScroll(TimeValue.timeValueSeconds(10L)) + .setScroll(DEFAULT_KEEPALIVE_SETTING.get(settings)) .setQuery(boolQuery) .setVersion(false) .setSize(1000) diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/esnative/NativeUsersStore.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/esnative/NativeUsersStore.java index d923a029804..620c3817ebb 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/esnative/NativeUsersStore.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/esnative/NativeUsersStore.java @@ -28,7 +28,6 @@ import org.elasticsearch.common.ValidationException; import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.settings.SecureString; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.index.IndexNotFoundException; @@ -62,6 +61,7 @@ import java.util.Map; import java.util.function.Consumer; import java.util.function.Supplier; +import static org.elasticsearch.search.SearchService.DEFAULT_KEEPALIVE_SETTING; import static org.elasticsearch.xpack.core.ClientHelper.SECURITY_ORIGIN; import static org.elasticsearch.xpack.core.ClientHelper.executeAsyncWithOrigin; import static org.elasticsearch.xpack.core.ClientHelper.stashWithOrigin; @@ -139,7 +139,7 @@ public class NativeUsersStore extends AbstractComponent { final Supplier supplier = client.threadPool().getThreadContext().newRestorableContext(false); try (ThreadContext.StoredContext ignore = stashWithOrigin(client.threadPool().getThreadContext(), SECURITY_ORIGIN)) { SearchRequest request = client.prepareSearch(SECURITY_INDEX_NAME) - .setScroll(TimeValue.timeValueSeconds(10L)) + .setScroll(DEFAULT_KEEPALIVE_SETTING.get(settings)) .setQuery(query) .setSize(1000) .setFetchSource(true) diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/file/FileUserPasswdStore.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/file/FileUserPasswdStore.java index 15a6c2c41da..faece90a89b 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/file/FileUserPasswdStore.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/file/FileUserPasswdStore.java @@ -6,6 +6,7 @@ package org.elasticsearch.xpack.security.authc.file; import org.apache.logging.log4j.Logger; +import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.message.ParameterizedMessage; import org.apache.logging.log4j.util.Supplier; import org.elasticsearch.ElasticsearchException; @@ -42,8 +43,7 @@ import static java.util.Collections.emptyMap; import static java.util.Collections.unmodifiableMap; public class FileUserPasswdStore { - - private final Logger logger; + private static final Logger logger = LogManager.getLogger(FileUserPasswdStore.class); private final Path file; private final Settings settings; @@ -55,7 +55,6 @@ public class FileUserPasswdStore { } FileUserPasswdStore(RealmConfig config, ResourceWatcherService watcherService, Runnable listener) { - logger = config.logger(FileUserPasswdStore.class); file = resolveFile(config.env()); settings = config.globalSettings(); users = parseFileLenient(file, logger, settings); diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/file/FileUserRolesStore.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/file/FileUserRolesStore.java index e17d8c5c7ec..e79621964e7 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/file/FileUserRolesStore.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/file/FileUserRolesStore.java @@ -6,6 +6,7 @@ package org.elasticsearch.xpack.security.authc.file; import org.apache.logging.log4j.Logger; +import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.message.ParameterizedMessage; import org.apache.logging.log4j.util.Supplier; import org.elasticsearch.ElasticsearchException; @@ -39,11 +40,10 @@ import static java.util.Collections.unmodifiableMap; import static org.elasticsearch.common.Strings.collectionToCommaDelimitedString; public class FileUserRolesStore { + private static final Logger logger = LogManager.getLogger(FileUserRolesStore.class); private static final Pattern USERS_DELIM = Pattern.compile("\\s*,\\s*"); - private final Logger logger; - private final Path file; private final CopyOnWriteArrayList listeners; private volatile Map userRoles; @@ -53,7 +53,6 @@ public class FileUserRolesStore { } FileUserRolesStore(RealmConfig config, ResourceWatcherService watcherService, Runnable listener) { - logger = config.logger(FileUserRolesStore.class); file = resolveFile(config.env()); userRoles = parseFileLenient(file, logger); listeners = new CopyOnWriteArrayList<>(Collections.singletonList(listener)); diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/ldap/support/SessionFactory.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/ldap/support/SessionFactory.java index d062e458895..78f8c68f124 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/ldap/support/SessionFactory.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/ldap/support/SessionFactory.java @@ -11,6 +11,7 @@ import com.unboundid.ldap.sdk.LDAPURL; import com.unboundid.ldap.sdk.ServerSet; import com.unboundid.util.ssl.HostNameSSLSocketVerifier; import org.apache.logging.log4j.Logger; +import org.apache.logging.log4j.LogManager; import org.elasticsearch.action.ActionListener; import org.elasticsearch.common.Strings; import org.elasticsearch.common.logging.DeprecationLogger; @@ -62,7 +63,7 @@ public abstract class SessionFactory { protected SessionFactory(RealmConfig config, SSLService sslService, ThreadPool threadPool) { this.config = config; - this.logger = config.logger(getClass()); + this.logger = LogManager.getLogger(getClass()); final Settings settings = config.settings(); TimeValue searchTimeout = settings.getAsTime(SessionFactorySettings.TIMEOUT_LDAP_SETTING, SessionFactorySettings.TIMEOUT_DEFAULT); if (searchTimeout.millis() < 1000L) { diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/saml/SamlAuthenticator.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/saml/SamlAuthenticator.java index c780055edd5..015cb1f8b18 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/saml/SamlAuthenticator.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/saml/SamlAuthenticator.java @@ -19,7 +19,6 @@ import org.elasticsearch.ElasticsearchSecurityException; import org.elasticsearch.common.Strings; import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.unit.TimeValue; -import org.elasticsearch.xpack.core.security.authc.RealmConfig; import org.opensaml.core.xml.XMLObject; import org.opensaml.saml.saml2.core.Assertion; import org.opensaml.saml.saml2.core.Attribute; @@ -52,12 +51,11 @@ class SamlAuthenticator extends SamlRequestHandler { private static final String RESPONSE_TAG_NAME = "Response"; - SamlAuthenticator(RealmConfig realmConfig, - Clock clock, + SamlAuthenticator(Clock clock, IdpConfiguration idp, SpConfiguration sp, TimeValue maxSkew) { - super(realmConfig, clock, idp, sp, maxSkew); + super(clock, idp, sp, maxSkew); } /** diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/saml/SamlLogoutRequestHandler.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/saml/SamlLogoutRequestHandler.java index 3e827952f45..9bd8527e373 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/saml/SamlLogoutRequestHandler.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/saml/SamlLogoutRequestHandler.java @@ -23,7 +23,6 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.core.internal.io.Streams; import org.elasticsearch.rest.RestUtils; -import org.elasticsearch.xpack.core.security.authc.RealmConfig; import org.opensaml.saml.common.SAMLObject; import org.opensaml.saml.saml2.core.EncryptedID; import org.opensaml.saml.saml2.core.LogoutRequest; @@ -42,8 +41,8 @@ public class SamlLogoutRequestHandler extends SamlRequestHandler { private static final String REQUEST_TAG_NAME = "LogoutRequest"; - SamlLogoutRequestHandler(RealmConfig realmConfig, Clock clock, IdpConfiguration idp, SpConfiguration sp, TimeValue maxSkew) { - super(realmConfig, clock, idp, sp, maxSkew); + SamlLogoutRequestHandler(Clock clock, IdpConfiguration idp, SpConfiguration sp, TimeValue maxSkew) { + super(clock, idp, sp, maxSkew); } /** diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/saml/SamlRealm.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/saml/SamlRealm.java index 4a9db7c5d61..36ad208df2b 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/saml/SamlRealm.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/saml/SamlRealm.java @@ -15,6 +15,7 @@ import org.apache.http.conn.ssl.NoopHostnameVerifier; import org.apache.http.conn.ssl.SSLConnectionSocketFactory; import org.apache.http.impl.client.HttpClientBuilder; import org.apache.logging.log4j.Logger; +import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.message.ParameterizedMessage; import org.elasticsearch.ElasticsearchSecurityException; import org.elasticsearch.ExceptionsHelper; @@ -134,6 +135,7 @@ import static org.elasticsearch.xpack.core.security.authc.saml.SamlRealmSettings * are still cool and no chance to opt out */ public final class SamlRealm extends Realm implements Releasable { + private static final Logger logger = LogManager.getLogger(SamlRealm.class); public static final String USER_METADATA_NAMEID_VALUE = "saml_" + SamlAttributes.NAMEID_SYNTHENTIC_ATTRIBUTE; public static final String USER_METADATA_NAMEID_FORMAT = USER_METADATA_NAMEID_VALUE + "_format"; @@ -178,7 +180,6 @@ public final class SamlRealm extends Realm implements Releasable { */ public static SamlRealm create(RealmConfig config, SSLService sslService, ResourceWatcherService watcherService, UserRoleMapper roleMapper) throws Exception { - final Logger logger = config.logger(SamlRealm.class); SamlUtils.initialize(logger); if (TokenService.isTokenServiceEnabled(config.globalSettings()) == false) { @@ -196,9 +197,9 @@ public final class SamlRealm extends Realm implements Releasable { final Clock clock = Clock.systemUTC(); final IdpConfiguration idpConfiguration = getIdpConfiguration(config, metadataResolver, idpDescriptor); final TimeValue maxSkew = CLOCK_SKEW.get(config.settings()); - final SamlAuthenticator authenticator = new SamlAuthenticator(config, clock, idpConfiguration, serviceProvider, maxSkew); + final SamlAuthenticator authenticator = new SamlAuthenticator(clock, idpConfiguration, serviceProvider, maxSkew); final SamlLogoutRequestHandler logoutHandler = - new SamlLogoutRequestHandler(config, clock, idpConfiguration, serviceProvider, maxSkew); + new SamlLogoutRequestHandler(clock, idpConfiguration, serviceProvider, maxSkew); final SamlRealm realm = new SamlRealm(config, roleMapper, authenticator, logoutHandler, idpDescriptor, serviceProvider); diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/saml/SamlRequestHandler.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/saml/SamlRequestHandler.java index 0f48c996a77..b7b91fe3f03 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/saml/SamlRequestHandler.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/saml/SamlRequestHandler.java @@ -6,14 +6,13 @@ package org.elasticsearch.xpack.security.authc.saml; import org.apache.logging.log4j.Logger; +import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.message.ParameterizedMessage; import org.elasticsearch.ElasticsearchSecurityException; import org.elasticsearch.common.CheckedFunction; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.Strings; -import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.unit.TimeValue; -import org.elasticsearch.xpack.core.security.authc.RealmConfig; import org.elasticsearch.xpack.security.support.RestorableContextClassLoader; import org.joda.time.DateTime; import org.opensaml.core.xml.XMLObject; @@ -82,7 +81,7 @@ public class SamlRequestHandler { } }); - protected final Logger logger; + protected final Logger logger = LogManager.getLogger(getClass()); @Nullable protected final Decrypter decrypter; @@ -93,8 +92,7 @@ public class SamlRequestHandler { private final TimeValue maxSkew; private final UnmarshallerFactory unmarshallerFactory; - public SamlRequestHandler(RealmConfig realmConfig, Clock clock, IdpConfiguration idp, SpConfiguration sp, TimeValue maxSkew) { - this.logger = Loggers.getLogger(getClass(), realmConfig.globalSettings()); + public SamlRequestHandler(Clock clock, IdpConfiguration idp, SpConfiguration sp, TimeValue maxSkew) { this.clock = clock; this.idp = idp; this.sp = sp; diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/DnRoleMapper.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/DnRoleMapper.java index 9ff4cd9be82..8a02977d55c 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/DnRoleMapper.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/DnRoleMapper.java @@ -20,6 +20,7 @@ import java.util.concurrent.CopyOnWriteArrayList; import com.unboundid.ldap.sdk.DN; import com.unboundid.ldap.sdk.LDAPException; import org.apache.logging.log4j.Logger; +import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.message.ParameterizedMessage; import org.apache.logging.log4j.util.Supplier; import org.elasticsearch.ElasticsearchException; @@ -43,8 +44,8 @@ import static org.elasticsearch.xpack.security.authc.ldap.support.LdapUtils.rela * This class loads and monitors the file defining the mappings of DNs to internal ES Roles. */ public class DnRoleMapper implements UserRoleMapper { + private static final Logger logger = LogManager.getLogger(DnRoleMapper.class); - protected final Logger logger; protected final RealmConfig config; private final Path file; @@ -54,7 +55,6 @@ public class DnRoleMapper implements UserRoleMapper { public DnRoleMapper(RealmConfig config, ResourceWatcherService watcherService) { this.config = config; - this.logger = config.logger(getClass()); useUnmappedGroupsAsRoles = DnRoleMapperSettings.USE_UNMAPPED_GROUPS_AS_ROLES_SETTING.get(config.settings()); file = resolveFile(config.settings(), config.env()); diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/RoleMappingFileBootstrapCheck.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/RoleMappingFileBootstrapCheck.java index c4193c19219..81b0dc6ea48 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/RoleMappingFileBootstrapCheck.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/RoleMappingFileBootstrapCheck.java @@ -7,6 +7,7 @@ package org.elasticsearch.xpack.security.authc.support; import java.nio.file.Path; +import org.apache.logging.log4j.LogManager; import org.elasticsearch.bootstrap.BootstrapCheck; import org.elasticsearch.bootstrap.BootstrapContext; import org.elasticsearch.xpack.core.security.authc.RealmConfig; @@ -28,7 +29,7 @@ public class RoleMappingFileBootstrapCheck implements BootstrapCheck { @Override public BootstrapCheckResult check(BootstrapContext context) { try { - DnRoleMapper.parseFile(path, realmConfig.logger(getClass()), realmConfig.type(), realmConfig.name(), true); + DnRoleMapper.parseFile(path, LogManager.getLogger(getClass()), realmConfig.type(), realmConfig.name(), true); return BootstrapCheckResult.success(); } catch (Exception e) { return BootstrapCheckResult.failure(e.getMessage()); diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/mapper/NativeRoleMappingStore.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/mapper/NativeRoleMappingStore.java index 677d13082ca..b45de8184d6 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/mapper/NativeRoleMappingStore.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/mapper/NativeRoleMappingStore.java @@ -16,7 +16,6 @@ import org.elasticsearch.common.CheckedBiConsumer; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.common.xcontent.LoggingDeprecationHandler; import org.elasticsearch.common.xcontent.NamedXContentRegistry; @@ -56,6 +55,7 @@ import java.util.stream.Stream; import static org.elasticsearch.action.DocWriteResponse.Result.CREATED; import static org.elasticsearch.action.DocWriteResponse.Result.DELETED; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; +import static org.elasticsearch.search.SearchService.DEFAULT_KEEPALIVE_SETTING; import static org.elasticsearch.xpack.core.ClientHelper.SECURITY_ORIGIN; import static org.elasticsearch.xpack.core.ClientHelper.executeAsyncWithOrigin; import static org.elasticsearch.xpack.core.ClientHelper.stashWithOrigin; @@ -129,7 +129,7 @@ public class NativeRoleMappingStore extends AbstractComponent implements UserRol final Supplier supplier = client.threadPool().getThreadContext().newRestorableContext(false); try (ThreadContext.StoredContext ignore = stashWithOrigin(client.threadPool().getThreadContext(), SECURITY_ORIGIN)) { SearchRequest request = client.prepareSearch(SECURITY_INDEX_NAME) - .setScroll(TimeValue.timeValueSeconds(10L)) + .setScroll(DEFAULT_KEEPALIVE_SETTING.get(settings)) .setTypes(SECURITY_GENERIC_TYPE) .setQuery(query) .setSize(1000) diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/store/CompositeRolesStore.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/store/CompositeRolesStore.java index beb2ca60fb2..7e1cc49e2c0 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/store/CompositeRolesStore.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/store/CompositeRolesStore.java @@ -97,9 +97,7 @@ public class CompositeRolesStore extends AbstractComponent { ThreadContext threadContext, XPackLicenseState licenseState) { super(settings); this.fileRolesStore = fileRolesStore; - // invalidating all on a file based role update is heavy handed to say the least, but in general this should be infrequent so the - // impact isn't really worth the added complexity of only clearing the changed values - fileRolesStore.addListener(this::invalidateAll); + fileRolesStore.addListener(this::invalidate); this.nativeRolesStore = nativeRolesStore; this.reservedRolesStore = reservedRolesStore; this.privilegeStore = privilegeStore; @@ -356,6 +354,23 @@ public class CompositeRolesStore extends AbstractComponent { negativeLookupCache.remove(role); } + public void invalidate(Set roles) { + numInvalidation.incrementAndGet(); + + // the cache cannot be modified while doing this operation per the terms of the cache iterator + try (ReleasableLock ignored = writeLock.acquire()) { + Iterator> keyIter = roleCache.keys().iterator(); + while (keyIter.hasNext()) { + Set key = keyIter.next(); + if (Sets.haveEmptyIntersection(key, roles) == false) { + keyIter.remove(); + } + } + } + + negativeLookupCache.removeAll(roles); + } + public void usageStats(ActionListener> listener) { final Map usage = new HashMap<>(2); usage.put("file", fileRolesStore.usageStats()); diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/store/FileRolesStore.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/store/FileRolesStore.java index 59bc8042fba..868a7076b8b 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/store/FileRolesStore.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/store/FileRolesStore.java @@ -12,6 +12,7 @@ import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.util.set.Sets; import org.elasticsearch.common.xcontent.LoggingDeprecationHandler; import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.common.xcontent.XContentParser; @@ -34,13 +35,16 @@ import java.nio.charset.StandardCharsets; import java.nio.file.Files; import java.nio.file.Path; import java.util.ArrayList; +import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Objects; import java.util.Set; +import java.util.function.Consumer; import java.util.regex.Pattern; +import java.util.stream.Collectors; import static java.util.Collections.emptyMap; import static java.util.Collections.unmodifiableMap; @@ -52,16 +56,16 @@ public class FileRolesStore extends AbstractComponent { private final Path file; private final XPackLicenseState licenseState; - private final List listeners = new ArrayList<>(); + private final List>> listeners = new ArrayList<>(); private volatile Map permissions; public FileRolesStore(Settings settings, Environment env, ResourceWatcherService watcherService, XPackLicenseState licenseState) throws IOException { - this(settings, env, watcherService, () -> {}, licenseState); + this(settings, env, watcherService, null, licenseState); } - FileRolesStore(Settings settings, Environment env, ResourceWatcherService watcherService, Runnable listener, + FileRolesStore(Settings settings, Environment env, ResourceWatcherService watcherService, Consumer> listener, XPackLicenseState licenseState) throws IOException { super(settings); this.file = resolveFile(env); @@ -76,9 +80,10 @@ public class FileRolesStore extends AbstractComponent { } public Set roleDescriptors(Set roleNames) { + final Map localPermissions = permissions; Set descriptors = new HashSet<>(); roleNames.forEach((name) -> { - RoleDescriptor descriptor = permissions.get(name); + RoleDescriptor descriptor = localPermissions.get(name); if (descriptor != null) { descriptors.add(descriptor); } @@ -87,12 +92,13 @@ public class FileRolesStore extends AbstractComponent { } public Map usageStats() { + final Map localPermissions = permissions; Map usageStats = new HashMap<>(3); - usageStats.put("size", permissions.size()); + usageStats.put("size", localPermissions.size()); boolean dls = false; boolean fls = false; - for (RoleDescriptor descriptor : permissions.values()) { + for (RoleDescriptor descriptor : localPermissions.values()) { for (IndicesPrivileges indicesPrivileges : descriptor.getIndicesPrivileges()) { fls = fls || indicesPrivileges.getGrantedFields() != null || indicesPrivileges.getDeniedFields() != null; dls = dls || indicesPrivileges.getQuery() != null; @@ -107,10 +113,10 @@ public class FileRolesStore extends AbstractComponent { return usageStats; } - public void addListener(Runnable runnable) { - Objects.requireNonNull(runnable); + public void addListener(Consumer> consumer) { + Objects.requireNonNull(consumer); synchronized (this) { - listeners.add(runnable); + listeners.add(consumer); } } @@ -118,6 +124,11 @@ public class FileRolesStore extends AbstractComponent { return file; } + // package private for testing + Set getAllRoleNames() { + return permissions.keySet(); + } + public static Path resolveFile(Environment env) { return XPackPlugin.resolveConfigFile(env, "roles.yml"); } @@ -319,11 +330,13 @@ public class FileRolesStore extends AbstractComponent { } @Override - public void onFileChanged(Path file) { + public synchronized void onFileChanged(Path file) { if (file.equals(FileRolesStore.this.file)) { + final Map previousPermissions = permissions; try { permissions = parseFile(file, logger, settings, licenseState); - logger.info("updated roles (roles file [{}] {})", file.toAbsolutePath(), Files.exists(file) ? "changed" : "removed"); + logger.info("updated roles (roles file [{}] {})", file.toAbsolutePath(), + Files.exists(file) ? "changed" : "removed"); } catch (Exception e) { logger.error( (Supplier) () -> new ParameterizedMessage( @@ -331,9 +344,13 @@ public class FileRolesStore extends AbstractComponent { return; } - synchronized (FileRolesStore.this) { - listeners.forEach(Runnable::run); - } + final Set changedOrMissingRoles = Sets.difference(previousPermissions.entrySet(), permissions.entrySet()) + .stream() + .map(Map.Entry::getKey) + .collect(Collectors.toSet()); + final Set addedRoles = Sets.difference(permissions.keySet(), previousPermissions.keySet()); + final Set changedRoles = Collections.unmodifiableSet(Sets.union(changedOrMissingRoles, addedRoles)); + listeners.forEach(c -> c.accept(changedRoles)); } } } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/store/NativePrivilegeStore.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/store/NativePrivilegeStore.java index 807cfff6c2c..2cfa89b647c 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/store/NativePrivilegeStore.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/store/NativePrivilegeStore.java @@ -24,7 +24,6 @@ import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.util.CollectionUtils; import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.common.util.iterable.Iterables; @@ -56,6 +55,7 @@ import java.util.stream.Collector; import java.util.stream.Collectors; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; +import static org.elasticsearch.search.SearchService.DEFAULT_KEEPALIVE_SETTING; import static org.elasticsearch.xpack.core.ClientHelper.SECURITY_ORIGIN; import static org.elasticsearch.xpack.core.ClientHelper.executeAsyncWithOrigin; import static org.elasticsearch.xpack.core.ClientHelper.stashWithOrigin; @@ -115,7 +115,7 @@ public class NativePrivilegeStore extends AbstractComponent { final Supplier supplier = client.threadPool().getThreadContext().newRestorableContext(false); try (ThreadContext.StoredContext ignore = stashWithOrigin(client.threadPool().getThreadContext(), SECURITY_ORIGIN)) { SearchRequest request = client.prepareSearch(SECURITY_INDEX_NAME) - .setScroll(TimeValue.timeValueSeconds(10L)) + .setScroll(DEFAULT_KEEPALIVE_SETTING.get(settings)) .setQuery(query) .setSize(1000) .setFetchSource(true) diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/store/NativeRolesStore.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/store/NativeRolesStore.java index e578a4005c4..e032d524038 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/store/NativeRolesStore.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/store/NativeRolesStore.java @@ -59,6 +59,7 @@ import java.util.function.Supplier; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; import static org.elasticsearch.index.query.QueryBuilders.existsQuery; +import static org.elasticsearch.search.SearchService.DEFAULT_KEEPALIVE_SETTING; import static org.elasticsearch.xpack.core.ClientHelper.SECURITY_ORIGIN; import static org.elasticsearch.xpack.core.ClientHelper.executeAsyncWithOrigin; import static org.elasticsearch.xpack.core.ClientHelper.stashWithOrigin; @@ -120,7 +121,7 @@ public class NativeRolesStore extends AbstractComponent { final Supplier supplier = client.threadPool().getThreadContext().newRestorableContext(false); try (ThreadContext.StoredContext ignore = stashWithOrigin(client.threadPool().getThreadContext(), SECURITY_ORIGIN)) { SearchRequest request = client.prepareSearch(SecurityIndexManager.SECURITY_INDEX_NAME) - .setScroll(TimeValue.timeValueSeconds(10L)) + .setScroll(DEFAULT_KEEPALIVE_SETTING.get(settings)) .setQuery(query) .setSize(1000) .setFetchSource(true) diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/transport/filter/IPFilter.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/transport/filter/IPFilter.java index 860d6bb69b6..10172ff95e8 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/transport/filter/IPFilter.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/transport/filter/IPFilter.java @@ -8,9 +8,9 @@ package org.elasticsearch.xpack.security.transport.filter; import io.netty.handler.ipfilter.IpFilterRuleType; import org.apache.logging.log4j.Logger; +import org.apache.logging.log4j.LogManager; import org.apache.lucene.util.SetOnce; import org.elasticsearch.common.collect.MapBuilder; -import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Setting.Property; @@ -96,11 +96,12 @@ public class IPFilter { } }; + private static final Logger logger = LogManager.getLogger(IPFilter.class); + private final AuditTrailService auditTrail; private final XPackLicenseState licenseState; private final boolean alwaysAllowBoundAddresses; - private final Logger logger; private volatile Map rules = Collections.emptyMap(); private volatile boolean isIpFilterEnabled; private volatile boolean isHttpFilterEnabled; @@ -117,7 +118,6 @@ public class IPFilter { public IPFilter(final Settings settings, AuditTrailService auditTrail, ClusterSettings clusterSettings, XPackLicenseState licenseState) { - this.logger = Loggers.getLogger(getClass(), settings); this.auditTrail = auditTrail; this.licenseState = licenseState; this.alwaysAllowBoundAddresses = ALLOW_BOUND_ADDRESSES_SETTING.get(settings); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/ScrollHelperIntegTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/ScrollHelperIntegTests.java index 7ab26b0c33f..3d623f343c3 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/ScrollHelperIntegTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/ScrollHelperIntegTests.java @@ -79,6 +79,7 @@ public class ScrollHelperIntegTests extends ESSingleNodeTestCase { when(client.threadPool()).thenReturn(threadPool); when(threadPool.getThreadContext()).thenReturn(new ThreadContext(Settings.EMPTY)); SearchRequest request = new SearchRequest(); + request.scroll(TimeValue.timeValueHours(10L)); String scrollId = randomAlphaOfLength(5); SearchHit[] hits = new SearchHit[] {new SearchHit(1), new SearchHit(2)}; diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/role/TransportDeleteRoleActionTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/role/TransportDeleteRoleActionTests.java index 94856f701fa..612a0ea83c0 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/role/TransportDeleteRoleActionTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/role/TransportDeleteRoleActionTests.java @@ -126,7 +126,6 @@ public class TransportDeleteRoleActionTests extends ESTestCase { DeleteRoleRequest request = new DeleteRoleRequest(); request.name(roleName); - final boolean found = randomBoolean(); doAnswer(new Answer() { @Override public Void answer(InvocationOnMock invocation) throws Throwable { diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/audit/index/AuditTrailTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/audit/index/AuditTrailTests.java index ef3c6aa56ae..022328f426f 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/audit/index/AuditTrailTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/audit/index/AuditTrailTests.java @@ -15,6 +15,7 @@ import org.elasticsearch.client.RequestOptions; import org.elasticsearch.client.Requests; import org.elasticsearch.client.ResponseException; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.search.SearchHit; import org.elasticsearch.test.SecurityIntegTestCase; @@ -161,6 +162,7 @@ public class AuditTrailTests extends SecurityIntegTestCase { client.admin().indices().refresh(Requests.refreshRequest(indexName)).get(); final SearchRequest request = client.prepareSearch(indexName) + .setScroll(TimeValue.timeValueMinutes(10L)) .setTypes(IndexAuditTrail.DOC_TYPE) .setQuery(QueryBuilders.matchAllQuery()) .setSize(1000) diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/TokenServiceTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/TokenServiceTests.java index b92b4cad39a..213def0f0fe 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/TokenServiceTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/TokenServiceTests.java @@ -55,8 +55,6 @@ import org.junit.AfterClass; import org.junit.Before; import org.junit.BeforeClass; -import javax.crypto.SecretKey; - import java.io.IOException; import java.time.Clock; import java.time.Instant; @@ -67,6 +65,8 @@ import java.util.HashMap; import java.util.Map; import java.util.function.Consumer; +import javax.crypto.SecretKey; + import static java.time.Clock.systemUTC; import static org.elasticsearch.repositories.ESBlobStoreTestCase.randomBytes; import static org.hamcrest.Matchers.containsString; @@ -253,7 +253,7 @@ public class TokenServiceTests extends ESTestCase { public void testKeyExchange() throws Exception { TokenService tokenService = new TokenService(tokenServiceEnabledSettings, systemUTC(), client, securityIndex, clusterService); - int numRotations = 0;randomIntBetween(1, 5); + int numRotations = randomIntBetween(1, 5); for (int i = 0; i < numRotations; i++) { rotateKeys(tokenService); } diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/ldap/SearchGroupsResolverInMemoryTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/ldap/SearchGroupsResolverInMemoryTests.java index fb20c08da61..122b486130e 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/ldap/SearchGroupsResolverInMemoryTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/ldap/SearchGroupsResolverInMemoryTests.java @@ -46,10 +46,10 @@ public class SearchGroupsResolverInMemoryTests extends LdapTestCase { * than simply returning no results. */ public void testSearchTimeoutIsFailure() throws Exception { - ldapServers[0].setProcessingDelayMillis(100); + ldapServers[0].setProcessingDelayMillis(500); final LDAPConnectionOptions options = new LDAPConnectionOptions(); - options.setConnectTimeoutMillis(500); + options.setConnectTimeoutMillis(1500); options.setResponseTimeoutMillis(5); connect(options); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/saml/SamlAuthenticatorTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/saml/SamlAuthenticatorTests.java index 5a7015a4e8d..7ae41de900e 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/saml/SamlAuthenticatorTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/saml/SamlAuthenticatorTests.java @@ -17,13 +17,8 @@ import org.elasticsearch.common.CheckedConsumer; import org.elasticsearch.common.Strings; import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.logging.Loggers; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; -import org.elasticsearch.common.util.concurrent.ThreadContext; -import org.elasticsearch.env.Environment; -import org.elasticsearch.env.TestEnvironment; import org.elasticsearch.xpack.core.watcher.watch.ClockMock; -import org.elasticsearch.xpack.core.security.authc.RealmConfig; import org.hamcrest.Matchers; import org.junit.AfterClass; import org.junit.Before; @@ -183,10 +178,8 @@ public class SamlAuthenticatorTests extends SamlTestCase { this.requestId = randomId(); } - private SamlAuthenticator buildAuthenticator(Supplier> credentials, List reqAuthnCtxClassRef) throws - Exception { - final Settings globalSettings = Settings.builder().put("path.home", createTempDir()).build(); - final Settings realmSettings = Settings.EMPTY; + private SamlAuthenticator buildAuthenticator(Supplier> credentials, List reqAuthnCtxClassRef) + throws Exception { final IdpConfiguration idp = new IdpConfiguration(IDP_ENTITY_ID, credentials); final SigningConfiguration signingConfiguration = new SigningConfiguration(Collections.singleton("*"), @@ -195,9 +188,7 @@ public class SamlAuthenticatorTests extends SamlTestCase { .map((cred) -> (X509Credential) cred).collect(Collectors.toList()); final SpConfiguration sp = new SpConfiguration(SP_ENTITY_ID, SP_ACS_URL, null, signingConfiguration, spEncryptionCredentials, reqAuthnCtxClassRef); - final Environment env = TestEnvironment.newEnvironment(globalSettings); return new SamlAuthenticator( - new RealmConfig("saml_test", realmSettings, globalSettings, env, new ThreadContext(globalSettings)), clock, idp, sp, diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/saml/SamlLogoutRequestHandlerTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/saml/SamlLogoutRequestHandlerTests.java index 5d39d90a76c..542bbbbdf3d 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/saml/SamlLogoutRequestHandlerTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/saml/SamlLogoutRequestHandlerTests.java @@ -15,13 +15,8 @@ import java.util.Arrays; import java.util.Collections; import org.elasticsearch.ElasticsearchSecurityException; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; -import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.common.util.set.Sets; -import org.elasticsearch.env.Environment; -import org.elasticsearch.env.TestEnvironment; -import org.elasticsearch.xpack.core.security.authc.RealmConfig; import org.joda.time.DateTime; import org.junit.AfterClass; import org.junit.Before; @@ -206,17 +201,13 @@ public class SamlLogoutRequestHandlerTests extends SamlTestCase { } private SamlLogoutRequestHandler buildHandler() throws Exception { - final Settings globalSettings = Settings.builder().put("path.home", createTempDir()).build(); - final Settings realmSettings = Settings.EMPTY; final IdpConfiguration idp = new IdpConfiguration(IDP_ENTITY_ID, () -> Collections.singletonList(credential)); final X509Credential spCredential = (X509Credential) buildOpenSamlCredential(readRandomKeyPair()).get(0); final SigningConfiguration signingConfiguration = new SigningConfiguration(Collections.singleton("*"), spCredential); final SpConfiguration sp = new SpConfiguration("https://sp.test/", "https://sp.test/saml/asc", LOGOUT_URL, signingConfiguration, Arrays.asList(spCredential), Collections.emptyList()); - final Environment env = TestEnvironment.newEnvironment(globalSettings); return new SamlLogoutRequestHandler( - new RealmConfig("saml_test", realmSettings, globalSettings, env, new ThreadContext(globalSettings)), clock, idp, sp, diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/saml/SamlMetadataCommandTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/saml/SamlMetadataCommandTests.java index e239c8706b9..f2c91437c3e 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/saml/SamlMetadataCommandTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/saml/SamlMetadataCommandTests.java @@ -33,7 +33,6 @@ import org.opensaml.xmlsec.signature.Signature; import org.opensaml.xmlsec.signature.X509Certificate; import org.opensaml.xmlsec.signature.X509Data; import org.opensaml.xmlsec.signature.support.SignatureValidator; -import org.w3c.dom.Element; import java.io.OutputStream; import java.nio.file.Files; @@ -385,7 +384,7 @@ public class SamlMetadataCommandTests extends SamlTestCase { final MockTerminal terminal = new MockTerminal(); final EntityDescriptor descriptor = command.buildEntityDescriptor(terminal, options, env); - Element e = command.possiblySignDescriptor(terminal, options, descriptor, env); + command.possiblySignDescriptor(terminal, options, descriptor, env); assertThat(descriptor, notNullValue()); // Verify generated signature assertThat(descriptor.getSignature(), notNullValue()); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/IndexAliasesTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/IndexAliasesTests.java index 036f1667e14..dca113b6e42 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/IndexAliasesTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/IndexAliasesTests.java @@ -362,8 +362,7 @@ public class IndexAliasesTests extends SecurityIntegTestCase { IndicesAliasesAction.NAME, "create_test_aliases_alias"); //fails: user doesn't have manage_aliases on test_*, wildcards can't get replaced - IndexNotFoundException indexNotFoundException = expectThrows(IndexNotFoundException.class, - client.admin().indices().prepareAliases().removeAlias("test_*", "alias_1")::get); + expectThrows(IndexNotFoundException.class, client.admin().indices().prepareAliases().removeAlias("test_*", "alias_1")::get); } public void testGetAliasesCreateAndAliasesPermission2() { diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/store/CompositeRolesStoreTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/store/CompositeRolesStoreTests.java index 0c2ab1ecc76..9f1490856d6 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/store/CompositeRolesStoreTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/store/CompositeRolesStoreTests.java @@ -53,6 +53,7 @@ import java.util.Set; import java.util.concurrent.ExecutionException; import java.util.concurrent.atomic.AtomicInteger; import java.util.function.BiConsumer; +import java.util.function.Consumer; import java.util.function.Function; import java.util.function.Predicate; @@ -213,7 +214,7 @@ public class CompositeRolesStoreTests extends ESTestCase { new CompositeRolesStore(SECURITY_ENABLED_SETTINGS, fileRolesStore, nativeRolesStore, reservedRolesStore, mock(NativePrivilegeStore.class), Collections.emptyList(), new ThreadContext(SECURITY_ENABLED_SETTINGS), new XPackLicenseState(SECURITY_ENABLED_SETTINGS)); - verify(fileRolesStore).addListener(any(Runnable.class)); // adds a listener in ctor + verify(fileRolesStore).addListener(any(Consumer.class)); // adds a listener in ctor final String roleName = randomAlphaOfLengthBetween(1, 10); PlainActionFuture future = new PlainActionFuture<>(); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/store/FileRolesStoreTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/store/FileRolesStoreTests.java index 5cb93b898ba..0763ff65ec5 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/store/FileRolesStoreTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/store/FileRolesStoreTests.java @@ -37,6 +37,7 @@ import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.StandardOpenOption; import java.util.Collections; +import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; @@ -319,8 +320,11 @@ public class FileRolesStoreTests extends ESTestCase { threadPool = new TestThreadPool("test"); watcherService = new ResourceWatcherService(settings, threadPool); final CountDownLatch latch = new CountDownLatch(1); - FileRolesStore store = new FileRolesStore(settings, env, watcherService, latch::countDown, - new XPackLicenseState(Settings.EMPTY)); + final Set modifiedRoles = new HashSet<>(); + FileRolesStore store = new FileRolesStore(settings, env, watcherService, roleSet -> { + modifiedRoles.addAll(roleSet); + latch.countDown(); + }, new XPackLicenseState(Settings.EMPTY)); Set descriptors = store.roleDescriptors(Collections.singleton("role1")); assertThat(descriptors, notNullValue()); @@ -344,6 +348,8 @@ public class FileRolesStoreTests extends ESTestCase { fail("Waited too long for the updated file to be picked up"); } + assertEquals(1, modifiedRoles.size()); + assertTrue(modifiedRoles.contains("role5")); final TransportRequest request = mock(TransportRequest.class); descriptors = store.roleDescriptors(Collections.singleton("role5")); assertThat(descriptors, notNullValue()); @@ -354,6 +360,49 @@ public class FileRolesStoreTests extends ESTestCase { assertThat(role.cluster().check("cluster:monitor/foo/bar", request), is(true)); assertThat(role.cluster().check("cluster:admin/foo/bar", request), is(false)); + // truncate to remove some + final Set truncatedFileRolesModified = new HashSet<>(); + final CountDownLatch truncateLatch = new CountDownLatch(1); + store = new FileRolesStore(settings, env, watcherService, roleSet -> { + truncatedFileRolesModified.addAll(roleSet); + truncateLatch.countDown(); + }, new XPackLicenseState(Settings.EMPTY)); + + final Set allRolesPreTruncate = store.getAllRoleNames(); + try (BufferedWriter writer = Files.newBufferedWriter(tmp, StandardCharsets.UTF_8, StandardOpenOption.TRUNCATE_EXISTING)) { + writer.append("role5:").append(System.lineSeparator()); + writer.append(" cluster:").append(System.lineSeparator()); + writer.append(" - 'MONITOR'"); + } + + truncateLatch.await(); + assertEquals(allRolesPreTruncate.size() - 1, truncatedFileRolesModified.size()); + assertTrue(allRolesPreTruncate.contains("role5")); + assertFalse(truncatedFileRolesModified.contains("role5")); + descriptors = store.roleDescriptors(Collections.singleton("role5")); + assertThat(descriptors, notNullValue()); + assertEquals(1, descriptors.size()); + + // modify + final Set modifiedFileRolesModified = new HashSet<>(); + final CountDownLatch modifyLatch = new CountDownLatch(1); + store = new FileRolesStore(settings, env, watcherService, roleSet -> { + modifiedFileRolesModified.addAll(roleSet); + modifyLatch.countDown(); + }, new XPackLicenseState(Settings.EMPTY)); + + try (BufferedWriter writer = Files.newBufferedWriter(tmp, StandardCharsets.UTF_8, StandardOpenOption.TRUNCATE_EXISTING)) { + writer.append("role5:").append(System.lineSeparator()); + writer.append(" cluster:").append(System.lineSeparator()); + writer.append(" - 'ALL'"); + } + + modifyLatch.await(); + assertEquals(1, modifiedFileRolesModified.size()); + assertTrue(modifiedFileRolesModified.contains("role5")); + descriptors = store.roleDescriptors(Collections.singleton("role5")); + assertThat(descriptors, notNullValue()); + assertEquals(1, descriptors.size()); } finally { if (watcherService != null) { watcherService.stop(); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/AbstractSimpleSecurityTransportTestCase.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/AbstractSimpleSecurityTransportTestCase.java new file mode 100644 index 00000000000..2e1a423d5fd --- /dev/null +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/AbstractSimpleSecurityTransportTestCase.java @@ -0,0 +1,175 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.security.transport; + +import java.util.concurrent.atomic.AtomicBoolean; +import org.elasticsearch.Version; +import org.elasticsearch.cluster.node.DiscoveryNode; +import org.elasticsearch.common.SuppressForbidden; +import org.elasticsearch.common.io.stream.OutputStreamStreamOutput; +import org.elasticsearch.common.settings.ClusterSettings; +import org.elasticsearch.common.settings.MockSecureSettings; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.transport.TransportAddress; +import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.env.TestEnvironment; +import org.elasticsearch.node.Node; +import org.elasticsearch.test.transport.MockTransportService; +import org.elasticsearch.transport.AbstractSimpleTransportTestCase; +import org.elasticsearch.transport.BindTransportException; +import org.elasticsearch.transport.ConnectTransportException; +import org.elasticsearch.transport.ConnectionManager; +import org.elasticsearch.transport.ConnectionProfile; +import org.elasticsearch.transport.TcpTransport; +import org.elasticsearch.transport.TransportRequestOptions; +import org.elasticsearch.transport.TransportService; +import org.elasticsearch.xpack.core.common.socket.SocketAccess; +import org.elasticsearch.xpack.core.ssl.SSLConfiguration; +import org.elasticsearch.xpack.core.ssl.SSLService; + +import javax.net.SocketFactory; +import javax.net.ssl.HandshakeCompletedListener; +import javax.net.ssl.SSLSocket; +import java.io.IOException; +import java.net.InetAddress; +import java.net.SocketTimeoutException; +import java.net.UnknownHostException; +import java.nio.file.Path; +import java.util.concurrent.CountDownLatch; + +import static java.util.Collections.emptyMap; +import static java.util.Collections.emptySet; +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.instanceOf; + +public abstract class AbstractSimpleSecurityTransportTestCase extends AbstractSimpleTransportTestCase { + + protected SSLService createSSLService() { + return createSSLService(Settings.EMPTY); + } + + protected SSLService createSSLService(Settings settings) { + Path testnodeCert = getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.crt"); + Path testnodeKey = getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.pem"); + MockSecureSettings secureSettings = new MockSecureSettings(); + secureSettings.setString("xpack.ssl.secure_key_passphrase", "testnode"); + Settings settings1 = Settings.builder() + .put(settings) + .put("xpack.security.transport.ssl.enabled", true) + .put("xpack.ssl.key", testnodeKey) + .put("xpack.ssl.certificate", testnodeCert) + .put("path.home", createTempDir()) + .setSecureSettings(secureSettings) + .build(); + try { + return new SSLService(settings1, TestEnvironment.newEnvironment(settings1)); + } catch (Exception e) { + throw new RuntimeException(e); + } + } + + public void testConnectException() throws UnknownHostException { + try { + serviceA.connectToNode(new DiscoveryNode("C", new TransportAddress(InetAddress.getByName("localhost"), 9876), + emptyMap(), emptySet(), Version.CURRENT)); + fail("Expected ConnectTransportException"); + } catch (ConnectTransportException e) { + assertThat(e.getMessage(), containsString("connect_exception")); + assertThat(e.getMessage(), containsString("[127.0.0.1:9876]")); + Throwable cause = e.getCause(); + assertThat(cause, instanceOf(IOException.class)); + } + } + + public void testBindUnavailableAddress() { + // this is on a lower level since it needs access to the TransportService before it's started + int port = serviceA.boundAddress().publishAddress().getPort(); + Settings settings = Settings.builder() + .put(Node.NODE_NAME_SETTING.getKey(), "foobar") + .put(TransportService.TRACE_LOG_INCLUDE_SETTING.getKey(), "") + .put(TransportService.TRACE_LOG_EXCLUDE_SETTING.getKey(), "NOTHING") + .put("transport.tcp.port", port) + .build(); + ClusterSettings clusterSettings = new ClusterSettings(settings, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS); + BindTransportException bindTransportException = expectThrows(BindTransportException.class, () -> { + MockTransportService transportService = build(settings, Version.CURRENT, clusterSettings, true); + try { + transportService.start(); + } finally { + transportService.stop(); + transportService.close(); + } + }); + assertEquals("Failed to bind to [" + port + "]", bindTransportException.getMessage()); + } + + @Override + public void testTcpHandshake() throws IOException, InterruptedException { + assumeTrue("only tcp transport has a handshake method", serviceA.getOriginalTransport() instanceof TcpTransport); + TcpTransport originalTransport = (TcpTransport) serviceA.getOriginalTransport(); + + ConnectionProfile connectionProfile = ConnectionManager.buildDefaultConnectionProfile(Settings.EMPTY); + try (TransportService service = buildService("TS_TPC", Version.CURRENT, null); + TcpTransport.NodeChannels connection = originalTransport.openConnection( + new DiscoveryNode("TS_TPC", "TS_TPC", service.boundAddress().publishAddress(), emptyMap(), emptySet(), version0), + connectionProfile)) { + Version version = originalTransport.executeHandshake(connection.getNode(), + connection.channel(TransportRequestOptions.Type.PING), TimeValue.timeValueSeconds(10)); + assertEquals(version, Version.CURRENT); + } + } + + @SuppressForbidden(reason = "Need to open socket connection") + public void testRenegotiation() throws Exception { + SSLService sslService = createSSLService(); + final SSLConfiguration sslConfiguration = sslService.getSSLConfiguration("xpack.ssl"); + SocketFactory factory = sslService.sslSocketFactory(sslConfiguration); + try (SSLSocket socket = (SSLSocket) factory.createSocket()) { + SocketAccess.doPrivileged(() -> socket.connect(serviceA.boundAddress().publishAddress().address())); + + CountDownLatch handshakeLatch = new CountDownLatch(1); + HandshakeCompletedListener firstListener = event -> handshakeLatch.countDown(); + socket.addHandshakeCompletedListener(firstListener); + socket.startHandshake(); + handshakeLatch.await(); + socket.removeHandshakeCompletedListener(firstListener); + + OutputStreamStreamOutput stream = new OutputStreamStreamOutput(socket.getOutputStream()); + stream.writeByte((byte) 'E'); + stream.writeByte((byte) 'S'); + stream.writeInt(-1); + stream.flush(); + + CountDownLatch renegotiationLatch = new CountDownLatch(1); + HandshakeCompletedListener secondListener = event -> renegotiationLatch.countDown(); + socket.addHandshakeCompletedListener(secondListener); + socket.startHandshake(); + AtomicBoolean stopped = new AtomicBoolean(false); + socket.setSoTimeout(10); + Thread emptyReader = new Thread(() -> { + while (stopped.get() == false) { + try { + socket.getInputStream().read(); + } catch (SocketTimeoutException e) { + // Ignore. We expect a timeout. + } catch (IOException e) { + throw new AssertionError(e); + } + } + }); + emptyReader.start(); + renegotiationLatch.await(); + stopped.set(true); + emptyReader.join(); + socket.removeHandshakeCompletedListener(secondListener); + + stream.writeByte((byte) 'E'); + stream.writeByte((byte) 'S'); + stream.writeInt(-1); + stream.flush(); + } + } +} diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/netty4/SimpleSecurityNetty4TransportTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/netty4/SimpleSecurityNetty4ServerTransportTests.java similarity index 59% rename from x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/netty4/SimpleSecurityNetty4TransportTests.java rename to x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/netty4/SimpleSecurityNetty4ServerTransportTests.java index 7efaa3f78c2..88895034df9 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/netty4/SimpleSecurityNetty4TransportTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/netty4/SimpleSecurityNetty4ServerTransportTests.java @@ -14,23 +14,16 @@ import io.netty.channel.socket.nio.NioServerSocketChannel; import io.netty.handler.ssl.SslHandler; import org.elasticsearch.Version; import org.elasticsearch.cluster.node.DiscoveryNode; -import org.elasticsearch.common.SuppressForbidden; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; -import org.elasticsearch.common.io.stream.OutputStreamStreamOutput; import org.elasticsearch.common.network.NetworkService; import org.elasticsearch.common.settings.ClusterSettings; -import org.elasticsearch.common.settings.MockSecureSettings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.transport.TransportAddress; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.util.BigArrays; -import org.elasticsearch.env.TestEnvironment; import org.elasticsearch.indices.breaker.NoneCircuitBreakerService; -import org.elasticsearch.node.Node; import org.elasticsearch.test.transport.MockTransportService; import org.elasticsearch.threadpool.ThreadPool; -import org.elasticsearch.transport.AbstractSimpleTransportTestCase; -import org.elasticsearch.transport.BindTransportException; import org.elasticsearch.transport.ConnectTransportException; import org.elasticsearch.transport.ConnectionProfile; import org.elasticsearch.transport.TcpChannel; @@ -38,39 +31,26 @@ import org.elasticsearch.transport.TcpTransport; import org.elasticsearch.transport.Transport; import org.elasticsearch.transport.TransportRequestOptions; import org.elasticsearch.transport.TransportService; -import org.elasticsearch.xpack.core.common.socket.SocketAccess; -import org.elasticsearch.xpack.core.security.transport.netty4.SecurityNetty4Transport; -import org.elasticsearch.xpack.core.ssl.SSLConfiguration; import org.elasticsearch.xpack.core.ssl.SSLService; +import org.elasticsearch.xpack.security.transport.AbstractSimpleSecurityTransportTestCase; -import javax.net.SocketFactory; -import javax.net.ssl.HandshakeCompletedListener; import javax.net.ssl.SNIHostName; import javax.net.ssl.SNIMatcher; import javax.net.ssl.SNIServerName; import javax.net.ssl.SSLEngine; import javax.net.ssl.SSLParameters; -import javax.net.ssl.SSLSocket; import java.io.IOException; -import java.net.InetAddress; import java.net.InetSocketAddress; -import java.net.SocketTimeoutException; -import java.net.UnknownHostException; -import java.nio.file.Path; import java.util.Collections; import java.util.EnumSet; import java.util.HashMap; import java.util.concurrent.CountDownLatch; import java.util.concurrent.TimeUnit; -import java.util.concurrent.atomic.AtomicReference; -import static java.util.Collections.emptyMap; -import static java.util.Collections.emptySet; import static org.elasticsearch.xpack.core.security.SecurityField.setting; import static org.hamcrest.Matchers.containsString; -import static org.hamcrest.Matchers.instanceOf; -public class SimpleSecurityNetty4TransportTests extends AbstractSimpleTransportTestCase { +public class SimpleSecurityNetty4ServerTransportTests extends AbstractSimpleSecurityTransportTestCase { private static final ConnectionProfile SINGLE_CHANNEL_PROFILE; @@ -85,25 +65,6 @@ public class SimpleSecurityNetty4TransportTests extends AbstractSimpleTransportT SINGLE_CHANNEL_PROFILE = builder.build(); } - private SSLService createSSLService() { - Path testnodeCert = getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.crt"); - Path testnodeKey = getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.pem"); - MockSecureSettings secureSettings = new MockSecureSettings(); - secureSettings.setString("xpack.ssl.secure_key_passphrase", "testnode"); - Settings settings = Settings.builder() - .put("xpack.security.transport.ssl.enabled", true) - .put("xpack.ssl.key", testnodeKey) - .put("xpack.ssl.certificate", testnodeCert) - .put("path.home", createTempDir()) - .setSecureSettings(secureSettings) - .build(); - try { - return new SSLService(settings, TestEnvironment.newEnvironment(settings)); - } catch (Exception e) { - throw new RuntimeException(e); - } - } - public MockTransportService nettyFromThreadPool(Settings settings, ThreadPool threadPool, final Version version, ClusterSettings clusterSettings, boolean doHandshake) { NamedWriteableRegistry namedWriteableRegistry = new NamedWriteableRegistry(Collections.emptyList()); @@ -111,12 +72,12 @@ public class SimpleSecurityNetty4TransportTests extends AbstractSimpleTransportT Settings settings1 = Settings.builder() .put(settings) .put("xpack.security.transport.ssl.enabled", true).build(); - Transport transport = new SecurityNetty4Transport(settings1, threadPool, + Transport transport = new SecurityNetty4ServerTransport(settings1, threadPool, networkService, BigArrays.NON_RECYCLING_INSTANCE, namedWriteableRegistry, - new NoneCircuitBreakerService(), createSSLService()) { + new NoneCircuitBreakerService(), null, createSSLService(settings1)) { @Override - protected Version executeHandshake(DiscoveryNode node, TcpChannel channel, TimeValue timeout) throws IOException, + public Version executeHandshake(DiscoveryNode node, TcpChannel channel, TimeValue timeout) throws IOException, InterruptedException { if (doHandshake) { return super.executeHandshake(node, channel, timeout); @@ -140,118 +101,16 @@ public class SimpleSecurityNetty4TransportTests extends AbstractSimpleTransportT @Override protected MockTransportService build(Settings settings, Version version, ClusterSettings clusterSettings, boolean doHandshake) { - settings = Settings.builder().put(settings) - .put(TcpTransport.PORT.getKey(), "0") - .build(); + if (TcpTransport.PORT.exists(settings) == false) { + settings = Settings.builder().put(settings) + .put(TcpTransport.PORT.getKey(), "0") + .build(); + } MockTransportService transportService = nettyFromThreadPool(settings, threadPool, version, clusterSettings, doHandshake); transportService.start(); return transportService; } - public void testConnectException() throws UnknownHostException { - try { - serviceA.connectToNode(new DiscoveryNode("C", new TransportAddress(InetAddress.getByName("localhost"), 9876), - emptyMap(), emptySet(), Version.CURRENT)); - fail("Expected ConnectTransportException"); - } catch (ConnectTransportException e) { - assertThat(e.getMessage(), containsString("connect_exception")); - assertThat(e.getMessage(), containsString("[127.0.0.1:9876]")); - Throwable cause = e.getCause(); - assertThat(cause, instanceOf(IOException.class)); - } - } - - public void testBindUnavailableAddress() { - // this is on a lower level since it needs access to the TransportService before it's started - int port = serviceA.boundAddress().publishAddress().getPort(); - Settings settings = Settings.builder() - .put(Node.NODE_NAME_SETTING.getKey(), "foobar") - .put(TransportService.TRACE_LOG_INCLUDE_SETTING.getKey(), "") - .put(TransportService.TRACE_LOG_EXCLUDE_SETTING.getKey(), "NOTHING") - .put("transport.tcp.port", port) - .build(); - ClusterSettings clusterSettings = new ClusterSettings(settings, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS); - BindTransportException bindTransportException = expectThrows(BindTransportException.class, () -> { - MockTransportService transportService = nettyFromThreadPool(settings, threadPool, Version.CURRENT, clusterSettings, true); - try { - transportService.start(); - } finally { - transportService.stop(); - transportService.close(); - } - }); - assertEquals("Failed to bind to [" + port + "]", bindTransportException.getMessage()); - } - - @SuppressForbidden(reason = "Need to open socket connection") - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/33772") - public void testRenegotiation() throws Exception { - SSLService sslService = createSSLService(); - final SSLConfiguration sslConfiguration = sslService.getSSLConfiguration("xpack.ssl"); - SocketFactory factory = sslService.sslSocketFactory(sslConfiguration); - try (SSLSocket socket = (SSLSocket) factory.createSocket()) { - SocketAccess.doPrivileged(() -> socket.connect(serviceA.boundAddress().publishAddress().address())); - - CountDownLatch handshakeLatch = new CountDownLatch(1); - HandshakeCompletedListener firstListener = event -> handshakeLatch.countDown(); - socket.addHandshakeCompletedListener(firstListener); - socket.startHandshake(); - handshakeLatch.await(); - socket.removeHandshakeCompletedListener(firstListener); - - OutputStreamStreamOutput stream = new OutputStreamStreamOutput(socket.getOutputStream()); - stream.writeByte((byte) 'E'); - stream.writeByte((byte) 'S'); - stream.writeInt(-1); - stream.flush(); - - socket.startHandshake(); - CountDownLatch renegotiationLatch = new CountDownLatch(1); - HandshakeCompletedListener secondListener = event -> renegotiationLatch.countDown(); - socket.addHandshakeCompletedListener(secondListener); - - AtomicReference error = new AtomicReference<>(); - CountDownLatch catchReadErrorsLatch = new CountDownLatch(1); - Thread renegotiationThread = new Thread(() -> { - try { - socket.setSoTimeout(50); - socket.getInputStream().read(); - } catch (SocketTimeoutException e) { - // Ignore. We expect a timeout. - } catch (IOException e) { - error.set(e); - } finally { - catchReadErrorsLatch.countDown(); - } - }); - renegotiationThread.start(); - renegotiationLatch.await(); - socket.removeHandshakeCompletedListener(secondListener); - catchReadErrorsLatch.await(); - - assertNull(error.get()); - - stream.writeByte((byte) 'E'); - stream.writeByte((byte) 'S'); - stream.writeInt(-1); - stream.flush(); - } - } - - // TODO: These tests currently rely on plaintext transports - - @Override - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/33285") - public void testTcpHandshake() { - } - - // TODO: These tests as configured do not currently work with the security transport - - @Override - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/33285") - public void testTransportProfilesWithPortAndHost() { - } - public void testSNIServerNameIsPropagated() throws Exception { SSLService sslService = createSSLService(); final ServerBootstrap serverBootstrap = new ServerBootstrap(); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/nio/SimpleSecurityNioTransportTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/nio/SimpleSecurityNioTransportTests.java index 1b8e35651b6..5208d58d743 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/nio/SimpleSecurityNioTransportTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/nio/SimpleSecurityNioTransportTests.java @@ -7,70 +7,25 @@ package org.elasticsearch.xpack.security.transport.nio; import org.elasticsearch.Version; import org.elasticsearch.cluster.node.DiscoveryNode; -import org.elasticsearch.common.SuppressForbidden; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; -import org.elasticsearch.common.io.stream.OutputStreamStreamOutput; import org.elasticsearch.common.network.NetworkService; import org.elasticsearch.common.settings.ClusterSettings; -import org.elasticsearch.common.settings.MockSecureSettings; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.transport.TransportAddress; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.common.util.MockPageCacheRecycler; -import org.elasticsearch.env.TestEnvironment; import org.elasticsearch.indices.breaker.NoneCircuitBreakerService; -import org.elasticsearch.node.Node; import org.elasticsearch.test.transport.MockTransportService; import org.elasticsearch.threadpool.ThreadPool; -import org.elasticsearch.transport.AbstractSimpleTransportTestCase; -import org.elasticsearch.transport.BindTransportException; -import org.elasticsearch.transport.ConnectTransportException; import org.elasticsearch.transport.TcpChannel; import org.elasticsearch.transport.TcpTransport; import org.elasticsearch.transport.Transport; -import org.elasticsearch.transport.TransportService; -import org.elasticsearch.xpack.core.common.socket.SocketAccess; -import org.elasticsearch.xpack.core.ssl.SSLConfiguration; -import org.elasticsearch.xpack.core.ssl.SSLService; +import org.elasticsearch.xpack.security.transport.AbstractSimpleSecurityTransportTestCase; -import javax.net.SocketFactory; -import javax.net.ssl.HandshakeCompletedListener; -import javax.net.ssl.SSLSocket; import java.io.IOException; -import java.net.InetAddress; -import java.net.SocketTimeoutException; -import java.net.UnknownHostException; -import java.nio.file.Path; import java.util.Collections; -import java.util.concurrent.CountDownLatch; -import java.util.concurrent.atomic.AtomicReference; -import static java.util.Collections.emptyMap; -import static java.util.Collections.emptySet; -import static org.hamcrest.Matchers.containsString; -import static org.hamcrest.Matchers.instanceOf; - -public class SimpleSecurityNioTransportTests extends AbstractSimpleTransportTestCase { - - private SSLService createSSLService() { - Path testnodeCert = getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.crt"); - Path testnodeKey = getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.pem"); - MockSecureSettings secureSettings = new MockSecureSettings(); - secureSettings.setString("xpack.ssl.secure_key_passphrase", "testnode"); - Settings settings = Settings.builder() - .put("xpack.security.transport.ssl.enabled", true) - .put("xpack.ssl.key", testnodeKey) - .put("xpack.ssl.certificate", testnodeCert) - .put("path.home", createTempDir()) - .setSecureSettings(secureSettings) - .build(); - try { - return new SSLService(settings, TestEnvironment.newEnvironment(settings)); - } catch (Exception e) { - throw new RuntimeException(e); - } - } +public class SimpleSecurityNioTransportTests extends AbstractSimpleSecurityTransportTestCase { public MockTransportService nioFromThreadPool(Settings settings, ThreadPool threadPool, final Version version, ClusterSettings clusterSettings, boolean doHandshake) { @@ -81,10 +36,10 @@ public class SimpleSecurityNioTransportTests extends AbstractSimpleTransportTest .put("xpack.security.transport.ssl.enabled", true).build(); Transport transport = new SecurityNioTransport(settings1, threadPool, networkService, BigArrays.NON_RECYCLING_INSTANCE, new MockPageCacheRecycler(settings), namedWriteableRegistry, - new NoneCircuitBreakerService(), null, createSSLService()) { + new NoneCircuitBreakerService(), null, createSSLService(settings1)) { @Override - protected Version executeHandshake(DiscoveryNode node, TcpChannel channel, TimeValue timeout) throws IOException, + public Version executeHandshake(DiscoveryNode node, TcpChannel channel, TimeValue timeout) throws IOException, InterruptedException { if (doHandshake) { return super.executeHandshake(node, channel, timeout); @@ -108,114 +63,13 @@ public class SimpleSecurityNioTransportTests extends AbstractSimpleTransportTest @Override protected MockTransportService build(Settings settings, Version version, ClusterSettings clusterSettings, boolean doHandshake) { - settings = Settings.builder().put(settings) + if (TcpTransport.PORT.exists(settings) == false) { + settings = Settings.builder().put(settings) .put(TcpTransport.PORT.getKey(), "0") .build(); + } MockTransportService transportService = nioFromThreadPool(settings, threadPool, version, clusterSettings, doHandshake); transportService.start(); return transportService; } - - public void testConnectException() throws UnknownHostException { - try { - serviceA.connectToNode(new DiscoveryNode("C", new TransportAddress(InetAddress.getByName("localhost"), 9876), - emptyMap(), emptySet(), Version.CURRENT)); - fail("Expected ConnectTransportException"); - } catch (ConnectTransportException e) { - assertThat(e.getMessage(), containsString("connect_exception")); - assertThat(e.getMessage(), containsString("[127.0.0.1:9876]")); - Throwable cause = e.getCause(); - assertThat(cause, instanceOf(IOException.class)); - } - } - - public void testBindUnavailableAddress() { - // this is on a lower level since it needs access to the TransportService before it's started - int port = serviceA.boundAddress().publishAddress().getPort(); - Settings settings = Settings.builder() - .put(Node.NODE_NAME_SETTING.getKey(), "foobar") - .put(TransportService.TRACE_LOG_INCLUDE_SETTING.getKey(), "") - .put(TransportService.TRACE_LOG_EXCLUDE_SETTING.getKey(), "NOTHING") - .put("transport.tcp.port", port) - .build(); - ClusterSettings clusterSettings = new ClusterSettings(settings, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS); - BindTransportException bindTransportException = expectThrows(BindTransportException.class, () -> { - MockTransportService transportService = nioFromThreadPool(settings, threadPool, Version.CURRENT, clusterSettings, true); - try { - transportService.start(); - } finally { - transportService.stop(); - transportService.close(); - } - }); - assertEquals("Failed to bind to [" + port + "]", bindTransportException.getMessage()); - } - - @SuppressForbidden(reason = "Need to open socket connection") - public void testRenegotiation() throws Exception { - SSLService sslService = createSSLService(); - final SSLConfiguration sslConfiguration = sslService.getSSLConfiguration("xpack.ssl"); - SocketFactory factory = sslService.sslSocketFactory(sslConfiguration); - try (SSLSocket socket = (SSLSocket) factory.createSocket()) { - SocketAccess.doPrivileged(() -> socket.connect(serviceA.boundAddress().publishAddress().address())); - - CountDownLatch handshakeLatch = new CountDownLatch(1); - HandshakeCompletedListener firstListener = event -> handshakeLatch.countDown(); - socket.addHandshakeCompletedListener(firstListener); - socket.startHandshake(); - handshakeLatch.await(); - socket.removeHandshakeCompletedListener(firstListener); - - OutputStreamStreamOutput stream = new OutputStreamStreamOutput(socket.getOutputStream()); - stream.writeByte((byte) 'E'); - stream.writeByte((byte) 'S'); - stream.writeInt(-1); - stream.flush(); - - socket.startHandshake(); - CountDownLatch renegotiationLatch = new CountDownLatch(1); - HandshakeCompletedListener secondListener = event -> renegotiationLatch.countDown(); - socket.addHandshakeCompletedListener(secondListener); - - AtomicReference error = new AtomicReference<>(); - CountDownLatch catchReadErrorsLatch = new CountDownLatch(1); - Thread renegotiationThread = new Thread(() -> { - try { - socket.setSoTimeout(50); - socket.getInputStream().read(); - } catch (SocketTimeoutException e) { - // Ignore. We expect a timeout. - } catch (IOException e) { - error.set(e); - } finally { - catchReadErrorsLatch.countDown(); - } - }); - renegotiationThread.start(); - renegotiationLatch.await(); - socket.removeHandshakeCompletedListener(secondListener); - catchReadErrorsLatch.await(); - - assertNull(error.get()); - - stream.writeByte((byte) 'E'); - stream.writeByte((byte)'S'); - stream.writeInt(-1); - stream.flush(); - } - } - - // TODO: These tests currently rely on plaintext transports - - @Override - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/33285") - public void testTcpHandshake() throws IOException, InterruptedException { - } - - // TODO: These tests as configured do not currently work with the security transport - - @Override - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/33285") - public void testTransportProfilesWithPortAndHost() { - } } diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/ssl/SslIntegrationTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/ssl/SslIntegrationTests.java index 541e6606912..16bdc705a43 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/ssl/SslIntegrationTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/ssl/SslIntegrationTests.java @@ -32,10 +32,6 @@ import org.elasticsearch.xpack.core.ssl.SSLConfiguration; import org.elasticsearch.xpack.core.ssl.SSLService; import org.elasticsearch.xpack.security.LocalStateSecurity; -import javax.net.ssl.SSLContext; -import javax.net.ssl.SSLHandshakeException; -import javax.net.ssl.TrustManagerFactory; - import java.io.InputStreamReader; import java.net.InetSocketAddress; import java.nio.charset.StandardCharsets; @@ -48,10 +44,13 @@ import java.util.List; import java.util.Locale; import java.util.Set; +import javax.net.ssl.SSLContext; +import javax.net.ssl.SSLHandshakeException; +import javax.net.ssl.TrustManagerFactory; + import static org.elasticsearch.test.SecuritySettingsSource.addSSLSettingsForPEMFiles; import static org.hamcrest.CoreMatchers.is; import static org.hamcrest.Matchers.containsString; -import static org.hamcrest.Matchers.instanceOf; public class SslIntegrationTests extends SecurityIntegTestCase { @@ -150,10 +149,7 @@ public class SslIntegrationTests extends SecurityIntegTestCase { SSLConnectionSocketFactory sf = new SSLConnectionSocketFactory(sslContext, new String[]{ "SSLv3" }, null, NoopHostnameVerifier.INSTANCE); try (CloseableHttpClient client = HttpClients.custom().setSSLSocketFactory(sf).build()) { - CloseableHttpResponse result = SocketAccess.doPrivileged(() -> client.execute(new HttpGet(getNodeUrl()))); - fail("Expected a connection error due to SSLv3 not being supported by default"); - } catch (Exception e) { - assertThat(e, is(instanceOf(SSLHandshakeException.class))); + expectThrows(SSLHandshakeException.class, () -> SocketAccess.doPrivileged(() -> client.execute(new HttpGet(getNodeUrl())))); } } diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/ssl/SSLReloadIntegTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/ssl/SSLReloadIntegTests.java index 0835ecee9c2..29283532698 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/ssl/SSLReloadIntegTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/ssl/SSLReloadIntegTests.java @@ -5,9 +5,7 @@ */ package org.elasticsearch.xpack.ssl; - import org.elasticsearch.ElasticsearchException; -import org.elasticsearch.bootstrap.JavaVersion; import org.elasticsearch.common.settings.MockSecureSettings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.transport.TransportAddress; @@ -17,7 +15,7 @@ import org.elasticsearch.transport.Transport; import org.elasticsearch.xpack.core.ssl.SSLConfiguration; import org.elasticsearch.xpack.core.ssl.SSLService; -import javax.net.ssl.SSLHandshakeException; +import javax.net.ssl.SSLException; import javax.net.ssl.SSLSocket; import javax.net.ssl.SSLSocketFactory; @@ -93,7 +91,6 @@ public class SSLReloadIntegTests extends SecurityIntegTestCase { } public void testThatSSLConfigurationReloadsOnModification() throws Exception { - assumeTrue("test fails on JDK 11 currently", JavaVersion.current().compareTo(JavaVersion.parse("11")) < 0); Path keyPath = createTempDir().resolve("testnode_updated.pem"); Path certPath = createTempDir().resolve("testnode_updated.crt"); Files.copy(getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode_updated.pem"), keyPath); @@ -119,7 +116,7 @@ public class SSLReloadIntegTests extends SecurityIntegTestCase { assertThat(socket.isConnected(), is(true)); socket.startHandshake(); fail("handshake should not have been successful!"); - } catch (SSLHandshakeException | SocketException expected) { + } catch (SSLException | SocketException expected) { logger.trace("expected exception", expected); } // Copy testnode_updated.crt to the placeholder updateable.crt so that the nodes will start trusting it now diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/ssl/SSLTrustRestrictionsTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/ssl/SSLTrustRestrictionsTests.java index e1896e01365..b9cec441fbb 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/ssl/SSLTrustRestrictionsTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/ssl/SSLTrustRestrictionsTests.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.ssl; import org.apache.logging.log4j.message.ParameterizedMessage; import org.elasticsearch.ElasticsearchException; -import org.elasticsearch.bootstrap.JavaVersion; import org.elasticsearch.common.io.PathUtils; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.transport.TransportAddress; @@ -25,7 +24,7 @@ import org.elasticsearch.xpack.core.ssl.SSLService; import org.junit.AfterClass; import org.junit.BeforeClass; -import javax.net.ssl.SSLHandshakeException; +import javax.net.ssl.SSLException; import javax.net.ssl.SSLSocket; import javax.net.ssl.SSLSocketFactory; import java.io.IOException; @@ -166,7 +165,7 @@ public class SSLTrustRestrictionsTests extends SecurityIntegTestCase { writeRestrictions("*.trusted"); try { tryConnect(trustedCert); - } catch (SSLHandshakeException | SocketException ex) { + } catch (SSLException | SocketException ex) { logger.warn(new ParameterizedMessage("unexpected handshake failure with certificate [{}] [{}]", trustedCert.certificate.getSubjectDN(), trustedCert.certificate.getSubjectAlternativeNames()), ex); fail("handshake should have been successful, but failed with " + ex); @@ -174,25 +173,21 @@ public class SSLTrustRestrictionsTests extends SecurityIntegTestCase { } public void testCertificateWithUntrustedNameFails() throws Exception { - // see https://github.com/elastic/elasticsearch/issues/29989 - assumeTrue("test fails on JDK 11 currently", JavaVersion.current().compareTo(JavaVersion.parse("11")) < 0); writeRestrictions("*.trusted"); try { tryConnect(untrustedCert); fail("handshake should have failed, but was successful"); - } catch (SSLHandshakeException | SocketException ex) { + } catch (SSLException | SocketException ex) { // expected } } public void testRestrictionsAreReloaded() throws Exception { - // see https://github.com/elastic/elasticsearch/issues/29989 - assumeTrue("test fails on JDK 11 currently", JavaVersion.current().compareTo(JavaVersion.parse("11")) < 0); writeRestrictions("*"); assertBusy(() -> { try { tryConnect(untrustedCert); - } catch (SSLHandshakeException | SocketException ex) { + } catch (SSLException | SocketException ex) { fail("handshake should have been successful, but failed with " + ex); } }, MAX_WAIT_RELOAD.millis(), TimeUnit.MILLISECONDS); @@ -202,7 +197,7 @@ public class SSLTrustRestrictionsTests extends SecurityIntegTestCase { try { tryConnect(untrustedCert); fail("handshake should have failed, but was successful"); - } catch (SSLHandshakeException | SocketException ex) { + } catch (SSLException | SocketException ex) { // expected } }, MAX_WAIT_RELOAD.millis(), TimeUnit.MILLISECONDS); diff --git a/x-pack/plugin/sql/jdbc/src/main/java/org/elasticsearch/xpack/sql/jdbc/jdbc/JdbcConfiguration.java b/x-pack/plugin/sql/jdbc/src/main/java/org/elasticsearch/xpack/sql/jdbc/jdbc/JdbcConfiguration.java index a2ab9060b5a..ca35504b2c8 100644 --- a/x-pack/plugin/sql/jdbc/src/main/java/org/elasticsearch/xpack/sql/jdbc/jdbc/JdbcConfiguration.java +++ b/x-pack/plugin/sql/jdbc/src/main/java/org/elasticsearch/xpack/sql/jdbc/jdbc/JdbcConfiguration.java @@ -66,7 +66,6 @@ public class JdbcConfiguration extends ConnectionConfiguration { } // immutable properties - private final String originalUrl; private final boolean debug; private final String debugOut; @@ -147,8 +146,6 @@ public class JdbcConfiguration extends ConnectionConfiguration { private JdbcConfiguration(URI baseURI, String u, Properties props) throws JdbcSQLException { super(baseURI, u, props); - this.originalUrl = u; - this.debug = parseValue(DEBUG, props.getProperty(DEBUG, DEBUG_DEFAULT), Boolean::parseBoolean); this.debugOut = props.getProperty(DEBUG_OUTPUT, DEBUG_OUTPUT_DEFAULT); diff --git a/x-pack/plugin/sql/jdbc/src/main/java/org/elasticsearch/xpack/sql/jdbc/jdbc/JdbcDatabaseMetaData.java b/x-pack/plugin/sql/jdbc/src/main/java/org/elasticsearch/xpack/sql/jdbc/jdbc/JdbcDatabaseMetaData.java index 085016bc0bd..c06a96c9881 100644 --- a/x-pack/plugin/sql/jdbc/src/main/java/org/elasticsearch/xpack/sql/jdbc/jdbc/JdbcDatabaseMetaData.java +++ b/x-pack/plugin/sql/jdbc/src/main/java/org/elasticsearch/xpack/sql/jdbc/jdbc/JdbcDatabaseMetaData.java @@ -19,6 +19,7 @@ import java.sql.ResultSet; import java.sql.RowIdLifetime; import java.sql.SQLException; import java.sql.SQLFeatureNotSupportedException; +import java.sql.SQLType; import java.util.ArrayList; import java.util.List; @@ -190,7 +191,7 @@ class JdbcDatabaseMetaData implements DatabaseMetaData, JdbcWrapper { + "PI,POWER," + "RADIANS,RAND,ROUND," + "SIGN,SIN,SQRT," - + "TAN"; + + "TAN,TRUNCATE"; } @Override @@ -1124,11 +1125,11 @@ class JdbcDatabaseMetaData implements DatabaseMetaData, JdbcWrapper { Object obj = cols[i]; if (obj instanceof String) { String name = obj.toString(); - JDBCType type = JDBCType.VARCHAR; + SQLType type = JDBCType.VARCHAR; if (i + 1 < cols.length) { // check if the next item it's a type - if (cols[i + 1] instanceof JDBCType) { - type = (JDBCType) cols[i + 1]; + if (cols[i + 1] instanceof SQLType) { + type = (SQLType) cols[i + 1]; i++; } // it's not, use the default and move on diff --git a/x-pack/plugin/sql/jdbc/src/main/java/org/elasticsearch/xpack/sql/jdbc/jdbc/JdbcParameterMetaData.java b/x-pack/plugin/sql/jdbc/src/main/java/org/elasticsearch/xpack/sql/jdbc/jdbc/JdbcParameterMetaData.java index ca464813dc2..988fa6da047 100644 --- a/x-pack/plugin/sql/jdbc/src/main/java/org/elasticsearch/xpack/sql/jdbc/jdbc/JdbcParameterMetaData.java +++ b/x-pack/plugin/sql/jdbc/src/main/java/org/elasticsearch/xpack/sql/jdbc/jdbc/JdbcParameterMetaData.java @@ -54,7 +54,7 @@ class JdbcParameterMetaData implements ParameterMetaData, JdbcWrapper { @Override public String getParameterTypeName(int param) throws SQLException { - return paramInfo(param).type.name(); + return paramInfo(param).type.getName(); } @Override diff --git a/x-pack/plugin/sql/jdbc/src/main/java/org/elasticsearch/xpack/sql/jdbc/jdbc/JdbcPreparedStatement.java b/x-pack/plugin/sql/jdbc/src/main/java/org/elasticsearch/xpack/sql/jdbc/jdbc/JdbcPreparedStatement.java index bae4260ac2b..dc3dac978a7 100644 --- a/x-pack/plugin/sql/jdbc/src/main/java/org/elasticsearch/xpack/sql/jdbc/jdbc/JdbcPreparedStatement.java +++ b/x-pack/plugin/sql/jdbc/src/main/java/org/elasticsearch/xpack/sql/jdbc/jdbc/JdbcPreparedStatement.java @@ -26,6 +26,7 @@ import java.sql.RowId; import java.sql.SQLDataException; import java.sql.SQLException; import java.sql.SQLFeatureNotSupportedException; +import java.sql.SQLType; import java.sql.SQLXML; import java.sql.Struct; import java.sql.Time; @@ -69,7 +70,7 @@ class JdbcPreparedStatement extends JdbcStatement implements PreparedStatement { throw new SQLFeatureNotSupportedException("Writes not supported"); } - private void setParam(int parameterIndex, Object value, int type) throws SQLException { + private void setParam(int parameterIndex, Object value, SQLType type) throws SQLException { checkOpen(); if (parameterIndex < 0 || parameterIndex > query.paramCount()) { @@ -77,12 +78,12 @@ class JdbcPreparedStatement extends JdbcStatement implements PreparedStatement { "]"); } - query.setParam(parameterIndex, value, JDBCType.valueOf(type)); + query.setParam(parameterIndex, value, type); } @Override public void setNull(int parameterIndex, int sqlType) throws SQLException { - setParam(parameterIndex, null, sqlType); + setParam(parameterIndex, null, JDBCType.valueOf(sqlType)); } @Override @@ -181,7 +182,7 @@ class JdbcPreparedStatement extends JdbcStatement implements PreparedStatement { @Override public void setObject(int parameterIndex, Object x) throws SQLException { if (x == null) { - setParam(parameterIndex, null, Types.NULL); + setParam(parameterIndex, null, JDBCType.NULL); return; } @@ -338,7 +339,7 @@ class JdbcPreparedStatement extends JdbcStatement implements PreparedStatement { // set the null value on the type and exit if (x == null) { - setParam(parameterIndex, null, targetSqlType); + setParam(parameterIndex, null, JDBCType.valueOf(targetSqlType)); return; } @@ -348,7 +349,7 @@ class JdbcPreparedStatement extends JdbcStatement implements PreparedStatement { throw new SQLFeatureNotSupportedException( "Conversion from type byte[] to " + targetJDBCType + " not supported"); } - setParam(parameterIndex, x, Types.VARBINARY); + setParam(parameterIndex, x, JDBCType.VARBINARY); return; } @@ -357,7 +358,7 @@ class JdbcPreparedStatement extends JdbcStatement implements PreparedStatement { || x instanceof Date || x instanceof LocalDateTime || x instanceof Time - || x instanceof java.util.Date) + || x instanceof java.util.Date) { if (targetJDBCType == JDBCType.TIMESTAMP) { // converting to {@code java.util.Date} because this is the type supported by {@code XContentBuilder} for serialization @@ -380,10 +381,10 @@ class JdbcPreparedStatement extends JdbcStatement implements PreparedStatement { dateToSet = (java.util.Date) x; } - setParam(parameterIndex, dateToSet, Types.TIMESTAMP); + setParam(parameterIndex, dateToSet, JDBCType.TIMESTAMP); return; } else if (targetJDBCType == JDBCType.VARCHAR) { - setParam(parameterIndex, String.valueOf(x), Types.VARCHAR); + setParam(parameterIndex, String.valueOf(x), JDBCType.VARCHAR); return; } // anything else other than VARCHAR and TIMESTAMP is not supported in this JDBC driver @@ -399,9 +400,9 @@ class JdbcPreparedStatement extends JdbcStatement implements PreparedStatement { || x instanceof Float || x instanceof Double || x instanceof String) { - setParam(parameterIndex, - TypeConverter.convert(x, TypeConverter.fromJavaToJDBC(x.getClass()), DataType.fromJdbcTypeToJava(targetJDBCType)), - targetSqlType); + setParam(parameterIndex, + TypeConverter.convert(x, TypeConverter.fromJavaToJDBC(x.getClass()), DataType.fromJdbcTypeToJava(targetJDBCType)), + JDBCType.valueOf(targetSqlType)); return; } @@ -410,8 +411,8 @@ class JdbcPreparedStatement extends JdbcStatement implements PreparedStatement { } private void checkKnownUnsupportedTypes(Object x) throws SQLFeatureNotSupportedException { - List> unsupportedTypes = new ArrayList>(Arrays.asList(Struct.class, Array.class, SQLXML.class, - RowId.class, Ref.class, Blob.class, NClob.class, Clob.class, LocalDate.class, LocalTime.class, + List> unsupportedTypes = new ArrayList<>(Arrays.asList(Struct.class, Array.class, SQLXML.class, + RowId.class, Ref.class, Blob.class, NClob.class, Clob.class, LocalDate.class, LocalTime.class, OffsetTime.class, OffsetDateTime.class, URL.class, BigDecimal.class)); for (Class clazz:unsupportedTypes) { diff --git a/x-pack/plugin/sql/jdbc/src/main/java/org/elasticsearch/xpack/sql/jdbc/jdbc/JdbcResultSet.java b/x-pack/plugin/sql/jdbc/src/main/java/org/elasticsearch/xpack/sql/jdbc/jdbc/JdbcResultSet.java index ebdeaef15ca..a289991853f 100644 --- a/x-pack/plugin/sql/jdbc/src/main/java/org/elasticsearch/xpack/sql/jdbc/jdbc/JdbcResultSet.java +++ b/x-pack/plugin/sql/jdbc/src/main/java/org/elasticsearch/xpack/sql/jdbc/jdbc/JdbcResultSet.java @@ -25,6 +25,7 @@ import java.sql.ResultSetMetaData; import java.sql.RowId; import java.sql.SQLException; import java.sql.SQLFeatureNotSupportedException; +import java.sql.SQLType; import java.sql.SQLWarning; import java.sql.SQLXML; import java.sql.Statement; @@ -133,7 +134,7 @@ class JdbcResultSet implements ResultSet, JdbcWrapper { @Override public boolean getBoolean(int columnIndex) throws SQLException { - return column(columnIndex) != null ? getObject(columnIndex, Boolean.class) : false; + return column(columnIndex) != null ? getObject(columnIndex, Boolean.class) : false; } @Override @@ -245,7 +246,7 @@ class JdbcResultSet implements ResultSet, JdbcWrapper { private Long dateTime(int columnIndex) throws SQLException { Object val = column(columnIndex); - JDBCType type = cursor.columns().get(columnIndex - 1).type; + SQLType type = cursor.columns().get(columnIndex - 1).type; try { // TODO: the B6 appendix of the jdbc spec does mention CHAR, VARCHAR, LONGVARCHAR, DATE, TIMESTAMP as supported // jdbc types that should be handled by getDate and getTime methods. From all of those we support VARCHAR and @@ -338,7 +339,7 @@ class JdbcResultSet implements ResultSet, JdbcWrapper { return null; } - JDBCType columnType = cursor.columns().get(columnIndex - 1).type; + SQLType columnType = cursor.columns().get(columnIndex - 1).type; return TypeConverter.convert(val, columnType, type); } diff --git a/x-pack/plugin/sql/jdbc/src/main/java/org/elasticsearch/xpack/sql/jdbc/jdbc/JdbcResultSetMetaData.java b/x-pack/plugin/sql/jdbc/src/main/java/org/elasticsearch/xpack/sql/jdbc/jdbc/JdbcResultSetMetaData.java index 574cdeb62b4..ed2b899e22a 100644 --- a/x-pack/plugin/sql/jdbc/src/main/java/org/elasticsearch/xpack/sql/jdbc/jdbc/JdbcResultSetMetaData.java +++ b/x-pack/plugin/sql/jdbc/src/main/java/org/elasticsearch/xpack/sql/jdbc/jdbc/JdbcResultSetMetaData.java @@ -114,7 +114,7 @@ class JdbcResultSetMetaData implements ResultSetMetaData, JdbcWrapper { @Override public String getColumnTypeName(int column) throws SQLException { - return column(column).type.name(); + return column(column).type.getName(); } @Override diff --git a/x-pack/plugin/sql/jdbc/src/main/java/org/elasticsearch/xpack/sql/jdbc/jdbc/PreparedQuery.java b/x-pack/plugin/sql/jdbc/src/main/java/org/elasticsearch/xpack/sql/jdbc/jdbc/PreparedQuery.java index 06825ee6e3f..ab459e90d96 100644 --- a/x-pack/plugin/sql/jdbc/src/main/java/org/elasticsearch/xpack/sql/jdbc/jdbc/PreparedQuery.java +++ b/x-pack/plugin/sql/jdbc/src/main/java/org/elasticsearch/xpack/sql/jdbc/jdbc/PreparedQuery.java @@ -11,6 +11,7 @@ import org.elasticsearch.xpack.sql.type.DataType; import java.sql.JDBCType; import java.sql.SQLException; +import java.sql.SQLType; import java.util.Arrays; import java.util.List; import java.util.stream.Collectors; @@ -18,10 +19,10 @@ import java.util.stream.Collectors; class PreparedQuery { static class ParamInfo { - JDBCType type; + SQLType type; Object value; - ParamInfo(Object value, JDBCType type) { + ParamInfo(Object value, SQLType type) { this.value = value; this.type = type; } @@ -43,7 +44,7 @@ class PreparedQuery { return params[param - 1]; } - void setParam(int param, Object value, JDBCType type) throws JdbcSQLException { + void setParam(int param, Object value, SQLType type) throws JdbcSQLException { if (param < 1 || param > params.length) { throw new JdbcSQLException("Invalid parameter index [" + param + "]"); } diff --git a/x-pack/plugin/sql/jdbc/src/main/java/org/elasticsearch/xpack/sql/jdbc/jdbc/TypeConverter.java b/x-pack/plugin/sql/jdbc/src/main/java/org/elasticsearch/xpack/sql/jdbc/jdbc/TypeConverter.java index 7b638d8bd09..2decfe5d3c5 100644 --- a/x-pack/plugin/sql/jdbc/src/main/java/org/elasticsearch/xpack/sql/jdbc/jdbc/TypeConverter.java +++ b/x-pack/plugin/sql/jdbc/src/main/java/org/elasticsearch/xpack/sql/jdbc/jdbc/TypeConverter.java @@ -12,6 +12,7 @@ import java.sql.Date; import java.sql.JDBCType; import java.sql.SQLException; import java.sql.SQLFeatureNotSupportedException; +import java.sql.SQLType; import java.sql.Time; import java.sql.Timestamp; import java.time.LocalDate; @@ -56,11 +57,11 @@ final class TypeConverter { } private static final long DAY_IN_MILLIS = 60 * 60 * 24 * 1000; - private static final Map, JDBCType> javaToJDBC; + private static final Map, SQLType> javaToJDBC; static { - Map, JDBCType> aMap = Arrays.stream(DataType.values()) + Map, SQLType> aMap = Arrays.stream(DataType.values()) .filter(dataType -> dataType.javaClass() != null && dataType != DataType.HALF_FLOAT && dataType != DataType.SCALED_FLOAT @@ -139,7 +140,7 @@ final class TypeConverter { * Converts object val from columnType to type */ @SuppressWarnings("unchecked") - static T convert(Object val, JDBCType columnType, Class type) throws SQLException { + static T convert(Object val, SQLType columnType, Class type) throws SQLException { if (type == null) { return (T) convert(val, columnType); } @@ -151,7 +152,7 @@ final class TypeConverter { try { return type.cast(val); } catch (ClassCastException cce) { - throw new SQLException(format(Locale.ROOT, "Unable to convert value [%.128s] of type [%s] to a %s", val, + throw new SQLException(format(Locale.ROOT, "Unable to convert value [%.128s] of type [%s] to a %s", val, columnType.getName(), type.getName()), cce); } } @@ -210,7 +211,7 @@ final class TypeConverter { if (type == OffsetDateTime.class) { return (T) asOffsetDateTime(val, columnType); } - throw new SQLException(format(Locale.ROOT, "Unable to convert value [%.128s] of type [%s] to a %s", val, + throw new SQLException(format(Locale.ROOT, "Unable to convert value [%.128s] of type [%s] to a %s", val, columnType.getName(), type.getName())); } @@ -220,7 +221,7 @@ final class TypeConverter { * See {@link javax.sql.rowset.RowSetMetaDataImpl#getColumnClassName} and * https://db.apache.org/derby/docs/10.5/ref/rrefjdbc20377.html */ - public static String classNameOf(JDBCType jdbcType) throws JdbcSQLException { + public static String classNameOf(SQLType jdbcType) throws JdbcSQLException { final DataType dataType; try { dataType = DataType.fromJdbcType(jdbcType); @@ -239,31 +240,35 @@ final class TypeConverter { *

    * The returned types needs to correspond to ES-portion of classes returned by {@link TypeConverter#classNameOf} */ - static Object convert(Object v, JDBCType columnType) throws SQLException { - switch (columnType) { - case NULL: - return null; - case BOOLEAN: - case VARCHAR: - return v; // These types are already represented correctly in JSON - case TINYINT: - return ((Number) v).byteValue(); // Parser might return it as integer or long - need to update to the correct type - case SMALLINT: - return ((Number) v).shortValue(); // Parser might return it as integer or long - need to update to the correct type - case INTEGER: - return ((Number) v).intValue(); - case BIGINT: - return ((Number) v).longValue(); - case FLOAT: - case DOUBLE: - return doubleValue(v); // Double might be represented as string for infinity and NaN values - case REAL: - return floatValue(v); // Float might be represented as string for infinity and NaN values - case TIMESTAMP: - return new Timestamp(((Number) v).longValue()); - default: - throw new SQLException("Unexpected column type [" + columnType.getName() + "]"); - + static Object convert(Object v, SQLType columnType) throws SQLException { + if (columnType instanceof JDBCType) { + switch ((JDBCType) columnType) { + case NULL: + return null; + case BOOLEAN: + case VARCHAR: + return v; // These types are already represented correctly in JSON + case TINYINT: + return ((Number) v).byteValue(); // Parser might return it as integer or long - need to update to the correct type + case SMALLINT: + return ((Number) v).shortValue(); // Parser might return it as integer or long - need to update to the correct type + case INTEGER: + return ((Number) v).intValue(); + case BIGINT: + return ((Number) v).longValue(); + case FLOAT: + case DOUBLE: + return doubleValue(v); // Double might be represented as string for infinity and NaN values + case REAL: + return floatValue(v); // Float might be represented as string for infinity and NaN values + case TIMESTAMP: + return new Timestamp(((Number) v).longValue()); + default: + throw new SQLException("Unexpected column type [" + columnType.getName() + "]"); + + } + } else { + throw new SQLException("Unexpected column type [" + columnType.getName() + "]"); } } @@ -272,7 +277,7 @@ final class TypeConverter { *

    * It needs to support both params and column types */ - static boolean isSigned(JDBCType jdbcType) throws SQLException { + static boolean isSigned(SQLType jdbcType) throws SQLException { final DataType dataType; try { dataType = DataType.fromJdbcType(jdbcType); @@ -284,8 +289,8 @@ final class TypeConverter { } - static JDBCType fromJavaToJDBC(Class clazz) throws SQLException { - for (Entry, JDBCType> e : javaToJDBC.entrySet()) { + static SQLType fromJavaToJDBC(Class clazz) throws SQLException { + for (Entry, SQLType> e : javaToJDBC.entrySet()) { // java.util.Calendar from {@code javaToJDBC} is an abstract class and this method can be used with concrete classes as well if (e.getKey().isAssignableFrom(clazz)) { return e.getValue(); @@ -331,200 +336,215 @@ final class TypeConverter { return nativeValue == null ? null : String.valueOf(nativeValue); } - private static Boolean asBoolean(Object val, JDBCType columnType) throws SQLException { - switch (columnType) { - case BOOLEAN: - case TINYINT: - case SMALLINT: - case INTEGER: - case BIGINT: - case REAL: - case FLOAT: - case DOUBLE: - return Boolean.valueOf(Integer.signum(((Number) val).intValue()) != 0); - case VARCHAR: - return Boolean.valueOf((String) val); - default: - throw new SQLException( - format(Locale.ROOT, "Unable to convert value [%.128s] of type [%s] to a Boolean", val, columnType.getName())); + private static Boolean asBoolean(Object val, SQLType columnType) throws SQLException { + if (columnType instanceof JDBCType) { + switch ((JDBCType) columnType) { + case BOOLEAN: + case TINYINT: + case SMALLINT: + case INTEGER: + case BIGINT: + case REAL: + case FLOAT: + case DOUBLE: + return Boolean.valueOf(Integer.signum(((Number) val).intValue()) != 0); + case VARCHAR: + return Boolean.valueOf((String) val); + default: + throw new SQLException( + format(Locale.ROOT, "Unable to convert value [%.128s] of type [%s] to a Boolean", val, columnType.getName())); + } + } else { + throw new SQLException("Unexpected column type [" + columnType.getName() + "]"); } } - private static Byte asByte(Object val, JDBCType columnType) throws SQLException { - switch (columnType) { - case BOOLEAN: - return Byte.valueOf(((Boolean) val).booleanValue() ? (byte) 1 : (byte) 0); - case TINYINT: - case SMALLINT: - case INTEGER: - case BIGINT: - return safeToByte(((Number) val).longValue()); - case REAL: - case FLOAT: - case DOUBLE: - return safeToByte(safeToLong(((Number) val).doubleValue())); - case VARCHAR: - try { - return Byte.valueOf((String) val); - } catch (NumberFormatException e) { - throw new SQLException(format(Locale.ROOT, "Unable to convert value [%.128s] of type [VARCHAR] to a Byte", val), e); - } - default: + private static Byte asByte(Object val, SQLType columnType) throws SQLException { + if (columnType instanceof JDBCType) { + switch ((JDBCType) columnType) { + case BOOLEAN: + return Byte.valueOf(((Boolean) val).booleanValue() ? (byte) 1 : (byte) 0); + case TINYINT: + case SMALLINT: + case INTEGER: + case BIGINT: + return safeToByte(((Number) val).longValue()); + case REAL: + case FLOAT: + case DOUBLE: + return safeToByte(safeToLong(((Number) val).doubleValue())); + case VARCHAR: + try { + return Byte.valueOf((String) val); + } catch (NumberFormatException e) { + throw new SQLException(format(Locale.ROOT, "Unable to convert value [%.128s] of type [VARCHAR] to a Byte", val), e); + } + default: + } } throw new SQLException(format(Locale.ROOT, "Unable to convert value [%.128s] of type [%s] to a Byte", val, columnType.getName())); } - private static Short asShort(Object val, JDBCType columnType) throws SQLException { - switch (columnType) { - case BOOLEAN: - return Short.valueOf(((Boolean) val).booleanValue() ? (short) 1 : (short) 0); - case TINYINT: - case SMALLINT: - case INTEGER: - case BIGINT: - return safeToShort(((Number) val).longValue()); - case REAL: - case FLOAT: - case DOUBLE: - return safeToShort(safeToLong(((Number) val).doubleValue())); - case VARCHAR: - try { - return Short.valueOf((String) val); - } catch (NumberFormatException e) { - throw new SQLException(format(Locale.ROOT, "Unable to convert value [%.128s] of type [VARCHAR] to a Short", val), e); - } - default: + private static Short asShort(Object val, SQLType columnType) throws SQLException { + if (columnType instanceof JDBCType) { + switch ((JDBCType) columnType) { + case BOOLEAN: + return Short.valueOf(((Boolean) val).booleanValue() ? (short) 1 : (short) 0); + case TINYINT: + case SMALLINT: + case INTEGER: + case BIGINT: + return safeToShort(((Number) val).longValue()); + case REAL: + case FLOAT: + case DOUBLE: + return safeToShort(safeToLong(((Number) val).doubleValue())); + case VARCHAR: + try { + return Short.valueOf((String) val); + } catch (NumberFormatException e) { + throw new SQLException(format(Locale.ROOT, "Unable to convert value [%.128s] of type [VARCHAR] to a Short", val), + e); + } + default: + } } - throw new SQLException(format(Locale.ROOT, "Unable to convert value [%.128s] of type [%s] to a Short", val, columnType.getName())); } - private static Integer asInteger(Object val, JDBCType columnType) throws SQLException { - switch (columnType) { - case BOOLEAN: - return Integer.valueOf(((Boolean) val).booleanValue() ? 1 : 0); - case TINYINT: - case SMALLINT: - case INTEGER: - case BIGINT: - return safeToInt(((Number) val).longValue()); - case REAL: - case FLOAT: - case DOUBLE: - return safeToInt(safeToLong(((Number) val).doubleValue())); - case VARCHAR: - try { - return Integer.valueOf((String) val); - } catch (NumberFormatException e) { - throw new SQLException( - format(Locale.ROOT, "Unable to convert value [%.128s] of type [VARCHAR] to an Integer", val), e); - } - default: - } + private static Integer asInteger(Object val, SQLType columnType) throws SQLException { + if (columnType instanceof JDBCType) { + switch ((JDBCType) columnType) { + case BOOLEAN: + return Integer.valueOf(((Boolean) val).booleanValue() ? 1 : 0); + case TINYINT: + case SMALLINT: + case INTEGER: + case BIGINT: + return safeToInt(((Number) val).longValue()); + case REAL: + case FLOAT: + case DOUBLE: + return safeToInt(safeToLong(((Number) val).doubleValue())); + case VARCHAR: + try { + return Integer.valueOf((String) val); + } catch (NumberFormatException e) { + throw new SQLException( + format(Locale.ROOT, "Unable to convert value [%.128s] of type [VARCHAR] to an Integer", val), e); + } + default: + } + } throw new SQLException( format(Locale.ROOT, "Unable to convert value [%.128s] of type [%s] to an Integer", val, columnType.getName())); } - private static Long asLong(Object val, JDBCType columnType) throws SQLException { - switch (columnType) { - case BOOLEAN: - return Long.valueOf(((Boolean) val).booleanValue() ? 1 : 0); - case TINYINT: - case SMALLINT: - case INTEGER: - case BIGINT: - return Long.valueOf(((Number) val).longValue()); - case REAL: - case FLOAT: - case DOUBLE: - return safeToLong(((Number) val).doubleValue()); - //TODO: should we support conversion to TIMESTAMP? - //The spec says that getLong() should support the following types conversions: - //TINYINT, SMALLINT, INTEGER, BIGINT, REAL, FLOAT, DOUBLE, DECIMAL, NUMERIC, BIT, BOOLEAN, CHAR, VARCHAR, LONGVARCHAR - //case TIMESTAMP: - // return ((Number) val).longValue(); - case VARCHAR: - try { - return Long.valueOf((String) val); - } catch (NumberFormatException e) { - throw new SQLException(format(Locale.ROOT, "Unable to convert value [%.128s] of type [VARCHAR] to a Long", val), e); - } - default: + private static Long asLong(Object val, SQLType columnType) throws SQLException { + if (columnType instanceof JDBCType) { + switch ((JDBCType) columnType) { + case BOOLEAN: + return Long.valueOf(((Boolean) val).booleanValue() ? 1 : 0); + case TINYINT: + case SMALLINT: + case INTEGER: + case BIGINT: + return Long.valueOf(((Number) val).longValue()); + case REAL: + case FLOAT: + case DOUBLE: + return safeToLong(((Number) val).doubleValue()); + //TODO: should we support conversion to TIMESTAMP? + //The spec says that getLong() should support the following types conversions: + //TINYINT, SMALLINT, INTEGER, BIGINT, REAL, FLOAT, DOUBLE, DECIMAL, NUMERIC, BIT, BOOLEAN, CHAR, VARCHAR, LONGVARCHAR + //case TIMESTAMP: + // return ((Number) val).longValue(); + case VARCHAR: + try { + return Long.valueOf((String) val); + } catch (NumberFormatException e) { + throw new SQLException(format(Locale.ROOT, "Unable to convert value [%.128s] of type [VARCHAR] to a Long", val), e); + } + default: + } } - throw new SQLException(format(Locale.ROOT, "Unable to convert value [%.128s] of type [%s] to a Long", val, columnType.getName())); } - private static Float asFloat(Object val, JDBCType columnType) throws SQLException { - switch (columnType) { - case BOOLEAN: - return Float.valueOf(((Boolean) val).booleanValue() ? 1 : 0); - case TINYINT: - case SMALLINT: - case INTEGER: - case BIGINT: - return Float.valueOf(((Number) val).longValue()); - case REAL: - case FLOAT: - case DOUBLE: - return Float.valueOf((((float) ((Number) val).doubleValue()))); - case VARCHAR: - try { - return Float.valueOf((String) val); - } catch (NumberFormatException e) { - throw new SQLException(format(Locale.ROOT, "Unable to convert value [%.128s] of type [VARCHAR] to a Float", val), e); - } - default: + private static Float asFloat(Object val, SQLType columnType) throws SQLException { + if (columnType instanceof JDBCType) { + switch ((JDBCType) columnType) { + case BOOLEAN: + return Float.valueOf(((Boolean) val).booleanValue() ? 1 : 0); + case TINYINT: + case SMALLINT: + case INTEGER: + case BIGINT: + return Float.valueOf(((Number) val).longValue()); + case REAL: + case FLOAT: + case DOUBLE: + return Float.valueOf((((float) ((Number) val).doubleValue()))); + case VARCHAR: + try { + return Float.valueOf((String) val); + } catch (NumberFormatException e) { + throw new SQLException(format(Locale.ROOT, "Unable to convert value [%.128s] of type [VARCHAR] to a Float", val), + e); + } + default: + } } - throw new SQLException(format(Locale.ROOT, "Unable to convert value [%.128s] of type [%s] to a Float", val, columnType.getName())); } - private static Double asDouble(Object val, JDBCType columnType) throws SQLException { - switch (columnType) { - case BOOLEAN: - return Double.valueOf(((Boolean) val).booleanValue() ? 1 : 0); - case TINYINT: - case SMALLINT: - case INTEGER: - case BIGINT: - return Double.valueOf(((Number) val).longValue()); - case REAL: - case FLOAT: - case DOUBLE: + private static Double asDouble(Object val, SQLType columnType) throws SQLException { + if (columnType instanceof JDBCType) { + switch ((JDBCType) columnType) { + case BOOLEAN: + return Double.valueOf(((Boolean) val).booleanValue() ? 1 : 0); + case TINYINT: + case SMALLINT: + case INTEGER: + case BIGINT: + return Double.valueOf(((Number) val).longValue()); + case REAL: + case FLOAT: + case DOUBLE: - return Double.valueOf(((Number) val).doubleValue()); - case VARCHAR: - try { - return Double.valueOf((String) val); - } catch (NumberFormatException e) { - throw new SQLException(format(Locale.ROOT, "Unable to convert value [%.128s] of type [VARCHAR] to a Double", val), e); - } - default: + return Double.valueOf(((Number) val).doubleValue()); + case VARCHAR: + try { + return Double.valueOf((String) val); + } catch (NumberFormatException e) { + throw new SQLException(format(Locale.ROOT, "Unable to convert value [%.128s] of type [VARCHAR] to a Double", val), + e); + } + default: + } } - throw new SQLException( format(Locale.ROOT, "Unable to convert value [%.128s] of type [%s] to a Double", val, columnType.getName())); } - private static Date asDate(Object val, JDBCType columnType) throws SQLException { + private static Date asDate(Object val, SQLType columnType) throws SQLException { if (columnType == JDBCType.TIMESTAMP) { return new Date(utcMillisRemoveTime(((Number) val).longValue())); } throw new SQLException(format(Locale.ROOT, "Unable to convert value [%.128s] of type [%s] to a Date", val, columnType.getName())); } - private static Time asTime(Object val, JDBCType columnType) throws SQLException { + private static Time asTime(Object val, SQLType columnType) throws SQLException { if (columnType == JDBCType.TIMESTAMP) { return new Time(utcMillisRemoveDate(((Number) val).longValue())); } throw new SQLException(format(Locale.ROOT, "Unable to convert value [%.128s] of type [%s] to a Time", val, columnType.getName())); } - private static Timestamp asTimestamp(Object val, JDBCType columnType) throws SQLException { + private static Timestamp asTimestamp(Object val, SQLType columnType) throws SQLException { if (columnType == JDBCType.TIMESTAMP) { return new Timestamp(((Number) val).longValue()); } @@ -532,27 +552,27 @@ final class TypeConverter { format(Locale.ROOT, "Unable to convert value [%.128s] of type [%s] to a Timestamp", val, columnType.getName())); } - private static byte[] asByteArray(Object val, JDBCType columnType) { + private static byte[] asByteArray(Object val, SQLType columnType) { throw new UnsupportedOperationException(); } - private static LocalDate asLocalDate(Object val, JDBCType columnType) { + private static LocalDate asLocalDate(Object val, SQLType columnType) { throw new UnsupportedOperationException(); } - private static LocalTime asLocalTime(Object val, JDBCType columnType) { + private static LocalTime asLocalTime(Object val, SQLType columnType) { throw new UnsupportedOperationException(); } - private static LocalDateTime asLocalDateTime(Object val, JDBCType columnType) { + private static LocalDateTime asLocalDateTime(Object val, SQLType columnType) { throw new UnsupportedOperationException(); } - private static OffsetTime asOffsetTime(Object val, JDBCType columnType) { + private static OffsetTime asOffsetTime(Object val, SQLType columnType) { throw new UnsupportedOperationException(); } - private static OffsetDateTime asOffsetDateTime(Object val, JDBCType columnType) { + private static OffsetDateTime asOffsetDateTime(Object val, SQLType columnType) { throw new UnsupportedOperationException(); } @@ -592,4 +612,4 @@ final class TypeConverter { } return Math.round(x); } -} +} \ No newline at end of file diff --git a/x-pack/plugin/sql/jdbc/src/main/java/org/elasticsearch/xpack/sql/jdbc/net/protocol/ColumnInfo.java b/x-pack/plugin/sql/jdbc/src/main/java/org/elasticsearch/xpack/sql/jdbc/net/protocol/ColumnInfo.java index 6e61d65ff53..b8582d8e9b3 100644 --- a/x-pack/plugin/sql/jdbc/src/main/java/org/elasticsearch/xpack/sql/jdbc/net/protocol/ColumnInfo.java +++ b/x-pack/plugin/sql/jdbc/src/main/java/org/elasticsearch/xpack/sql/jdbc/net/protocol/ColumnInfo.java @@ -5,7 +5,7 @@ */ package org.elasticsearch.xpack.sql.jdbc.net.protocol; -import java.sql.JDBCType; +import java.sql.SQLType; import java.util.Objects; public class ColumnInfo { @@ -15,9 +15,9 @@ public class ColumnInfo { public final String label; public final String name; public final int displaySize; - public final JDBCType type; + public final SQLType type; - public ColumnInfo(String name, JDBCType type, String table, String catalog, String schema, String label, int displaySize) { + public ColumnInfo(String name, SQLType type, String table, String catalog, String schema, String label, int displaySize) { if (name == null) { throw new IllegalArgumentException("[name] must not be null"); } @@ -88,4 +88,4 @@ public class ColumnInfo { public int hashCode() { return Objects.hash(name, type, table, catalog, schema, label, displaySize); } -} +} \ No newline at end of file diff --git a/x-pack/plugin/sql/jdbc/src/test/java/org/elasticsearch/xpack/sql/jdbc/jdbc/JdbcPreparedStatementTests.java b/x-pack/plugin/sql/jdbc/src/test/java/org/elasticsearch/xpack/sql/jdbc/jdbc/JdbcPreparedStatementTests.java index 35a3ec57487..229c7e8182c 100644 --- a/x-pack/plugin/sql/jdbc/src/test/java/org/elasticsearch/xpack/sql/jdbc/jdbc/JdbcPreparedStatementTests.java +++ b/x-pack/plugin/sql/jdbc/src/test/java/org/elasticsearch/xpack/sql/jdbc/jdbc/JdbcPreparedStatementTests.java @@ -9,9 +9,9 @@ import org.elasticsearch.test.ESTestCase; import java.net.URL; import java.nio.charset.StandardCharsets; -import java.sql.JDBCType; import java.sql.SQLException; import java.sql.SQLFeatureNotSupportedException; +import java.sql.SQLType; import java.sql.Struct; import java.sql.Time; import java.sql.Timestamp; @@ -450,7 +450,7 @@ public class JdbcPreparedStatementTests extends ESTestCase { someCalendar.setTimeInMillis(randomLong()); jps.setObject(1, someCalendar); - assertEquals(someCalendar.getTime(), (Date) value(jps)); + assertEquals(someCalendar.getTime(), value(jps)); assertEquals(TIMESTAMP, jdbcType(jps)); assertTrue(value(jps) instanceof java.util.Date); @@ -460,7 +460,7 @@ public class JdbcPreparedStatementTests extends ESTestCase { Calendar nonDefaultCal = randomCalendar(); jps.setObject(1, nonDefaultCal); - assertEquals(nonDefaultCal.getTime(), (Date) value(jps)); + assertEquals(nonDefaultCal.getTime(), value(jps)); assertEquals(TIMESTAMP, jdbcType(jps)); } @@ -477,7 +477,7 @@ public class JdbcPreparedStatementTests extends ESTestCase { Date someDate = new Date(randomLong()); jps.setObject(1, someDate); - assertEquals(someDate, (Date) value(jps)); + assertEquals(someDate, value(jps)); assertEquals(TIMESTAMP, jdbcType(jps)); assertTrue(value(jps) instanceof java.util.Date); @@ -530,7 +530,7 @@ public class JdbcPreparedStatementTests extends ESTestCase { assertTrue(value(jps) instanceof byte[]); jps.setObject(1, buffer, Types.VARBINARY); - assertEquals((byte[]) value(jps), buffer); + assertEquals(value(jps), buffer); assertEquals(VARBINARY, jdbcType(jps)); SQLException sqle = expectThrows(SQLFeatureNotSupportedException.class, () -> jps.setObject(1, buffer, Types.VARCHAR)); @@ -555,7 +555,7 @@ public class JdbcPreparedStatementTests extends ESTestCase { return new JdbcPreparedStatement(null, JdbcConfiguration.create("jdbc:es://l:1", null, 0), "?"); } - private JDBCType jdbcType(JdbcPreparedStatement jps) throws SQLException { + private SQLType jdbcType(JdbcPreparedStatement jps) throws SQLException { return jps.query.getParam(1).type; } diff --git a/x-pack/plugin/sql/sql-action/src/main/java/org/elasticsearch/xpack/sql/action/SqlQueryResponse.java b/x-pack/plugin/sql/sql-action/src/main/java/org/elasticsearch/xpack/sql/action/SqlQueryResponse.java index 1dc356f9fba..970be02e385 100644 --- a/x-pack/plugin/sql/sql-action/src/main/java/org/elasticsearch/xpack/sql/action/SqlQueryResponse.java +++ b/x-pack/plugin/sql/sql-action/src/main/java/org/elasticsearch/xpack/sql/action/SqlQueryResponse.java @@ -183,6 +183,7 @@ public class SqlQueryResponse extends ActionResponse implements ToXContentObject JDBCType jdbcType; int displaySize; if (in.readBoolean()) { + // FIXME: this needs changing to allow custom types jdbcType = JDBCType.valueOf(in.readVInt()); displaySize = in.readVInt(); } else { @@ -207,8 +208,12 @@ public class SqlQueryResponse extends ActionResponse implements ToXContentObject @Override public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } SqlQueryResponse that = (SqlQueryResponse) o; return Objects.equals(cursor, that.cursor) && Objects.equals(columns, that.columns) && diff --git a/x-pack/plugin/sql/sql-proto/src/main/java/org/elasticsearch/xpack/sql/proto/ColumnInfo.java b/x-pack/plugin/sql/sql-proto/src/main/java/org/elasticsearch/xpack/sql/proto/ColumnInfo.java index dcd4f314005..28eb1b37fdc 100644 --- a/x-pack/plugin/sql/sql-proto/src/main/java/org/elasticsearch/xpack/sql/proto/ColumnInfo.java +++ b/x-pack/plugin/sql/sql-proto/src/main/java/org/elasticsearch/xpack/sql/proto/ColumnInfo.java @@ -14,6 +14,7 @@ import org.elasticsearch.common.xcontent.XContentParser; import java.io.IOException; import java.sql.JDBCType; +import java.sql.SQLType; import java.util.Objects; import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg; @@ -51,10 +52,10 @@ public class ColumnInfo implements ToXContentObject { private final String name; private final String esType; @Nullable - private final JDBCType jdbcType; + private final SQLType jdbcType; private final int displaySize; - public ColumnInfo(String table, String name, String esType, JDBCType jdbcType, int displaySize) { + public ColumnInfo(String table, String name, String esType, SQLType jdbcType, int displaySize) { this.table = table; this.name = name; this.esType = esType; @@ -79,6 +80,10 @@ public class ColumnInfo implements ToXContentObject { builder.field("name", name); builder.field("type", esType); if (jdbcType != null) { + // FIXME: make this pluggable by saving the SQLType.getVendorName + if (!(jdbcType instanceof JDBCType)) { + throw new IOException("Unknown jdbc type " + jdbcType); + } builder.field("jdbc_type", jdbcType.getVendorTypeNumber()); builder.field("display_size", displaySize); } @@ -114,7 +119,7 @@ public class ColumnInfo implements ToXContentObject { /** * The type of the column as it would be returned by a JDBC driver. */ - public JDBCType jdbcType() { + public SQLType jdbcType() { return jdbcType; } @@ -127,8 +132,12 @@ public class ColumnInfo implements ToXContentObject { @Override public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } ColumnInfo that = (ColumnInfo) o; return displaySize == that.displaySize && Objects.equals(table, that.table) && diff --git a/x-pack/plugin/sql/sql-proto/src/main/java/org/elasticsearch/xpack/sql/type/DataType.java b/x-pack/plugin/sql/sql-proto/src/main/java/org/elasticsearch/xpack/sql/type/DataType.java index 3f77bc2fc2e..05fb192f8d1 100644 --- a/x-pack/plugin/sql/sql-proto/src/main/java/org/elasticsearch/xpack/sql/type/DataType.java +++ b/x-pack/plugin/sql/sql-proto/src/main/java/org/elasticsearch/xpack/sql/type/DataType.java @@ -6,6 +6,7 @@ package org.elasticsearch.xpack.sql.type; import java.sql.JDBCType; +import java.sql.SQLType; import java.sql.Timestamp; import java.util.Arrays; import java.util.Locale; @@ -43,7 +44,7 @@ public enum DataType { DATE( JDBCType.TIMESTAMP, Timestamp.class, Long.BYTES, 24, 24); // @formatter:on - private static final Map jdbcToEs; + private static final Map jdbcToEs; static { jdbcToEs = Arrays.stream(DataType.values()) @@ -59,7 +60,7 @@ public enum DataType { /** * Compatible JDBC type */ - public final JDBCType jdbcType; + public final SQLType jdbcType; /** * Size of the type in bytes @@ -102,7 +103,7 @@ public enum DataType { private final Class javaClass; - DataType(JDBCType jdbcType, Class javaClass, int size, int defaultPrecision, int displaySize, boolean isInteger, boolean isRational, + DataType(SQLType jdbcType, Class javaClass, int size, int defaultPrecision, int displaySize, boolean isInteger, boolean isRational, boolean defaultDocValues) { this.esType = name().toLowerCase(Locale.ROOT); this.javaClass = javaClass; @@ -115,7 +116,7 @@ public enum DataType { this.defaultDocValues = defaultDocValues; } - DataType(JDBCType jdbcType, Class javaClass, int size, int defaultPrecision, int displaySize) { + DataType(SQLType jdbcType, Class javaClass, int size, int defaultPrecision, int displaySize) { this(jdbcType, javaClass, size, defaultPrecision, displaySize, false, false, true); } @@ -147,14 +148,14 @@ public enum DataType { return this != OBJECT && this != NESTED; } - public static DataType fromJdbcType(JDBCType jdbcType) { + public static DataType fromJdbcType(SQLType jdbcType) { if (jdbcToEs.containsKey(jdbcType) == false) { throw new IllegalArgumentException("Unsupported JDBC type [" + jdbcType + "]"); } return jdbcToEs.get(jdbcType); } - public static Class fromJdbcTypeToJava(JDBCType jdbcType) { + public static Class fromJdbcTypeToJava(SQLType jdbcType) { if (jdbcToEs.containsKey(jdbcType) == false) { throw new IllegalArgumentException("Unsupported JDBC type [" + jdbcType + "]"); } diff --git a/x-pack/plugin/sql/src/main/antlr/SqlBase.g4 b/x-pack/plugin/sql/src/main/antlr/SqlBase.g4 index ca6fdece281..75dd646d932 100644 --- a/x-pack/plugin/sql/src/main/antlr/SqlBase.g4 +++ b/x-pack/plugin/sql/src/main/antlr/SqlBase.g4 @@ -218,11 +218,9 @@ primaryExpression : castExpression #cast | extractExpression #extract | constant #constantDefault - | ASTERISK #star | (qualifiedName DOT)? ASTERISK #star | functionExpression #function | '(' query ')' #subqueryExpression - | identifier #columnReference | qualifiedName #dereference | '(' expression ')' #parenthesizedExpression ; @@ -309,8 +307,8 @@ unquoteIdentifier ; number - : (PLUS | MINUS)? DECIMAL_VALUE #decimalLiteral - | (PLUS | MINUS)? INTEGER_VALUE #integerLiteral + : DECIMAL_VALUE #decimalLiteral + | INTEGER_VALUE #integerLiteral ; string @@ -456,7 +454,7 @@ DIGIT_IDENTIFIER ; TABLE_IDENTIFIER - : (LETTER | DIGIT | '_' | '@' | ASTERISK)+ + : (LETTER | DIGIT | '_')+ ; QUOTED_IDENTIFIER diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/analysis/analyzer/Analyzer.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/analysis/analyzer/Analyzer.java index f45368afc06..c13160c9335 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/analysis/analyzer/Analyzer.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/analysis/analyzer/Analyzer.java @@ -29,7 +29,7 @@ import org.elasticsearch.xpack.sql.expression.function.FunctionRegistry; import org.elasticsearch.xpack.sql.expression.function.Functions; import org.elasticsearch.xpack.sql.expression.function.UnresolvedFunction; import org.elasticsearch.xpack.sql.expression.function.scalar.Cast; -import org.elasticsearch.xpack.sql.expression.function.scalar.arithmetic.ArithmeticFunction; +import org.elasticsearch.xpack.sql.expression.predicate.operator.arithmetic.ArithmeticOperation; import org.elasticsearch.xpack.sql.plan.TableIdentifier; import org.elasticsearch.xpack.sql.plan.logical.Aggregate; import org.elasticsearch.xpack.sql.plan.logical.EsRelation; @@ -112,10 +112,6 @@ public class Analyzer extends RuleExecutor { new ResolveAggsInHaving() //new ImplicitCasting() ); - // TODO: this might be removed since the deduplication happens already in ResolveFunctions - Batch deduplication = new Batch("Deduplication", - new PruneDuplicateFunctions()); - return Arrays.asList(substitution, resolution); } @@ -196,7 +192,7 @@ public class Analyzer extends RuleExecutor { .collect(toList()) ); } - + private static boolean hasStar(List exprs) { for (Expression expression : exprs) { if (expression instanceof UnresolvedStar) { @@ -775,9 +771,9 @@ public class Analyzer extends RuleExecutor { return uf; } - String normalizedName = functionRegistry.concreteFunctionName(name); + String functionName = functionRegistry.resolveAlias(name); - List list = getList(seen, normalizedName); + List list = getList(seen, functionName); // first try to resolve from seen functions if (!list.isEmpty()) { for (Function seenFunction : list) { @@ -788,11 +784,11 @@ public class Analyzer extends RuleExecutor { } // not seen before, use the registry - if (!functionRegistry.functionExists(name)) { - return uf.missing(normalizedName, functionRegistry.listFunctions()); + if (!functionRegistry.functionExists(functionName)) { + return uf.missing(functionName, functionRegistry.listFunctions()); } // TODO: look into Generator for significant terms, etc.. - FunctionDefinition def = functionRegistry.resolveFunction(normalizedName); + FunctionDefinition def = functionRegistry.resolveFunction(functionName); Function f = uf.buildResolved(timeZone, def); list.add(f); @@ -1011,8 +1007,8 @@ public class Analyzer extends RuleExecutor { // BinaryOperations are ignored as they are pushed down to ES // and casting (and thus Aliasing when folding) gets in the way - if (e instanceof ArithmeticFunction) { - ArithmeticFunction f = (ArithmeticFunction) e; + if (e instanceof ArithmeticOperation) { + ArithmeticOperation f = (ArithmeticOperation) e; left = f.left(); right = f.right(); } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/CompositeAggregationCursor.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/CompositeAggregationCursor.java index 31d933f9f59..2a30db7e44d 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/CompositeAggregationCursor.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/CompositeAggregationCursor.java @@ -168,7 +168,7 @@ public class CompositeAggregationCursor implements Cursor { Map afterKey = composite.afterKey(); // a null after-key means done if (afterKey != null) { - AggregationBuilder aggBuilder = next.aggregations().getAggregatorFactories().get(0); + AggregationBuilder aggBuilder = next.aggregations().getAggregatorFactories().iterator().next(); // update after-key with the new value if (aggBuilder instanceof CompositeAggregationBuilder) { CompositeAggregationBuilder comp = (CompositeAggregationBuilder) aggBuilder; diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/Querier.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/Querier.java index d0bff77a648..5be361179b3 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/Querier.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/Querier.java @@ -32,11 +32,11 @@ import org.elasticsearch.xpack.sql.execution.search.extractor.ConstantExtractor; import org.elasticsearch.xpack.sql.execution.search.extractor.FieldHitExtractor; import org.elasticsearch.xpack.sql.execution.search.extractor.HitExtractor; import org.elasticsearch.xpack.sql.execution.search.extractor.MetricAggExtractor; -import org.elasticsearch.xpack.sql.expression.function.scalar.processor.definition.AggExtractorInput; -import org.elasticsearch.xpack.sql.expression.function.scalar.processor.definition.AggPathInput; -import org.elasticsearch.xpack.sql.expression.function.scalar.processor.definition.HitExtractorInput; -import org.elasticsearch.xpack.sql.expression.function.scalar.processor.definition.ProcessorDefinition; -import org.elasticsearch.xpack.sql.expression.function.scalar.processor.definition.ReferenceInput; +import org.elasticsearch.xpack.sql.expression.gen.pipeline.AggExtractorInput; +import org.elasticsearch.xpack.sql.expression.gen.pipeline.AggPathInput; +import org.elasticsearch.xpack.sql.expression.gen.pipeline.HitExtractorInput; +import org.elasticsearch.xpack.sql.expression.gen.pipeline.Pipe; +import org.elasticsearch.xpack.sql.expression.gen.pipeline.ReferenceInput; import org.elasticsearch.xpack.sql.querydsl.agg.Aggs; import org.elasticsearch.xpack.sql.querydsl.container.ComputedRef; import org.elasticsearch.xpack.sql.querydsl.container.GlobalCountRef; @@ -275,7 +275,7 @@ public class Querier { } if (ref instanceof ComputedRef) { - ProcessorDefinition proc = ((ComputedRef) ref).processor(); + Pipe proc = ((ComputedRef) ref).processor(); // wrap only agg inputs proc = proc.transformDown(l -> { @@ -351,7 +351,7 @@ public class Querier { } if (ref instanceof ComputedRef) { - ProcessorDefinition proc = ((ComputedRef) ref).processor(); + Pipe proc = ((ComputedRef) ref).processor(); // collect hitNames Set hitNames = new LinkedHashSet<>(); proc = proc.transformDown(l -> { diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/extractor/ComputingExtractor.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/extractor/ComputingExtractor.java index dded5adfcb8..92f81b6ac43 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/extractor/ComputingExtractor.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/extractor/ComputingExtractor.java @@ -9,8 +9,8 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.aggregations.bucket.MultiBucketsAggregation.Bucket; -import org.elasticsearch.xpack.sql.expression.function.scalar.processor.runtime.HitExtractorProcessor; -import org.elasticsearch.xpack.sql.expression.function.scalar.processor.runtime.Processor; +import org.elasticsearch.xpack.sql.expression.gen.processor.HitExtractorProcessor; +import org.elasticsearch.xpack.sql.expression.gen.processor.Processor; import java.io.IOException; import java.util.Objects; diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/Alias.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/Alias.java index 6f3ea405fbe..fb3c0290f31 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/Alias.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/Alias.java @@ -5,16 +5,18 @@ */ package org.elasticsearch.xpack.sql.expression; +import org.elasticsearch.xpack.sql.SqlIllegalArgumentException; +import org.elasticsearch.xpack.sql.expression.gen.script.ScriptTemplate; import org.elasticsearch.xpack.sql.tree.Location; import org.elasticsearch.xpack.sql.tree.NodeInfo; import org.elasticsearch.xpack.sql.type.DataType; import org.elasticsearch.xpack.sql.type.EsField; -import static java.util.Collections.singletonList; - import java.util.Collections; import java.util.List; +import static java.util.Collections.singletonList; + /** * An {@code Alias} is a {@code NamedExpression} that gets renamed to something else through the Alias. * @@ -91,6 +93,11 @@ public class Alias extends NamedExpression { return lazyAttribute; } + @Override + public ScriptTemplate asScript() { + throw new SqlIllegalArgumentException("Encountered a bug; an alias should never be scripted"); + } + private Attribute createAttribute() { if (resolved()) { Expression c = child(); @@ -114,4 +121,4 @@ public class Alias extends NamedExpression { public String toString() { return child + " AS " + name() + "#" + id(); } -} +} \ No newline at end of file diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/Attribute.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/Attribute.java index dd18363b2a8..3be4b027954 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/Attribute.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/Attribute.java @@ -5,7 +5,10 @@ */ package org.elasticsearch.xpack.sql.expression; +import org.elasticsearch.xpack.sql.SqlIllegalArgumentException; +import org.elasticsearch.xpack.sql.expression.gen.script.ScriptTemplate; import org.elasticsearch.xpack.sql.tree.Location; +import org.elasticsearch.xpack.sql.tree.NodeInfo; import java.util.List; import java.util.Objects; @@ -60,6 +63,11 @@ public abstract class Attribute extends NamedExpression { throw new UnsupportedOperationException("this type of node doesn't have any children to replace"); } + @Override + public ScriptTemplate asScript() { + throw new SqlIllegalArgumentException("Encountered a bug - an attribute should never be scripted"); + } + public String qualifier() { return qualifier; } @@ -103,6 +111,11 @@ public abstract class Attribute extends NamedExpression { return id().hashCode(); } + @Override + protected NodeInfo info() { + return null; + } + @Override public boolean semanticEquals(Expression other) { return other instanceof Attribute ? id().equals(((Attribute) other).id()) : false; @@ -130,4 +143,4 @@ public abstract class Attribute extends NamedExpression { } protected abstract String label(); -} +} \ No newline at end of file diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/BinaryExpression.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/BinaryExpression.java deleted file mode 100644 index fd6b8632f8e..00000000000 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/BinaryExpression.java +++ /dev/null @@ -1,82 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License; - * you may not use this file except in compliance with the Elastic License. - */ -package org.elasticsearch.xpack.sql.expression; - -import org.elasticsearch.xpack.sql.tree.Location; - -import java.util.Arrays; -import java.util.List; -import java.util.Objects; - -public abstract class BinaryExpression extends Expression { - - private final Expression left, right; - - protected BinaryExpression(Location location, Expression left, Expression right) { - super(location, Arrays.asList(left, right)); - this.left = left; - this.right = right; - } - - @Override - public final BinaryExpression replaceChildren(List newChildren) { - if (newChildren.size() != 2) { - throw new IllegalArgumentException("expected [2] children but received [" + newChildren.size() + "]"); - } - return replaceChildren(newChildren.get(0), newChildren.get(1)); - } - protected abstract BinaryExpression replaceChildren(Expression newLeft, Expression newRight); - - public Expression left() { - return left; - } - - public Expression right() { - return right; - } - - @Override - public boolean foldable() { - return left.foldable() && right.foldable(); - } - - @Override - public boolean nullable() { - return left.nullable() || left.nullable(); - } - - @Override - public int hashCode() { - return Objects.hash(left, right); - } - - @Override - public boolean equals(Object obj) { - if (!super.equals(obj)) { - return false; - } - - BinaryExpression other = (BinaryExpression) obj; - return Objects.equals(left, other.left) - && Objects.equals(right, other.right); - } - - - @Override - public String toString() { - StringBuilder sb = new StringBuilder(); - sb.append(left()); - sb.append(" "); - sb.append(symbol()); - sb.append(" "); - sb.append(right()); - return sb.toString(); - } - - public abstract String symbol(); - - public abstract BinaryExpression swapLeftAndRight(); -} diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/Expression.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/Expression.java index 846c06feb09..27291a9253e 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/Expression.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/Expression.java @@ -132,4 +132,4 @@ public abstract class Expression extends Node implements Resolvable public String toString() { return nodeName() + "[" + propertiesToString(false) + "]"; } -} +} \ No newline at end of file diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/Expressions.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/Expressions.java index 1b326e0474f..dfaef60abd5 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/Expressions.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/Expressions.java @@ -5,7 +5,9 @@ */ package org.elasticsearch.xpack.sql.expression; +import org.elasticsearch.xpack.sql.SqlIllegalArgumentException; import org.elasticsearch.xpack.sql.expression.Expression.TypeResolution; +import org.elasticsearch.xpack.sql.expression.gen.pipeline.Pipe; import java.util.ArrayList; import java.util.Collection; @@ -112,6 +114,13 @@ public abstract class Expressions { return true; } + public static Pipe pipe(Expression e) { + if (e instanceof NamedExpression) { + return ((NamedExpression) e).asPipe(); + } + throw new SqlIllegalArgumentException("Cannot create pipe for {}", e); + } + public static TypeResolution typeMustBe(Expression e, Predicate predicate, String message) { return predicate.test(e) ? TypeResolution.TYPE_RESOLVED : new TypeResolution(message); } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/Literal.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/Literal.java index 4badfc7091c..3c334c233f9 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/Literal.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/Literal.java @@ -6,6 +6,8 @@ package org.elasticsearch.xpack.sql.expression; import org.elasticsearch.xpack.sql.SqlIllegalArgumentException; +import org.elasticsearch.xpack.sql.expression.gen.script.Params; +import org.elasticsearch.xpack.sql.expression.gen.script.ScriptTemplate; import org.elasticsearch.xpack.sql.tree.Location; import org.elasticsearch.xpack.sql.tree.NodeInfo; import org.elasticsearch.xpack.sql.type.DataType; @@ -77,6 +79,11 @@ public class Literal extends NamedExpression { return new LiteralAttribute(location(), name(), null, false, id(), false, dataType, this); } + @Override + public ScriptTemplate asScript() { + return new ScriptTemplate(String.valueOf(value), Params.EMPTY, dataType); + } + @Override public Expression replaceChildren(List newChildren) { throw new UnsupportedOperationException("this type of node doesn't have any children to replace"); @@ -87,9 +94,15 @@ public class Literal extends NamedExpression { return AttributeSet.EMPTY; } + @Override + protected Expression canonicalize() { + String s = String.valueOf(value); + return name().equals(s) ? this : Literal.of(location(), value); + } + @Override public int hashCode() { - return Objects.hash(name(), value, dataType); + return Objects.hash(value, dataType); } @Override @@ -102,8 +115,7 @@ public class Literal extends NamedExpression { } Literal other = (Literal) obj; - return Objects.equals(name(), other.name()) - && Objects.equals(value, other.value) + return Objects.equals(value, other.value) && Objects.equals(dataType, other.dataType); } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/LiteralAttribute.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/LiteralAttribute.java index a6483458a6b..ef3db576b66 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/LiteralAttribute.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/LiteralAttribute.java @@ -5,8 +5,7 @@ */ package org.elasticsearch.xpack.sql.expression; -import org.elasticsearch.xpack.sql.expression.function.scalar.processor.definition.ConstantInput; -import org.elasticsearch.xpack.sql.expression.function.scalar.processor.definition.ProcessorDefinition; +import org.elasticsearch.xpack.sql.expression.gen.pipeline.Pipe; import org.elasticsearch.xpack.sql.tree.Location; import org.elasticsearch.xpack.sql.tree.NodeInfo; import org.elasticsearch.xpack.sql.type.DataType; @@ -33,12 +32,13 @@ public class LiteralAttribute extends TypedAttribute { return new LiteralAttribute(location, name, qualifier, nullable, id, synthetic, dataType(), literal); } - public ProcessorDefinition asProcessorDefinition() { - return new ConstantInput(location(), literal, literal.value()); - } - @Override protected String label() { return "c"; } + + @Override + public Pipe asPipe() { + return literal.asPipe(); + } } \ No newline at end of file diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/NamedExpression.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/NamedExpression.java index cf06ddcc09c..1cd4510079b 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/NamedExpression.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/NamedExpression.java @@ -5,16 +5,26 @@ */ package org.elasticsearch.xpack.sql.expression; +import org.elasticsearch.xpack.sql.expression.gen.pipeline.AttributeInput; +import org.elasticsearch.xpack.sql.expression.gen.pipeline.ConstantInput; +import org.elasticsearch.xpack.sql.expression.gen.pipeline.Pipe; +import org.elasticsearch.xpack.sql.expression.gen.script.ScriptTemplate; import org.elasticsearch.xpack.sql.tree.Location; import java.util.List; import java.util.Objects; +/** + * An expression that has a name. Named expressions can be used as a result + * (by converting to an attribute). + */ public abstract class NamedExpression extends Expression { private final String name; private final ExpressionId id; private final boolean synthetic; + private Pipe lazyPipe = null; + public NamedExpression(Location location, String name, List children, ExpressionId id) { this(location, name, children, id, false); @@ -41,6 +51,20 @@ public abstract class NamedExpression extends Expression { public abstract Attribute toAttribute(); + public Pipe asPipe() { + if (lazyPipe == null) { + lazyPipe = foldable() ? new ConstantInput(location(), this, fold()) : makePipe(); + } + + return lazyPipe; + } + + protected Pipe makePipe() { + return new AttributeInput(location(), this, toAttribute()); + } + + public abstract ScriptTemplate asScript(); + @Override public int hashCode() { return Objects.hash(id, name, synthetic); @@ -67,4 +91,4 @@ public abstract class NamedExpression extends Expression { && Objects.equals(name, other.name) && Objects.equals(children(), other.children()); } -} +} \ No newline at end of file diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/Order.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/Order.java index 70e537527c8..79ffad82380 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/Order.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/Order.java @@ -7,30 +7,53 @@ package org.elasticsearch.xpack.sql.expression; import org.elasticsearch.xpack.sql.tree.Location; import org.elasticsearch.xpack.sql.tree.NodeInfo; +import org.elasticsearch.xpack.sql.type.DataType; +import java.util.List; import java.util.Objects; -public class Order extends UnaryExpression { +import static java.util.Collections.singletonList; + +public class Order extends Expression { public enum OrderDirection { ASC, DESC } + private final Expression child; private final OrderDirection direction; public Order(Location location, Expression child, OrderDirection direction) { - super(location, child); + super(location, singletonList(child)); + this.child = child; this.direction = direction; } @Override protected NodeInfo info() { - return NodeInfo.create(this, Order::new, child(), direction); + return NodeInfo.create(this, Order::new, child, direction); } @Override - protected UnaryExpression replaceChild(Expression newChild) { - return new Order(location(), newChild, direction); + public boolean nullable() { + return false; + } + + @Override + public DataType dataType() { + return child.dataType(); + } + + @Override + public Order replaceChildren(List newChildren) { + if (newChildren.size() != 1) { + throw new IllegalArgumentException("expected [1] child but received [" + newChildren.size() + "]"); + } + return new Order(location(), newChildren.get(0), direction); + } + + public Expression child() { + return child; } public OrderDirection direction() { @@ -44,7 +67,7 @@ public class Order extends UnaryExpression { @Override public int hashCode() { - return Objects.hash(child(), direction); + return Objects.hash(child, direction); } @Override @@ -59,6 +82,6 @@ public class Order extends UnaryExpression { Order other = (Order) obj; return Objects.equals(direction, other.direction) - && Objects.equals(child(), other.child()); + && Objects.equals(child, other.child); } -} +} \ No newline at end of file diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/SubQueryExpression.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/SubQueryExpression.java index 33f2f3d0b07..1a046724a34 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/SubQueryExpression.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/SubQueryExpression.java @@ -5,13 +5,13 @@ */ package org.elasticsearch.xpack.sql.expression; +import org.elasticsearch.xpack.sql.plan.logical.LogicalPlan; +import org.elasticsearch.xpack.sql.tree.Location; + import java.util.Collections; import java.util.List; import java.util.Objects; -import org.elasticsearch.xpack.sql.plan.logical.LogicalPlan; -import org.elasticsearch.xpack.sql.tree.Location; - public abstract class SubQueryExpression extends Expression { private final LogicalPlan query; diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/UnaryExpression.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/UnaryExpression.java index 710ee760328..c2e764522f4 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/UnaryExpression.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/UnaryExpression.java @@ -5,21 +5,23 @@ */ package org.elasticsearch.xpack.sql.expression; +import org.elasticsearch.xpack.sql.SqlIllegalArgumentException; +import org.elasticsearch.xpack.sql.expression.gen.pipeline.Pipe; +import org.elasticsearch.xpack.sql.expression.gen.script.ScriptTemplate; import org.elasticsearch.xpack.sql.tree.Location; import org.elasticsearch.xpack.sql.type.DataType; +import java.util.List; import java.util.Objects; import static java.util.Collections.singletonList; -import java.util.List; - -public abstract class UnaryExpression extends Expression { +public abstract class UnaryExpression extends NamedExpression { private final Expression child; protected UnaryExpression(Location location, Expression child) { - super(location, singletonList(child)); + super(location, null, singletonList(child), null); this.child = child; } @@ -56,6 +58,21 @@ public abstract class UnaryExpression extends Expression { return child.dataType(); } + @Override + public Attribute toAttribute() { + throw new SqlIllegalArgumentException("Not supported yet"); + } + + @Override + public ScriptTemplate asScript() { + throw new SqlIllegalArgumentException("Not supported yet"); + } + + @Override + protected Pipe makePipe() { + throw new SqlIllegalArgumentException("Not supported yet"); + } + @Override public int hashCode() { return Objects.hash(child); diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/UnresolvedNamedExpression.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/UnresolvedNamedExpression.java index ea35c382750..a36e534fe3f 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/UnresolvedNamedExpression.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/UnresolvedNamedExpression.java @@ -7,6 +7,7 @@ package org.elasticsearch.xpack.sql.expression; import org.elasticsearch.xpack.sql.capabilities.Unresolvable; import org.elasticsearch.xpack.sql.capabilities.UnresolvedException; +import org.elasticsearch.xpack.sql.expression.gen.script.ScriptTemplate; import org.elasticsearch.xpack.sql.tree.Location; import org.elasticsearch.xpack.sql.type.DataType; @@ -42,4 +43,9 @@ abstract class UnresolvedNamedExpression extends NamedExpression implements Unre public Attribute toAttribute() { throw new UnresolvedException("attribute", this); } + + @Override + public ScriptTemplate asScript() { + throw new UnresolvedException("script", this); + } } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/FunctionRegistry.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/FunctionRegistry.java index 2daa90c7bda..caafd8294c6 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/FunctionRegistry.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/FunctionRegistry.java @@ -20,7 +20,6 @@ import org.elasticsearch.xpack.sql.expression.function.aggregate.StddevPop; import org.elasticsearch.xpack.sql.expression.function.aggregate.Sum; import org.elasticsearch.xpack.sql.expression.function.aggregate.SumOfSquares; import org.elasticsearch.xpack.sql.expression.function.aggregate.VarPop; -import org.elasticsearch.xpack.sql.expression.function.scalar.arithmetic.Mod; import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.DayName; import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.DayOfMonth; import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.DayOfWeek; @@ -61,6 +60,7 @@ import org.elasticsearch.xpack.sql.expression.function.scalar.math.Sin; import org.elasticsearch.xpack.sql.expression.function.scalar.math.Sinh; import org.elasticsearch.xpack.sql.expression.function.scalar.math.Sqrt; import org.elasticsearch.xpack.sql.expression.function.scalar.math.Tan; +import org.elasticsearch.xpack.sql.expression.function.scalar.math.Truncate; import org.elasticsearch.xpack.sql.expression.function.scalar.string.Ascii; import org.elasticsearch.xpack.sql.expression.function.scalar.string.BitLength; import org.elasticsearch.xpack.sql.expression.function.scalar.string.Char; @@ -80,6 +80,7 @@ import org.elasticsearch.xpack.sql.expression.function.scalar.string.Right; import org.elasticsearch.xpack.sql.expression.function.scalar.string.Space; import org.elasticsearch.xpack.sql.expression.function.scalar.string.Substring; import org.elasticsearch.xpack.sql.expression.function.scalar.string.UCase; +import org.elasticsearch.xpack.sql.expression.predicate.operator.arithmetic.Mod; import org.elasticsearch.xpack.sql.parser.ParsingException; import org.elasticsearch.xpack.sql.tree.Location; import org.elasticsearch.xpack.sql.util.StringUtils; @@ -89,6 +90,7 @@ import java.util.Collection; import java.util.HashMap; import java.util.LinkedHashMap; import java.util.List; +import java.util.Locale; import java.util.Map; import java.util.TimeZone; import java.util.function.BiFunction; @@ -114,21 +116,21 @@ public class FunctionRegistry { def(SumOfSquares.class, SumOfSquares::new), def(Skewness.class, Skewness::new), def(Kurtosis.class, Kurtosis::new), - // Scalar functions + // Scalar functions // Date + def(DayName.class, DayName::new, "DAYNAME"), def(DayOfMonth.class, DayOfMonth::new, "DAYOFMONTH", "DAY", "DOM"), def(DayOfWeek.class, DayOfWeek::new, "DAYOFWEEK", "DOW"), def(DayOfYear.class, DayOfYear::new, "DAYOFYEAR", "DOY"), def(HourOfDay.class, HourOfDay::new, "HOUR"), def(MinuteOfDay.class, MinuteOfDay::new), def(MinuteOfHour.class, MinuteOfHour::new, "MINUTE"), - def(SecondOfMinute.class, SecondOfMinute::new, "SECOND"), + def(MonthName.class, MonthName::new, "MONTHNAME"), def(MonthOfYear.class, MonthOfYear::new, "MONTH"), + def(SecondOfMinute.class, SecondOfMinute::new, "SECOND"), + def(Quarter.class, Quarter::new), def(Year.class, Year::new), def(WeekOfYear.class, WeekOfYear::new, "WEEK"), - def(DayName.class, DayName::new, "DAYNAME"), - def(MonthName.class, MonthName::new, "MONTHNAME"), - def(Quarter.class, Quarter::new), // Math def(Abs.class, Abs::new), def(ACos.class, ACos::new), @@ -159,27 +161,28 @@ public class FunctionRegistry { def(Sinh.class, Sinh::new), def(Sqrt.class, Sqrt::new), def(Tan.class, Tan::new), + def(Truncate.class, Truncate::new), // String def(Ascii.class, Ascii::new), - def(Char.class, Char::new), def(BitLength.class, BitLength::new), + def(Char.class, Char::new), def(CharLength.class, CharLength::new, "CHARACTER_LENGTH"), - def(LCase.class, LCase::new), - def(Length.class, Length::new), - def(LTrim.class, LTrim::new), - def(RTrim.class, RTrim::new), - def(Space.class, Space::new), def(Concat.class, Concat::new), def(Insert.class, Insert::new), + def(LCase.class, LCase::new), def(Left.class, Left::new), + def(Length.class, Length::new), def(Locate.class, Locate::new), + def(LTrim.class, LTrim::new), def(Position.class, Position::new), def(Repeat.class, Repeat::new), def(Replace.class, Replace::new), def(Right.class, Right::new), + def(RTrim.class, RTrim::new), + def(Space.class, Space::new), def(Substring.class, Substring::new), def(UCase.class, UCase::new), - // Special + // Special def(Score.class, Score::new))); private final Map defs = new LinkedHashMap<>(); @@ -209,21 +212,23 @@ public class FunctionRegistry { } } - public FunctionDefinition resolveFunction(String name) { - FunctionDefinition def = defs.get(normalize(name)); + public FunctionDefinition resolveFunction(String functionName) { + FunctionDefinition def = defs.get(functionName); if (def == null) { - throw new SqlIllegalArgumentException("Cannot find function {}; this should have been caught during analysis", name); + throw new SqlIllegalArgumentException( + "Cannot find function {}; this should have been caught during analysis", + functionName); } return def; } - public String concreteFunctionName(String alias) { - String normalized = normalize(alias); - return aliases.getOrDefault(normalized, normalized); + public String resolveAlias(String alias) { + String upperCase = alias.toUpperCase(Locale.ROOT); + return aliases.getOrDefault(upperCase, upperCase); } - public boolean functionExists(String name) { - return defs.containsKey(normalize(name)); + public boolean functionExists(String functionName) { + return defs.containsKey(functionName); } public Collection listFunctions() { @@ -330,13 +335,17 @@ public class FunctionRegistry { static FunctionDefinition def(Class function, BinaryFunctionBuilder ctorRef, String... aliases) { FunctionBuilder builder = (location, children, distinct, tz) -> { - if (children.size() != 2) { + boolean isBinaryOptionalParamFunction = function.isAssignableFrom(Round.class) || function.isAssignableFrom(Truncate.class); + if (isBinaryOptionalParamFunction && (children.size() > 2 || children.size() < 1)) { + throw new IllegalArgumentException("expects one or two arguments"); + } else if (!isBinaryOptionalParamFunction && children.size() != 2) { throw new IllegalArgumentException("expects exactly two arguments"); } + if (distinct) { throw new IllegalArgumentException("does not support DISTINCT yet it was specified"); } - return ctorRef.build(location, children.get(0), children.get(1)); + return ctorRef.build(location, children.get(0), children.size() == 2 ? children.get(1) : null); }; return def(function, builder, false, aliases); } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/Score.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/Score.java index c4a4097102f..e165d4388a0 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/Score.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/Score.java @@ -5,17 +5,19 @@ */ package org.elasticsearch.xpack.sql.expression.function; +import org.elasticsearch.xpack.sql.SqlIllegalArgumentException; import org.elasticsearch.xpack.sql.expression.Attribute; import org.elasticsearch.xpack.sql.expression.Expression; -import org.elasticsearch.xpack.sql.expression.function.Function; +import org.elasticsearch.xpack.sql.expression.gen.pipeline.Pipe; +import org.elasticsearch.xpack.sql.expression.gen.script.ScriptTemplate; import org.elasticsearch.xpack.sql.tree.Location; import org.elasticsearch.xpack.sql.tree.NodeInfo; import org.elasticsearch.xpack.sql.type.DataType; -import static java.util.Collections.emptyList; - import java.util.List; +import static java.util.Collections.emptyList; + /** * Function referring to the {@code _score} in a search. Only available * in the search context, and only at the "root" so it can't be combined @@ -59,4 +61,14 @@ public class Score extends Function { public int hashCode() { return location().hashCode(); } + + @Override + protected Pipe makePipe() { + throw new SqlIllegalArgumentException("Scoring cannot be computed on the client"); + } + + @Override + public ScriptTemplate asScript() { + throw new SqlIllegalArgumentException("Scoring cannot be scripted"); + } } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/ScoreAttribute.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/ScoreAttribute.java index c3f6ed8b26a..622f0755ef5 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/ScoreAttribute.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/ScoreAttribute.java @@ -7,7 +7,8 @@ package org.elasticsearch.xpack.sql.expression.function; import org.elasticsearch.xpack.sql.expression.Attribute; import org.elasticsearch.xpack.sql.expression.ExpressionId; -import org.elasticsearch.xpack.sql.expression.function.FunctionAttribute; +import org.elasticsearch.xpack.sql.expression.gen.pipeline.Pipe; +import org.elasticsearch.xpack.sql.expression.gen.pipeline.ScorePipe; import org.elasticsearch.xpack.sql.tree.Location; import org.elasticsearch.xpack.sql.tree.NodeInfo; import org.elasticsearch.xpack.sql.type.DataType; @@ -41,6 +42,11 @@ public class ScoreAttribute extends FunctionAttribute { return new ScoreAttribute(location, name, dataType(), qualifier, nullable, id, synthetic); } + @Override + protected Pipe makePipe() { + return new ScorePipe(location(), this); + } + @Override protected String label() { return "SCORE"; diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/UnresolvedFunction.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/UnresolvedFunction.java index 338b926ce6f..e774963b22f 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/UnresolvedFunction.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/UnresolvedFunction.java @@ -10,6 +10,7 @@ import org.elasticsearch.xpack.sql.capabilities.UnresolvedException; import org.elasticsearch.xpack.sql.expression.Attribute; import org.elasticsearch.xpack.sql.expression.Expression; import org.elasticsearch.xpack.sql.expression.Literal; +import org.elasticsearch.xpack.sql.expression.gen.script.ScriptTemplate; import org.elasticsearch.xpack.sql.tree.Location; import org.elasticsearch.xpack.sql.tree.NodeInfo; import org.elasticsearch.xpack.sql.type.DataType; @@ -143,6 +144,11 @@ public class UnresolvedFunction extends Function implements Unresolvable { throw new UnresolvedException("attribute", this); } + @Override + public ScriptTemplate asScript() { + throw new UnresolvedException("script", this); + } + @Override public String unresolvedMessage() { return unresolvedMsg; diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/aggregate/AggregateFunction.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/aggregate/AggregateFunction.java index 413ecf96464..2b558970df5 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/aggregate/AggregateFunction.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/aggregate/AggregateFunction.java @@ -5,8 +5,12 @@ */ package org.elasticsearch.xpack.sql.expression.function.aggregate; +import org.elasticsearch.xpack.sql.SqlIllegalArgumentException; import org.elasticsearch.xpack.sql.expression.Expression; import org.elasticsearch.xpack.sql.expression.function.Function; +import org.elasticsearch.xpack.sql.expression.gen.pipeline.AggNameInput; +import org.elasticsearch.xpack.sql.expression.gen.pipeline.Pipe; +import org.elasticsearch.xpack.sql.expression.gen.script.ScriptTemplate; import org.elasticsearch.xpack.sql.tree.Location; import org.elasticsearch.xpack.sql.util.CollectionUtils; @@ -53,6 +57,17 @@ public abstract class AggregateFunction extends Function { return lazyAttribute; } + @Override + protected Pipe makePipe() { + // unresolved AggNameInput (should always get replaced by the folder) + return new AggNameInput(location(), this, name()); + } + + @Override + public ScriptTemplate asScript() { + throw new SqlIllegalArgumentException("Aggregate functions cannot be scripted"); + } + @Override public boolean equals(Object obj) { if (false == super.equals(obj)) { @@ -67,4 +82,4 @@ public abstract class AggregateFunction extends Function { public int hashCode() { return Objects.hash(field(), parameters()); } -} +} \ No newline at end of file diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/BinaryScalarFunction.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/BinaryScalarFunction.java index 4e2882d46c1..5b9196f462b 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/BinaryScalarFunction.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/BinaryScalarFunction.java @@ -6,7 +6,7 @@ package org.elasticsearch.xpack.sql.expression.function.scalar; import org.elasticsearch.xpack.sql.expression.Expression; -import org.elasticsearch.xpack.sql.expression.function.scalar.script.ScriptTemplate; +import org.elasticsearch.xpack.sql.expression.gen.script.ScriptTemplate; import org.elasticsearch.xpack.sql.tree.Location; import java.util.Arrays; diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/Cast.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/Cast.java index ae94b0b9f83..a8dfe431749 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/Cast.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/Cast.java @@ -6,12 +6,9 @@ package org.elasticsearch.xpack.sql.expression.function.scalar; import org.elasticsearch.xpack.sql.expression.Expression; -import org.elasticsearch.xpack.sql.expression.FieldAttribute; -import org.elasticsearch.xpack.sql.expression.function.scalar.processor.definition.ProcessorDefinition; -import org.elasticsearch.xpack.sql.expression.function.scalar.processor.definition.ProcessorDefinitions; -import org.elasticsearch.xpack.sql.expression.function.scalar.processor.definition.UnaryProcessorDefinition; -import org.elasticsearch.xpack.sql.expression.function.scalar.script.Params; -import org.elasticsearch.xpack.sql.expression.function.scalar.script.ScriptTemplate; +import org.elasticsearch.xpack.sql.expression.Expressions; +import org.elasticsearch.xpack.sql.expression.gen.pipeline.Pipe; +import org.elasticsearch.xpack.sql.expression.gen.pipeline.UnaryPipe; import org.elasticsearch.xpack.sql.tree.Location; import org.elasticsearch.xpack.sql.tree.NodeInfo; import org.elasticsearch.xpack.sql.type.DataType; @@ -74,18 +71,8 @@ public class Cast extends UnaryScalarFunction { } @Override - protected ScriptTemplate asScriptFrom(ScalarFunctionAttribute scalar) { - return scalar.script(); - } - - @Override - protected ScriptTemplate asScriptFrom(FieldAttribute field) { - return new ScriptTemplate(field.name(), Params.EMPTY, field.dataType()); - } - - @Override - protected ProcessorDefinition makeProcessorDefinition() { - return new UnaryProcessorDefinition(location(), this, ProcessorDefinitions.toProcessorDefinition(field()), + protected Pipe makePipe() { + return new UnaryPipe(location(), this, Expressions.pipe(field()), new CastProcessor(DataTypeConversion.conversionFor(from(), to()))); } @@ -118,4 +105,4 @@ public class Cast extends UnaryScalarFunction { sb.insert(sb.length() - 1, " AS " + to().sqlName()); return sb.toString(); } -} +} \ No newline at end of file diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/CastProcessor.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/CastProcessor.java index f5fe541fb46..6d0173c23b8 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/CastProcessor.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/CastProcessor.java @@ -7,7 +7,7 @@ package org.elasticsearch.xpack.sql.expression.function.scalar; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.xpack.sql.expression.function.scalar.processor.runtime.Processor; +import org.elasticsearch.xpack.sql.expression.gen.processor.Processor; import org.elasticsearch.xpack.sql.type.DataTypeConversion.Conversion; import java.io.IOException; diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/Processors.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/Processors.java index a62aadab467..ae35f9c760c 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/Processors.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/Processors.java @@ -7,18 +7,11 @@ package org.elasticsearch.xpack.sql.expression.function.scalar; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.NamedWriteableRegistry.Entry; -import org.elasticsearch.xpack.sql.expression.function.scalar.arithmetic.BinaryArithmeticProcessor; -import org.elasticsearch.xpack.sql.expression.function.scalar.arithmetic.UnaryArithmeticProcessor; import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.DateTimeProcessor; import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.NamedDateTimeProcessor; import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.QuarterProcessor; import org.elasticsearch.xpack.sql.expression.function.scalar.math.BinaryMathProcessor; import org.elasticsearch.xpack.sql.expression.function.scalar.math.MathProcessor; -import org.elasticsearch.xpack.sql.expression.function.scalar.processor.runtime.BucketExtractorProcessor; -import org.elasticsearch.xpack.sql.expression.function.scalar.processor.runtime.ChainingProcessor; -import org.elasticsearch.xpack.sql.expression.function.scalar.processor.runtime.ConstantProcessor; -import org.elasticsearch.xpack.sql.expression.function.scalar.processor.runtime.HitExtractorProcessor; -import org.elasticsearch.xpack.sql.expression.function.scalar.processor.runtime.Processor; import org.elasticsearch.xpack.sql.expression.function.scalar.string.BinaryStringNumericProcessor; import org.elasticsearch.xpack.sql.expression.function.scalar.string.BinaryStringStringProcessor; import org.elasticsearch.xpack.sql.expression.function.scalar.string.ConcatFunctionProcessor; @@ -27,6 +20,14 @@ import org.elasticsearch.xpack.sql.expression.function.scalar.string.LocateFunct import org.elasticsearch.xpack.sql.expression.function.scalar.string.ReplaceFunctionProcessor; import org.elasticsearch.xpack.sql.expression.function.scalar.string.StringProcessor; import org.elasticsearch.xpack.sql.expression.function.scalar.string.SubstringFunctionProcessor; +import org.elasticsearch.xpack.sql.expression.gen.processor.BucketExtractorProcessor; +import org.elasticsearch.xpack.sql.expression.gen.processor.ChainingProcessor; +import org.elasticsearch.xpack.sql.expression.gen.processor.ConstantProcessor; +import org.elasticsearch.xpack.sql.expression.gen.processor.HitExtractorProcessor; +import org.elasticsearch.xpack.sql.expression.gen.processor.Processor; +import org.elasticsearch.xpack.sql.expression.predicate.operator.arithmetic.BinaryArithmeticProcessor; +import org.elasticsearch.xpack.sql.expression.predicate.operator.arithmetic.UnaryArithmeticProcessor; +import org.elasticsearch.xpack.sql.expression.predicate.operator.comparison.BinaryComparisonProcessor; import java.util.ArrayList; import java.util.List; @@ -48,6 +49,9 @@ public final class Processors { entries.add(new Entry(Processor.class, CastProcessor.NAME, CastProcessor::new)); entries.add(new Entry(Processor.class, ChainingProcessor.NAME, ChainingProcessor::new)); + // comparators + entries.add(new Entry(Processor.class, BinaryComparisonProcessor.NAME, BinaryComparisonProcessor::new)); + // arithmetic entries.add(new Entry(Processor.class, BinaryArithmeticProcessor.NAME, BinaryArithmeticProcessor::new)); entries.add(new Entry(Processor.class, UnaryArithmeticProcessor.NAME, UnaryArithmeticProcessor::new)); diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/ScalarFunction.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/ScalarFunction.java index e7b8529557f..0be9ca86c5f 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/ScalarFunction.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/ScalarFunction.java @@ -5,23 +5,14 @@ */ package org.elasticsearch.xpack.sql.expression.function.scalar; -import org.elasticsearch.xpack.sql.SqlIllegalArgumentException; -import org.elasticsearch.xpack.sql.expression.Attribute; import org.elasticsearch.xpack.sql.expression.Expression; -import org.elasticsearch.xpack.sql.expression.Expressions; -import org.elasticsearch.xpack.sql.expression.FieldAttribute; import org.elasticsearch.xpack.sql.expression.function.Function; -import org.elasticsearch.xpack.sql.expression.function.aggregate.AggregateFunctionAttribute; -import org.elasticsearch.xpack.sql.expression.function.scalar.processor.definition.ProcessorDefinition; -import org.elasticsearch.xpack.sql.expression.function.scalar.script.Params; -import org.elasticsearch.xpack.sql.expression.function.scalar.script.ScriptTemplate; +import org.elasticsearch.xpack.sql.expression.gen.script.ScriptWeaver; import org.elasticsearch.xpack.sql.tree.Location; import java.util.List; import static java.util.Collections.emptyList; -import static org.elasticsearch.xpack.sql.expression.function.scalar.script.ParamsBuilder.paramsBuilder; -import static org.elasticsearch.xpack.sql.expression.function.scalar.script.ScriptTemplate.formatTemplate; /** * A {@code ScalarFunction} is a {@code Function} that takes values from some @@ -29,11 +20,9 @@ import static org.elasticsearch.xpack.sql.expression.function.scalar.script.Scri * {@code ABS()}, which takes one value at a time, applies a function to the * value (abs) and returns a new value. */ -public abstract class ScalarFunction extends Function { +public abstract class ScalarFunction extends Function implements ScriptWeaver { private ScalarFunctionAttribute lazyAttribute = null; - private ProcessorDefinition lazyProcessor = null; - protected ScalarFunction(Location location) { super(location, emptyList()); @@ -47,74 +36,14 @@ public abstract class ScalarFunction extends Function { public final ScalarFunctionAttribute toAttribute() { if (lazyAttribute == null) { lazyAttribute = new ScalarFunctionAttribute(location(), name(), dataType(), id(), functionId(), asScript(), orderBy(), - asProcessorDefinition()); + asPipe()); } return lazyAttribute; } - public abstract ScriptTemplate asScript(); - - // utility methods for creating the actual scripts - protected ScriptTemplate asScript(Expression exp) { - if (exp.foldable()) { - return asScriptFromFoldable(exp); - } - - Attribute attr = Expressions.attribute(exp); - if (attr != null) { - if (attr instanceof ScalarFunctionAttribute) { - return asScriptFrom((ScalarFunctionAttribute) attr); - } - if (attr instanceof AggregateFunctionAttribute) { - return asScriptFrom((AggregateFunctionAttribute) attr); - } - if (attr instanceof FieldAttribute) { - return asScriptFrom((FieldAttribute) attr); - } - } - throw new SqlIllegalArgumentException("Cannot evaluate script for expression {}", exp); - } - - protected ScriptTemplate asScriptFrom(ScalarFunctionAttribute scalar) { - ScriptTemplate nested = scalar.script(); - Params p = paramsBuilder().script(nested.params()).build(); - return new ScriptTemplate(formatScript(nested.template()), p, dataType()); - } - - protected ScriptTemplate asScriptFromFoldable(Expression foldable) { - return new ScriptTemplate(formatScript("{}"), - paramsBuilder().variable(foldable.fold()).build(), - foldable.dataType()); - } - - protected ScriptTemplate asScriptFrom(FieldAttribute field) { - return new ScriptTemplate(formatScript("doc[{}].value"), - paramsBuilder().variable(field.name()).build(), - field.dataType()); - } - - protected ScriptTemplate asScriptFrom(AggregateFunctionAttribute aggregate) { - return new ScriptTemplate(formatScript("{}"), - paramsBuilder().agg(aggregate).build(), - aggregate.dataType()); - } - - protected String formatScript(String scriptTemplate) { - return formatTemplate(scriptTemplate); - } - - public ProcessorDefinition asProcessorDefinition() { - if (lazyProcessor == null) { - lazyProcessor = makeProcessorDefinition(); - } - return lazyProcessor; - } - - protected abstract ProcessorDefinition makeProcessorDefinition(); - // used if the function is monotonic and thus does not have to be computed for ordering purposes // null means the script needs to be used; expression means the field/expression to be used instead public Expression orderBy() { return null; } -} +} \ No newline at end of file diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/ScalarFunctionAttribute.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/ScalarFunctionAttribute.java index 0e2870acd09..f2b0f48a18f 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/ScalarFunctionAttribute.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/ScalarFunctionAttribute.java @@ -9,8 +9,8 @@ import org.elasticsearch.xpack.sql.expression.Attribute; import org.elasticsearch.xpack.sql.expression.Expression; import org.elasticsearch.xpack.sql.expression.ExpressionId; import org.elasticsearch.xpack.sql.expression.function.FunctionAttribute; -import org.elasticsearch.xpack.sql.expression.function.scalar.processor.definition.ProcessorDefinition; -import org.elasticsearch.xpack.sql.expression.function.scalar.script.ScriptTemplate; +import org.elasticsearch.xpack.sql.expression.gen.pipeline.Pipe; +import org.elasticsearch.xpack.sql.expression.gen.script.ScriptTemplate; import org.elasticsearch.xpack.sql.tree.Location; import org.elasticsearch.xpack.sql.tree.NodeInfo; import org.elasticsearch.xpack.sql.type.DataType; @@ -21,28 +21,28 @@ public class ScalarFunctionAttribute extends FunctionAttribute { private final ScriptTemplate script; private final Expression orderBy; - private final ProcessorDefinition processorDef; + private final Pipe pipe; ScalarFunctionAttribute(Location location, String name, DataType dataType, ExpressionId id, - String functionId, ScriptTemplate script, Expression orderBy, ProcessorDefinition processorDef) { + String functionId, ScriptTemplate script, Expression orderBy, Pipe processorDef) { this(location, name, dataType, null, true, id, false, functionId, script, orderBy, processorDef); } public ScalarFunctionAttribute(Location location, String name, DataType dataType, String qualifier, boolean nullable, ExpressionId id, boolean synthetic, String functionId, ScriptTemplate script, - Expression orderBy, ProcessorDefinition processorDef) { + Expression orderBy, Pipe pipe) { super(location, name, dataType, qualifier, nullable, id, synthetic, functionId); this.script = script; this.orderBy = orderBy; - this.processorDef = processorDef; + this.pipe = pipe; } @Override protected NodeInfo info() { return NodeInfo.create(this, ScalarFunctionAttribute::new, name(), dataType(), qualifier(), nullable(), id(), synthetic(), - functionId(), script, orderBy, processorDef); + functionId(), script, orderBy, pipe); } public ScriptTemplate script() { @@ -53,34 +53,41 @@ public class ScalarFunctionAttribute extends FunctionAttribute { return orderBy; } - public ProcessorDefinition processorDef() { - return processorDef; + @Override + public Pipe asPipe() { + return pipe; } @Override protected Expression canonicalize() { return new ScalarFunctionAttribute(location(), "", dataType(), null, true, id(), false, - functionId(), script, orderBy, processorDef); + functionId(), script, orderBy, pipe); } @Override protected Attribute clone(Location location, String name, String qualifier, boolean nullable, ExpressionId id, boolean synthetic) { return new ScalarFunctionAttribute(location, name, dataType(), qualifier, nullable, id, synthetic, - functionId(), script, orderBy, processorDef); + functionId(), script, orderBy, pipe); } @Override public int hashCode() { - return Objects.hash(super.hashCode(), orderBy); + return Objects.hash(super.hashCode(), script(), pipe, orderBy); } @Override public boolean equals(Object obj) { - return super.equals(obj) && Objects.equals(orderBy, ((ScalarFunctionAttribute) obj).orderBy()); + if (super.equals(obj)) { + ScalarFunctionAttribute other = (ScalarFunctionAttribute) obj; + return Objects.equals(script, other.script()) + && Objects.equals(pipe, other.asPipe()) + && Objects.equals(orderBy, other.orderBy()); + } + return false; } @Override protected String label() { return "s->" + functionId(); } -} +} \ No newline at end of file diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/UnaryScalarFunction.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/UnaryScalarFunction.java index e4a0953c115..54fe2e834db 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/UnaryScalarFunction.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/UnaryScalarFunction.java @@ -6,13 +6,13 @@ package org.elasticsearch.xpack.sql.expression.function.scalar; import org.elasticsearch.xpack.sql.expression.Expression; -import org.elasticsearch.xpack.sql.expression.function.scalar.script.ScriptTemplate; +import org.elasticsearch.xpack.sql.expression.gen.script.ScriptTemplate; import org.elasticsearch.xpack.sql.tree.Location; -import static java.util.Collections.singletonList; - import java.util.List; +import static java.util.Collections.singletonList; + public abstract class UnaryScalarFunction extends ScalarFunction { private final Expression field; diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/arithmetic/ArithmeticFunction.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/arithmetic/ArithmeticFunction.java deleted file mode 100644 index e95fec86397..00000000000 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/arithmetic/ArithmeticFunction.java +++ /dev/null @@ -1,91 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License; - * you may not use this file except in compliance with the Elastic License. - */ -package org.elasticsearch.xpack.sql.expression.function.scalar.arithmetic; - -import org.elasticsearch.xpack.sql.expression.Expression; -import org.elasticsearch.xpack.sql.expression.Expressions; -import org.elasticsearch.xpack.sql.expression.Literal; -import org.elasticsearch.xpack.sql.expression.function.scalar.arithmetic.BinaryArithmeticProcessor.BinaryArithmeticOperation; -import org.elasticsearch.xpack.sql.expression.function.scalar.math.BinaryNumericFunction; -import org.elasticsearch.xpack.sql.expression.function.scalar.processor.definition.ProcessorDefinition; -import org.elasticsearch.xpack.sql.expression.function.scalar.processor.definition.ProcessorDefinitions; -import org.elasticsearch.xpack.sql.expression.function.scalar.script.ScriptTemplate; -import org.elasticsearch.xpack.sql.tree.Location; -import org.elasticsearch.xpack.sql.type.DataType; -import org.elasticsearch.xpack.sql.type.DataTypeConversion; - -import java.util.Locale; - -import static java.lang.String.format; -import static org.elasticsearch.xpack.sql.expression.function.scalar.script.ParamsBuilder.paramsBuilder; - -public abstract class ArithmeticFunction extends BinaryNumericFunction { - - private final BinaryArithmeticOperation operation; - - ArithmeticFunction(Location location, Expression left, Expression right, BinaryArithmeticOperation operation) { - super(location, left, right); - this.operation = operation; - } - - @Override - public BinaryArithmeticOperation operation() { - return operation; - } - - @Override - public DataType dataType() { - return DataTypeConversion.commonType(left().dataType(), right().dataType()); - } - - @Override - protected ScriptTemplate asScriptFrom(ScriptTemplate leftScript, ScriptTemplate rightScript) { - String op = operation.symbol(); - // escape % - if (operation == BinaryArithmeticOperation.MOD) { - op = "%" + op; - } - return new ScriptTemplate(format(Locale.ROOT, "(%s) %s (%s)", leftScript.template(), op, rightScript.template()), - paramsBuilder() - .script(leftScript.params()).script(rightScript.params()) - .build(), dataType()); - } - - @Override - protected ProcessorDefinition makeProcessorDefinition() { - return new BinaryArithmeticProcessorDefinition(location(), this, - ProcessorDefinitions.toProcessorDefinition(left()), - ProcessorDefinitions.toProcessorDefinition(right()), - operation); - } - - @Override - public String name() { - StringBuilder sb = new StringBuilder(); - sb.append("("); - sb.append(Expressions.name(left())); - if (!(left() instanceof Literal)) { - sb.insert(1, "("); - sb.append(")"); - } - sb.append(" "); - sb.append(operation); - sb.append(" "); - int pos = sb.length(); - sb.append(Expressions.name(right())); - if (!(right() instanceof Literal)) { - sb.insert(pos, "("); - sb.append(")"); - } - sb.append(")"); - return sb.toString(); - } - - @Override - public String toString() { - return name() + "#" + functionId(); - } -} diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/arithmetic/BinaryArithmeticProcessorDefinition.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/arithmetic/BinaryArithmeticProcessorDefinition.java deleted file mode 100644 index b94a726290e..00000000000 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/arithmetic/BinaryArithmeticProcessorDefinition.java +++ /dev/null @@ -1,66 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License; - * you may not use this file except in compliance with the Elastic License. - */ -package org.elasticsearch.xpack.sql.expression.function.scalar.arithmetic; - -import org.elasticsearch.xpack.sql.expression.Expression; -import org.elasticsearch.xpack.sql.expression.function.scalar.arithmetic.BinaryArithmeticProcessor.BinaryArithmeticOperation; -import org.elasticsearch.xpack.sql.expression.function.scalar.processor.definition.BinaryProcessorDefinition; -import org.elasticsearch.xpack.sql.expression.function.scalar.processor.definition.ProcessorDefinition; -import org.elasticsearch.xpack.sql.tree.Location; -import org.elasticsearch.xpack.sql.tree.NodeInfo; -import java.util.Objects; - -public class BinaryArithmeticProcessorDefinition extends BinaryProcessorDefinition { - - private final BinaryArithmeticOperation operation; - - public BinaryArithmeticProcessorDefinition(Location location, Expression expression, ProcessorDefinition left, - ProcessorDefinition right, BinaryArithmeticOperation operation) { - super(location, expression, left, right); - this.operation = operation; - } - - @Override - protected NodeInfo info() { - return NodeInfo.create(this, BinaryArithmeticProcessorDefinition::new, - expression(), left(), right(), operation); - } - - public BinaryArithmeticOperation operation() { - return operation; - } - - @Override - protected BinaryProcessorDefinition replaceChildren(ProcessorDefinition left, ProcessorDefinition right) { - return new BinaryArithmeticProcessorDefinition(location(), expression(), left, right, operation); - } - - @Override - public BinaryArithmeticProcessor asProcessor() { - return new BinaryArithmeticProcessor(left().asProcessor(), right().asProcessor(), operation); - } - - @Override - public int hashCode() { - return Objects.hash(left(), right(), operation); - } - - @Override - public boolean equals(Object obj) { - if (this == obj) { - return true; - } - - if (obj == null || getClass() != obj.getClass()) { - return false; - } - - BinaryArithmeticProcessorDefinition other = (BinaryArithmeticProcessorDefinition) obj; - return Objects.equals(operation, other.operation) - && Objects.equals(left(), other.left()) - && Objects.equals(right(), other.right()); - } -} diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/BaseDateTimeFunction.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/BaseDateTimeFunction.java index 2213fad8c8d..b3c621bc14a 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/BaseDateTimeFunction.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/BaseDateTimeFunction.java @@ -8,9 +8,7 @@ package org.elasticsearch.xpack.sql.expression.function.scalar.datetime; import org.elasticsearch.xpack.sql.expression.Expression; import org.elasticsearch.xpack.sql.expression.Expressions; -import org.elasticsearch.xpack.sql.expression.function.aggregate.AggregateFunctionAttribute; import org.elasticsearch.xpack.sql.expression.function.scalar.UnaryScalarFunction; -import org.elasticsearch.xpack.sql.expression.function.scalar.script.ScriptTemplate; import org.elasticsearch.xpack.sql.tree.Location; import org.elasticsearch.xpack.sql.tree.NodeInfo; import org.elasticsearch.xpack.sql.type.DataType; @@ -62,9 +60,4 @@ abstract class BaseDateTimeFunction extends UnaryScalarFunction { public boolean foldable() { return field().foldable(); } - - @Override - protected ScriptTemplate asScriptFrom(AggregateFunctionAttribute aggregate) { - throw new UnsupportedOperationException(); - } } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/BaseDateTimeProcessor.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/BaseDateTimeProcessor.java index 95547ded222..c8f42704ac5 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/BaseDateTimeProcessor.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/BaseDateTimeProcessor.java @@ -9,7 +9,7 @@ package org.elasticsearch.xpack.sql.expression.function.scalar.datetime; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.xpack.sql.SqlIllegalArgumentException; -import org.elasticsearch.xpack.sql.expression.function.scalar.processor.runtime.Processor; +import org.elasticsearch.xpack.sql.expression.gen.processor.Processor; import org.joda.time.ReadableInstant; import java.io.IOException; diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DateTimeFunction.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DateTimeFunction.java index d87e15084a4..2ecfc27376d 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DateTimeFunction.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DateTimeFunction.java @@ -6,13 +6,13 @@ package org.elasticsearch.xpack.sql.expression.function.scalar.datetime; import org.elasticsearch.xpack.sql.expression.Expression; +import org.elasticsearch.xpack.sql.expression.Expressions; import org.elasticsearch.xpack.sql.expression.FieldAttribute; import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.DateTimeProcessor.DateTimeExtractor; -import org.elasticsearch.xpack.sql.expression.function.scalar.processor.definition.ProcessorDefinition; -import org.elasticsearch.xpack.sql.expression.function.scalar.processor.definition.ProcessorDefinitions; -import org.elasticsearch.xpack.sql.expression.function.scalar.processor.definition.UnaryProcessorDefinition; -import org.elasticsearch.xpack.sql.expression.function.scalar.script.ParamsBuilder; -import org.elasticsearch.xpack.sql.expression.function.scalar.script.ScriptTemplate; +import org.elasticsearch.xpack.sql.expression.gen.pipeline.Pipe; +import org.elasticsearch.xpack.sql.expression.gen.pipeline.UnaryPipe; +import org.elasticsearch.xpack.sql.expression.gen.script.ParamsBuilder; +import org.elasticsearch.xpack.sql.expression.gen.script.ScriptTemplate; import org.elasticsearch.xpack.sql.tree.Location; import org.elasticsearch.xpack.sql.type.DataType; import org.joda.time.DateTime; @@ -24,8 +24,7 @@ import java.time.temporal.ChronoField; import java.util.Objects; import java.util.TimeZone; -import static org.elasticsearch.xpack.sql.expression.function.scalar.script.ParamsBuilder.paramsBuilder; -import static org.elasticsearch.xpack.sql.expression.function.scalar.script.ScriptTemplate.formatTemplate; +import static org.elasticsearch.xpack.sql.expression.gen.script.ParamsBuilder.paramsBuilder; public abstract class DateTimeFunction extends BaseDateTimeFunction { @@ -49,7 +48,7 @@ public abstract class DateTimeFunction extends BaseDateTimeFunction { } @Override - protected ScriptTemplate asScriptFrom(FieldAttribute field) { + public ScriptTemplate scriptWithField(FieldAttribute field) { ParamsBuilder params = paramsBuilder(); String template = null; @@ -67,9 +66,8 @@ public abstract class DateTimeFunction extends BaseDateTimeFunction { protected abstract ChronoField chronoField(); @Override - protected ProcessorDefinition makeProcessorDefinition() { - return new UnaryProcessorDefinition(location(), this, ProcessorDefinitions.toProcessorDefinition(field()), - new DateTimeProcessor(extractor(), timeZone())); + protected Pipe makePipe() { + return new UnaryPipe(location(), this, Expressions.pipe(field()), new DateTimeProcessor(extractor(), timeZone())); } protected abstract DateTimeExtractor extractor(); diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DayName.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DayName.java index 2f5ba7eeaca..77cf4277704 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DayName.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DayName.java @@ -16,7 +16,6 @@ import java.util.TimeZone; * Extract the day of the week from a datetime in text format (Monday, Tuesday etc.) */ public class DayName extends NamedDateTimeFunction { - protected static final String DAY_NAME_FORMAT = "EEEE"; public DayName(Location location, Expression field, TimeZone timeZone) { super(location, field, timeZone); @@ -32,18 +31,8 @@ public class DayName extends NamedDateTimeFunction { return new DayName(location(), newChild, timeZone()); } - @Override - protected String dateTimeFormat() { - return DAY_NAME_FORMAT; - } - @Override protected NameExtractor nameExtractor() { return NameExtractor.DAY_NAME; } - - @Override - public String extractName(long millis, String tzId) { - return nameExtractor().extract(millis, tzId); - } } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/MonthName.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/MonthName.java index 170c80c10f9..75be9978406 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/MonthName.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/MonthName.java @@ -16,7 +16,6 @@ import java.util.TimeZone; * Extract the month from a datetime in text format (January, February etc.) */ public class MonthName extends NamedDateTimeFunction { - protected static final String MONTH_NAME_FORMAT = "MMMM"; public MonthName(Location location, Expression field, TimeZone timeZone) { super(location, field, timeZone); @@ -32,16 +31,6 @@ public class MonthName extends NamedDateTimeFunction { return new MonthName(location(), newChild, timeZone()); } - @Override - protected String dateTimeFormat() { - return MONTH_NAME_FORMAT; - } - - @Override - public String extractName(long millis, String tzId) { - return nameExtractor().extract(millis, tzId); - } - @Override protected NameExtractor nameExtractor() { return NameExtractor.MONTH_NAME; diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/NamedDateTimeFunction.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/NamedDateTimeFunction.java index c3e10981ce1..935e517ee0f 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/NamedDateTimeFunction.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/NamedDateTimeFunction.java @@ -6,26 +6,26 @@ package org.elasticsearch.xpack.sql.expression.function.scalar.datetime; import org.elasticsearch.xpack.sql.expression.Expression; +import org.elasticsearch.xpack.sql.expression.Expressions; import org.elasticsearch.xpack.sql.expression.FieldAttribute; import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.NamedDateTimeProcessor.NameExtractor; -import org.elasticsearch.xpack.sql.expression.function.scalar.processor.definition.ProcessorDefinition; -import org.elasticsearch.xpack.sql.expression.function.scalar.processor.definition.ProcessorDefinitions; -import org.elasticsearch.xpack.sql.expression.function.scalar.processor.definition.UnaryProcessorDefinition; -import org.elasticsearch.xpack.sql.expression.function.scalar.script.ParamsBuilder; -import org.elasticsearch.xpack.sql.expression.function.scalar.script.ScriptTemplate; +import org.elasticsearch.xpack.sql.expression.gen.pipeline.Pipe; +import org.elasticsearch.xpack.sql.expression.gen.pipeline.UnaryPipe; +import org.elasticsearch.xpack.sql.expression.gen.script.ScriptTemplate; import org.elasticsearch.xpack.sql.tree.Location; import org.elasticsearch.xpack.sql.type.DataType; import org.elasticsearch.xpack.sql.util.StringUtils; import org.joda.time.DateTime; +import java.util.Locale; import java.util.Objects; import java.util.TimeZone; -import static org.elasticsearch.xpack.sql.expression.function.scalar.script.ParamsBuilder.paramsBuilder; -import static org.elasticsearch.xpack.sql.expression.function.scalar.script.ScriptTemplate.formatTemplate; +import static java.lang.String.format; +import static org.elasticsearch.xpack.sql.expression.gen.script.ParamsBuilder.paramsBuilder; /* - * Base class for "naming" date/time functions like month_name and day_name + * Base class for "named" date/time functions like month_name and day_name */ abstract class NamedDateTimeFunction extends BaseDateTimeFunction { @@ -40,38 +40,28 @@ abstract class NamedDateTimeFunction extends BaseDateTimeFunction { return null; } - return extractName(folded.getMillis(), timeZone().getID()); - } - - public abstract String extractName(long millis, String tzId); - - @Override - protected ScriptTemplate asScriptFrom(FieldAttribute field) { - ParamsBuilder params = paramsBuilder(); - - String template = null; - template = formatTemplate(formatMethodName("{sql}.{method_name}(doc[{}].value.millis, {})")); - params.variable(field.name()) - .variable(timeZone().getID()); - - return new ScriptTemplate(template, params.build(), dataType()); - } - - private String formatMethodName(String template) { - // the Painless method name will be the enum's lower camelcase name - return template.replace("{method_name}", StringUtils.underscoreToLowerCamelCase(nameExtractor().toString())); + return nameExtractor().extract(folded.getMillis(), timeZone().getID()); } @Override - protected final ProcessorDefinition makeProcessorDefinition() { - return new UnaryProcessorDefinition(location(), this, ProcessorDefinitions.toProcessorDefinition(field()), + public ScriptTemplate scriptWithField(FieldAttribute field) { + return new ScriptTemplate( + formatTemplate(format(Locale.ROOT, "{sql}.%s(doc[{}].value.millis, {})", + StringUtils.underscoreToLowerCamelCase(nameExtractor().name()))), + paramsBuilder() + .variable(field.name()) + .variable(timeZone().getID()).build(), + dataType()); + } + + @Override + protected final Pipe makePipe() { + return new UnaryPipe(location(), this, Expressions.pipe(field()), new NamedDateTimeProcessor(nameExtractor(), timeZone())); } protected abstract NameExtractor nameExtractor(); - protected abstract String dateTimeFormat(); - @Override public DataType dataType() { return DataType.KEYWORD; diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/NamedDateTimeProcessor.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/NamedDateTimeProcessor.java index 478ad8ee09f..43b3db07236 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/NamedDateTimeProcessor.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/NamedDateTimeProcessor.java @@ -24,11 +24,11 @@ public class NamedDateTimeProcessor extends BaseDateTimeProcessor { // for the moment we'll use no specific Locale, but we might consider introducing a Locale parameter, just like the timeZone one DAY_NAME((Long millis, String tzId) -> { ZonedDateTime time = ZonedDateTime.ofInstant(Instant.ofEpochMilli(millis), ZoneId.of(tzId)); - return time.format(DateTimeFormatter.ofPattern(DayName.DAY_NAME_FORMAT, Locale.ROOT)); + return time.format(DateTimeFormatter.ofPattern(DAY_NAME_FORMAT, Locale.ROOT)); }), MONTH_NAME((Long millis, String tzId) -> { ZonedDateTime time = ZonedDateTime.ofInstant(Instant.ofEpochMilli(millis), ZoneId.of(tzId)); - return time.format(DateTimeFormatter.ofPattern(MonthName.MONTH_NAME_FORMAT, Locale.ROOT)); + return time.format(DateTimeFormatter.ofPattern(MONTH_NAME_FORMAT, Locale.ROOT)); }); private final BiFunction apply; @@ -43,6 +43,8 @@ public class NamedDateTimeProcessor extends BaseDateTimeProcessor { } public static final String NAME = "ndt"; + private static final String MONTH_NAME_FORMAT = "MMMM"; + private static final String DAY_NAME_FORMAT = "EEEE"; private final NameExtractor extractor; diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/Quarter.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/Quarter.java index 22e368b0ec6..9cf15f637df 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/Quarter.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/Quarter.java @@ -7,12 +7,11 @@ package org.elasticsearch.xpack.sql.expression.function.scalar.datetime; import org.elasticsearch.xpack.sql.expression.Expression; +import org.elasticsearch.xpack.sql.expression.Expressions; import org.elasticsearch.xpack.sql.expression.FieldAttribute; -import org.elasticsearch.xpack.sql.expression.function.scalar.processor.definition.ProcessorDefinition; -import org.elasticsearch.xpack.sql.expression.function.scalar.processor.definition.ProcessorDefinitions; -import org.elasticsearch.xpack.sql.expression.function.scalar.processor.definition.UnaryProcessorDefinition; -import org.elasticsearch.xpack.sql.expression.function.scalar.script.ParamsBuilder; -import org.elasticsearch.xpack.sql.expression.function.scalar.script.ScriptTemplate; +import org.elasticsearch.xpack.sql.expression.gen.pipeline.Pipe; +import org.elasticsearch.xpack.sql.expression.gen.pipeline.UnaryPipe; +import org.elasticsearch.xpack.sql.expression.gen.script.ScriptTemplate; import org.elasticsearch.xpack.sql.tree.Location; import org.elasticsearch.xpack.sql.tree.NodeInfo.NodeCtor2; import org.elasticsearch.xpack.sql.type.DataType; @@ -22,8 +21,7 @@ import java.util.Objects; import java.util.TimeZone; import static org.elasticsearch.xpack.sql.expression.function.scalar.datetime.QuarterProcessor.quarter; -import static org.elasticsearch.xpack.sql.expression.function.scalar.script.ParamsBuilder.paramsBuilder; -import static org.elasticsearch.xpack.sql.expression.function.scalar.script.ScriptTemplate.formatTemplate; +import static org.elasticsearch.xpack.sql.expression.gen.script.ParamsBuilder.paramsBuilder; public class Quarter extends BaseDateTimeFunction { @@ -44,15 +42,13 @@ public class Quarter extends BaseDateTimeFunction { } @Override - protected ScriptTemplate asScriptFrom(FieldAttribute field) { - ParamsBuilder params = paramsBuilder(); - - String template = null; - template = formatTemplate("{sql}.quarter(doc[{}].value.millis, {})"); - params.variable(field.name()) - .variable(timeZone().getID()); - - return new ScriptTemplate(template, params.build(), dataType()); + public ScriptTemplate scriptWithField(FieldAttribute field) { + return new ScriptTemplate(formatTemplate("{sql}.quarter(doc[{}].value.millis, {})"), + paramsBuilder() + .variable(field.name()) + .variable(timeZone().getID()) + .build(), + dataType()); } @Override @@ -66,9 +62,8 @@ public class Quarter extends BaseDateTimeFunction { } @Override - protected ProcessorDefinition makeProcessorDefinition() { - return new UnaryProcessorDefinition(location(), this, ProcessorDefinitions.toProcessorDefinition(field()), - new QuarterProcessor(timeZone())); + protected Pipe makePipe() { + return new UnaryPipe(location(), this, Expressions.pipe(field()), new QuarterProcessor(timeZone())); } @Override @@ -90,5 +85,4 @@ public class Quarter extends BaseDateTimeFunction { public int hashCode() { return Objects.hash(field(), timeZone()); } - -} +} \ No newline at end of file diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/math/ATan2.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/math/ATan2.java index 24bbebd64c2..9ade5108406 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/math/ATan2.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/math/ATan2.java @@ -7,13 +7,9 @@ package org.elasticsearch.xpack.sql.expression.function.scalar.math; import org.elasticsearch.xpack.sql.expression.Expression; import org.elasticsearch.xpack.sql.expression.function.scalar.math.BinaryMathProcessor.BinaryMathOperation; -import org.elasticsearch.xpack.sql.expression.function.scalar.processor.definition.ProcessorDefinition; -import org.elasticsearch.xpack.sql.expression.function.scalar.processor.definition.ProcessorDefinitions; import org.elasticsearch.xpack.sql.tree.Location; import org.elasticsearch.xpack.sql.tree.NodeInfo; -import java.util.function.BiFunction; - /** * Multi-valued inverse tangent * function. @@ -21,12 +17,7 @@ import java.util.function.BiFunction; public class ATan2 extends BinaryNumericFunction { public ATan2(Location location, Expression left, Expression right) { - super(location, left, right); - } - - @Override - protected BiFunction operation() { - return BinaryMathOperation.ATAN2; + super(location, left, right, BinaryMathOperation.ATAN2); } @Override @@ -38,12 +29,4 @@ public class ATan2 extends BinaryNumericFunction { protected ATan2 replaceChildren(Expression newLeft, Expression newRight) { return new ATan2(location(), newLeft, newRight); } - - @Override - protected ProcessorDefinition makeProcessorDefinition() { - return new BinaryMathProcessorDefinition(location(), this, - ProcessorDefinitions.toProcessorDefinition(left()), - ProcessorDefinitions.toProcessorDefinition(right()), - BinaryMathOperation.ATAN2); - } } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/math/BinaryMathPipe.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/math/BinaryMathPipe.java new file mode 100644 index 00000000000..0a598275c7f --- /dev/null +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/math/BinaryMathPipe.java @@ -0,0 +1,62 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.sql.expression.function.scalar.math; + +import org.elasticsearch.xpack.sql.expression.Expression; +import org.elasticsearch.xpack.sql.expression.function.scalar.math.BinaryMathProcessor.BinaryMathOperation; +import org.elasticsearch.xpack.sql.expression.gen.pipeline.BinaryPipe; +import org.elasticsearch.xpack.sql.expression.gen.pipeline.Pipe; +import org.elasticsearch.xpack.sql.tree.Location; +import org.elasticsearch.xpack.sql.tree.NodeInfo; + +import java.util.Objects; + +/** + * Math operation pipe requiring two arguments. + */ +public class BinaryMathPipe extends BinaryPipe { + + private final BinaryMathOperation operation; + + public BinaryMathPipe(Location location, Expression expression, Pipe left, + Pipe right, BinaryMathOperation operation) { + super(location, expression, left, right); + this.operation = operation; + } + + @Override + protected NodeInfo info() { + return NodeInfo.create(this, BinaryMathPipe::new, expression(), left(), right(), operation); + } + + public BinaryMathOperation operation() { + return operation; + } + + @Override + protected BinaryPipe replaceChildren(Pipe left, Pipe right) { + return new BinaryMathPipe(location(), expression(), left, right, operation); + } + + @Override + public BinaryMathProcessor asProcessor() { + return new BinaryMathProcessor(left().asProcessor(), right().asProcessor(), operation); + } + + @Override + public int hashCode() { + return Objects.hash(super.hashCode(), operation); + } + + @Override + public boolean equals(Object obj) { + if (super.equals(obj)) { + BinaryMathPipe other = (BinaryMathPipe) obj; + return Objects.equals(operation, other.operation); + } + return false; + } +} diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/math/BinaryMathProcessor.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/math/BinaryMathProcessor.java index fca6aa5023d..3a4f6dd5014 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/math/BinaryMathProcessor.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/math/BinaryMathProcessor.java @@ -7,9 +7,11 @@ package org.elasticsearch.xpack.sql.expression.function.scalar.math; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.xpack.sql.SqlIllegalArgumentException; import org.elasticsearch.xpack.sql.expression.function.scalar.math.BinaryMathProcessor.BinaryMathOperation; import org.elasticsearch.xpack.sql.expression.function.scalar.math.MathProcessor.MathOperation; -import org.elasticsearch.xpack.sql.expression.function.scalar.processor.runtime.Processor; +import org.elasticsearch.xpack.sql.expression.gen.processor.Processor; +import org.elasticsearch.xpack.sql.expression.predicate.operator.arithmetic.Arithmetics; import java.io.IOException; import java.util.function.BiFunction; @@ -22,7 +24,39 @@ public class BinaryMathProcessor extends BinaryNumericProcessor { ATAN2((l, r) -> Math.atan2(l.doubleValue(), r.doubleValue())), - POWER((l, r) -> Math.pow(l.doubleValue(), r.doubleValue())); + MOD(Arithmetics::mod), + POWER((l, r) -> Math.pow(l.doubleValue(), r.doubleValue())), + ROUND((l, r) -> { + if (l == null) { + return null; + } + if (r == null) { + return l; + } + if (r instanceof Float || r instanceof Double) { + throw new SqlIllegalArgumentException("An integer number is required; received [{}] as second parameter", r); + } + + double tenAtScale = Math.pow(10., r.longValue()); + double middleResult = l.doubleValue() * tenAtScale; + int sign = middleResult > 0 ? 1 : -1; + return Math.round(Math.abs(middleResult)) / tenAtScale * sign; + }), + TRUNCATE((l, r) -> { + if (l == null) { + return null; + } + if (r == null) { + return l; + } + if (r instanceof Float || r instanceof Double) { + throw new SqlIllegalArgumentException("An integer number is required; received [{}] as second parameter", r); + } + + double tenAtScale = Math.pow(10., r.longValue()); + double g = l.doubleValue() * tenAtScale; + return (((l.doubleValue() < 0) ? Math.ceil(g) : Math.floor(g)) / tenAtScale); + }); private final BiFunction process; @@ -55,4 +89,4 @@ public class BinaryMathProcessor extends BinaryNumericProcessor info() { - return NodeInfo.create(this, BinaryMathProcessorDefinition::new, expression(), left(), right(), operation); - } - - public BinaryMathOperation operation() { - return operation; - } - - @Override - protected BinaryProcessorDefinition replaceChildren(ProcessorDefinition left, ProcessorDefinition right) { - return new BinaryMathProcessorDefinition(location(), expression(), left, right, operation); - } - - @Override - public BinaryMathProcessor asProcessor() { - return new BinaryMathProcessor(left().asProcessor(), right().asProcessor(), operation); - } - - @Override - public int hashCode() { - return Objects.hash(left(), right(), operation); - } - - @Override - public boolean equals(Object obj) { - if (this == obj) { - return true; - } - - if (obj == null || getClass() != obj.getClass()) { - return false; - } - - BinaryMathProcessorDefinition other = (BinaryMathProcessorDefinition) obj; - return Objects.equals(operation, other.operation) - && Objects.equals(left(), other.left()) - && Objects.equals(right(), other.right()); - } -} diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/math/BinaryNumericFunction.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/math/BinaryNumericFunction.java index 14675270f9f..4de63345aab 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/math/BinaryNumericFunction.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/math/BinaryNumericFunction.java @@ -6,25 +6,28 @@ package org.elasticsearch.xpack.sql.expression.function.scalar.math; import org.elasticsearch.xpack.sql.expression.Expression; +import org.elasticsearch.xpack.sql.expression.Expressions; import org.elasticsearch.xpack.sql.expression.function.scalar.BinaryScalarFunction; -import org.elasticsearch.xpack.sql.expression.function.scalar.script.ScriptTemplate; +import org.elasticsearch.xpack.sql.expression.function.scalar.math.BinaryMathProcessor.BinaryMathOperation; +import org.elasticsearch.xpack.sql.expression.gen.pipeline.Pipe; +import org.elasticsearch.xpack.sql.expression.gen.script.ScriptTemplate; import org.elasticsearch.xpack.sql.tree.Location; import org.elasticsearch.xpack.sql.type.DataType; import java.util.Locale; import java.util.Objects; -import java.util.function.BiFunction; import static java.lang.String.format; -import static org.elasticsearch.xpack.sql.expression.function.scalar.script.ParamsBuilder.paramsBuilder; +import static org.elasticsearch.xpack.sql.expression.gen.script.ParamsBuilder.paramsBuilder; public abstract class BinaryNumericFunction extends BinaryScalarFunction { - protected BinaryNumericFunction(Location location, Expression left, Expression right) { - super(location, left, right); - } + private final BinaryMathOperation operation; - protected abstract BiFunction operation(); + protected BinaryNumericFunction(Location location, Expression left, Expression right, BinaryMathOperation operation) { + super(location, left, right); + this.operation = operation; + } @Override public DataType dataType() { @@ -46,14 +49,19 @@ public abstract class BinaryNumericFunction extends BinaryScalarFunction { } protected TypeResolution resolveInputType(DataType inputType) { - return inputType.isNumeric() ? - TypeResolution.TYPE_RESOLVED : + return inputType.isNumeric() ? + TypeResolution.TYPE_RESOLVED : new TypeResolution("'%s' requires a numeric type, received %s", mathFunction(), inputType.esType); } @Override public Object fold() { - return operation().apply((Number) left().fold(), (Number) right().fold()); + return operation.apply((Number) left().fold(), (Number) right().fold()); + } + + @Override + protected Pipe makePipe() { + return new BinaryMathPipe(location(), this, Expressions.pipe(left()), Expressions.pipe(right()), operation); } @Override @@ -70,7 +78,7 @@ public abstract class BinaryNumericFunction extends BinaryScalarFunction { @Override public int hashCode() { - return Objects.hash(left(), right(), operation()); + return Objects.hash(left(), right(), operation); } @Override @@ -81,6 +89,6 @@ public abstract class BinaryNumericFunction extends BinaryScalarFunction { BinaryNumericFunction other = (BinaryNumericFunction) obj; return Objects.equals(other.left(), left()) && Objects.equals(other.right(), right()) - && Objects.equals(other.operation(), operation()); + && Objects.equals(other.operation, operation); } } \ No newline at end of file diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/math/BinaryNumericProcessor.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/math/BinaryNumericProcessor.java index 3acc1cabf2b..599db91673f 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/math/BinaryNumericProcessor.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/math/BinaryNumericProcessor.java @@ -7,8 +7,8 @@ package org.elasticsearch.xpack.sql.expression.function.scalar.math; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.xpack.sql.SqlIllegalArgumentException; -import org.elasticsearch.xpack.sql.expression.function.scalar.processor.runtime.BinaryProcessor; -import org.elasticsearch.xpack.sql.expression.function.scalar.processor.runtime.Processor; +import org.elasticsearch.xpack.sql.expression.gen.processor.BinaryProcessor; +import org.elasticsearch.xpack.sql.expression.gen.processor.Processor; import java.io.IOException; import java.util.Locale; diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/math/Cot.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/math/Cot.java index 5bb4e0630bb..5bf51fd7a0f 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/math/Cot.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/math/Cot.java @@ -34,8 +34,9 @@ public class Cot extends MathFunction { } @Override - protected String formatScript(String template) { - return super.formatScript(format(Locale.ROOT, "1.0 / Math.tan(%s)", template)); + public String processScript(String template) { + // FIXME: needs to be null aware + return super.processScript(format(Locale.ROOT, "1.0 / Math.tan(%s)", template)); } @Override diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/math/E.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/math/E.java index a3fdfa654df..e9d5d4a37e8 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/math/E.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/math/E.java @@ -9,8 +9,8 @@ package org.elasticsearch.xpack.sql.expression.function.scalar.math; import org.elasticsearch.xpack.sql.expression.Expression; import org.elasticsearch.xpack.sql.expression.Literal; import org.elasticsearch.xpack.sql.expression.function.scalar.math.MathProcessor.MathOperation; -import org.elasticsearch.xpack.sql.expression.function.scalar.script.Params; -import org.elasticsearch.xpack.sql.expression.function.scalar.script.ScriptTemplate; +import org.elasticsearch.xpack.sql.expression.gen.script.Params; +import org.elasticsearch.xpack.sql.expression.gen.script.ScriptTemplate; import org.elasticsearch.xpack.sql.tree.Location; import org.elasticsearch.xpack.sql.tree.NodeInfo; import org.elasticsearch.xpack.sql.type.DataType; diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/math/MathFunction.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/math/MathFunction.java index c50b7243f10..d1193053221 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/math/MathFunction.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/math/MathFunction.java @@ -6,11 +6,11 @@ package org.elasticsearch.xpack.sql.expression.function.scalar.math; import org.elasticsearch.xpack.sql.expression.Expression; +import org.elasticsearch.xpack.sql.expression.Expressions; import org.elasticsearch.xpack.sql.expression.function.scalar.UnaryScalarFunction; import org.elasticsearch.xpack.sql.expression.function.scalar.math.MathProcessor.MathOperation; -import org.elasticsearch.xpack.sql.expression.function.scalar.processor.definition.ProcessorDefinition; -import org.elasticsearch.xpack.sql.expression.function.scalar.processor.definition.ProcessorDefinitions; -import org.elasticsearch.xpack.sql.expression.function.scalar.processor.definition.UnaryProcessorDefinition; +import org.elasticsearch.xpack.sql.expression.gen.pipeline.Pipe; +import org.elasticsearch.xpack.sql.expression.gen.pipeline.UnaryPipe; import org.elasticsearch.xpack.sql.tree.Location; import org.elasticsearch.xpack.sql.type.DataType; @@ -40,8 +40,8 @@ public abstract class MathFunction extends UnaryScalarFunction { } @Override - protected String formatScript(String template) { - return super.formatScript(format(Locale.ROOT, "Math.%s(%s)", mathFunction(), template)); + public String processScript(String template) { + return super.processScript(format(Locale.ROOT, "Math.%s(%s)", mathFunction(), template)); } protected String mathFunction() { @@ -64,9 +64,8 @@ public abstract class MathFunction extends UnaryScalarFunction { } @Override - protected final ProcessorDefinition makeProcessorDefinition() { - return new UnaryProcessorDefinition(location(), this, - ProcessorDefinitions.toProcessorDefinition(field()), new MathProcessor(operation())); + protected final Pipe makePipe() { + return new UnaryPipe(location(), this, Expressions.pipe(field()), new MathProcessor(operation())); } protected abstract MathOperation operation(); diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/math/MathProcessor.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/math/MathProcessor.java index b9bf56f33a4..07da9b2e3cc 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/math/MathProcessor.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/math/MathProcessor.java @@ -9,7 +9,7 @@ import org.elasticsearch.common.Randomness; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.xpack.sql.SqlIllegalArgumentException; -import org.elasticsearch.xpack.sql.expression.function.scalar.processor.runtime.Processor; +import org.elasticsearch.xpack.sql.expression.gen.processor.Processor; import java.io.IOException; import java.util.Random; @@ -52,7 +52,6 @@ public class MathProcessor implements Processor { RANDOM((Object l) -> l != null ? new Random(((Number) l).longValue()).nextDouble() : Randomness.get().nextDouble(), true), - ROUND((DoubleFunction) Math::round), SIGN((DoubleFunction) Math::signum), SIN(Math::sin), SINH(Math::sinh), diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/math/Pi.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/math/Pi.java index e57aa333f06..202d2763fc5 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/math/Pi.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/math/Pi.java @@ -9,8 +9,8 @@ package org.elasticsearch.xpack.sql.expression.function.scalar.math; import org.elasticsearch.xpack.sql.expression.Expression; import org.elasticsearch.xpack.sql.expression.Literal; import org.elasticsearch.xpack.sql.expression.function.scalar.math.MathProcessor.MathOperation; -import org.elasticsearch.xpack.sql.expression.function.scalar.script.Params; -import org.elasticsearch.xpack.sql.expression.function.scalar.script.ScriptTemplate; +import org.elasticsearch.xpack.sql.expression.gen.script.Params; +import org.elasticsearch.xpack.sql.expression.gen.script.ScriptTemplate; import org.elasticsearch.xpack.sql.tree.Location; import org.elasticsearch.xpack.sql.tree.NodeInfo; import org.elasticsearch.xpack.sql.type.DataType; diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/math/Power.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/math/Power.java index 4e362dbb8e5..03dbc41a348 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/math/Power.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/math/Power.java @@ -7,22 +7,13 @@ package org.elasticsearch.xpack.sql.expression.function.scalar.math; import org.elasticsearch.xpack.sql.expression.Expression; import org.elasticsearch.xpack.sql.expression.function.scalar.math.BinaryMathProcessor.BinaryMathOperation; -import org.elasticsearch.xpack.sql.expression.function.scalar.processor.definition.ProcessorDefinition; -import org.elasticsearch.xpack.sql.expression.function.scalar.processor.definition.ProcessorDefinitions; import org.elasticsearch.xpack.sql.tree.Location; import org.elasticsearch.xpack.sql.tree.NodeInfo; -import java.util.function.BiFunction; - public class Power extends BinaryNumericFunction { public Power(Location location, Expression left, Expression right) { - super(location, left, right); - } - - @Override - protected BiFunction operation() { - return BinaryMathOperation.POWER; + super(location, left, right, BinaryMathOperation.POWER); } @Override @@ -35,14 +26,6 @@ public class Power extends BinaryNumericFunction { return new Power(location(), newLeft, newRight); } - @Override - protected ProcessorDefinition makeProcessorDefinition() { - return new BinaryMathProcessorDefinition(location(), this, - ProcessorDefinitions.toProcessorDefinition(left()), - ProcessorDefinitions.toProcessorDefinition(right()), - BinaryMathOperation.POWER); - } - @Override protected String mathFunction() { return "pow"; diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/math/Random.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/math/Random.java index 4e078ed2126..c3ee2201fdd 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/math/Random.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/math/Random.java @@ -34,9 +34,9 @@ public class Random extends MathFunction { } @Override - protected String formatScript(String template) { + public String processScript(String template) { //TODO: Painless script uses Random since Randomness is not whitelisted - return super.formatScript( + return super.processScript( format(Locale.ROOT, "%s != null ? new Random((long) %s).nextDouble() : Math.random()", template, template)); } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/math/Round.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/math/Round.java index 52d7bc5aeca..d2e87daa0e2 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/math/Round.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/math/Round.java @@ -6,41 +6,53 @@ package org.elasticsearch.xpack.sql.expression.function.scalar.math; import org.elasticsearch.xpack.sql.expression.Expression; -import org.elasticsearch.xpack.sql.expression.function.scalar.math.MathProcessor.MathOperation; +import org.elasticsearch.xpack.sql.expression.Literal; +import org.elasticsearch.xpack.sql.expression.function.scalar.math.BinaryMathProcessor.BinaryMathOperation; +import org.elasticsearch.xpack.sql.expression.gen.script.ScriptTemplate; import org.elasticsearch.xpack.sql.tree.Location; import org.elasticsearch.xpack.sql.tree.NodeInfo; import org.elasticsearch.xpack.sql.type.DataType; -import org.elasticsearch.xpack.sql.type.DataTypeConversion; + +import java.util.Locale; + +import static java.lang.String.format; +import static org.elasticsearch.xpack.sql.expression.gen.script.ParamsBuilder.paramsBuilder; /** - * Round - * function. - * - * Note that this uses {@link Math#round(double)} which uses "half up" rounding - * for `ROUND(-1.5)` rounds to `-1`. + * Function that takes two parameters: one is the field/value itself, the other is a non-floating point numeric + * which indicates how the rounding should behave. If positive, it will round the number till that parameter + * count digits after the decimal point. If negative, it will round the number till that paramter count + * digits before the decimal point, starting at the decimal point. */ -public class Round extends MathFunction { - public Round(Location location, Expression field) { - super(location, field); +public class Round extends BinaryNumericFunction { + + public Round(Location location, Expression left, Expression right) { + super(location, left, right == null ? Literal.of(left.location(), 0) : right, BinaryMathOperation.ROUND); } @Override protected NodeInfo info() { - return NodeInfo.create(this, Round::new, field()); + return NodeInfo.create(this, Round::new, left(), right()); } @Override - protected Round replaceChild(Expression newChild) { - return new Round(location(), newChild); + protected Round replaceChildren(Expression newLeft, Expression newRight) { + return new Round(location(), newLeft, newRight); } @Override - protected MathOperation operation() { - return MathOperation.ROUND; + protected ScriptTemplate asScriptFrom(ScriptTemplate leftScript, ScriptTemplate rightScript) { + return new ScriptTemplate(format(Locale.ROOT, formatTemplate("{sql}.%s(%s,%s)"), + mathFunction(), + leftScript.template(), + rightScript.template()), + paramsBuilder() + .script(leftScript.params()).script(rightScript.params()) + .build(), dataType()); } - + @Override public DataType dataType() { - return DataTypeConversion.asInteger(field().dataType()); + return left().dataType(); } } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/math/Truncate.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/math/Truncate.java new file mode 100644 index 00000000000..58fdede9087 --- /dev/null +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/math/Truncate.java @@ -0,0 +1,58 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.sql.expression.function.scalar.math; + +import org.elasticsearch.xpack.sql.expression.Expression; +import org.elasticsearch.xpack.sql.expression.Literal; +import org.elasticsearch.xpack.sql.expression.function.scalar.math.BinaryMathProcessor.BinaryMathOperation; +import org.elasticsearch.xpack.sql.expression.gen.script.ScriptTemplate; +import org.elasticsearch.xpack.sql.tree.Location; +import org.elasticsearch.xpack.sql.tree.NodeInfo; +import org.elasticsearch.xpack.sql.type.DataType; + +import java.util.Locale; + +import static java.lang.String.format; +import static org.elasticsearch.xpack.sql.expression.gen.script.ParamsBuilder.paramsBuilder; + +/** + * Function that takes two parameters: one is the field/value itself, the other is a non-floating point numeric + * which indicates how the truncation should behave. If positive, it will truncate the number till that + * parameter count digits after the decimal point. If negative, it will truncate the number till that parameter + * count digits before the decimal point, starting at the decimal point. + */ +public class Truncate extends BinaryNumericFunction { + + public Truncate(Location location, Expression left, Expression right) { + super(location, left, right == null ? Literal.of(left.location(), 0) : right, BinaryMathOperation.TRUNCATE); + } + + @Override + protected NodeInfo info() { + return NodeInfo.create(this, Truncate::new, left(), right()); + } + + @Override + protected Truncate replaceChildren(Expression newLeft, Expression newRight) { + return new Truncate(location(), newLeft, newRight); + } + + @Override + protected ScriptTemplate asScriptFrom(ScriptTemplate leftScript, ScriptTemplate rightScript) { + return new ScriptTemplate(format(Locale.ROOT, formatTemplate("{sql}.%s(%s,%s)"), + mathFunction(), + leftScript.template(), + rightScript.template()), + paramsBuilder() + .script(leftScript.params()).script(rightScript.params()) + .build(), dataType()); + } + + @Override + public DataType dataType() { + return left().dataType(); + } +} diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/processor/definition/ProcessorDefinitions.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/processor/definition/ProcessorDefinitions.java deleted file mode 100644 index e525b37b4dd..00000000000 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/processor/definition/ProcessorDefinitions.java +++ /dev/null @@ -1,32 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License; - * you may not use this file except in compliance with the Elastic License. - */ -package org.elasticsearch.xpack.sql.expression.function.scalar.processor.definition; - -import org.elasticsearch.xpack.sql.SqlIllegalArgumentException; -import org.elasticsearch.xpack.sql.expression.Expression; -import org.elasticsearch.xpack.sql.expression.NamedExpression; -import org.elasticsearch.xpack.sql.expression.function.aggregate.AggregateFunction; -import org.elasticsearch.xpack.sql.expression.function.scalar.ScalarFunction; - -public abstract class ProcessorDefinitions { - - public static ProcessorDefinition toProcessorDefinition(Expression ex) { - if (ex.foldable()) { - return new ConstantInput(ex.location(), ex, ex.fold()); - } - if (ex instanceof ScalarFunction) { - return ((ScalarFunction) ex).asProcessorDefinition(); - } - if (ex instanceof AggregateFunction) { - // unresolved AggNameInput (should always get replaced by the folder) - return new AggNameInput(ex.location(), ex, ((AggregateFunction) ex).name()); - } - if (ex instanceof NamedExpression) { - return new AttributeInput(ex.location(), ex, ((NamedExpression) ex).toAttribute()); - } - throw new SqlIllegalArgumentException("Cannot extract processor from {}", ex); - } -} diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/BinaryStringFunction.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/BinaryStringFunction.java index 2ee241c8a0f..093d6c95e78 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/BinaryStringFunction.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/BinaryStringFunction.java @@ -8,18 +8,16 @@ package org.elasticsearch.xpack.sql.expression.function.scalar.string; import org.elasticsearch.xpack.sql.expression.Expression; import org.elasticsearch.xpack.sql.expression.FieldAttribute; import org.elasticsearch.xpack.sql.expression.function.scalar.BinaryScalarFunction; -import org.elasticsearch.xpack.sql.expression.function.scalar.script.ScriptTemplate; +import org.elasticsearch.xpack.sql.expression.gen.script.ScriptTemplate; import org.elasticsearch.xpack.sql.tree.Location; import org.elasticsearch.xpack.sql.type.DataType; -import org.elasticsearch.xpack.sql.util.StringUtils; import java.util.Locale; import java.util.Objects; import java.util.function.BiFunction; import static java.lang.String.format; -import static org.elasticsearch.xpack.sql.expression.function.scalar.script.ParamsBuilder.paramsBuilder; -import static org.elasticsearch.xpack.sql.expression.function.scalar.script.ScriptTemplate.formatTemplate; +import static org.elasticsearch.xpack.sql.expression.gen.script.ParamsBuilder.paramsBuilder; /** * Base class for binary functions that have the first parameter a string, the second parameter a number @@ -61,11 +59,11 @@ public abstract class BinaryStringFunction extends BinaryScalarFunction { } @Override - protected ScriptTemplate asScriptFrom(ScriptTemplate leftScript, ScriptTemplate rightScript) { + public ScriptTemplate asScriptFrom(ScriptTemplate leftScript, ScriptTemplate rightScript) { // basically, transform the script to InternalSqlScriptUtils.[function_name](function_or_field1, function_or_field2) - return new ScriptTemplate(format(Locale.ROOT, formatTemplate("{sql}.%s(%s,%s)"), - StringUtils.underscoreToLowerCamelCase(operation().toString()), - leftScript.template(), + return new ScriptTemplate(format(Locale.ROOT, formatTemplate("{sql}.%s(%s,%s)"), + operation().toString().toLowerCase(Locale.ROOT), + leftScript.template(), rightScript.template()), paramsBuilder() .script(leftScript.params()).script(rightScript.params()) @@ -73,8 +71,8 @@ public abstract class BinaryStringFunction extends BinaryScalarFunction { } @Override - protected ScriptTemplate asScriptFrom(FieldAttribute field) { - return new ScriptTemplate(formatScript("doc[{}].value"), + public ScriptTemplate scriptWithField(FieldAttribute field) { + return new ScriptTemplate(processScript("doc[{}].value"), paramsBuilder().variable(field.isInexact() ? field.exactAttribute().name() : field.name()).build(), dataType()); } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/BinaryStringNumericFunction.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/BinaryStringNumericFunction.java index 3e3637d57df..eaddf4bc70f 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/BinaryStringNumericFunction.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/BinaryStringNumericFunction.java @@ -6,6 +6,9 @@ package org.elasticsearch.xpack.sql.expression.function.scalar.string; import org.elasticsearch.xpack.sql.expression.Expression; +import org.elasticsearch.xpack.sql.expression.Expressions; +import org.elasticsearch.xpack.sql.expression.function.scalar.string.BinaryStringNumericProcessor.BinaryStringNumericOperation; +import org.elasticsearch.xpack.sql.expression.gen.pipeline.Pipe; import org.elasticsearch.xpack.sql.tree.Location; import org.elasticsearch.xpack.sql.type.DataType; @@ -18,13 +21,21 @@ public abstract class BinaryStringNumericFunction extends BinaryStringFunction info() { + return NodeInfo.create(this, BinaryStringNumericPipe::new, expression(), left(), right(), operation()); + } + + public BinaryStringNumericOperation operation() { + return operation; + } + + @Override + protected BinaryPipe replaceChildren(Pipe newLeft, Pipe newRight) { + return new BinaryStringNumericPipe(location(), expression(), newLeft, newRight, operation()); + } + + @Override + public BinaryStringNumericProcessor asProcessor() { + return new BinaryStringNumericProcessor(left().asProcessor(), right().asProcessor(), operation()); + } + + @Override + public int hashCode() { + return Objects.hash(super.hashCode(), operation); + } + + @Override + public boolean equals(Object obj) { + if (super.equals(obj)) { + BinaryStringNumericPipe other = (BinaryStringNumericPipe) obj; + return Objects.equals(operation, other.operation); + } + return false; + } +} diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/BinaryStringNumericProcessor.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/BinaryStringNumericProcessor.java index 683bcba5c3f..164330dc9e6 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/BinaryStringNumericProcessor.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/BinaryStringNumericProcessor.java @@ -8,8 +8,8 @@ package org.elasticsearch.xpack.sql.expression.function.scalar.string; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.xpack.sql.SqlIllegalArgumentException; -import org.elasticsearch.xpack.sql.expression.function.scalar.processor.runtime.Processor; import org.elasticsearch.xpack.sql.expression.function.scalar.string.BinaryStringNumericProcessor.BinaryStringNumericOperation; +import org.elasticsearch.xpack.sql.expression.gen.processor.Processor; import java.io.IOException; import java.util.function.BiFunction; diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/BinaryStringNumericProcessorDefinition.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/BinaryStringNumericProcessorDefinition.java deleted file mode 100644 index b0adcdf1ff2..00000000000 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/BinaryStringNumericProcessorDefinition.java +++ /dev/null @@ -1,69 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License; - * you may not use this file except in compliance with the Elastic License. - */ -package org.elasticsearch.xpack.sql.expression.function.scalar.string; - -import org.elasticsearch.xpack.sql.expression.Expression; -import org.elasticsearch.xpack.sql.expression.function.scalar.processor.definition.BinaryProcessorDefinition; -import org.elasticsearch.xpack.sql.expression.function.scalar.processor.definition.ProcessorDefinition; -import org.elasticsearch.xpack.sql.expression.function.scalar.string.BinaryStringNumericProcessor.BinaryStringNumericOperation; -import org.elasticsearch.xpack.sql.tree.Location; -import org.elasticsearch.xpack.sql.tree.NodeInfo; - -import java.util.Objects; - -/** - * Processor definition for String operations requiring one string and one numeric argument. - */ -public class BinaryStringNumericProcessorDefinition extends BinaryProcessorDefinition { - - private final BinaryStringNumericOperation operation; - - public BinaryStringNumericProcessorDefinition(Location location, Expression expression, ProcessorDefinition left, - ProcessorDefinition right, BinaryStringNumericOperation operation) { - super(location, expression, left, right); - this.operation = operation; - } - - @Override - protected NodeInfo info() { - return NodeInfo.create(this, BinaryStringNumericProcessorDefinition::new, expression(), left(), right(), operation()); - } - - public BinaryStringNumericOperation operation() { - return operation; - } - - @Override - protected BinaryProcessorDefinition replaceChildren(ProcessorDefinition newLeft, ProcessorDefinition newRight) { - return new BinaryStringNumericProcessorDefinition(location(), expression(), newLeft, newRight, operation()); - } - - @Override - public BinaryStringNumericProcessor asProcessor() { - return new BinaryStringNumericProcessor(left().asProcessor(), right().asProcessor(), operation()); - } - - @Override - public int hashCode() { - return Objects.hash(left(), right(), operation); - } - - @Override - public boolean equals(Object obj) { - if (this == obj) { - return true; - } - - if (obj == null || getClass() != obj.getClass()) { - return false; - } - - BinaryStringNumericProcessorDefinition other = (BinaryStringNumericProcessorDefinition) obj; - return Objects.equals(operation, other.operation) - && Objects.equals(left(), other.left()) - && Objects.equals(right(), other.right()); - } -} diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/BinaryStringProcessor.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/BinaryStringProcessor.java index 0303d15adad..b19a441b320 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/BinaryStringProcessor.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/BinaryStringProcessor.java @@ -6,8 +6,8 @@ package org.elasticsearch.xpack.sql.expression.function.scalar.string; import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.xpack.sql.expression.function.scalar.processor.runtime.BinaryProcessor; -import org.elasticsearch.xpack.sql.expression.function.scalar.processor.runtime.Processor; +import org.elasticsearch.xpack.sql.expression.gen.processor.BinaryProcessor; +import org.elasticsearch.xpack.sql.expression.gen.processor.Processor; import java.io.IOException; import java.util.Objects; diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/BinaryStringStringPipe.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/BinaryStringStringPipe.java new file mode 100644 index 00000000000..ec1ab78c42d --- /dev/null +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/BinaryStringStringPipe.java @@ -0,0 +1,62 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.sql.expression.function.scalar.string; + +import org.elasticsearch.xpack.sql.expression.Expression; +import org.elasticsearch.xpack.sql.expression.function.scalar.string.BinaryStringStringProcessor.BinaryStringStringOperation; +import org.elasticsearch.xpack.sql.expression.gen.pipeline.BinaryPipe; +import org.elasticsearch.xpack.sql.expression.gen.pipeline.Pipe; +import org.elasticsearch.xpack.sql.tree.Location; +import org.elasticsearch.xpack.sql.tree.NodeInfo; + +import java.util.Objects; + +/** + * String operations pipe requiring two string arguments. + */ +public class BinaryStringStringPipe extends BinaryPipe { + + private final BinaryStringStringOperation operation; + + public BinaryStringStringPipe(Location location, Expression expression, Pipe left, + Pipe right, BinaryStringStringOperation operation) { + super(location, expression, left, right); + this.operation = operation; + } + + @Override + protected NodeInfo info() { + return NodeInfo.create(this, BinaryStringStringPipe::new, expression(), left(), right(), operation); + } + + public BinaryStringStringOperation operation() { + return operation; + } + + @Override + protected BinaryPipe replaceChildren(Pipe left, Pipe right) { + return new BinaryStringStringPipe(location(), expression(), left, right, operation); + } + + @Override + public BinaryStringStringProcessor asProcessor() { + return new BinaryStringStringProcessor(left().asProcessor(), right().asProcessor(), operation); + } + + @Override + public int hashCode() { + return Objects.hash(super.hashCode(), operation); + } + + @Override + public boolean equals(Object obj) { + if (super.equals(obj)) { + BinaryStringStringPipe other = (BinaryStringStringPipe) obj; + return Objects.equals(operation, other.operation); + } + return false; + } +} diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/BinaryStringStringProcessor.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/BinaryStringStringProcessor.java index 8f3425bdb4e..c10232d22d6 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/BinaryStringStringProcessor.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/BinaryStringStringProcessor.java @@ -8,8 +8,8 @@ package org.elasticsearch.xpack.sql.expression.function.scalar.string; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.xpack.sql.SqlIllegalArgumentException; -import org.elasticsearch.xpack.sql.expression.function.scalar.processor.runtime.Processor; import org.elasticsearch.xpack.sql.expression.function.scalar.string.BinaryStringStringProcessor.BinaryStringStringOperation; +import org.elasticsearch.xpack.sql.expression.gen.processor.Processor; import java.io.IOException; import java.util.function.BiFunction; diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/BinaryStringStringProcessorDefinition.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/BinaryStringStringProcessorDefinition.java deleted file mode 100644 index 33692845bf9..00000000000 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/BinaryStringStringProcessorDefinition.java +++ /dev/null @@ -1,69 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License; - * you may not use this file except in compliance with the Elastic License. - */ -package org.elasticsearch.xpack.sql.expression.function.scalar.string; - -import org.elasticsearch.xpack.sql.expression.Expression; -import org.elasticsearch.xpack.sql.expression.function.scalar.processor.definition.BinaryProcessorDefinition; -import org.elasticsearch.xpack.sql.expression.function.scalar.processor.definition.ProcessorDefinition; -import org.elasticsearch.xpack.sql.expression.function.scalar.string.BinaryStringStringProcessor.BinaryStringStringOperation; -import org.elasticsearch.xpack.sql.tree.Location; -import org.elasticsearch.xpack.sql.tree.NodeInfo; - -import java.util.Objects; - -/** - * Processor definition for String operations requiring two string arguments. - */ -public class BinaryStringStringProcessorDefinition extends BinaryProcessorDefinition { - - private final BinaryStringStringOperation operation; - - public BinaryStringStringProcessorDefinition(Location location, Expression expression, ProcessorDefinition left, - ProcessorDefinition right, BinaryStringStringOperation operation) { - super(location, expression, left, right); - this.operation = operation; - } - - @Override - protected NodeInfo info() { - return NodeInfo.create(this, BinaryStringStringProcessorDefinition::new, expression(), left(), right(), operation); - } - - public BinaryStringStringOperation operation() { - return operation; - } - - @Override - protected BinaryProcessorDefinition replaceChildren(ProcessorDefinition left, ProcessorDefinition right) { - return new BinaryStringStringProcessorDefinition(location(), expression(), left, right, operation); - } - - @Override - public BinaryStringStringProcessor asProcessor() { - return new BinaryStringStringProcessor(left().asProcessor(), right().asProcessor(), operation); - } - - @Override - public int hashCode() { - return Objects.hash(left(), right(), operation); - } - - @Override - public boolean equals(Object obj) { - if (this == obj) { - return true; - } - - if (obj == null || getClass() != obj.getClass()) { - return false; - } - - BinaryStringStringProcessorDefinition other = (BinaryStringStringProcessorDefinition) obj; - return Objects.equals(operation, other.operation) - && Objects.equals(left(), other.left()) - && Objects.equals(right(), other.right()); - } -} diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/Concat.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/Concat.java index a5dc8fc9ac3..f9964026946 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/Concat.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/Concat.java @@ -6,11 +6,11 @@ package org.elasticsearch.xpack.sql.expression.function.scalar.string; import org.elasticsearch.xpack.sql.expression.Expression; +import org.elasticsearch.xpack.sql.expression.Expressions; import org.elasticsearch.xpack.sql.expression.FieldAttribute; import org.elasticsearch.xpack.sql.expression.function.scalar.BinaryScalarFunction; -import org.elasticsearch.xpack.sql.expression.function.scalar.processor.definition.ProcessorDefinition; -import org.elasticsearch.xpack.sql.expression.function.scalar.processor.definition.ProcessorDefinitions; -import org.elasticsearch.xpack.sql.expression.function.scalar.script.ScriptTemplate; +import org.elasticsearch.xpack.sql.expression.gen.pipeline.Pipe; +import org.elasticsearch.xpack.sql.expression.gen.script.ScriptTemplate; import org.elasticsearch.xpack.sql.tree.Location; import org.elasticsearch.xpack.sql.tree.NodeInfo; import org.elasticsearch.xpack.sql.type.DataType; @@ -18,9 +18,8 @@ import org.elasticsearch.xpack.sql.type.DataType; import java.util.Locale; import static java.lang.String.format; -import static org.elasticsearch.xpack.sql.expression.function.scalar.script.ParamsBuilder.paramsBuilder; -import static org.elasticsearch.xpack.sql.expression.function.scalar.script.ScriptTemplate.formatTemplate; import static org.elasticsearch.xpack.sql.expression.function.scalar.string.ConcatFunctionProcessor.doProcessInScripts; +import static org.elasticsearch.xpack.sql.expression.gen.script.ParamsBuilder.paramsBuilder; /** * Returns a string that is the result of concatenating the two strings received as parameters. @@ -33,6 +32,7 @@ public class Concat extends BinaryScalarFunction { super(location, source1, source2); } + @Override protected TypeResolution resolveType() { if (!childrenResolved()) { return new TypeResolution("Unresolved children"); @@ -47,16 +47,13 @@ public class Concat extends BinaryScalarFunction { } @Override - protected ProcessorDefinition makeProcessorDefinition() { - return new ConcatFunctionProcessorDefinition(location(), this, - ProcessorDefinitions.toProcessorDefinition(left()), - ProcessorDefinitions.toProcessorDefinition(right())); + protected Pipe makePipe() { + return new ConcatFunctionPipe(location(), this, Expressions.pipe(left()), Expressions.pipe(right())); } @Override public boolean foldable() { - return left().foldable() - && right().foldable(); + return left().foldable() && right().foldable(); } @Override @@ -74,20 +71,12 @@ public class Concat extends BinaryScalarFunction { return NodeInfo.create(this, Concat::new, left(), right()); } - @Override - public ScriptTemplate asScript() { - ScriptTemplate sourceScript1 = asScript(left()); - ScriptTemplate sourceScript2 = asScript(right()); - - return asScriptFrom(sourceScript1, sourceScript2); - } - @Override protected ScriptTemplate asScriptFrom(ScriptTemplate leftScript, ScriptTemplate rightScript) { // basically, transform the script to InternalSqlScriptUtils.[function_name](function_or_field1, function_or_field2) - return new ScriptTemplate(format(Locale.ROOT, formatTemplate("{sql}.%s(%s,%s)"), - "concat", - leftScript.template(), + return new ScriptTemplate(format(Locale.ROOT, formatTemplate("{sql}.%s(%s,%s)"), + "concat", + leftScript.template(), rightScript.template()), paramsBuilder() .script(leftScript.params()).script(rightScript.params()) @@ -95,8 +84,8 @@ public class Concat extends BinaryScalarFunction { } @Override - protected ScriptTemplate asScriptFrom(FieldAttribute field) { - return new ScriptTemplate(formatScript("doc[{}].value"), + public ScriptTemplate scriptWithField(FieldAttribute field) { + return new ScriptTemplate(processScript("doc[{}].value"), paramsBuilder().variable(field.isInexact() ? field.exactAttribute().name() : field.name()).build(), dataType()); } @@ -105,5 +94,4 @@ public class Concat extends BinaryScalarFunction { public DataType dataType() { return DataType.KEYWORD; } - -} +} \ No newline at end of file diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/ConcatFunctionProcessorDefinition.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/ConcatFunctionPipe.java similarity index 56% rename from x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/ConcatFunctionProcessorDefinition.java rename to x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/ConcatFunctionPipe.java index f7f86419be1..82e65f88f1d 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/ConcatFunctionProcessorDefinition.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/ConcatFunctionPipe.java @@ -6,28 +6,28 @@ package org.elasticsearch.xpack.sql.expression.function.scalar.string; import org.elasticsearch.xpack.sql.expression.Expression; -import org.elasticsearch.xpack.sql.expression.function.scalar.processor.definition.BinaryProcessorDefinition; -import org.elasticsearch.xpack.sql.expression.function.scalar.processor.definition.ProcessorDefinition; +import org.elasticsearch.xpack.sql.expression.gen.pipeline.BinaryPipe; +import org.elasticsearch.xpack.sql.expression.gen.pipeline.Pipe; import org.elasticsearch.xpack.sql.tree.Location; import org.elasticsearch.xpack.sql.tree.NodeInfo; import java.util.Objects; -public class ConcatFunctionProcessorDefinition extends BinaryProcessorDefinition { +public class ConcatFunctionPipe extends BinaryPipe { - public ConcatFunctionProcessorDefinition(Location location, Expression expression, ProcessorDefinition left, - ProcessorDefinition right) { + public ConcatFunctionPipe(Location location, Expression expression, Pipe left, + Pipe right) { super(location, expression, left, right); } @Override - protected NodeInfo info() { - return NodeInfo.create(this, ConcatFunctionProcessorDefinition::new, expression(), left(), right()); + protected NodeInfo info() { + return NodeInfo.create(this, ConcatFunctionPipe::new, expression(), left(), right()); } @Override - protected BinaryProcessorDefinition replaceChildren(ProcessorDefinition left, ProcessorDefinition right) { - return new ConcatFunctionProcessorDefinition(location(), expression(), left, right); + protected BinaryPipe replaceChildren(Pipe left, Pipe right) { + return new ConcatFunctionPipe(location(), expression(), left, right); } @Override @@ -50,7 +50,7 @@ public class ConcatFunctionProcessorDefinition extends BinaryProcessorDefinition return false; } - ConcatFunctionProcessorDefinition other = (ConcatFunctionProcessorDefinition) obj; + ConcatFunctionPipe other = (ConcatFunctionPipe) obj; return Objects.equals(left(), other.left()) && Objects.equals(right(), other.right()); } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/ConcatFunctionProcessor.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/ConcatFunctionProcessor.java index 3a5e9bbee24..675851032ba 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/ConcatFunctionProcessor.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/ConcatFunctionProcessor.java @@ -8,15 +8,15 @@ package org.elasticsearch.xpack.sql.expression.function.scalar.string; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.xpack.sql.SqlIllegalArgumentException; -import org.elasticsearch.xpack.sql.expression.function.scalar.processor.runtime.BinaryProcessor; -import org.elasticsearch.xpack.sql.expression.function.scalar.processor.runtime.Processor; +import org.elasticsearch.xpack.sql.expression.gen.processor.BinaryProcessor; +import org.elasticsearch.xpack.sql.expression.gen.processor.Processor; import java.io.IOException; import java.util.Objects; public class ConcatFunctionProcessor extends BinaryProcessor { - public static final String NAME = "cb"; + public static final String NAME = "scon"; public ConcatFunctionProcessor(Processor source1, Processor source2) { super(source1, source2); diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/Insert.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/Insert.java index 9d635b7cdcb..990cd2921dc 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/Insert.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/Insert.java @@ -6,11 +6,11 @@ package org.elasticsearch.xpack.sql.expression.function.scalar.string; import org.elasticsearch.xpack.sql.expression.Expression; +import org.elasticsearch.xpack.sql.expression.Expressions; import org.elasticsearch.xpack.sql.expression.FieldAttribute; import org.elasticsearch.xpack.sql.expression.function.scalar.ScalarFunction; -import org.elasticsearch.xpack.sql.expression.function.scalar.processor.definition.ProcessorDefinition; -import org.elasticsearch.xpack.sql.expression.function.scalar.processor.definition.ProcessorDefinitions; -import org.elasticsearch.xpack.sql.expression.function.scalar.script.ScriptTemplate; +import org.elasticsearch.xpack.sql.expression.gen.pipeline.Pipe; +import org.elasticsearch.xpack.sql.expression.gen.script.ScriptTemplate; import org.elasticsearch.xpack.sql.tree.Location; import org.elasticsearch.xpack.sql.tree.NodeInfo; import org.elasticsearch.xpack.sql.type.DataType; @@ -20,9 +20,8 @@ import java.util.List; import java.util.Locale; import static java.lang.String.format; -import static org.elasticsearch.xpack.sql.expression.function.scalar.script.ParamsBuilder.paramsBuilder; -import static org.elasticsearch.xpack.sql.expression.function.scalar.script.ScriptTemplate.formatTemplate; import static org.elasticsearch.xpack.sql.expression.function.scalar.string.InsertFunctionProcessor.doProcess; +import static org.elasticsearch.xpack.sql.expression.gen.script.ParamsBuilder.paramsBuilder; /** * Returns a character string where length characters have been deleted from the source string, beginning at start, @@ -40,6 +39,7 @@ public class Insert extends ScalarFunction { this.replacement = replacement; } + @Override protected TypeResolution resolveType() { if (!childrenResolved()) { return new TypeResolution("Unresolved children"); @@ -65,7 +65,7 @@ public class Insert extends ScalarFunction { @Override public boolean foldable() { - return source.foldable() + return source.foldable() && start.foldable() && length.foldable() && replacement.foldable(); @@ -77,12 +77,12 @@ public class Insert extends ScalarFunction { } @Override - protected ProcessorDefinition makeProcessorDefinition() { - return new InsertFunctionProcessorDefinition(location(), this, - ProcessorDefinitions.toProcessorDefinition(source), - ProcessorDefinitions.toProcessorDefinition(start), - ProcessorDefinitions.toProcessorDefinition(length), - ProcessorDefinitions.toProcessorDefinition(replacement)); + protected Pipe makePipe() { + return new InsertFunctionPipe(location(), this, + Expressions.pipe(source), + Expressions.pipe(start), + Expressions.pipe(length), + Expressions.pipe(replacement)); } @Override @@ -100,13 +100,12 @@ public class Insert extends ScalarFunction { return asScriptFrom(sourceScript, startScript, lengthScript, replacementScript); } - protected ScriptTemplate asScriptFrom(ScriptTemplate sourceScript, ScriptTemplate startScript, - ScriptTemplate lengthScript, ScriptTemplate replacementScript) - { + private ScriptTemplate asScriptFrom(ScriptTemplate sourceScript, ScriptTemplate startScript, + ScriptTemplate lengthScript, ScriptTemplate replacementScript) { // basically, transform the script to InternalSqlScriptUtils.[function_name](function_or_field1, function_or_field2,...) - return new ScriptTemplate(format(Locale.ROOT, formatTemplate("{sql}.%s(%s,%s,%s,%s)"), - "insert", - sourceScript.template(), + return new ScriptTemplate(format(Locale.ROOT, formatTemplate("{sql}.%s(%s,%s,%s,%s)"), + "insert", + sourceScript.template(), startScript.template(), lengthScript.template(), replacementScript.template()), @@ -117,8 +116,8 @@ public class Insert extends ScalarFunction { } @Override - protected ScriptTemplate asScriptFrom(FieldAttribute field) { - return new ScriptTemplate(formatScript("doc[{}].value"), + public ScriptTemplate scriptWithField(FieldAttribute field) { + return new ScriptTemplate(processScript("doc[{}].value"), paramsBuilder().variable(field.isInexact() ? field.exactAttribute().name() : field.name()).build(), dataType()); } @@ -136,4 +135,4 @@ public class Insert extends ScalarFunction { return new Insert(location(), newChildren.get(0), newChildren.get(1), newChildren.get(2), newChildren.get(3)); } -} +} \ No newline at end of file diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/InsertFunctionProcessorDefinition.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/InsertFunctionPipe.java similarity index 63% rename from x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/InsertFunctionProcessorDefinition.java rename to x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/InsertFunctionPipe.java index ab82622788d..7c147bca93c 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/InsertFunctionProcessorDefinition.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/InsertFunctionPipe.java @@ -7,7 +7,7 @@ package org.elasticsearch.xpack.sql.expression.function.scalar.string; import org.elasticsearch.xpack.sql.execution.search.SqlSourceBuilder; import org.elasticsearch.xpack.sql.expression.Expression; -import org.elasticsearch.xpack.sql.expression.function.scalar.processor.definition.ProcessorDefinition; +import org.elasticsearch.xpack.sql.expression.gen.pipeline.Pipe; import org.elasticsearch.xpack.sql.tree.Location; import org.elasticsearch.xpack.sql.tree.NodeInfo; @@ -15,13 +15,13 @@ import java.util.Arrays; import java.util.List; import java.util.Objects; -public class InsertFunctionProcessorDefinition extends ProcessorDefinition { +public class InsertFunctionPipe extends Pipe { - private final ProcessorDefinition source, start, length, replacement; + private final Pipe source, start, length, replacement; - public InsertFunctionProcessorDefinition(Location location, Expression expression, - ProcessorDefinition source, ProcessorDefinition start, - ProcessorDefinition length, ProcessorDefinition replacement) { + public InsertFunctionPipe(Location location, Expression expression, + Pipe source, Pipe start, + Pipe length, Pipe replacement) { super(location, expression, Arrays.asList(source, start, length, replacement)); this.source = source; this.start = start; @@ -30,7 +30,7 @@ public class InsertFunctionProcessorDefinition extends ProcessorDefinition { } @Override - public final ProcessorDefinition replaceChildren(List newChildren) { + public final Pipe replaceChildren(List newChildren) { if (newChildren.size() != 4) { throw new IllegalArgumentException("expected [4] children but received [" + newChildren.size() + "]"); } @@ -38,11 +38,11 @@ public class InsertFunctionProcessorDefinition extends ProcessorDefinition { } @Override - public final ProcessorDefinition resolveAttributes(AttributeResolver resolver) { - ProcessorDefinition newSource = source.resolveAttributes(resolver); - ProcessorDefinition newStart = start.resolveAttributes(resolver); - ProcessorDefinition newLength = length.resolveAttributes(resolver); - ProcessorDefinition newReplacement = replacement.resolveAttributes(resolver); + public final Pipe resolveAttributes(AttributeResolver resolver) { + Pipe newSource = source.resolveAttributes(resolver); + Pipe newStart = start.resolveAttributes(resolver); + Pipe newLength = length.resolveAttributes(resolver); + Pipe newReplacement = replacement.resolveAttributes(resolver); if (newSource == source && newStart == start && newLength == length @@ -65,11 +65,11 @@ public class InsertFunctionProcessorDefinition extends ProcessorDefinition { return source.resolved() && start.resolved() && length.resolved() && replacement.resolved(); } - protected ProcessorDefinition replaceChildren(ProcessorDefinition newSource, - ProcessorDefinition newStart, - ProcessorDefinition newLength, - ProcessorDefinition newReplacement) { - return new InsertFunctionProcessorDefinition(location(), expression(), newSource, newStart, newLength, newReplacement); + protected Pipe replaceChildren(Pipe newSource, + Pipe newStart, + Pipe newLength, + Pipe newReplacement) { + return new InsertFunctionPipe(location(), expression(), newSource, newStart, newLength, newReplacement); } @Override @@ -81,8 +81,8 @@ public class InsertFunctionProcessorDefinition extends ProcessorDefinition { } @Override - protected NodeInfo info() { - return NodeInfo.create(this, InsertFunctionProcessorDefinition::new, expression(), source, start, length, replacement); + protected NodeInfo info() { + return NodeInfo.create(this, InsertFunctionPipe::new, expression(), source, start, length, replacement); } @Override @@ -90,19 +90,19 @@ public class InsertFunctionProcessorDefinition extends ProcessorDefinition { return new InsertFunctionProcessor(source.asProcessor(), start.asProcessor(), length.asProcessor(), replacement.asProcessor()); } - public ProcessorDefinition source() { + public Pipe source() { return source; } - public ProcessorDefinition start() { + public Pipe start() { return start; } - public ProcessorDefinition length() { + public Pipe length() { return length; } - public ProcessorDefinition replacement() { + public Pipe replacement() { return replacement; } @@ -121,7 +121,7 @@ public class InsertFunctionProcessorDefinition extends ProcessorDefinition { return false; } - InsertFunctionProcessorDefinition other = (InsertFunctionProcessorDefinition) obj; + InsertFunctionPipe other = (InsertFunctionPipe) obj; return Objects.equals(source, other.source) && Objects.equals(start, other.start) && Objects.equals(length, other.length) diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/InsertFunctionProcessor.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/InsertFunctionProcessor.java index 4649925614f..5202ab64065 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/InsertFunctionProcessor.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/InsertFunctionProcessor.java @@ -8,7 +8,7 @@ package org.elasticsearch.xpack.sql.expression.function.scalar.string; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.xpack.sql.SqlIllegalArgumentException; -import org.elasticsearch.xpack.sql.expression.function.scalar.processor.runtime.Processor; +import org.elasticsearch.xpack.sql.expression.gen.processor.Processor; import java.io.IOException; import java.util.Objects; @@ -16,7 +16,7 @@ import java.util.Objects; public class InsertFunctionProcessor implements Processor { private final Processor source, start, length, replacement; - public static final String NAME = "ins"; + public static final String NAME = "si"; public InsertFunctionProcessor(Processor source, Processor start, Processor length, Processor replacement) { this.source = source; @@ -81,8 +81,8 @@ public class InsertFunctionProcessor implements Processor { StringBuilder sb = new StringBuilder(source.toString()); String replString = (replacement.toString()); - return sb.replace(realStart, - realStart + ((Number) length).intValue(), + return sb.replace(realStart, + realStart + ((Number) length).intValue(), replString).toString(); } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/Left.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/Left.java index 2e31ecf3e7b..728c38e8f62 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/Left.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/Left.java @@ -6,14 +6,10 @@ package org.elasticsearch.xpack.sql.expression.function.scalar.string; import org.elasticsearch.xpack.sql.expression.Expression; -import org.elasticsearch.xpack.sql.expression.function.scalar.processor.definition.ProcessorDefinition; -import org.elasticsearch.xpack.sql.expression.function.scalar.processor.definition.ProcessorDefinitions; import org.elasticsearch.xpack.sql.expression.function.scalar.string.BinaryStringNumericProcessor.BinaryStringNumericOperation; import org.elasticsearch.xpack.sql.tree.Location; import org.elasticsearch.xpack.sql.tree.NodeInfo; -import java.util.function.BiFunction; - /** * Returns the leftmost count characters of a string. */ @@ -24,7 +20,7 @@ public class Left extends BinaryStringNumericFunction { } @Override - protected BiFunction operation() { + protected BinaryStringNumericOperation operation() { return BinaryStringNumericOperation.LEFT; } @@ -33,14 +29,6 @@ public class Left extends BinaryStringNumericFunction { return new Left(location(), newLeft, newRight); } - @Override - protected ProcessorDefinition makeProcessorDefinition() { - return new BinaryStringNumericProcessorDefinition(location(), this, - ProcessorDefinitions.toProcessorDefinition(left()), - ProcessorDefinitions.toProcessorDefinition(right()), - BinaryStringNumericOperation.LEFT); - } - @Override protected NodeInfo info() { return NodeInfo.create(this, Left::new, left(), right()); diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/Locate.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/Locate.java index e33511cfbfd..53f73c170c6 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/Locate.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/Locate.java @@ -6,11 +6,11 @@ package org.elasticsearch.xpack.sql.expression.function.scalar.string; import org.elasticsearch.xpack.sql.expression.Expression; +import org.elasticsearch.xpack.sql.expression.Expressions; import org.elasticsearch.xpack.sql.expression.FieldAttribute; import org.elasticsearch.xpack.sql.expression.function.scalar.ScalarFunction; -import org.elasticsearch.xpack.sql.expression.function.scalar.processor.definition.ProcessorDefinition; -import org.elasticsearch.xpack.sql.expression.function.scalar.processor.definition.ProcessorDefinitions; -import org.elasticsearch.xpack.sql.expression.function.scalar.script.ScriptTemplate; +import org.elasticsearch.xpack.sql.expression.gen.pipeline.Pipe; +import org.elasticsearch.xpack.sql.expression.gen.script.ScriptTemplate; import org.elasticsearch.xpack.sql.tree.Location; import org.elasticsearch.xpack.sql.tree.NodeInfo; import org.elasticsearch.xpack.sql.type.DataType; @@ -20,14 +20,13 @@ import java.util.List; import java.util.Locale; import static java.lang.String.format; -import static org.elasticsearch.xpack.sql.expression.function.scalar.script.ParamsBuilder.paramsBuilder; -import static org.elasticsearch.xpack.sql.expression.function.scalar.script.ScriptTemplate.formatTemplate; import static org.elasticsearch.xpack.sql.expression.function.scalar.string.LocateFunctionProcessor.doProcess; +import static org.elasticsearch.xpack.sql.expression.gen.script.ParamsBuilder.paramsBuilder; /** * Returns the starting position of the first occurrence of the pattern within the source string. * The search for the first occurrence of the pattern begins with the first character position in the source string - * unless the optional argument, start, is specified. If start is specified, the search begins with the character + * unless the optional argument, start, is specified. If start is specified, the search begins with the character * position indicated by the value of start. The first character position in the source string is indicated by the value 1. * If the pattern is not found within the source string, the value 0 is returned. */ @@ -42,6 +41,7 @@ public class Locate extends ScalarFunction { this.start = start; } + @Override protected TypeResolution resolveType() { if (!childrenResolved()) { return new TypeResolution("Unresolved children"); @@ -61,11 +61,11 @@ public class Locate extends ScalarFunction { } @Override - protected ProcessorDefinition makeProcessorDefinition() { - return new LocateFunctionProcessorDefinition(location(), this, - ProcessorDefinitions.toProcessorDefinition(pattern), - ProcessorDefinitions.toProcessorDefinition(source), - start == null ? null : ProcessorDefinitions.toProcessorDefinition(start)); + protected Pipe makePipe() { + return new LocateFunctionPipe(location(), this, + Expressions.pipe(pattern), + Expressions.pipe(source), + start == null ? null : Expressions.pipe(start)); } @Override @@ -75,7 +75,7 @@ public class Locate extends ScalarFunction { @Override public boolean foldable() { - return pattern.foldable() + return pattern.foldable() && source.foldable() && (start == null? true : start.foldable()); } @@ -94,22 +94,20 @@ public class Locate extends ScalarFunction { return asScriptFrom(patternScript, sourceScript, startScript); } - protected ScriptTemplate asScriptFrom(ScriptTemplate patternScript, ScriptTemplate sourceScript, - ScriptTemplate startScript) - { + private ScriptTemplate asScriptFrom(ScriptTemplate patternScript, ScriptTemplate sourceScript, ScriptTemplate startScript) { if (start == null) { - return new ScriptTemplate(format(Locale.ROOT, formatTemplate("{sql}.%s(%s,%s)"), - "locate", - patternScript.template(), + return new ScriptTemplate(format(Locale.ROOT, formatTemplate("{sql}.%s(%s,%s)"), + "locate", + patternScript.template(), sourceScript.template()), paramsBuilder() .script(patternScript.params()).script(sourceScript.params()) .build(), dataType()); } // basically, transform the script to InternalSqlScriptUtils.[function_name](function_or_field1, function_or_field2,...) - return new ScriptTemplate(format(Locale.ROOT, formatTemplate("{sql}.%s(%s,%s,%s)"), - "locate", - patternScript.template(), + return new ScriptTemplate(format(Locale.ROOT, formatTemplate("{sql}.%s(%s,%s,%s)"), + "locate", + patternScript.template(), sourceScript.template(), startScript.template()), paramsBuilder() @@ -119,8 +117,8 @@ public class Locate extends ScalarFunction { } @Override - protected ScriptTemplate asScriptFrom(FieldAttribute field) { - return new ScriptTemplate(formatScript("doc[{}].value"), + public ScriptTemplate scriptWithField(FieldAttribute field) { + return new ScriptTemplate(processScript("doc[{}].value"), paramsBuilder().variable(field.isInexact() ? field.exactAttribute().name() : field.name()).build(), dataType()); } @@ -138,4 +136,4 @@ public class Locate extends ScalarFunction { return new Locate(location(), newChildren.get(0), newChildren.get(1), newChildren.get(2)); } -} +} \ No newline at end of file diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/LocateFunctionProcessorDefinition.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/LocateFunctionPipe.java similarity index 65% rename from x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/LocateFunctionProcessorDefinition.java rename to x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/LocateFunctionPipe.java index 19020511852..8477ff34277 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/LocateFunctionProcessorDefinition.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/LocateFunctionPipe.java @@ -7,7 +7,7 @@ package org.elasticsearch.xpack.sql.expression.function.scalar.string; import org.elasticsearch.xpack.sql.execution.search.SqlSourceBuilder; import org.elasticsearch.xpack.sql.expression.Expression; -import org.elasticsearch.xpack.sql.expression.function.scalar.processor.definition.ProcessorDefinition; +import org.elasticsearch.xpack.sql.expression.gen.pipeline.Pipe; import org.elasticsearch.xpack.sql.tree.Location; import org.elasticsearch.xpack.sql.tree.NodeInfo; @@ -15,12 +15,12 @@ import java.util.Arrays; import java.util.List; import java.util.Objects; -public class LocateFunctionProcessorDefinition extends ProcessorDefinition { +public class LocateFunctionPipe extends Pipe { - private final ProcessorDefinition pattern, source, start; + private final Pipe pattern, source, start; - public LocateFunctionProcessorDefinition(Location location, Expression expression, ProcessorDefinition pattern, - ProcessorDefinition source, ProcessorDefinition start) { + public LocateFunctionPipe(Location location, Expression expression, Pipe pattern, + Pipe source, Pipe start) { super(location, expression, start == null ? Arrays.asList(pattern, source) : Arrays.asList(pattern, source, start)); this.pattern = pattern; this.source = source; @@ -28,7 +28,7 @@ public class LocateFunctionProcessorDefinition extends ProcessorDefinition { } @Override - public final ProcessorDefinition replaceChildren(List newChildren) { + public final Pipe replaceChildren(List newChildren) { int childrenSize = newChildren.size(); if (childrenSize > 3 || childrenSize < 2) { throw new IllegalArgumentException("expected [2 or 3] children but received [" + newChildren.size() + "]"); @@ -37,10 +37,10 @@ public class LocateFunctionProcessorDefinition extends ProcessorDefinition { } @Override - public final ProcessorDefinition resolveAttributes(AttributeResolver resolver) { - ProcessorDefinition newPattern = pattern.resolveAttributes(resolver); - ProcessorDefinition newSource = source.resolveAttributes(resolver); - ProcessorDefinition newStart = start == null ? start : start.resolveAttributes(resolver); + public final Pipe resolveAttributes(AttributeResolver resolver) { + Pipe newPattern = pattern.resolveAttributes(resolver); + Pipe newSource = source.resolveAttributes(resolver); + Pipe newStart = start == null ? start : start.resolveAttributes(resolver); if (newPattern == pattern && newSource == source && newStart == start) { return this; } @@ -58,9 +58,9 @@ public class LocateFunctionProcessorDefinition extends ProcessorDefinition { return pattern.resolved() && source.resolved() && (start == null || start.resolved()); } - protected ProcessorDefinition replaceChildren(ProcessorDefinition newPattern, ProcessorDefinition newSource, - ProcessorDefinition newStart) { - return new LocateFunctionProcessorDefinition(location(), expression(), newPattern, newSource, newStart); + protected Pipe replaceChildren(Pipe newPattern, Pipe newSource, + Pipe newStart) { + return new LocateFunctionPipe(location(), expression(), newPattern, newSource, newStart); } @Override @@ -73,8 +73,8 @@ public class LocateFunctionProcessorDefinition extends ProcessorDefinition { } @Override - protected NodeInfo info() { - return NodeInfo.create(this, LocateFunctionProcessorDefinition::new, expression(), pattern, source, start); + protected NodeInfo info() { + return NodeInfo.create(this, LocateFunctionPipe::new, expression(), pattern, source, start); } @Override @@ -82,15 +82,15 @@ public class LocateFunctionProcessorDefinition extends ProcessorDefinition { return new LocateFunctionProcessor(pattern.asProcessor(), source.asProcessor(), start == null ? null : start.asProcessor()); } - public ProcessorDefinition source() { + public Pipe source() { return source; } - public ProcessorDefinition start() { + public Pipe start() { return start; } - public ProcessorDefinition pattern() { + public Pipe pattern() { return pattern; } @@ -109,7 +109,7 @@ public class LocateFunctionProcessorDefinition extends ProcessorDefinition { return false; } - LocateFunctionProcessorDefinition other = (LocateFunctionProcessorDefinition) obj; + LocateFunctionPipe other = (LocateFunctionPipe) obj; return Objects.equals(pattern, other.pattern) && Objects.equals(source, other.source) && Objects.equals(start, other.start); } -} +} \ No newline at end of file diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/LocateFunctionProcessor.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/LocateFunctionProcessor.java index 7831fc44df2..dc26a4869c0 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/LocateFunctionProcessor.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/LocateFunctionProcessor.java @@ -8,7 +8,7 @@ package org.elasticsearch.xpack.sql.expression.function.scalar.string; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.xpack.sql.SqlIllegalArgumentException; -import org.elasticsearch.xpack.sql.expression.function.scalar.processor.runtime.Processor; +import org.elasticsearch.xpack.sql.expression.gen.processor.Processor; import java.io.IOException; import java.util.Objects; @@ -16,7 +16,7 @@ import java.util.Objects; public class LocateFunctionProcessor implements Processor { private final Processor pattern, source, start; - public static final String NAME = "lc"; + public static final String NAME = "sloc"; public LocateFunctionProcessor(Processor pattern, Processor source, Processor start) { this.pattern = pattern; @@ -63,9 +63,9 @@ public class LocateFunctionProcessor implements Processor { String stringSource = source instanceof Character ? source.toString() : (String) source; String stringPattern = pattern instanceof Character ? pattern.toString() : (String) pattern; - return (Integer) (1 + (start != null ? + return 1 + (start != null ? stringSource.indexOf(stringPattern, ((Number) start).intValue() - 1) - : stringSource.indexOf(stringPattern))); + : stringSource.indexOf(stringPattern)); } @Override diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/Position.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/Position.java index 9dfd3e094bb..68549474e3a 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/Position.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/Position.java @@ -6,9 +6,9 @@ package org.elasticsearch.xpack.sql.expression.function.scalar.string; import org.elasticsearch.xpack.sql.expression.Expression; -import org.elasticsearch.xpack.sql.expression.function.scalar.processor.definition.ProcessorDefinition; -import org.elasticsearch.xpack.sql.expression.function.scalar.processor.definition.ProcessorDefinitions; +import org.elasticsearch.xpack.sql.expression.Expressions; import org.elasticsearch.xpack.sql.expression.function.scalar.string.BinaryStringStringProcessor.BinaryStringStringOperation; +import org.elasticsearch.xpack.sql.expression.gen.pipeline.Pipe; import org.elasticsearch.xpack.sql.tree.Location; import org.elasticsearch.xpack.sql.tree.NodeInfo; @@ -34,10 +34,10 @@ public class Position extends BinaryStringStringFunction { } @Override - protected ProcessorDefinition makeProcessorDefinition() { - return new BinaryStringStringProcessorDefinition(location(), this, - ProcessorDefinitions.toProcessorDefinition(left()), - ProcessorDefinitions.toProcessorDefinition(right()), + protected Pipe makePipe() { + return new BinaryStringStringPipe(location(), this, + Expressions.pipe(left()), + Expressions.pipe(right()), BinaryStringStringOperation.POSITION); } @@ -45,5 +45,4 @@ public class Position extends BinaryStringStringFunction { protected NodeInfo info() { return NodeInfo.create(this, Position::new, left(), right()); } - } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/Repeat.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/Repeat.java index 89ea7664e40..05ebcfdc759 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/Repeat.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/Repeat.java @@ -6,14 +6,10 @@ package org.elasticsearch.xpack.sql.expression.function.scalar.string; import org.elasticsearch.xpack.sql.expression.Expression; -import org.elasticsearch.xpack.sql.expression.function.scalar.processor.definition.ProcessorDefinition; -import org.elasticsearch.xpack.sql.expression.function.scalar.processor.definition.ProcessorDefinitions; import org.elasticsearch.xpack.sql.expression.function.scalar.string.BinaryStringNumericProcessor.BinaryStringNumericOperation; import org.elasticsearch.xpack.sql.tree.Location; import org.elasticsearch.xpack.sql.tree.NodeInfo; -import java.util.function.BiFunction; - /** * Creates a string composed of a string repeated count times. */ @@ -24,7 +20,7 @@ public class Repeat extends BinaryStringNumericFunction { } @Override - protected BiFunction operation() { + protected BinaryStringNumericOperation operation() { return BinaryStringNumericOperation.REPEAT; } @@ -33,17 +29,8 @@ public class Repeat extends BinaryStringNumericFunction { return new Repeat(location(), newLeft, newRight); } - @Override - protected ProcessorDefinition makeProcessorDefinition() { - return new BinaryStringNumericProcessorDefinition(location(), this, - ProcessorDefinitions.toProcessorDefinition(left()), - ProcessorDefinitions.toProcessorDefinition(right()), - BinaryStringNumericOperation.REPEAT); - } - @Override protected NodeInfo info() { return NodeInfo.create(this, Repeat::new, left(), right()); } - } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/Replace.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/Replace.java index 3834b16ff1e..9c8e856dbad 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/Replace.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/Replace.java @@ -6,11 +6,11 @@ package org.elasticsearch.xpack.sql.expression.function.scalar.string; import org.elasticsearch.xpack.sql.expression.Expression; +import org.elasticsearch.xpack.sql.expression.Expressions; import org.elasticsearch.xpack.sql.expression.FieldAttribute; import org.elasticsearch.xpack.sql.expression.function.scalar.ScalarFunction; -import org.elasticsearch.xpack.sql.expression.function.scalar.processor.definition.ProcessorDefinition; -import org.elasticsearch.xpack.sql.expression.function.scalar.processor.definition.ProcessorDefinitions; -import org.elasticsearch.xpack.sql.expression.function.scalar.script.ScriptTemplate; +import org.elasticsearch.xpack.sql.expression.gen.pipeline.Pipe; +import org.elasticsearch.xpack.sql.expression.gen.script.ScriptTemplate; import org.elasticsearch.xpack.sql.tree.Location; import org.elasticsearch.xpack.sql.tree.NodeInfo; import org.elasticsearch.xpack.sql.type.DataType; @@ -20,9 +20,8 @@ import java.util.List; import java.util.Locale; import static java.lang.String.format; -import static org.elasticsearch.xpack.sql.expression.function.scalar.script.ParamsBuilder.paramsBuilder; -import static org.elasticsearch.xpack.sql.expression.function.scalar.script.ScriptTemplate.formatTemplate; import static org.elasticsearch.xpack.sql.expression.function.scalar.string.ReplaceFunctionProcessor.doProcess; +import static org.elasticsearch.xpack.sql.expression.gen.script.ParamsBuilder.paramsBuilder; /** * Search the source string for occurrences of the pattern, and replace with the replacement string. @@ -38,6 +37,7 @@ public class Replace extends ScalarFunction { this.replacement = replacement; } + @Override protected TypeResolution resolveType() { if (!childrenResolved()) { return new TypeResolution("Unresolved children"); @@ -57,11 +57,11 @@ public class Replace extends ScalarFunction { } @Override - protected ProcessorDefinition makeProcessorDefinition() { - return new ReplaceFunctionProcessorDefinition(location(), this, - ProcessorDefinitions.toProcessorDefinition(source), - ProcessorDefinitions.toProcessorDefinition(pattern), - ProcessorDefinitions.toProcessorDefinition(replacement)); + protected Pipe makePipe() { + return new ReplaceFunctionPipe(location(), this, + Expressions.pipe(source), + Expressions.pipe(pattern), + Expressions.pipe(replacement)); } @Override @@ -71,7 +71,7 @@ public class Replace extends ScalarFunction { @Override public boolean foldable() { - return source.foldable() + return source.foldable() && pattern.foldable() && replacement.foldable(); } @@ -90,13 +90,11 @@ public class Replace extends ScalarFunction { return asScriptFrom(sourceScript, patternScript, replacementScript); } - protected ScriptTemplate asScriptFrom(ScriptTemplate sourceScript, ScriptTemplate patternScript, - ScriptTemplate replacementScript) - { + private ScriptTemplate asScriptFrom(ScriptTemplate sourceScript, ScriptTemplate patternScript, ScriptTemplate replacementScript) { // basically, transform the script to InternalSqlScriptUtils.[function_name](function_or_field1, function_or_field2,...) - return new ScriptTemplate(format(Locale.ROOT, formatTemplate("{sql}.%s(%s,%s,%s)"), - "replace", - sourceScript.template(), + return new ScriptTemplate(format(Locale.ROOT, formatTemplate("{sql}.%s(%s,%s,%s)"), + "replace", + sourceScript.template(), patternScript.template(), replacementScript.template()), paramsBuilder() @@ -106,8 +104,8 @@ public class Replace extends ScalarFunction { } @Override - protected ScriptTemplate asScriptFrom(FieldAttribute field) { - return new ScriptTemplate(formatScript("doc[{}].value"), + public ScriptTemplate scriptWithField(FieldAttribute field) { + return new ScriptTemplate(processScript("doc[{}].value"), paramsBuilder().variable(field.isInexact() ? field.exactAttribute().name() : field.name()).build(), dataType()); } @@ -125,4 +123,4 @@ public class Replace extends ScalarFunction { return new Replace(location(), newChildren.get(0), newChildren.get(1), newChildren.get(2)); } -} +} \ No newline at end of file diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/ReplaceFunctionProcessorDefinition.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/ReplaceFunctionPipe.java similarity index 63% rename from x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/ReplaceFunctionProcessorDefinition.java rename to x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/ReplaceFunctionPipe.java index 14e7e929e35..173ea3e9c0c 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/ReplaceFunctionProcessorDefinition.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/ReplaceFunctionPipe.java @@ -7,7 +7,7 @@ package org.elasticsearch.xpack.sql.expression.function.scalar.string; import org.elasticsearch.xpack.sql.execution.search.SqlSourceBuilder; import org.elasticsearch.xpack.sql.expression.Expression; -import org.elasticsearch.xpack.sql.expression.function.scalar.processor.definition.ProcessorDefinition; +import org.elasticsearch.xpack.sql.expression.gen.pipeline.Pipe; import org.elasticsearch.xpack.sql.tree.Location; import org.elasticsearch.xpack.sql.tree.NodeInfo; @@ -15,12 +15,12 @@ import java.util.Arrays; import java.util.List; import java.util.Objects; -public class ReplaceFunctionProcessorDefinition extends ProcessorDefinition { +public class ReplaceFunctionPipe extends Pipe { - private final ProcessorDefinition source, pattern, replacement; + private final Pipe source, pattern, replacement; - public ReplaceFunctionProcessorDefinition(Location location, Expression expression, ProcessorDefinition source, - ProcessorDefinition pattern, ProcessorDefinition replacement) { + public ReplaceFunctionPipe(Location location, Expression expression, Pipe source, + Pipe pattern, Pipe replacement) { super(location, expression, Arrays.asList(source, pattern, replacement)); this.source = source; this.pattern = pattern; @@ -28,7 +28,7 @@ public class ReplaceFunctionProcessorDefinition extends ProcessorDefinition { } @Override - public final ProcessorDefinition replaceChildren(List newChildren) { + public final Pipe replaceChildren(List newChildren) { if (newChildren.size() != 3) { throw new IllegalArgumentException("expected [3] children but received [" + newChildren.size() + "]"); } @@ -36,10 +36,10 @@ public class ReplaceFunctionProcessorDefinition extends ProcessorDefinition { } @Override - public final ProcessorDefinition resolveAttributes(AttributeResolver resolver) { - ProcessorDefinition newSource = source.resolveAttributes(resolver); - ProcessorDefinition newPattern = pattern.resolveAttributes(resolver); - ProcessorDefinition newReplacement = replacement.resolveAttributes(resolver); + public final Pipe resolveAttributes(AttributeResolver resolver) { + Pipe newSource = source.resolveAttributes(resolver); + Pipe newPattern = pattern.resolveAttributes(resolver); + Pipe newReplacement = replacement.resolveAttributes(resolver); if (newSource == source && newPattern == pattern && newReplacement == replacement) { return this; } @@ -56,9 +56,9 @@ public class ReplaceFunctionProcessorDefinition extends ProcessorDefinition { return source.resolved() && pattern.resolved() && replacement.resolved(); } - protected ProcessorDefinition replaceChildren(ProcessorDefinition newSource, ProcessorDefinition newPattern, - ProcessorDefinition newReplacement) { - return new ReplaceFunctionProcessorDefinition(location(), expression(), newSource, newPattern, newReplacement); + protected Pipe replaceChildren(Pipe newSource, Pipe newPattern, + Pipe newReplacement) { + return new ReplaceFunctionPipe(location(), expression(), newSource, newPattern, newReplacement); } @Override @@ -69,8 +69,8 @@ public class ReplaceFunctionProcessorDefinition extends ProcessorDefinition { } @Override - protected NodeInfo info() { - return NodeInfo.create(this, ReplaceFunctionProcessorDefinition::new, expression(), source, pattern, replacement); + protected NodeInfo info() { + return NodeInfo.create(this, ReplaceFunctionPipe::new, expression(), source, pattern, replacement); } @Override @@ -78,15 +78,15 @@ public class ReplaceFunctionProcessorDefinition extends ProcessorDefinition { return new ReplaceFunctionProcessor(source.asProcessor(), pattern.asProcessor(), replacement.asProcessor()); } - public ProcessorDefinition source() { + public Pipe source() { return source; } - public ProcessorDefinition pattern() { + public Pipe pattern() { return pattern; } - public ProcessorDefinition replacement() { + public Pipe replacement() { return replacement; } @@ -105,7 +105,7 @@ public class ReplaceFunctionProcessorDefinition extends ProcessorDefinition { return false; } - ReplaceFunctionProcessorDefinition other = (ReplaceFunctionProcessorDefinition) obj; + ReplaceFunctionPipe other = (ReplaceFunctionPipe) obj; return Objects.equals(source, other.source) && Objects.equals(pattern, other.pattern) && Objects.equals(replacement, other.replacement); diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/ReplaceFunctionProcessor.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/ReplaceFunctionProcessor.java index 529ceb408aa..1745874469f 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/ReplaceFunctionProcessor.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/ReplaceFunctionProcessor.java @@ -9,7 +9,7 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.xpack.sql.SqlIllegalArgumentException; -import org.elasticsearch.xpack.sql.expression.function.scalar.processor.runtime.Processor; +import org.elasticsearch.xpack.sql.expression.gen.processor.Processor; import java.io.IOException; import java.util.Objects; @@ -17,7 +17,7 @@ import java.util.Objects; public class ReplaceFunctionProcessor implements Processor { private final Processor source, pattern, replacement; - public static final String NAME = "r"; + public static final String NAME = "srep"; public ReplaceFunctionProcessor(Processor source, Processor pattern, Processor replacement) { this.source = source; @@ -60,8 +60,8 @@ public class ReplaceFunctionProcessor implements Processor { throw new SqlIllegalArgumentException("A string/char is required; received [{}]", replacement); } - return Strings.replace(source instanceof Character ? source.toString() : (String)source, - pattern instanceof Character ? pattern.toString() : (String) pattern, + return Strings.replace(source instanceof Character ? source.toString() : (String)source, + pattern instanceof Character ? pattern.toString() : (String) pattern, replacement instanceof Character ? replacement.toString() : (String) replacement); } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/Right.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/Right.java index 3250dcc0a6e..39d220e2d65 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/Right.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/Right.java @@ -6,14 +6,10 @@ package org.elasticsearch.xpack.sql.expression.function.scalar.string; import org.elasticsearch.xpack.sql.expression.Expression; -import org.elasticsearch.xpack.sql.expression.function.scalar.processor.definition.ProcessorDefinition; -import org.elasticsearch.xpack.sql.expression.function.scalar.processor.definition.ProcessorDefinitions; import org.elasticsearch.xpack.sql.expression.function.scalar.string.BinaryStringNumericProcessor.BinaryStringNumericOperation; import org.elasticsearch.xpack.sql.tree.Location; import org.elasticsearch.xpack.sql.tree.NodeInfo; -import java.util.function.BiFunction; - /** * Returns the rightmost count characters of a string. */ @@ -24,7 +20,7 @@ public class Right extends BinaryStringNumericFunction { } @Override - protected BiFunction operation() { + protected BinaryStringNumericOperation operation() { return BinaryStringNumericOperation.RIGHT; } @@ -33,14 +29,6 @@ public class Right extends BinaryStringNumericFunction { return new Right(location(), newLeft, newRight); } - @Override - protected ProcessorDefinition makeProcessorDefinition() { - return new BinaryStringNumericProcessorDefinition(location(), this, - ProcessorDefinitions.toProcessorDefinition(left()), - ProcessorDefinitions.toProcessorDefinition(right()), - BinaryStringNumericOperation.RIGHT); - } - @Override protected NodeInfo info() { return NodeInfo.create(this, Right::new, left(), right()); diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/StringProcessor.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/StringProcessor.java index 2a1ba3a10cf..56ef820a4d3 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/StringProcessor.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/StringProcessor.java @@ -9,7 +9,7 @@ import org.apache.lucene.util.UnicodeUtil; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.xpack.sql.SqlIllegalArgumentException; -import org.elasticsearch.xpack.sql.expression.function.scalar.processor.runtime.Processor; +import org.elasticsearch.xpack.sql.expression.gen.processor.Processor; import java.io.IOException; import java.util.Arrays; diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/Substring.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/Substring.java index 199cda01b9b..e1475665110 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/Substring.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/Substring.java @@ -6,11 +6,11 @@ package org.elasticsearch.xpack.sql.expression.function.scalar.string; import org.elasticsearch.xpack.sql.expression.Expression; +import org.elasticsearch.xpack.sql.expression.Expressions; import org.elasticsearch.xpack.sql.expression.FieldAttribute; import org.elasticsearch.xpack.sql.expression.function.scalar.ScalarFunction; -import org.elasticsearch.xpack.sql.expression.function.scalar.processor.definition.ProcessorDefinition; -import org.elasticsearch.xpack.sql.expression.function.scalar.processor.definition.ProcessorDefinitions; -import org.elasticsearch.xpack.sql.expression.function.scalar.script.ScriptTemplate; +import org.elasticsearch.xpack.sql.expression.gen.pipeline.Pipe; +import org.elasticsearch.xpack.sql.expression.gen.script.ScriptTemplate; import org.elasticsearch.xpack.sql.tree.Location; import org.elasticsearch.xpack.sql.tree.NodeInfo; import org.elasticsearch.xpack.sql.type.DataType; @@ -20,12 +20,11 @@ import java.util.List; import java.util.Locale; import static java.lang.String.format; -import static org.elasticsearch.xpack.sql.expression.function.scalar.script.ParamsBuilder.paramsBuilder; -import static org.elasticsearch.xpack.sql.expression.function.scalar.script.ScriptTemplate.formatTemplate; import static org.elasticsearch.xpack.sql.expression.function.scalar.string.SubstringFunctionProcessor.doProcess; +import static org.elasticsearch.xpack.sql.expression.gen.script.ParamsBuilder.paramsBuilder; /** - * Returns a character string that is derived from the source string, beginning at the character position specified by start + * Returns a character string that is derived from the source string, beginning at the character position specified by start * for length characters. */ public class Substring extends ScalarFunction { @@ -39,6 +38,7 @@ public class Substring extends ScalarFunction { this.length = length; } + @Override protected TypeResolution resolveType() { if (!childrenResolved()) { return new TypeResolution("Unresolved children"); @@ -58,18 +58,16 @@ public class Substring extends ScalarFunction { } @Override - protected ProcessorDefinition makeProcessorDefinition() { - return new SubstringFunctionProcessorDefinition(location(), this, - ProcessorDefinitions.toProcessorDefinition(source), - ProcessorDefinitions.toProcessorDefinition(start), - ProcessorDefinitions.toProcessorDefinition(length)); + protected Pipe makePipe() { + return new SubstringFunctionPipe(location(), this, + Expressions.pipe(source), + Expressions.pipe(start), + Expressions.pipe(length)); } @Override public boolean foldable() { - return source.foldable() - && start.foldable() - && length.foldable(); + return source.foldable() && start.foldable() && length.foldable(); } @Override @@ -92,12 +90,11 @@ public class Substring extends ScalarFunction { } protected ScriptTemplate asScriptFrom(ScriptTemplate sourceScript, ScriptTemplate startScript, - ScriptTemplate lengthScript) - { + ScriptTemplate lengthScript) { // basically, transform the script to InternalSqlScriptUtils.[function_name](function_or_field1, function_or_field2,...) - return new ScriptTemplate(format(Locale.ROOT, formatTemplate("{sql}.%s(%s,%s,%s)"), - "substring", - sourceScript.template(), + return new ScriptTemplate(format(Locale.ROOT, formatTemplate("{sql}.%s(%s,%s,%s)"), + "substring", + sourceScript.template(), startScript.template(), lengthScript.template()), paramsBuilder() @@ -107,8 +104,8 @@ public class Substring extends ScalarFunction { } @Override - protected ScriptTemplate asScriptFrom(FieldAttribute field) { - return new ScriptTemplate(formatScript("doc[{}].value"), + public ScriptTemplate scriptWithField(FieldAttribute field) { + return new ScriptTemplate(processScript("doc[{}].value"), paramsBuilder().variable(field.isInexact() ? field.exactAttribute().name() : field.name()).build(), dataType()); } @@ -126,4 +123,4 @@ public class Substring extends ScalarFunction { return new Substring(location(), newChildren.get(0), newChildren.get(1), newChildren.get(2)); } -} +} \ No newline at end of file diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/SubstringFunctionProcessorDefinition.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/SubstringFunctionPipe.java similarity index 62% rename from x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/SubstringFunctionProcessorDefinition.java rename to x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/SubstringFunctionPipe.java index 653aac24f98..078defec40b 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/SubstringFunctionProcessorDefinition.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/SubstringFunctionPipe.java @@ -7,7 +7,7 @@ package org.elasticsearch.xpack.sql.expression.function.scalar.string; import org.elasticsearch.xpack.sql.execution.search.SqlSourceBuilder; import org.elasticsearch.xpack.sql.expression.Expression; -import org.elasticsearch.xpack.sql.expression.function.scalar.processor.definition.ProcessorDefinition; +import org.elasticsearch.xpack.sql.expression.gen.pipeline.Pipe; import org.elasticsearch.xpack.sql.tree.Location; import org.elasticsearch.xpack.sql.tree.NodeInfo; @@ -15,12 +15,12 @@ import java.util.Arrays; import java.util.List; import java.util.Objects; -public class SubstringFunctionProcessorDefinition extends ProcessorDefinition { +public class SubstringFunctionPipe extends Pipe { - private final ProcessorDefinition source, start, length; + private final Pipe source, start, length; - public SubstringFunctionProcessorDefinition(Location location, Expression expression, ProcessorDefinition source, - ProcessorDefinition start, ProcessorDefinition length) { + public SubstringFunctionPipe(Location location, Expression expression, Pipe source, + Pipe start, Pipe length) { super(location, expression, Arrays.asList(source, start, length)); this.source = source; this.start = start; @@ -28,7 +28,7 @@ public class SubstringFunctionProcessorDefinition extends ProcessorDefinition { } @Override - public final ProcessorDefinition replaceChildren(List newChildren) { + public final Pipe replaceChildren(List newChildren) { if (newChildren.size() != 3) { throw new IllegalArgumentException("expected [3] children but received [" + newChildren.size() + "]"); } @@ -36,10 +36,10 @@ public class SubstringFunctionProcessorDefinition extends ProcessorDefinition { } @Override - public final ProcessorDefinition resolveAttributes(AttributeResolver resolver) { - ProcessorDefinition newSource = source.resolveAttributes(resolver); - ProcessorDefinition newStart = start.resolveAttributes(resolver); - ProcessorDefinition newLength = length.resolveAttributes(resolver); + public final Pipe resolveAttributes(AttributeResolver resolver) { + Pipe newSource = source.resolveAttributes(resolver); + Pipe newStart = start.resolveAttributes(resolver); + Pipe newLength = length.resolveAttributes(resolver); if (newSource == source && newStart == start && newLength == length) { return this; } @@ -56,9 +56,9 @@ public class SubstringFunctionProcessorDefinition extends ProcessorDefinition { return source.resolved() && start.resolved() && length.resolved(); } - protected ProcessorDefinition replaceChildren(ProcessorDefinition newSource, ProcessorDefinition newStart, - ProcessorDefinition newLength) { - return new SubstringFunctionProcessorDefinition(location(), expression(), newSource, newStart, newLength); + protected Pipe replaceChildren(Pipe newSource, Pipe newStart, + Pipe newLength) { + return new SubstringFunctionPipe(location(), expression(), newSource, newStart, newLength); } @Override @@ -69,8 +69,8 @@ public class SubstringFunctionProcessorDefinition extends ProcessorDefinition { } @Override - protected NodeInfo info() { - return NodeInfo.create(this, SubstringFunctionProcessorDefinition::new, expression(), source, start, length); + protected NodeInfo info() { + return NodeInfo.create(this, SubstringFunctionPipe::new, expression(), source, start, length); } @Override @@ -78,15 +78,15 @@ public class SubstringFunctionProcessorDefinition extends ProcessorDefinition { return new SubstringFunctionProcessor(source.asProcessor(), start.asProcessor(), length.asProcessor()); } - public ProcessorDefinition source() { + public Pipe source() { return source; } - public ProcessorDefinition start() { + public Pipe start() { return start; } - public ProcessorDefinition length() { + public Pipe length() { return length; } @@ -105,7 +105,7 @@ public class SubstringFunctionProcessorDefinition extends ProcessorDefinition { return false; } - SubstringFunctionProcessorDefinition other = (SubstringFunctionProcessorDefinition) obj; + SubstringFunctionPipe other = (SubstringFunctionPipe) obj; return Objects.equals(source, other.source) && Objects.equals(start, other.start) && Objects.equals(length, other.length); } } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/SubstringFunctionProcessor.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/SubstringFunctionProcessor.java index 28f251bf0db..ca30a7804c4 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/SubstringFunctionProcessor.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/SubstringFunctionProcessor.java @@ -8,15 +8,16 @@ package org.elasticsearch.xpack.sql.expression.function.scalar.string; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.xpack.sql.SqlIllegalArgumentException; -import org.elasticsearch.xpack.sql.expression.function.scalar.processor.runtime.Processor; +import org.elasticsearch.xpack.sql.expression.gen.processor.Processor; import java.io.IOException; import java.util.Objects; public class SubstringFunctionProcessor implements Processor { + public static final String NAME = "ssub"; + private final Processor source, start, length; - public static final String NAME = "sb"; public SubstringFunctionProcessor(Processor source, Processor start, Processor length) { this.source = source; @@ -62,7 +63,7 @@ public class SubstringFunctionProcessor implements Processor { throw new SqlIllegalArgumentException("A positive number is required for [length]; received [{}]", length); } - return StringFunctionUtils.substring(source instanceof Character ? source.toString() : (String) source, + return StringFunctionUtils.substring(source instanceof Character ? source.toString() : (String) source, ((Number) start).intValue() - 1, // SQL is 1-based when it comes to string manipulation ((Number) length).intValue()); } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/UnaryStringFunction.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/UnaryStringFunction.java index a0cfd50422c..d387fe7e4a1 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/UnaryStringFunction.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/UnaryStringFunction.java @@ -6,13 +6,13 @@ package org.elasticsearch.xpack.sql.expression.function.scalar.string; import org.elasticsearch.xpack.sql.expression.Expression; +import org.elasticsearch.xpack.sql.expression.Expressions; import org.elasticsearch.xpack.sql.expression.FieldAttribute; import org.elasticsearch.xpack.sql.expression.function.scalar.UnaryScalarFunction; -import org.elasticsearch.xpack.sql.expression.function.scalar.processor.definition.ProcessorDefinition; -import org.elasticsearch.xpack.sql.expression.function.scalar.processor.definition.ProcessorDefinitions; -import org.elasticsearch.xpack.sql.expression.function.scalar.processor.definition.UnaryProcessorDefinition; -import org.elasticsearch.xpack.sql.expression.function.scalar.script.ScriptTemplate; import org.elasticsearch.xpack.sql.expression.function.scalar.string.StringProcessor.StringOperation; +import org.elasticsearch.xpack.sql.expression.gen.pipeline.Pipe; +import org.elasticsearch.xpack.sql.expression.gen.pipeline.UnaryPipe; +import org.elasticsearch.xpack.sql.expression.gen.script.ScriptTemplate; import org.elasticsearch.xpack.sql.tree.Location; import org.elasticsearch.xpack.sql.util.StringUtils; @@ -20,7 +20,7 @@ import java.util.Locale; import java.util.Objects; import static java.lang.String.format; -import static org.elasticsearch.xpack.sql.expression.function.scalar.script.ParamsBuilder.paramsBuilder; +import static org.elasticsearch.xpack.sql.expression.gen.script.ParamsBuilder.paramsBuilder; public abstract class UnaryStringFunction extends UnaryScalarFunction { @@ -49,27 +49,25 @@ public abstract class UnaryStringFunction extends UnaryScalarFunction { } @Override - protected final ProcessorDefinition makeProcessorDefinition() { - return new UnaryProcessorDefinition(location(), this, ProcessorDefinitions.toProcessorDefinition(field()), - new StringProcessor(operation())); + protected final Pipe makePipe() { + return new UnaryPipe(location(), this, Expressions.pipe(field()), new StringProcessor(operation())); } protected abstract StringOperation operation(); @Override - protected ScriptTemplate asScriptFrom(FieldAttribute field) { + public ScriptTemplate scriptWithField(FieldAttribute field) { //TODO change this to use _source instead of the exact form (aka field.keyword for text fields) - return new ScriptTemplate(formatScript("doc[{}].value"), + return new ScriptTemplate(processScript("doc[{}].value"), paramsBuilder().variable(field.isInexact() ? field.exactAttribute().name() : field.name()).build(), dataType()); } @Override - protected String formatScript(String template) { - // basically, transform the script to InternalSqlScriptUtils.[function_name](other_function_or_field_name) - return super.formatScript( - format(Locale.ROOT, "{sql}.%s(%s)", - StringUtils.underscoreToLowerCamelCase(operation().toString()), + public String processScript(String template) { + return formatTemplate( + format(Locale.ROOT, "{sql}.%s(%s)", + StringUtils.underscoreToLowerCamelCase(operation().name()), template)); } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/UnaryStringIntFunction.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/UnaryStringIntFunction.java index 7e963eb9db7..613b37dd7e8 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/UnaryStringIntFunction.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/UnaryStringIntFunction.java @@ -6,21 +6,20 @@ package org.elasticsearch.xpack.sql.expression.function.scalar.string; import org.elasticsearch.xpack.sql.expression.Expression; +import org.elasticsearch.xpack.sql.expression.Expressions; import org.elasticsearch.xpack.sql.expression.FieldAttribute; import org.elasticsearch.xpack.sql.expression.function.scalar.UnaryScalarFunction; -import org.elasticsearch.xpack.sql.expression.function.scalar.processor.definition.ProcessorDefinition; -import org.elasticsearch.xpack.sql.expression.function.scalar.processor.definition.ProcessorDefinitions; -import org.elasticsearch.xpack.sql.expression.function.scalar.processor.definition.UnaryProcessorDefinition; -import org.elasticsearch.xpack.sql.expression.function.scalar.script.ScriptTemplate; import org.elasticsearch.xpack.sql.expression.function.scalar.string.StringProcessor.StringOperation; +import org.elasticsearch.xpack.sql.expression.gen.pipeline.Pipe; +import org.elasticsearch.xpack.sql.expression.gen.pipeline.UnaryPipe; +import org.elasticsearch.xpack.sql.expression.gen.script.ScriptTemplate; import org.elasticsearch.xpack.sql.tree.Location; -import org.elasticsearch.xpack.sql.util.StringUtils; import java.util.Locale; import java.util.Objects; import static java.lang.String.format; -import static org.elasticsearch.xpack.sql.expression.function.scalar.script.ParamsBuilder.paramsBuilder; +import static org.elasticsearch.xpack.sql.expression.gen.script.ParamsBuilder.paramsBuilder; /** * Base unary function for text manipulating SQL functions that receive as parameter a number @@ -52,25 +51,24 @@ public abstract class UnaryStringIntFunction extends UnaryScalarFunction { } @Override - protected final ProcessorDefinition makeProcessorDefinition() { - return new UnaryProcessorDefinition(location(), this, ProcessorDefinitions.toProcessorDefinition(field()), - new StringProcessor(operation())); + protected final Pipe makePipe() { + return new UnaryPipe(location(), this, Expressions.pipe(field()), new StringProcessor(operation())); } protected abstract StringOperation operation(); @Override - protected ScriptTemplate asScriptFrom(FieldAttribute field) { - return new ScriptTemplate(formatScript("doc[{}].value"), + public ScriptTemplate scriptWithField(FieldAttribute field) { + return new ScriptTemplate(processScript("doc[{}].value"), paramsBuilder().variable(field.name()).build(), dataType()); } @Override - protected String formatScript(String template) { - return super.formatScript( - format(Locale.ROOT, "{sql}.%s(%s)", - StringUtils.underscoreToLowerCamelCase(operation().toString()), + public String processScript(String template) { + return super.processScript( + format(Locale.ROOT, "{sql}.%s(%s)", + operation().toString().toLowerCase(Locale.ROOT), template)); } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/whitelist/InternalSqlScriptUtils.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/whitelist/InternalSqlScriptUtils.java index f0a79f15e36..782b2cc66cb 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/whitelist/InternalSqlScriptUtils.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/whitelist/InternalSqlScriptUtils.java @@ -8,6 +8,7 @@ package org.elasticsearch.xpack.sql.expression.function.scalar.whitelist; import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.DateTimeFunction; import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.NamedDateTimeProcessor.NameExtractor; import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.QuarterProcessor; +import org.elasticsearch.xpack.sql.expression.function.scalar.math.BinaryMathProcessor.BinaryMathOperation; import org.elasticsearch.xpack.sql.expression.function.scalar.string.BinaryStringNumericProcessor.BinaryStringNumericOperation; import org.elasticsearch.xpack.sql.expression.function.scalar.string.BinaryStringStringProcessor.BinaryStringStringOperation; import org.elasticsearch.xpack.sql.expression.function.scalar.string.ConcatFunctionProcessor; @@ -121,4 +122,12 @@ public final class InternalSqlScriptUtils { public static Integer locate(String s1, String s2) { return locate(s1, s2, null); } -} + + public static Number round(Number v, Number s) { + return BinaryMathOperation.ROUND.apply(v, s); + } + + public static Number truncate(Number v, Number s) { + return BinaryMathOperation.TRUNCATE.apply(v, s); + } +} \ No newline at end of file diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/processor/definition/AggExtractorInput.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/gen/pipeline/AggExtractorInput.java similarity index 76% rename from x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/processor/definition/AggExtractorInput.java rename to x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/gen/pipeline/AggExtractorInput.java index 22a7ab2fa3e..15aff1e4155 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/processor/definition/AggExtractorInput.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/gen/pipeline/AggExtractorInput.java @@ -3,14 +3,14 @@ * or more contributor license agreements. Licensed under the Elastic License; * you may not use this file except in compliance with the Elastic License. */ -package org.elasticsearch.xpack.sql.expression.function.scalar.processor.definition; +package org.elasticsearch.xpack.sql.expression.gen.pipeline; import org.elasticsearch.xpack.sql.execution.search.SqlSourceBuilder; import org.elasticsearch.xpack.sql.execution.search.extractor.BucketExtractor; import org.elasticsearch.xpack.sql.expression.Expression; -import org.elasticsearch.xpack.sql.expression.function.scalar.processor.runtime.BucketExtractorProcessor; -import org.elasticsearch.xpack.sql.expression.function.scalar.processor.runtime.ChainingProcessor; -import org.elasticsearch.xpack.sql.expression.function.scalar.processor.runtime.Processor; +import org.elasticsearch.xpack.sql.expression.gen.processor.BucketExtractorProcessor; +import org.elasticsearch.xpack.sql.expression.gen.processor.ChainingProcessor; +import org.elasticsearch.xpack.sql.expression.gen.processor.Processor; import org.elasticsearch.xpack.sql.tree.Location; import org.elasticsearch.xpack.sql.tree.NodeInfo; @@ -40,7 +40,7 @@ public class AggExtractorInput extends LeafInput { } @Override - public ProcessorDefinition resolveAttributes(AttributeResolver resolver) { + public Pipe resolveAttributes(AttributeResolver resolver) { return this; } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/processor/definition/AggNameInput.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/gen/pipeline/AggNameInput.java similarity index 91% rename from x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/processor/definition/AggNameInput.java rename to x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/gen/pipeline/AggNameInput.java index 43da886de49..9e4a1bc857c 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/processor/definition/AggNameInput.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/gen/pipeline/AggNameInput.java @@ -3,7 +3,7 @@ * or more contributor license agreements. Licensed under the Elastic License; * you may not use this file except in compliance with the Elastic License. */ -package org.elasticsearch.xpack.sql.expression.function.scalar.processor.definition; +package org.elasticsearch.xpack.sql.expression.gen.pipeline; import org.elasticsearch.xpack.sql.expression.Expression; import org.elasticsearch.xpack.sql.tree.Location; diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/processor/definition/AggPathInput.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/gen/pipeline/AggPathInput.java similarity index 92% rename from x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/processor/definition/AggPathInput.java rename to x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/gen/pipeline/AggPathInput.java index 957a13c2f3a..8e78f0e2ad4 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/processor/definition/AggPathInput.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/gen/pipeline/AggPathInput.java @@ -3,11 +3,11 @@ * or more contributor license agreements. Licensed under the Elastic License; * you may not use this file except in compliance with the Elastic License. */ -package org.elasticsearch.xpack.sql.expression.function.scalar.processor.definition; +package org.elasticsearch.xpack.sql.expression.gen.pipeline; import org.elasticsearch.xpack.sql.execution.search.AggRef; import org.elasticsearch.xpack.sql.expression.Expression; -import org.elasticsearch.xpack.sql.expression.function.scalar.processor.runtime.Processor; +import org.elasticsearch.xpack.sql.expression.gen.processor.Processor; import org.elasticsearch.xpack.sql.tree.Location; import org.elasticsearch.xpack.sql.tree.NodeInfo; diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/processor/definition/AttributeInput.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/gen/pipeline/AttributeInput.java similarity index 89% rename from x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/processor/definition/AttributeInput.java rename to x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/gen/pipeline/AttributeInput.java index ba97f0e6494..ed1823e75a6 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/processor/definition/AttributeInput.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/gen/pipeline/AttributeInput.java @@ -3,7 +3,7 @@ * or more contributor license agreements. Licensed under the Elastic License; * you may not use this file except in compliance with the Elastic License. */ -package org.elasticsearch.xpack.sql.expression.function.scalar.processor.definition; +package org.elasticsearch.xpack.sql.expression.gen.pipeline; import org.elasticsearch.xpack.sql.execution.search.SqlSourceBuilder; import org.elasticsearch.xpack.sql.expression.Attribute; @@ -31,7 +31,7 @@ public class AttributeInput extends NonExecutableInput { } @Override - public ProcessorDefinition resolveAttributes(AttributeResolver resolver) { + public Pipe resolveAttributes(AttributeResolver resolver) { return new ReferenceInput(location(), expression(), resolver.resolve(context())); } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/processor/definition/BinaryProcessorDefinition.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/gen/pipeline/BinaryPipe.java similarity index 59% rename from x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/processor/definition/BinaryProcessorDefinition.java rename to x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/gen/pipeline/BinaryPipe.java index e758b104f28..285470ad8fd 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/processor/definition/BinaryProcessorDefinition.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/gen/pipeline/BinaryPipe.java @@ -3,7 +3,7 @@ * or more contributor license agreements. Licensed under the Elastic License; * you may not use this file except in compliance with the Elastic License. */ -package org.elasticsearch.xpack.sql.expression.function.scalar.processor.definition; +package org.elasticsearch.xpack.sql.expression.gen.pipeline; import org.elasticsearch.xpack.sql.execution.search.SqlSourceBuilder; import org.elasticsearch.xpack.sql.expression.Expression; @@ -11,30 +11,31 @@ import org.elasticsearch.xpack.sql.tree.Location; import java.util.Arrays; import java.util.List; +import java.util.Objects; -public abstract class BinaryProcessorDefinition extends ProcessorDefinition { +public abstract class BinaryPipe extends Pipe { - private final ProcessorDefinition left, right; + private final Pipe left, right; - public BinaryProcessorDefinition(Location location, Expression expression, ProcessorDefinition left, ProcessorDefinition right) { + public BinaryPipe(Location location, Expression expression, Pipe left, Pipe right) { super(location, expression, Arrays.asList(left, right)); this.left = left; this.right = right; } @Override - public final ProcessorDefinition replaceChildren(List newChildren) { + public final Pipe replaceChildren(List newChildren) { if (newChildren.size() != 2) { throw new IllegalArgumentException("expected [2] children but received [" + newChildren.size() + "]"); } return replaceChildren(newChildren.get(0), newChildren.get(1)); } - public ProcessorDefinition left() { + public Pipe left() { return left; } - public ProcessorDefinition right() { + public Pipe right() { return right; } @@ -44,9 +45,9 @@ public abstract class BinaryProcessorDefinition extends ProcessorDefinition { } @Override - public final ProcessorDefinition resolveAttributes(AttributeResolver resolver) { - ProcessorDefinition newLeft = left.resolveAttributes(resolver); - ProcessorDefinition newRight = right.resolveAttributes(resolver); + public final Pipe resolveAttributes(AttributeResolver resolver) { + Pipe newLeft = left.resolveAttributes(resolver); + Pipe newRight = right.resolveAttributes(resolver); if (newLeft == left && newRight == right) { return this; } @@ -57,7 +58,7 @@ public abstract class BinaryProcessorDefinition extends ProcessorDefinition { * Build a copy of this object with new left and right children. Used by * {@link #resolveAttributes(AttributeResolver)}. */ - protected abstract BinaryProcessorDefinition replaceChildren(ProcessorDefinition left, ProcessorDefinition right); + protected abstract BinaryPipe replaceChildren(Pipe left, Pipe right); @Override public boolean resolved() { @@ -69,4 +70,24 @@ public abstract class BinaryProcessorDefinition extends ProcessorDefinition { left.collectFields(sourceBuilder); right.collectFields(sourceBuilder); } -} + + @Override + public int hashCode() { + return Objects.hash(left(), right()); + } + + @Override + public boolean equals(Object obj) { + if (this == obj) { + return true; + } + + if (obj == null || getClass() != obj.getClass()) { + return false; + } + + BinaryPipe other = (BinaryPipe) obj; + return Objects.equals(left(), other.left()) + && Objects.equals(right(), other.right()); + } +} \ No newline at end of file diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/processor/definition/CommonNonExecutableInput.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/gen/pipeline/CommonNonExecutableInput.java similarity index 81% rename from x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/processor/definition/CommonNonExecutableInput.java rename to x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/gen/pipeline/CommonNonExecutableInput.java index c202f215457..666b4c093d0 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/processor/definition/CommonNonExecutableInput.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/gen/pipeline/CommonNonExecutableInput.java @@ -3,12 +3,12 @@ * or more contributor license agreements. Licensed under the Elastic License; * you may not use this file except in compliance with the Elastic License. */ -package org.elasticsearch.xpack.sql.expression.function.scalar.processor.definition; +package org.elasticsearch.xpack.sql.expression.gen.pipeline; import org.elasticsearch.xpack.sql.SqlIllegalArgumentException; import org.elasticsearch.xpack.sql.execution.search.SqlSourceBuilder; import org.elasticsearch.xpack.sql.expression.Expression; -import org.elasticsearch.xpack.sql.expression.function.scalar.processor.runtime.Processor; +import org.elasticsearch.xpack.sql.expression.gen.processor.Processor; import org.elasticsearch.xpack.sql.tree.Location; /** @@ -26,7 +26,7 @@ abstract class CommonNonExecutableInput extends NonExecutableInput { } @Override - public final ProcessorDefinition resolveAttributes(AttributeResolver resolver) { + public final Pipe resolveAttributes(AttributeResolver resolver) { return this; } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/processor/definition/ConstantInput.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/gen/pipeline/ConstantInput.java similarity index 76% rename from x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/processor/definition/ConstantInput.java rename to x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/gen/pipeline/ConstantInput.java index 63db9d9a4c5..ddab153cc4f 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/processor/definition/ConstantInput.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/gen/pipeline/ConstantInput.java @@ -3,12 +3,12 @@ * or more contributor license agreements. Licensed under the Elastic License; * you may not use this file except in compliance with the Elastic License. */ -package org.elasticsearch.xpack.sql.expression.function.scalar.processor.definition; +package org.elasticsearch.xpack.sql.expression.gen.pipeline; import org.elasticsearch.xpack.sql.execution.search.SqlSourceBuilder; import org.elasticsearch.xpack.sql.expression.Expression; -import org.elasticsearch.xpack.sql.expression.function.scalar.processor.runtime.ConstantProcessor; -import org.elasticsearch.xpack.sql.expression.function.scalar.processor.runtime.Processor; +import org.elasticsearch.xpack.sql.expression.gen.processor.ConstantProcessor; +import org.elasticsearch.xpack.sql.expression.gen.processor.Processor; import org.elasticsearch.xpack.sql.tree.Location; import org.elasticsearch.xpack.sql.tree.NodeInfo; @@ -34,7 +34,7 @@ public class ConstantInput extends LeafInput { } @Override - public ProcessorDefinition resolveAttributes(AttributeResolver resolver) { + public Pipe resolveAttributes(AttributeResolver resolver) { return this; } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/processor/definition/HitExtractorInput.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/gen/pipeline/HitExtractorInput.java similarity index 78% rename from x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/processor/definition/HitExtractorInput.java rename to x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/gen/pipeline/HitExtractorInput.java index 50a00880e9d..750b948a48b 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/processor/definition/HitExtractorInput.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/gen/pipeline/HitExtractorInput.java @@ -3,13 +3,13 @@ * or more contributor license agreements. Licensed under the Elastic License; * you may not use this file except in compliance with the Elastic License. */ -package org.elasticsearch.xpack.sql.expression.function.scalar.processor.definition; +package org.elasticsearch.xpack.sql.expression.gen.pipeline; import org.elasticsearch.xpack.sql.execution.search.SqlSourceBuilder; import org.elasticsearch.xpack.sql.execution.search.extractor.HitExtractor; import org.elasticsearch.xpack.sql.expression.Expression; -import org.elasticsearch.xpack.sql.expression.function.scalar.processor.runtime.HitExtractorProcessor; -import org.elasticsearch.xpack.sql.expression.function.scalar.processor.runtime.Processor; +import org.elasticsearch.xpack.sql.expression.gen.processor.HitExtractorProcessor; +import org.elasticsearch.xpack.sql.expression.gen.processor.Processor; import org.elasticsearch.xpack.sql.tree.Location; import org.elasticsearch.xpack.sql.tree.NodeInfo; @@ -35,7 +35,7 @@ public class HitExtractorInput extends LeafInput { } @Override - public ProcessorDefinition resolveAttributes(AttributeResolver resolver) { + public Pipe resolveAttributes(AttributeResolver resolver) { return this; } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/processor/definition/LeafInput.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/gen/pipeline/LeafInput.java similarity index 85% rename from x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/processor/definition/LeafInput.java rename to x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/gen/pipeline/LeafInput.java index 5b7468faa31..b2a15ab4471 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/processor/definition/LeafInput.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/gen/pipeline/LeafInput.java @@ -3,7 +3,7 @@ * or more contributor license agreements. Licensed under the Elastic License; * you may not use this file except in compliance with the Elastic License. */ -package org.elasticsearch.xpack.sql.expression.function.scalar.processor.definition; +package org.elasticsearch.xpack.sql.expression.gen.pipeline; import org.elasticsearch.xpack.sql.expression.Expression; import org.elasticsearch.xpack.sql.tree.Location; @@ -13,7 +13,7 @@ import java.util.Objects; import static java.util.Collections.emptyList; -public abstract class LeafInput extends ProcessorDefinition { +public abstract class LeafInput extends Pipe { private T context; @@ -23,7 +23,7 @@ public abstract class LeafInput extends ProcessorDefinition { } @Override - public final ProcessorDefinition replaceChildren(List newChildren) { + public final Pipe replaceChildren(List newChildren) { throw new UnsupportedOperationException("this type of node doesn't have any children to replace"); } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/processor/definition/NonExecutableInput.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/gen/pipeline/NonExecutableInput.java similarity index 82% rename from x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/processor/definition/NonExecutableInput.java rename to x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/gen/pipeline/NonExecutableInput.java index 2161f09d46a..be3eded19b2 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/processor/definition/NonExecutableInput.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/gen/pipeline/NonExecutableInput.java @@ -3,11 +3,11 @@ * or more contributor license agreements. Licensed under the Elastic License; * you may not use this file except in compliance with the Elastic License. */ -package org.elasticsearch.xpack.sql.expression.function.scalar.processor.definition; +package org.elasticsearch.xpack.sql.expression.gen.pipeline; import org.elasticsearch.xpack.sql.SqlIllegalArgumentException; import org.elasticsearch.xpack.sql.expression.Expression; -import org.elasticsearch.xpack.sql.expression.function.scalar.processor.runtime.Processor; +import org.elasticsearch.xpack.sql.expression.gen.processor.Processor; import org.elasticsearch.xpack.sql.tree.Location; public abstract class NonExecutableInput extends LeafInput { diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/processor/definition/ProcessorDefinition.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/gen/pipeline/Pipe.java similarity index 60% rename from x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/processor/definition/ProcessorDefinition.java rename to x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/gen/pipeline/Pipe.java index 929367fca94..4d1604ff535 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/processor/definition/ProcessorDefinition.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/gen/pipeline/Pipe.java @@ -3,30 +3,32 @@ * or more contributor license agreements. Licensed under the Elastic License; * you may not use this file except in compliance with the Elastic License. */ -package org.elasticsearch.xpack.sql.expression.function.scalar.processor.definition; +package org.elasticsearch.xpack.sql.expression.gen.pipeline; import org.elasticsearch.xpack.sql.execution.search.FieldExtraction; import org.elasticsearch.xpack.sql.expression.Attribute; import org.elasticsearch.xpack.sql.expression.Expression; -import org.elasticsearch.xpack.sql.expression.function.scalar.processor.runtime.Processor; +import org.elasticsearch.xpack.sql.expression.gen.processor.Processor; import org.elasticsearch.xpack.sql.tree.Location; import org.elasticsearch.xpack.sql.tree.Node; import java.util.List; /** - * Contains the tree for processing a function, so for example, the {@code ProcessorDefinition} of: + * Processing pipe for an expression (tree). Used for local execution of expressions + * on the invoking node. + * For example, the {@code Pipe} of: * * ABS(MAX(foo)) + CAST(bar) * - * Is an {@code Add} Function with left {@code ABS} over an aggregate (MAX), and + * Is an {@code Add} operator with left {@code ABS} over an aggregate (MAX), and * right being a {@code CAST} function. */ -public abstract class ProcessorDefinition extends Node implements FieldExtraction { +public abstract class Pipe extends Node implements FieldExtraction { private final Expression expression; - public ProcessorDefinition(Location location, Expression expression, List children) { + public Pipe(Location location, Expression expression, List children) { super(location, children); this.expression = expression; } @@ -41,12 +43,13 @@ public abstract class ProcessorDefinition extends Node impl /** * Resolve {@link Attribute}s which are unprocessable into - * {@link FieldExtraction}s which are processable. + * {@link Pipe}s that are. * * @return {@code this} if the resolution doesn't change the - * definition, a new {@link ProcessorDefinition} otherwise + * definition, a new {@link Pipe} otherwise */ - public abstract ProcessorDefinition resolveAttributes(AttributeResolver resolver); + public abstract Pipe resolveAttributes(AttributeResolver resolver); + public interface AttributeResolver { FieldExtraction resolve(Attribute attribute); } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/processor/definition/ReferenceInput.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/gen/pipeline/ReferenceInput.java similarity index 83% rename from x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/processor/definition/ReferenceInput.java rename to x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/gen/pipeline/ReferenceInput.java index 59c001093be..ec4850a3e18 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/processor/definition/ReferenceInput.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/gen/pipeline/ReferenceInput.java @@ -3,7 +3,7 @@ * or more contributor license agreements. Licensed under the Elastic License; * you may not use this file except in compliance with the Elastic License. */ -package org.elasticsearch.xpack.sql.expression.function.scalar.processor.definition; +package org.elasticsearch.xpack.sql.expression.gen.pipeline; import org.elasticsearch.xpack.sql.execution.search.FieldExtraction; import org.elasticsearch.xpack.sql.execution.search.SqlSourceBuilder; @@ -17,7 +17,7 @@ public class ReferenceInput extends NonExecutableInput { } @Override - protected NodeInfo info() { + protected NodeInfo info() { return NodeInfo.create(this, ReferenceInput::new, expression(), context()); } @@ -27,7 +27,7 @@ public class ReferenceInput extends NonExecutableInput { } @Override - public ProcessorDefinition resolveAttributes(AttributeResolver resolver) { + public Pipe resolveAttributes(AttributeResolver resolver) { return this; } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/processor/definition/ScoreProcessorDefinition.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/gen/pipeline/ScorePipe.java similarity index 63% rename from x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/processor/definition/ScoreProcessorDefinition.java rename to x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/gen/pipeline/ScorePipe.java index 5617fa01617..c65070405a8 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/processor/definition/ScoreProcessorDefinition.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/gen/pipeline/ScorePipe.java @@ -3,13 +3,13 @@ * or more contributor license agreements. Licensed under the Elastic License; * you may not use this file except in compliance with the Elastic License. */ -package org.elasticsearch.xpack.sql.expression.function.scalar.processor.definition; +package org.elasticsearch.xpack.sql.expression.gen.pipeline; import org.elasticsearch.xpack.sql.execution.search.SqlSourceBuilder; import org.elasticsearch.xpack.sql.execution.search.extractor.ScoreExtractor; import org.elasticsearch.xpack.sql.expression.Expression; -import org.elasticsearch.xpack.sql.expression.function.scalar.processor.runtime.HitExtractorProcessor; -import org.elasticsearch.xpack.sql.expression.function.scalar.processor.runtime.Processor; +import org.elasticsearch.xpack.sql.expression.gen.processor.HitExtractorProcessor; +import org.elasticsearch.xpack.sql.expression.gen.processor.Processor; import org.elasticsearch.xpack.sql.tree.Location; import org.elasticsearch.xpack.sql.tree.NodeInfo; @@ -17,18 +17,18 @@ import java.util.List; import static java.util.Collections.emptyList; -public class ScoreProcessorDefinition extends ProcessorDefinition { - public ScoreProcessorDefinition(Location location, Expression expression) { +public class ScorePipe extends Pipe { + public ScorePipe(Location location, Expression expression) { super(location, expression, emptyList()); } @Override - protected NodeInfo info() { - return NodeInfo.create(this, ScoreProcessorDefinition::new, expression()); + protected NodeInfo info() { + return NodeInfo.create(this, ScorePipe::new, expression()); } @Override - public final ProcessorDefinition replaceChildren(List newChildren) { + public final Pipe replaceChildren(List newChildren) { throw new UnsupportedOperationException("this type of node doesn't have any children to replace"); } @@ -48,7 +48,7 @@ public class ScoreProcessorDefinition extends ProcessorDefinition { } @Override - public ProcessorDefinition resolveAttributes(AttributeResolver resolver) { + public Pipe resolveAttributes(AttributeResolver resolver) { return this; } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/processor/definition/UnaryProcessorDefinition.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/gen/pipeline/UnaryPipe.java similarity index 63% rename from x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/processor/definition/UnaryProcessorDefinition.java rename to x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/gen/pipeline/UnaryPipe.java index fe8a4099ec3..8e36f448929 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/processor/definition/UnaryProcessorDefinition.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/gen/pipeline/UnaryPipe.java @@ -3,12 +3,12 @@ * or more contributor license agreements. Licensed under the Elastic License; * you may not use this file except in compliance with the Elastic License. */ -package org.elasticsearch.xpack.sql.expression.function.scalar.processor.definition; +package org.elasticsearch.xpack.sql.expression.gen.pipeline; import org.elasticsearch.xpack.sql.execution.search.SqlSourceBuilder; import org.elasticsearch.xpack.sql.expression.Expression; -import org.elasticsearch.xpack.sql.expression.function.scalar.processor.runtime.ChainingProcessor; -import org.elasticsearch.xpack.sql.expression.function.scalar.processor.runtime.Processor; +import org.elasticsearch.xpack.sql.expression.gen.processor.ChainingProcessor; +import org.elasticsearch.xpack.sql.expression.gen.processor.Processor; import org.elasticsearch.xpack.sql.tree.Location; import org.elasticsearch.xpack.sql.tree.NodeInfo; @@ -17,31 +17,31 @@ import java.util.Objects; import static java.util.Collections.singletonList; -public final class UnaryProcessorDefinition extends ProcessorDefinition { +public final class UnaryPipe extends Pipe { - private final ProcessorDefinition child; + private final Pipe child; private final Processor action; - public UnaryProcessorDefinition(Location location, Expression expression, ProcessorDefinition child, Processor action) { + public UnaryPipe(Location location, Expression expression, Pipe child, Processor action) { super(location, expression, singletonList(child)); this.child = child; this.action = action; } @Override - protected NodeInfo info() { - return NodeInfo.create(this, UnaryProcessorDefinition::new, expression(), child, action); + protected NodeInfo info() { + return NodeInfo.create(this, UnaryPipe::new, expression(), child, action); } @Override - public ProcessorDefinition replaceChildren(List newChildren) { + public Pipe replaceChildren(List newChildren) { if (newChildren.size() != 1) { throw new IllegalArgumentException("expected [1] child but received [" + newChildren.size() + "]"); } - return new UnaryProcessorDefinition(location(), expression(), newChildren.get(0), action); + return new UnaryPipe(location(), expression(), newChildren.get(0), action); } - public ProcessorDefinition child() { + public Pipe child() { return child; } @@ -65,12 +65,12 @@ public final class UnaryProcessorDefinition extends ProcessorDefinition { } @Override - public ProcessorDefinition resolveAttributes(AttributeResolver resolver) { - ProcessorDefinition newChild = child.resolveAttributes(resolver); + public Pipe resolveAttributes(AttributeResolver resolver) { + Pipe newChild = child.resolveAttributes(resolver); if (newChild == child) { return this; } - return new UnaryProcessorDefinition(location(), expression(), newChild, action); + return new UnaryPipe(location(), expression(), newChild, action); } @Override @@ -93,7 +93,7 @@ public final class UnaryProcessorDefinition extends ProcessorDefinition { return false; } - UnaryProcessorDefinition other = (UnaryProcessorDefinition) obj; + UnaryPipe other = (UnaryPipe) obj; return Objects.equals(action, other.action) && Objects.equals(child, other.child) && Objects.equals(expression(), other.expression()); diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/processor/runtime/BinaryProcessor.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/gen/processor/BinaryProcessor.java similarity index 94% rename from x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/processor/runtime/BinaryProcessor.java rename to x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/gen/processor/BinaryProcessor.java index 81795923915..c3222e2fa03 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/processor/runtime/BinaryProcessor.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/gen/processor/BinaryProcessor.java @@ -3,7 +3,7 @@ * or more contributor license agreements. Licensed under the Elastic License; * you may not use this file except in compliance with the Elastic License. */ -package org.elasticsearch.xpack.sql.expression.function.scalar.processor.runtime; +package org.elasticsearch.xpack.sql.expression.gen.processor; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/processor/runtime/BucketExtractorProcessor.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/gen/processor/BucketExtractorProcessor.java similarity index 96% rename from x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/processor/runtime/BucketExtractorProcessor.java rename to x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/gen/processor/BucketExtractorProcessor.java index 0a5a2b1f1e0..e3e8a2c9c83 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/processor/runtime/BucketExtractorProcessor.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/gen/processor/BucketExtractorProcessor.java @@ -3,7 +3,7 @@ * or more contributor license agreements. Licensed under the Elastic License; * you may not use this file except in compliance with the Elastic License. */ -package org.elasticsearch.xpack.sql.expression.function.scalar.processor.runtime; +package org.elasticsearch.xpack.sql.expression.gen.processor; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/processor/runtime/ChainingProcessor.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/gen/processor/ChainingProcessor.java similarity index 95% rename from x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/processor/runtime/ChainingProcessor.java rename to x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/gen/processor/ChainingProcessor.java index 9be7de637e3..e9c13858183 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/processor/runtime/ChainingProcessor.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/gen/processor/ChainingProcessor.java @@ -3,7 +3,7 @@ * or more contributor license agreements. Licensed under the Elastic License; * you may not use this file except in compliance with the Elastic License. */ -package org.elasticsearch.xpack.sql.expression.function.scalar.processor.runtime; +package org.elasticsearch.xpack.sql.expression.gen.processor; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/processor/runtime/ConstantProcessor.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/gen/processor/ConstantProcessor.java similarity index 94% rename from x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/processor/runtime/ConstantProcessor.java rename to x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/gen/processor/ConstantProcessor.java index cc419f3c7b7..21cb72f2dab 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/processor/runtime/ConstantProcessor.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/gen/processor/ConstantProcessor.java @@ -3,7 +3,7 @@ * or more contributor license agreements. Licensed under the Elastic License; * you may not use this file except in compliance with the Elastic License. */ -package org.elasticsearch.xpack.sql.expression.function.scalar.processor.runtime; +package org.elasticsearch.xpack.sql.expression.gen.processor; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/processor/runtime/HitExtractorProcessor.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/gen/processor/HitExtractorProcessor.java similarity index 96% rename from x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/processor/runtime/HitExtractorProcessor.java rename to x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/gen/processor/HitExtractorProcessor.java index 5960b8cfca0..9f58b008989 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/processor/runtime/HitExtractorProcessor.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/gen/processor/HitExtractorProcessor.java @@ -3,7 +3,7 @@ * or more contributor license agreements. Licensed under the Elastic License; * you may not use this file except in compliance with the Elastic License. */ -package org.elasticsearch.xpack.sql.expression.function.scalar.processor.runtime; +package org.elasticsearch.xpack.sql.expression.gen.processor; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/processor/runtime/Processor.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/gen/processor/Processor.java similarity index 72% rename from x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/processor/runtime/Processor.java rename to x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/gen/processor/Processor.java index 9fb67fb51a1..2b8433191e3 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/processor/runtime/Processor.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/gen/processor/Processor.java @@ -3,12 +3,12 @@ * or more contributor license agreements. Licensed under the Elastic License; * you may not use this file except in compliance with the Elastic License. */ -package org.elasticsearch.xpack.sql.expression.function.scalar.processor.runtime; +package org.elasticsearch.xpack.sql.expression.gen.processor; import org.elasticsearch.common.io.stream.NamedWriteable; /** - * For a scalar function, a {@code Processor} is how we convert the value to convert one value to another value. For instance, ABS(foo). + * A {@code Processor} evaluates locally an expression. For instance, ABS(foo). * Aggregate functions are handled by ES but scalars are not. * * This is an opaque class, the computed/compiled result gets saved on the client during scrolling. diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/processor/runtime/UnaryProcessor.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/gen/processor/UnaryProcessor.java similarity index 95% rename from x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/processor/runtime/UnaryProcessor.java rename to x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/gen/processor/UnaryProcessor.java index 613e2632283..7c1a5dd7d35 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/processor/runtime/UnaryProcessor.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/gen/processor/UnaryProcessor.java @@ -3,7 +3,7 @@ * or more contributor license agreements. Licensed under the Elastic License; * you may not use this file except in compliance with the Elastic License. */ -package org.elasticsearch.xpack.sql.expression.function.scalar.processor.runtime; +package org.elasticsearch.xpack.sql.expression.gen.processor; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/script/Agg.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/gen/script/Agg.java similarity index 90% rename from x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/script/Agg.java rename to x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/gen/script/Agg.java index 3b75b7f98b5..55bba713062 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/script/Agg.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/gen/script/Agg.java @@ -3,7 +3,7 @@ * or more contributor license agreements. Licensed under the Elastic License; * you may not use this file except in compliance with the Elastic License. */ -package org.elasticsearch.xpack.sql.expression.function.scalar.script; +package org.elasticsearch.xpack.sql.expression.gen.script; import org.elasticsearch.xpack.sql.expression.function.aggregate.AggregateFunctionAttribute; diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/script/Param.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/gen/script/Param.java similarity index 89% rename from x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/script/Param.java rename to x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/gen/script/Param.java index ff2e3322ae0..579eacd036b 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/script/Param.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/gen/script/Param.java @@ -3,7 +3,7 @@ * or more contributor license agreements. Licensed under the Elastic License; * you may not use this file except in compliance with the Elastic License. */ -package org.elasticsearch.xpack.sql.expression.function.scalar.script; +package org.elasticsearch.xpack.sql.expression.gen.script; import java.util.Locale; diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/script/Params.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/gen/script/Params.java similarity index 98% rename from x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/script/Params.java rename to x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/gen/script/Params.java index c7c331e3b58..0fc85b3241f 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/script/Params.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/gen/script/Params.java @@ -3,7 +3,7 @@ * or more contributor license agreements. Licensed under the Elastic License; * you may not use this file except in compliance with the Elastic License. */ -package org.elasticsearch.xpack.sql.expression.function.scalar.script; +package org.elasticsearch.xpack.sql.expression.gen.script; import org.elasticsearch.xpack.sql.SqlIllegalArgumentException; diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/script/ParamsBuilder.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/gen/script/ParamsBuilder.java similarity index 93% rename from x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/script/ParamsBuilder.java rename to x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/gen/script/ParamsBuilder.java index 8f99f29b9c1..6719776c84a 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/script/ParamsBuilder.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/gen/script/ParamsBuilder.java @@ -3,7 +3,7 @@ * or more contributor license agreements. Licensed under the Elastic License; * you may not use this file except in compliance with the Elastic License. */ -package org.elasticsearch.xpack.sql.expression.function.scalar.script; +package org.elasticsearch.xpack.sql.expression.gen.script; import org.elasticsearch.xpack.sql.expression.function.aggregate.AggregateFunctionAttribute; diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/script/Script.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/gen/script/Script.java similarity index 85% rename from x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/script/Script.java rename to x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/gen/script/Script.java index ceabac9c499..85c436cf69e 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/script/Script.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/gen/script/Script.java @@ -3,7 +3,7 @@ * or more contributor license agreements. Licensed under the Elastic License; * you may not use this file except in compliance with the Elastic License. */ -package org.elasticsearch.xpack.sql.expression.function.scalar.script; +package org.elasticsearch.xpack.sql.expression.gen.script; class Script extends Param { diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/script/ScriptTemplate.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/gen/script/ScriptTemplate.java similarity index 87% rename from x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/script/ScriptTemplate.java rename to x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/gen/script/ScriptTemplate.java index 35b7680dcca..9279cdcc1b8 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/script/ScriptTemplate.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/gen/script/ScriptTemplate.java @@ -3,11 +3,10 @@ * or more contributor license agreements. Licensed under the Elastic License; * you may not use this file except in compliance with the Elastic License. */ -package org.elasticsearch.xpack.sql.expression.function.scalar.script; +package org.elasticsearch.xpack.sql.expression.gen.script; import org.elasticsearch.script.Script; import org.elasticsearch.script.ScriptType; -import org.elasticsearch.xpack.sql.expression.function.scalar.whitelist.InternalSqlScriptUtils; import org.elasticsearch.xpack.sql.type.DataType; import org.elasticsearch.xpack.sql.util.StringUtils; @@ -91,8 +90,4 @@ public class ScriptTemplate { public String toString() { return bindTemplate(); } - - public static String formatTemplate(String template) { - return template.replace("{sql}", InternalSqlScriptUtils.class.getSimpleName()).replace("{}", "params.%s"); - } -} +} \ No newline at end of file diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/gen/script/ScriptWeaver.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/gen/script/ScriptWeaver.java new file mode 100644 index 00000000000..f171c03b1e4 --- /dev/null +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/gen/script/ScriptWeaver.java @@ -0,0 +1,80 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +package org.elasticsearch.xpack.sql.expression.gen.script; + +import org.elasticsearch.xpack.sql.SqlIllegalArgumentException; +import org.elasticsearch.xpack.sql.expression.Attribute; +import org.elasticsearch.xpack.sql.expression.Expression; +import org.elasticsearch.xpack.sql.expression.Expressions; +import org.elasticsearch.xpack.sql.expression.FieldAttribute; +import org.elasticsearch.xpack.sql.expression.function.aggregate.AggregateFunctionAttribute; +import org.elasticsearch.xpack.sql.expression.function.scalar.ScalarFunctionAttribute; +import org.elasticsearch.xpack.sql.expression.function.scalar.whitelist.InternalSqlScriptUtils; +import org.elasticsearch.xpack.sql.type.DataType; + +import static org.elasticsearch.xpack.sql.expression.gen.script.ParamsBuilder.paramsBuilder; + +/** + * Mixin-like interface for customizing the default script generation. + */ +public interface ScriptWeaver { + + default ScriptTemplate asScript(Expression exp) { + if (exp.foldable()) { + return scriptWithFoldable(exp); + } + + Attribute attr = Expressions.attribute(exp); + if (attr != null) { + if (attr instanceof ScalarFunctionAttribute) { + return scriptWithScalar((ScalarFunctionAttribute) attr); + } + if (attr instanceof AggregateFunctionAttribute) { + return scriptWithAggregate((AggregateFunctionAttribute) attr); + } + if (attr instanceof FieldAttribute) { + return scriptWithField((FieldAttribute) attr); + } + } + throw new SqlIllegalArgumentException("Cannot evaluate script for expression {}", exp); + } + + DataType dataType(); + + default ScriptTemplate scriptWithFoldable(Expression foldable) { + return new ScriptTemplate(processScript("{}"), + paramsBuilder().variable(foldable.fold()).build(), + dataType()); + } + + default ScriptTemplate scriptWithScalar(ScalarFunctionAttribute scalar) { + ScriptTemplate nested = scalar.script(); + return new ScriptTemplate(processScript(nested.template()), + paramsBuilder().script(nested.params()).build(), + dataType()); + } + + default ScriptTemplate scriptWithAggregate(AggregateFunctionAttribute aggregate) { + return new ScriptTemplate(processScript("{}"), + paramsBuilder().agg(aggregate).build(), + dataType()); + } + + default ScriptTemplate scriptWithField(FieldAttribute field) { + return new ScriptTemplate(processScript("doc[{}].value"), + paramsBuilder().variable(field.name()).build(), + dataType()); + } + + default String processScript(String script) { + return formatTemplate(script); + } + + default String formatTemplate(String template) { + return template.replace("{sql}", InternalSqlScriptUtils.class.getSimpleName()).replace("{}", "params.%s"); + } +} \ No newline at end of file diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/script/Var.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/gen/script/Var.java similarity index 84% rename from x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/script/Var.java rename to x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/gen/script/Var.java index 96bda8eabe6..f22e510fdc4 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/script/Var.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/gen/script/Var.java @@ -3,7 +3,7 @@ * or more contributor license agreements. Licensed under the Elastic License; * you may not use this file except in compliance with the Elastic License. */ -package org.elasticsearch.xpack.sql.expression.function.scalar.script; +package org.elasticsearch.xpack.sql.expression.gen.script; class Var extends Param { diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/And.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/And.java index 0cabf065f8f..c7f5b7dacec 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/And.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/And.java @@ -5,10 +5,8 @@ */ package org.elasticsearch.xpack.sql.expression.predicate; -import org.elasticsearch.xpack.sql.expression.BinaryLogic; -import org.elasticsearch.xpack.sql.expression.BinaryOperator; -import org.elasticsearch.xpack.sql.expression.BinaryOperator.Negateable; import org.elasticsearch.xpack.sql.expression.Expression; +import org.elasticsearch.xpack.sql.expression.predicate.BinaryOperator.Negateable; import org.elasticsearch.xpack.sql.tree.Location; import org.elasticsearch.xpack.sql.tree.NodeInfo; @@ -17,7 +15,7 @@ import java.util.Objects; public class And extends BinaryLogic implements Negateable { public And(Location location, Expression left, Expression right) { - super(location, left, right); + super(location, left, right, "&&"); } @Override @@ -30,6 +28,7 @@ public class And extends BinaryLogic implements Negateable { return new And(location(), newLeft, newRight); } + @Override public Object fold() { return Objects.equals(left().fold(), Boolean.TRUE) && Objects.equals(right().fold(), Boolean.TRUE); } @@ -43,9 +42,4 @@ public class And extends BinaryLogic implements Negateable { public And swapLeftAndRight() { return new And(location(), right(), left()); } - - @Override - public String symbol() { - return "&&"; - } } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/BinaryLogic.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/BinaryLogic.java similarity index 54% rename from x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/BinaryLogic.java rename to x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/BinaryLogic.java index 8cb51a6ea0c..01fefb2c8e7 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/BinaryLogic.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/BinaryLogic.java @@ -3,15 +3,19 @@ * or more contributor license agreements. Licensed under the Elastic License; * you may not use this file except in compliance with the Elastic License. */ -package org.elasticsearch.xpack.sql.expression; +package org.elasticsearch.xpack.sql.expression.predicate; +import org.elasticsearch.xpack.sql.expression.Expression; +import org.elasticsearch.xpack.sql.expression.gen.pipeline.AggNameInput; +import org.elasticsearch.xpack.sql.expression.gen.pipeline.Pipe; +import org.elasticsearch.xpack.sql.expression.gen.script.ScriptTemplate; import org.elasticsearch.xpack.sql.tree.Location; import org.elasticsearch.xpack.sql.type.DataType; public abstract class BinaryLogic extends BinaryOperator { - protected BinaryLogic(Location location, Expression left, Expression right) { - super(location, left, right); + protected BinaryLogic(Location location, Expression left, Expression right, String symbol) { + super(location, left, right, symbol); } @Override @@ -24,4 +28,14 @@ public abstract class BinaryLogic extends BinaryOperator { return DataType.BOOLEAN == inputType ? TypeResolution.TYPE_RESOLVED : new TypeResolution( "'%s' requires type %s not %s", symbol(), DataType.BOOLEAN.sqlName(), inputType.sqlName()); } + + @Override + protected ScriptTemplate asScriptFrom(ScriptTemplate leftScript, ScriptTemplate rightScript) { + return new ScriptTemplate(""); + } } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/BinaryOperator.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/BinaryOperator.java similarity index 65% rename from x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/BinaryOperator.java rename to x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/BinaryOperator.java index bfa4358d240..27e979bc30a 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/BinaryOperator.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/BinaryOperator.java @@ -3,24 +3,30 @@ * or more contributor license agreements. Licensed under the Elastic License; * you may not use this file except in compliance with the Elastic License. */ -package org.elasticsearch.xpack.sql.expression; +package org.elasticsearch.xpack.sql.expression.predicate; +import org.elasticsearch.xpack.sql.expression.Expression; import org.elasticsearch.xpack.sql.tree.Location; import org.elasticsearch.xpack.sql.type.DataType; -//Binary expression that requires both input expressions to have the same type -//Compatible types should be handled by the analyzer (by using the narrowest type) -public abstract class BinaryOperator extends BinaryExpression { + +/** + * Operator is a specialized binary predicate where both sides have the compatible types + * (it's up to the analyzer to do any conversion if needed). + */ +public abstract class BinaryOperator extends BinaryPredicate { public interface Negateable { - BinaryExpression negate(); + BinaryOperator negate(); } - protected BinaryOperator(Location location, Expression left, Expression right) { - super(location, left, right); + protected BinaryOperator(Location location, Expression left, Expression right, String symbol) { + super(location, left, right, symbol); } protected abstract TypeResolution resolveInputType(DataType inputType); + public abstract BinaryOperator swapLeftAndRight(); + @Override protected TypeResolution resolveType() { if (!childrenResolved()) { @@ -36,4 +42,4 @@ public abstract class BinaryOperator extends BinaryExpression { } return resolution; } -} +} \ No newline at end of file diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/BinaryPredicate.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/BinaryPredicate.java new file mode 100644 index 00000000000..ead90c13e88 --- /dev/null +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/BinaryPredicate.java @@ -0,0 +1,84 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.sql.expression.predicate; + +import org.elasticsearch.xpack.sql.expression.Expression; +import org.elasticsearch.xpack.sql.expression.Expressions; +import org.elasticsearch.xpack.sql.expression.Literal; +import org.elasticsearch.xpack.sql.expression.function.scalar.BinaryScalarFunction; +import org.elasticsearch.xpack.sql.tree.Location; + +import java.util.Objects; + +/** + * Binary operator. Operators act as _special_ functions in that they have a symbol + * instead of a name and do not use parathensis. + * Further more they are not registered as the rest of the functions as are implicit + * to the language. + */ +public abstract class BinaryPredicate extends BinaryScalarFunction { + + private final String symbol; + private final String name; + + protected BinaryPredicate(Location location, Expression left, Expression right, String symbol) { + super(location, left, right); + this.name = name(left, right, symbol); + this.symbol = symbol; + } + + @Override + public int hashCode() { + return Objects.hash(left(), right()); + } + + @Override + public boolean equals(Object obj) { + // NB: the id and name are being ignored for binary expressions as most of them + // are operators + + if (this == obj) { + return true; + } + if (obj == null || getClass() != obj.getClass()) { + return false; + } + + BinaryPredicate other = (BinaryPredicate) obj; + + return Objects.equals(symbol, other.symbol) + && Objects.equals(left(), other.left()) + && Objects.equals(right(), other.right()); + } + + private static String name(Expression left, Expression right, String symbol) { + StringBuilder sb = new StringBuilder(); + sb.append(Expressions.name(left)); + if (!(left instanceof Literal)) { + sb.insert(0, "("); + sb.append(")"); + } + sb.append(" "); + sb.append(symbol); + sb.append(" "); + int pos = sb.length(); + sb.append(Expressions.name(right)); + if (!(right instanceof Literal)) { + sb.insert(pos, "("); + sb.append(")"); + } + return sb.toString(); + } + + @Override + public String name() { + return name; + } + + public final String symbol() { + return symbol; + } +} \ No newline at end of file diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/In.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/In.java index 5793f46c465..fb04f6d438a 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/In.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/In.java @@ -5,23 +5,27 @@ */ package org.elasticsearch.xpack.sql.expression.predicate; -import java.util.List; -import java.util.Objects; - +import org.elasticsearch.xpack.sql.SqlIllegalArgumentException; +import org.elasticsearch.xpack.sql.expression.Attribute; import org.elasticsearch.xpack.sql.expression.Expression; +import org.elasticsearch.xpack.sql.expression.NamedExpression; +import org.elasticsearch.xpack.sql.expression.gen.script.ScriptTemplate; import org.elasticsearch.xpack.sql.tree.Location; import org.elasticsearch.xpack.sql.tree.NodeInfo; import org.elasticsearch.xpack.sql.type.DataType; import org.elasticsearch.xpack.sql.util.CollectionUtils; -public class In extends Expression { +import java.util.List; +import java.util.Objects; + +public class In extends NamedExpression { private final Expression value; private final List list; private final boolean nullable, foldable; public In(Location location, Expression value, List list) { - super(location, CollectionUtils.combine(list, value)); + super(location, null, CollectionUtils.combine(list, value), null); this.value = value; this.list = list; @@ -65,6 +69,16 @@ public class In extends Expression { return foldable; } + @Override + public Attribute toAttribute() { + throw new SqlIllegalArgumentException("not implemented yet"); + } + + @Override + public ScriptTemplate asScript() { + throw new SqlIllegalArgumentException("not implemented yet"); + } + @Override public int hashCode() { return Objects.hash(value, list); @@ -75,8 +89,7 @@ public class In extends Expression { if (this == obj) { return true; } - - if (!super.equals(obj) || getClass() != obj.getClass()) { + if (obj == null || getClass() != obj.getClass()) { return false; } @@ -84,4 +97,4 @@ public class In extends Expression { return Objects.equals(value, other.value) && Objects.equals(list, other.list); } -} +} \ No newline at end of file diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/IsNotNull.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/IsNotNull.java index 42140510245..cabca2aaf2d 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/IsNotNull.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/IsNotNull.java @@ -5,8 +5,11 @@ */ package org.elasticsearch.xpack.sql.expression.predicate; +import org.elasticsearch.xpack.sql.SqlIllegalArgumentException; import org.elasticsearch.xpack.sql.expression.Expression; import org.elasticsearch.xpack.sql.expression.UnaryExpression; +import org.elasticsearch.xpack.sql.expression.gen.pipeline.Pipe; +import org.elasticsearch.xpack.sql.expression.gen.script.ScriptTemplate; import org.elasticsearch.xpack.sql.tree.Location; import org.elasticsearch.xpack.sql.tree.NodeInfo; import org.elasticsearch.xpack.sql.type.DataType; @@ -28,10 +31,21 @@ public class IsNotNull extends UnaryExpression { return new IsNotNull(location(), newChild); } + @Override public Object fold() { return child().fold() != null && !DataTypes.isNull(child().dataType()); } + @Override + protected Pipe makePipe() { + throw new SqlIllegalArgumentException("Not supported yet"); + } + + @Override + public ScriptTemplate asScript() { + throw new SqlIllegalArgumentException("Not supported yet"); + } + @Override public boolean nullable() { return false; diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/Not.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/Not.java index 71ce42ba8aa..4b6e7ba4b46 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/Not.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/Not.java @@ -5,17 +5,20 @@ */ package org.elasticsearch.xpack.sql.expression.predicate; -import org.elasticsearch.xpack.sql.expression.BinaryOperator.Negateable; +import org.elasticsearch.xpack.sql.SqlIllegalArgumentException; import org.elasticsearch.xpack.sql.expression.Expression; import org.elasticsearch.xpack.sql.expression.Expressions; -import org.elasticsearch.xpack.sql.expression.UnaryExpression; +import org.elasticsearch.xpack.sql.expression.function.scalar.UnaryScalarFunction; +import org.elasticsearch.xpack.sql.expression.gen.pipeline.Pipe; +import org.elasticsearch.xpack.sql.expression.gen.script.ScriptTemplate; +import org.elasticsearch.xpack.sql.expression.predicate.BinaryOperator.Negateable; import org.elasticsearch.xpack.sql.tree.Location; import org.elasticsearch.xpack.sql.tree.NodeInfo; import org.elasticsearch.xpack.sql.type.DataType; import java.util.Objects; -public class Not extends UnaryExpression { +public class Not extends UnaryScalarFunction { public Not(Location location, Expression child) { super(location, child); @@ -23,7 +26,7 @@ public class Not extends UnaryExpression { @Override protected NodeInfo info() { - return NodeInfo.create(this, Not::new, child()); + return NodeInfo.create(this, Not::new, field()); } @Override @@ -31,22 +34,33 @@ public class Not extends UnaryExpression { return new Not(location(), newChild); } + @Override protected TypeResolution resolveType() { - if (DataType.BOOLEAN == child().dataType()) { + if (DataType.BOOLEAN == field().dataType()) { return TypeResolution.TYPE_RESOLVED; } - return new TypeResolution("Cannot negate expression ([" + Expressions.name(child()) + "] of type [" - + child().dataType().esType + "])"); + return new TypeResolution("Cannot negate expression ([" + Expressions.name(field()) + "] of type [" + + field().dataType().esType + "])"); } @Override public Object fold() { - return Objects.equals(child().fold(), Boolean.TRUE) ? Boolean.FALSE : Boolean.TRUE; + return Objects.equals(field().fold(), Boolean.TRUE) ? Boolean.FALSE : Boolean.TRUE; + } + + @Override + protected Pipe makePipe() { + throw new SqlIllegalArgumentException("Not supported yet"); + } + + @Override + public ScriptTemplate asScript() { + throw new SqlIllegalArgumentException("Not supported yet"); } @Override protected Expression canonicalize() { - Expression canonicalChild = child().canonical(); + Expression canonicalChild = field().canonical(); if (canonicalChild instanceof Negateable) { return ((Negateable) canonicalChild).negate(); } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/Or.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/Or.java index 49bd40b2846..614ef94adaa 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/Or.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/Or.java @@ -5,10 +5,8 @@ */ package org.elasticsearch.xpack.sql.expression.predicate; -import org.elasticsearch.xpack.sql.expression.BinaryLogic; -import org.elasticsearch.xpack.sql.expression.BinaryOperator; -import org.elasticsearch.xpack.sql.expression.BinaryOperator.Negateable; import org.elasticsearch.xpack.sql.expression.Expression; +import org.elasticsearch.xpack.sql.expression.predicate.BinaryOperator.Negateable; import org.elasticsearch.xpack.sql.tree.Location; import org.elasticsearch.xpack.sql.tree.NodeInfo; @@ -17,7 +15,7 @@ import java.util.Objects; public class Or extends BinaryLogic implements Negateable { public Or(Location location, Expression left, Expression right) { - super(location, left, right); + super(location, left, right, "||"); } @Override @@ -44,9 +42,4 @@ public class Or extends BinaryLogic implements Negateable { public And negate() { return new And(location(), new Not(location(), left()), new Not(location(), right())); } - - @Override - public String symbol() { - return "||"; - } } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/Range.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/Range.java index c17e9634492..4395e263e06 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/Range.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/Range.java @@ -5,23 +5,45 @@ */ package org.elasticsearch.xpack.sql.expression.predicate; +import org.elasticsearch.xpack.sql.SqlIllegalArgumentException; +import org.elasticsearch.xpack.sql.expression.Attribute; import org.elasticsearch.xpack.sql.expression.Expression; +import org.elasticsearch.xpack.sql.expression.FieldAttribute; +import org.elasticsearch.xpack.sql.expression.Foldables; +import org.elasticsearch.xpack.sql.expression.NamedExpression; +import org.elasticsearch.xpack.sql.expression.gen.pipeline.Pipe; +import org.elasticsearch.xpack.sql.expression.gen.script.Params; +import org.elasticsearch.xpack.sql.expression.gen.script.ScriptTemplate; +import org.elasticsearch.xpack.sql.expression.gen.script.ScriptWeaver; +import org.elasticsearch.xpack.sql.expression.predicate.operator.comparison.BinaryComparison; import org.elasticsearch.xpack.sql.tree.Location; import org.elasticsearch.xpack.sql.tree.NodeInfo; import org.elasticsearch.xpack.sql.type.DataType; +import org.elasticsearch.xpack.sql.type.EsField; import java.util.Arrays; import java.util.List; +import java.util.Locale; import java.util.Objects; +import static java.lang.String.format; +import static java.util.Collections.emptyMap; +import static org.elasticsearch.xpack.sql.expression.gen.script.ParamsBuilder.paramsBuilder; + // BETWEEN or range - is a mix of gt(e) AND lt(e) -public class Range extends Expression { +public class Range extends NamedExpression implements ScriptWeaver { private final Expression value, lower, upper; private final boolean includeLower, includeUpper; public Range(Location location, Expression value, Expression lower, boolean includeLower, Expression upper, boolean includeUpper) { - super(location, Arrays.asList(value, lower, upper)); + this(location, null, value, lower, includeLower, upper, includeUpper); + } + + public Range(Location location, String name, Expression value, Expression lower, boolean includeLower, Expression upper, + boolean includeUpper) { + super(location, name == null ? defaultName(value, lower, upper, includeLower, includeUpper) : name, + Arrays.asList(value, lower, upper), null); this.value = value; this.lower = lower; @@ -32,7 +54,7 @@ public class Range extends Expression { @Override protected NodeInfo info() { - return NodeInfo.create(this, Range::new, value, lower, includeLower, upper, includeUpper); + return NodeInfo.create(this, Range::new, name(), value, lower, includeLower, upper, includeUpper); } @Override @@ -106,6 +128,36 @@ public class Range extends Expression { return DataType.BOOLEAN; } + @Override + public ScriptTemplate asScript() { + ScriptTemplate scriptTemplate = asScript(value); + + String template = formatTemplate(format(Locale.ROOT, "({} %s %s) && (%s %s {})", + includeLower() ? "<=" : "<", + scriptTemplate.template(), + scriptTemplate.template(), + includeUpper() ? "<=" : "<")); + + Params params = paramsBuilder().variable(Foldables.valueOf(lower)) + .script(scriptTemplate.params()) + .script(scriptTemplate.params()) + .variable(Foldables.valueOf(upper)) + .build(); + + return new ScriptTemplate(template, params, DataType.BOOLEAN); + } + + @Override + protected Pipe makePipe() { + throw new SqlIllegalArgumentException("Not supported yet"); + } + + @Override + public Attribute toAttribute() { + return new FieldAttribute(location(), "not yet implemented", + new EsField("not yet implemented", DataType.UNSUPPORTED, emptyMap(), false)); + } + @Override public int hashCode() { return Objects.hash(includeLower, includeUpper, value, lower, upper); @@ -129,8 +181,7 @@ public class Range extends Expression { && Objects.equals(upper, other.upper); } - @Override - public String toString() { + private static String defaultName(Expression value, Expression lower, Expression upper, boolean includeLower, boolean includeUpper) { StringBuilder sb = new StringBuilder(); sb.append(lower); sb.append(includeLower ? " <= " : " < "); @@ -139,4 +190,9 @@ public class Range extends Expression { sb.append(upper); return sb.toString(); } -} + + @Override + public String toString() { + return name(); + } +} \ No newline at end of file diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/fulltext/FullTextPredicate.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/fulltext/FullTextPredicate.java index 0ec11351320..07f284c90ca 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/fulltext/FullTextPredicate.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/fulltext/FullTextPredicate.java @@ -84,4 +84,4 @@ public abstract class FullTextPredicate extends Expression { return Objects.equals(query, other.query) && Objects.equals(options, other.options); } -} +} \ No newline at end of file diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/arithmetic/Add.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/operator/arithmetic/Add.java similarity index 77% rename from x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/arithmetic/Add.java rename to x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/operator/arithmetic/Add.java index 92d6e5218ac..36becf9f17b 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/arithmetic/Add.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/operator/arithmetic/Add.java @@ -3,17 +3,17 @@ * or more contributor license agreements. Licensed under the Elastic License; * you may not use this file except in compliance with the Elastic License. */ -package org.elasticsearch.xpack.sql.expression.function.scalar.arithmetic; +package org.elasticsearch.xpack.sql.expression.predicate.operator.arithmetic; import org.elasticsearch.xpack.sql.expression.Expression; -import org.elasticsearch.xpack.sql.expression.function.scalar.arithmetic.BinaryArithmeticProcessor.BinaryArithmeticOperation; +import org.elasticsearch.xpack.sql.expression.predicate.operator.arithmetic.BinaryArithmeticProcessor.BinaryArithmeticOperation; import org.elasticsearch.xpack.sql.tree.Location; import org.elasticsearch.xpack.sql.tree.NodeInfo; /** * Addition function ({@code a + b}). */ -public class Add extends ArithmeticFunction { +public class Add extends ArithmeticOperation { public Add(Location location, Expression left, Expression right) { super(location, left, right, BinaryArithmeticOperation.ADD); } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/operator/arithmetic/ArithmeticOperation.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/operator/arithmetic/ArithmeticOperation.java new file mode 100644 index 00000000000..24a7281beb9 --- /dev/null +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/operator/arithmetic/ArithmeticOperation.java @@ -0,0 +1,71 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.sql.expression.predicate.operator.arithmetic; + +import org.elasticsearch.xpack.sql.expression.Expression; +import org.elasticsearch.xpack.sql.expression.Expressions; +import org.elasticsearch.xpack.sql.expression.gen.pipeline.Pipe; +import org.elasticsearch.xpack.sql.expression.gen.script.ScriptTemplate; +import org.elasticsearch.xpack.sql.expression.predicate.BinaryOperator; +import org.elasticsearch.xpack.sql.expression.predicate.operator.arithmetic.BinaryArithmeticProcessor.BinaryArithmeticOperation; +import org.elasticsearch.xpack.sql.tree.Location; +import org.elasticsearch.xpack.sql.type.DataType; +import org.elasticsearch.xpack.sql.type.DataTypeConversion; + +import java.util.Locale; + +import static java.lang.String.format; +import static org.elasticsearch.xpack.sql.expression.gen.script.ParamsBuilder.paramsBuilder; + +public abstract class ArithmeticOperation extends BinaryOperator { + + private final BinaryArithmeticOperation operation; + + ArithmeticOperation(Location location, Expression left, Expression right, BinaryArithmeticOperation operation) { + super(location, left, right, operation.symbol()); + this.operation = operation; + } + + @Override + protected TypeResolution resolveInputType(DataType inputType) { + return inputType.isNumeric() ? + TypeResolution.TYPE_RESOLVED : + new TypeResolution("'%s' requires a numeric type, received %s", symbol(), inputType.esType); + } + + @Override + public ArithmeticOperation swapLeftAndRight() { + return this; + } + + @Override + public DataType dataType() { + return DataTypeConversion.commonType(left().dataType(), right().dataType()); + } + + @Override + public Object fold() { + return operation.apply((Number) left().fold(), (Number) right().fold()); + } + + @Override + protected ScriptTemplate asScriptFrom(ScriptTemplate leftScript, ScriptTemplate rightScript) { + String op = operation.symbol(); + // escape % + if (operation == BinaryArithmeticOperation.MOD) { + op = "%" + op; + } + return new ScriptTemplate(format(Locale.ROOT, "(%s) %s (%s)", leftScript.template(), op, rightScript.template()), + paramsBuilder() + .script(leftScript.params()).script(rightScript.params()) + .build(), dataType()); + } + + @Override + protected Pipe makePipe() { + return new BinaryArithmeticPipe(location(), this, Expressions.pipe(left()), Expressions.pipe(right()), operation); + } +} \ No newline at end of file diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/arithmetic/Arithmetics.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/operator/arithmetic/Arithmetics.java similarity index 96% rename from x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/arithmetic/Arithmetics.java rename to x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/operator/arithmetic/Arithmetics.java index 51cccb85066..07fcef39168 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/arithmetic/Arithmetics.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/operator/arithmetic/Arithmetics.java @@ -3,13 +3,13 @@ * or more contributor license agreements. Licensed under the Elastic License; * you may not use this file except in compliance with the Elastic License. */ -package org.elasticsearch.xpack.sql.expression.function.scalar.arithmetic; +package org.elasticsearch.xpack.sql.expression.predicate.operator.arithmetic; /** * Arithmetic operation using the type widening rules of the JLS 5.6.2 namely * widen to double or float or long or int in this order. */ -abstract class Arithmetics { +public abstract class Arithmetics { static Number add(Number l, Number r) { if (l == null || r == null) { @@ -83,7 +83,7 @@ abstract class Arithmetics { return l.intValue() / r.intValue(); } - static Number mod(Number l, Number r) { + public static Number mod(Number l, Number r) { if (l == null || r == null) { return null; } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/operator/arithmetic/BinaryArithmeticPipe.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/operator/arithmetic/BinaryArithmeticPipe.java new file mode 100644 index 00000000000..06bed035141 --- /dev/null +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/operator/arithmetic/BinaryArithmeticPipe.java @@ -0,0 +1,56 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.sql.expression.predicate.operator.arithmetic; + +import org.elasticsearch.xpack.sql.expression.Expression; +import org.elasticsearch.xpack.sql.expression.gen.pipeline.BinaryPipe; +import org.elasticsearch.xpack.sql.expression.gen.pipeline.Pipe; +import org.elasticsearch.xpack.sql.expression.predicate.operator.arithmetic.BinaryArithmeticProcessor.BinaryArithmeticOperation; +import org.elasticsearch.xpack.sql.tree.Location; +import org.elasticsearch.xpack.sql.tree.NodeInfo; + +import java.util.Objects; + +public class BinaryArithmeticPipe extends BinaryPipe { + + private final BinaryArithmeticOperation operation; + + public BinaryArithmeticPipe(Location location, Expression expression, Pipe left, + Pipe right, BinaryArithmeticOperation operation) { + super(location, expression, left, right); + this.operation = operation; + } + + @Override + protected NodeInfo info() { + return NodeInfo.create(this, BinaryArithmeticPipe::new, + expression(), left(), right(), operation); + } + + @Override + protected BinaryPipe replaceChildren(Pipe left, Pipe right) { + return new BinaryArithmeticPipe(location(), expression(), left, right, operation); + } + + @Override + public BinaryArithmeticProcessor asProcessor() { + return new BinaryArithmeticProcessor(left().asProcessor(), right().asProcessor(), operation); + } + + @Override + public int hashCode() { + return Objects.hash(super.hashCode(), operation); + } + + @Override + public boolean equals(Object obj) { + if (super.equals(obj)) { + BinaryArithmeticPipe other = (BinaryArithmeticPipe) obj; + return Objects.equals(operation, other.operation); + } + return false; + } +} diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/arithmetic/BinaryArithmeticProcessor.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/operator/arithmetic/BinaryArithmeticProcessor.java similarity index 87% rename from x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/arithmetic/BinaryArithmeticProcessor.java rename to x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/operator/arithmetic/BinaryArithmeticProcessor.java index 3f54004c1b0..f7811ba7cba 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/arithmetic/BinaryArithmeticProcessor.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/operator/arithmetic/BinaryArithmeticProcessor.java @@ -3,13 +3,13 @@ * or more contributor license agreements. Licensed under the Elastic License; * you may not use this file except in compliance with the Elastic License. */ -package org.elasticsearch.xpack.sql.expression.function.scalar.arithmetic; +package org.elasticsearch.xpack.sql.expression.predicate.operator.arithmetic; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.xpack.sql.expression.function.scalar.arithmetic.BinaryArithmeticProcessor.BinaryArithmeticOperation; import org.elasticsearch.xpack.sql.expression.function.scalar.math.BinaryNumericProcessor; -import org.elasticsearch.xpack.sql.expression.function.scalar.processor.runtime.Processor; +import org.elasticsearch.xpack.sql.expression.gen.processor.Processor; +import org.elasticsearch.xpack.sql.expression.predicate.operator.arithmetic.BinaryArithmeticProcessor.BinaryArithmeticOperation; import java.io.IOException; import java.util.function.BiFunction; diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/arithmetic/Div.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/operator/arithmetic/Div.java similarity index 71% rename from x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/arithmetic/Div.java rename to x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/operator/arithmetic/Div.java index fa3a82f3113..643286b1ae4 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/arithmetic/Div.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/operator/arithmetic/Div.java @@ -3,11 +3,10 @@ * or more contributor license agreements. Licensed under the Elastic License; * you may not use this file except in compliance with the Elastic License. */ -package org.elasticsearch.xpack.sql.expression.function.scalar.arithmetic; +package org.elasticsearch.xpack.sql.expression.predicate.operator.arithmetic; import org.elasticsearch.xpack.sql.expression.Expression; -import org.elasticsearch.xpack.sql.expression.function.scalar.BinaryScalarFunction; -import org.elasticsearch.xpack.sql.expression.function.scalar.arithmetic.BinaryArithmeticProcessor.BinaryArithmeticOperation; +import org.elasticsearch.xpack.sql.expression.predicate.operator.arithmetic.BinaryArithmeticProcessor.BinaryArithmeticOperation; import org.elasticsearch.xpack.sql.tree.Location; import org.elasticsearch.xpack.sql.tree.NodeInfo; import org.elasticsearch.xpack.sql.type.DataType; @@ -16,7 +15,7 @@ import org.elasticsearch.xpack.sql.type.DataTypeConversion; /** * Division function ({@code a / b}). */ -public class Div extends ArithmeticFunction { +public class Div extends ArithmeticOperation { public Div(Location location, Expression left, Expression right) { super(location, left, right, BinaryArithmeticOperation.DIV); @@ -28,7 +27,7 @@ public class Div extends ArithmeticFunction { } @Override - protected BinaryScalarFunction replaceChildren(Expression newLeft, Expression newRight) { + protected Div replaceChildren(Expression newLeft, Expression newRight) { return new Div(location(), newLeft, newRight); } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/arithmetic/Mod.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/operator/arithmetic/Mod.java similarity index 66% rename from x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/arithmetic/Mod.java rename to x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/operator/arithmetic/Mod.java index 7cdfa72a86c..95485281acd 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/arithmetic/Mod.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/operator/arithmetic/Mod.java @@ -3,19 +3,20 @@ * or more contributor license agreements. Licensed under the Elastic License; * you may not use this file except in compliance with the Elastic License. */ -package org.elasticsearch.xpack.sql.expression.function.scalar.arithmetic; +package org.elasticsearch.xpack.sql.expression.predicate.operator.arithmetic; import org.elasticsearch.xpack.sql.expression.Expression; -import org.elasticsearch.xpack.sql.expression.function.scalar.BinaryScalarFunction; -import org.elasticsearch.xpack.sql.expression.function.scalar.arithmetic.BinaryArithmeticProcessor.BinaryArithmeticOperation; +import org.elasticsearch.xpack.sql.expression.predicate.operator.arithmetic.BinaryArithmeticProcessor.BinaryArithmeticOperation; import org.elasticsearch.xpack.sql.tree.Location; import org.elasticsearch.xpack.sql.tree.NodeInfo; /** * Modulo * function ({@code a % b}). + * + * Note this operator is also registered as a function (needed for ODBC/SQL) purposes. */ -public class Mod extends ArithmeticFunction { +public class Mod extends ArithmeticOperation { public Mod(Location location, Expression left, Expression right) { super(location, left, right, BinaryArithmeticOperation.MOD); @@ -27,7 +28,7 @@ public class Mod extends ArithmeticFunction { } @Override - protected BinaryScalarFunction replaceChildren(Expression newLeft, Expression newRight) { + protected Mod replaceChildren(Expression newLeft, Expression newRight) { return new Mod(location(), newLeft, newRight); } } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/arithmetic/Mul.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/operator/arithmetic/Mul.java similarity index 65% rename from x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/arithmetic/Mul.java rename to x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/operator/arithmetic/Mul.java index 87d2574c262..edfea25d5c0 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/arithmetic/Mul.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/operator/arithmetic/Mul.java @@ -3,18 +3,17 @@ * or more contributor license agreements. Licensed under the Elastic License; * you may not use this file except in compliance with the Elastic License. */ -package org.elasticsearch.xpack.sql.expression.function.scalar.arithmetic; +package org.elasticsearch.xpack.sql.expression.predicate.operator.arithmetic; import org.elasticsearch.xpack.sql.expression.Expression; -import org.elasticsearch.xpack.sql.expression.function.scalar.BinaryScalarFunction; -import org.elasticsearch.xpack.sql.expression.function.scalar.arithmetic.BinaryArithmeticProcessor.BinaryArithmeticOperation; +import org.elasticsearch.xpack.sql.expression.predicate.operator.arithmetic.BinaryArithmeticProcessor.BinaryArithmeticOperation; import org.elasticsearch.xpack.sql.tree.Location; import org.elasticsearch.xpack.sql.tree.NodeInfo; /** * Multiplication function ({@code a * b}). */ -public class Mul extends ArithmeticFunction { +public class Mul extends ArithmeticOperation { public Mul(Location location, Expression left, Expression right) { super(location, left, right, BinaryArithmeticOperation.MUL); @@ -26,7 +25,7 @@ public class Mul extends ArithmeticFunction { } @Override - protected BinaryScalarFunction replaceChildren(Expression newLeft, Expression newRight) { + protected Mul replaceChildren(Expression newLeft, Expression newRight) { return new Mul(location(), newLeft, newRight); } } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/arithmetic/Neg.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/operator/arithmetic/Neg.java similarity index 58% rename from x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/arithmetic/Neg.java rename to x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/operator/arithmetic/Neg.java index 44cd51522b1..c5758b787f0 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/arithmetic/Neg.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/operator/arithmetic/Neg.java @@ -3,16 +3,16 @@ * or more contributor license agreements. Licensed under the Elastic License; * you may not use this file except in compliance with the Elastic License. */ -package org.elasticsearch.xpack.sql.expression.function.scalar.arithmetic; +package org.elasticsearch.xpack.sql.expression.predicate.operator.arithmetic; import org.elasticsearch.xpack.sql.expression.Expression; import org.elasticsearch.xpack.sql.expression.Expressions; import org.elasticsearch.xpack.sql.expression.NamedExpression; import org.elasticsearch.xpack.sql.expression.function.scalar.UnaryScalarFunction; -import org.elasticsearch.xpack.sql.expression.function.scalar.arithmetic.UnaryArithmeticProcessor.UnaryArithmeticOperation; -import org.elasticsearch.xpack.sql.expression.function.scalar.processor.definition.ProcessorDefinition; -import org.elasticsearch.xpack.sql.expression.function.scalar.processor.definition.ProcessorDefinitions; -import org.elasticsearch.xpack.sql.expression.function.scalar.processor.definition.UnaryProcessorDefinition; +import org.elasticsearch.xpack.sql.expression.gen.pipeline.Pipe; +import org.elasticsearch.xpack.sql.expression.gen.pipeline.UnaryPipe; +import org.elasticsearch.xpack.sql.expression.gen.script.ScriptWeaver; +import org.elasticsearch.xpack.sql.expression.predicate.operator.arithmetic.UnaryArithmeticProcessor.UnaryArithmeticOperation; import org.elasticsearch.xpack.sql.tree.Location; import org.elasticsearch.xpack.sql.tree.NodeInfo; import org.elasticsearch.xpack.sql.type.DataType; @@ -20,7 +20,8 @@ import org.elasticsearch.xpack.sql.type.DataType; /** * Negation function (@{code -x}). */ -public class Neg extends UnaryScalarFunction { +public class Neg extends UnaryScalarFunction implements ScriptWeaver { + public Neg(Location location, Expression field) { super(location, field); } @@ -31,7 +32,7 @@ public class Neg extends UnaryScalarFunction { } @Override - protected UnaryScalarFunction replaceChild(Expression newChild) { + protected Neg replaceChild(Expression newChild) { return new Neg(location(), newChild); } @@ -56,14 +57,12 @@ public class Neg extends UnaryScalarFunction { } @Override - protected String formatScript(String template) { - // Painless supports negating (and hopefully its corner cases) - return super.formatScript("-" + template); + public String processScript(String template) { + return super.processScript("-" + template); } @Override - protected ProcessorDefinition makeProcessorDefinition() { - return new UnaryProcessorDefinition(location(), this, ProcessorDefinitions.toProcessorDefinition(field()), - new UnaryArithmeticProcessor(UnaryArithmeticOperation.NEGATE)); + protected Pipe makePipe() { + return new UnaryPipe(location(), this, Expressions.pipe(field()), new UnaryArithmeticProcessor(UnaryArithmeticOperation.NEGATE)); } } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/arithmetic/Sub.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/operator/arithmetic/Sub.java similarity index 65% rename from x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/arithmetic/Sub.java rename to x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/operator/arithmetic/Sub.java index bd36a8dd843..25c66572d20 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/arithmetic/Sub.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/operator/arithmetic/Sub.java @@ -3,18 +3,17 @@ * or more contributor license agreements. Licensed under the Elastic License; * you may not use this file except in compliance with the Elastic License. */ -package org.elasticsearch.xpack.sql.expression.function.scalar.arithmetic; +package org.elasticsearch.xpack.sql.expression.predicate.operator.arithmetic; import org.elasticsearch.xpack.sql.expression.Expression; -import org.elasticsearch.xpack.sql.expression.function.scalar.BinaryScalarFunction; -import org.elasticsearch.xpack.sql.expression.function.scalar.arithmetic.BinaryArithmeticProcessor.BinaryArithmeticOperation; +import org.elasticsearch.xpack.sql.expression.predicate.operator.arithmetic.BinaryArithmeticProcessor.BinaryArithmeticOperation; import org.elasticsearch.xpack.sql.tree.Location; import org.elasticsearch.xpack.sql.tree.NodeInfo; /** * Subtraction function ({@code a - b}). */ -public class Sub extends ArithmeticFunction { +public class Sub extends ArithmeticOperation { public Sub(Location location, Expression left, Expression right) { super(location, left, right, BinaryArithmeticOperation.SUB); @@ -26,7 +25,7 @@ public class Sub extends ArithmeticFunction { } @Override - protected BinaryScalarFunction replaceChildren(Expression newLeft, Expression newRight) { + protected Sub replaceChildren(Expression newLeft, Expression newRight) { return new Sub(location(), newLeft, newRight); } } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/arithmetic/UnaryArithmeticProcessor.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/operator/arithmetic/UnaryArithmeticProcessor.java similarity index 92% rename from x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/arithmetic/UnaryArithmeticProcessor.java rename to x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/operator/arithmetic/UnaryArithmeticProcessor.java index d0da0f9e719..e75ca47b7f4 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/arithmetic/UnaryArithmeticProcessor.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/operator/arithmetic/UnaryArithmeticProcessor.java @@ -3,12 +3,12 @@ * or more contributor license agreements. Licensed under the Elastic License; * you may not use this file except in compliance with the Elastic License. */ -package org.elasticsearch.xpack.sql.expression.function.scalar.arithmetic; +package org.elasticsearch.xpack.sql.expression.predicate.operator.arithmetic; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.xpack.sql.SqlIllegalArgumentException; -import org.elasticsearch.xpack.sql.expression.function.scalar.processor.runtime.Processor; +import org.elasticsearch.xpack.sql.expression.gen.processor.Processor; import java.io.IOException; import java.util.function.Function; diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/operator/comparison/BinaryComparison.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/operator/comparison/BinaryComparison.java new file mode 100644 index 00000000000..2ebfd57a4d2 --- /dev/null +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/operator/comparison/BinaryComparison.java @@ -0,0 +1,80 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.sql.expression.predicate.operator.comparison; + +import org.elasticsearch.xpack.sql.expression.Expression; +import org.elasticsearch.xpack.sql.expression.Expressions; +import org.elasticsearch.xpack.sql.expression.gen.pipeline.Pipe; +import org.elasticsearch.xpack.sql.expression.gen.script.ScriptTemplate; +import org.elasticsearch.xpack.sql.expression.predicate.BinaryOperator; +import org.elasticsearch.xpack.sql.expression.predicate.operator.comparison.BinaryComparisonProcessor.BinaryComparisonOperation; +import org.elasticsearch.xpack.sql.tree.Location; +import org.elasticsearch.xpack.sql.type.DataType; + +import java.util.Locale; + +import static java.lang.String.format; +import static org.elasticsearch.xpack.sql.expression.gen.script.ParamsBuilder.paramsBuilder; + +// marker class to indicate operations that rely on values +public abstract class BinaryComparison extends BinaryOperator { + + private final BinaryComparisonOperation operation; + + public BinaryComparison(Location location, Expression left, Expression right, BinaryComparisonOperation operation) { + super(location, left, right, operation.symbol()); + this.operation = operation; + } + + @Override + protected TypeResolution resolveInputType(DataType inputType) { + return TypeResolution.TYPE_RESOLVED; + } + + @Override + protected ScriptTemplate asScriptFrom(ScriptTemplate leftScript, ScriptTemplate rightScript) { + String op = operation.symbol(); + return new ScriptTemplate(format(Locale.ROOT, "(%s) %s (%s)", leftScript.template(), op, rightScript.template()), + paramsBuilder() + .script(leftScript.params()).script(rightScript.params()) + .build(), dataType()); + } + + @Override + protected Pipe makePipe() { + return new BinaryComparisonPipe(location(), this, Expressions.pipe(left()), Expressions.pipe(right()), operation); + } + + @Override + public Object fold() { + return operation.apply(left().fold(), right().fold()); + } + + @Override + protected Expression canonicalize() { + return left().hashCode() > right().hashCode() ? swapLeftAndRight() : this; + } + + @Override + public DataType dataType() { + return DataType.BOOLEAN; + } + + public static Integer compare(Object left, Object right) { + return Comparisons.compare(left, right); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append(left()); + sb.append(" "); + sb.append(symbol()); + sb.append(" "); + sb.append(right()); + return sb.toString(); + } +} diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/operator/comparison/BinaryComparisonPipe.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/operator/comparison/BinaryComparisonPipe.java new file mode 100644 index 00000000000..a7ca4a2dea3 --- /dev/null +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/operator/comparison/BinaryComparisonPipe.java @@ -0,0 +1,55 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.sql.expression.predicate.operator.comparison; + +import org.elasticsearch.xpack.sql.expression.Expression; +import org.elasticsearch.xpack.sql.expression.gen.pipeline.BinaryPipe; +import org.elasticsearch.xpack.sql.expression.gen.pipeline.Pipe; +import org.elasticsearch.xpack.sql.expression.predicate.operator.comparison.BinaryComparisonProcessor.BinaryComparisonOperation; +import org.elasticsearch.xpack.sql.tree.Location; +import org.elasticsearch.xpack.sql.tree.NodeInfo; + +import java.util.Objects; + +public class BinaryComparisonPipe extends BinaryPipe { + + private final BinaryComparisonOperation operation; + + public BinaryComparisonPipe(Location location, Expression expression, Pipe left, + Pipe right, BinaryComparisonOperation operation) { + super(location, expression, left, right); + this.operation = operation; + } + + @Override + protected NodeInfo info() { + return NodeInfo.create(this, BinaryComparisonPipe::new, expression(), left(), right(), operation); + } + + @Override + protected BinaryPipe replaceChildren(Pipe left, Pipe right) { + return new BinaryComparisonPipe(location(), expression(), left, right, operation); + } + + @Override + public BinaryComparisonProcessor asProcessor() { + return new BinaryComparisonProcessor(left().asProcessor(), right().asProcessor(), operation); + } + + @Override + public int hashCode() { + return Objects.hash(super.hashCode(), operation); + } + + @Override + public boolean equals(Object obj) { + if (super.equals(obj)) { + BinaryComparisonPipe other = (BinaryComparisonPipe) obj; + return Objects.equals(operation, other.operation); + } + return false; + } +} diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/operator/comparison/BinaryComparisonProcessor.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/operator/comparison/BinaryComparisonProcessor.java new file mode 100644 index 00000000000..25856f91403 --- /dev/null +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/operator/comparison/BinaryComparisonProcessor.java @@ -0,0 +1,68 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.sql.expression.predicate.operator.comparison; + +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.xpack.sql.expression.gen.processor.Processor; +import org.elasticsearch.xpack.sql.expression.predicate.operator.comparison.BinaryComparisonProcessor.BinaryComparisonOperation; + +import java.io.IOException; +import java.util.function.BiFunction; + +public class BinaryComparisonProcessor extends BinaryOperatorProcessor { + + public enum BinaryComparisonOperation implements BiFunction { + + EQ(Comparisons::eq, "=="), + GT(Comparisons::gt, ">"), + GTE(Comparisons::gte, ">="), + LT(Comparisons::lt, "<"), + LTE(Comparisons::lte, "<="); + + private final BiFunction process; + private final String symbol; + + BinaryComparisonOperation(BiFunction process, String symbol) { + this.process = process; + this.symbol = symbol; + } + + public String symbol() { + return symbol; + } + + @Override + public final Boolean apply(Object left, Object right) { + return process.apply(left, right); + } + + @Override + public String toString() { + return symbol; + } + } + + public static final String NAME = "cb"; + + public BinaryComparisonProcessor(Processor left, Processor right, BinaryComparisonOperation operation) { + super(left, right, operation); + } + + public BinaryComparisonProcessor(StreamInput in) throws IOException { + super(in, i -> i.readEnum(BinaryComparisonOperation.class)); + } + + @Override + protected void doWrite(StreamOutput out) throws IOException { + out.writeEnum(operation()); + } + + @Override + public String getWriteableName() { + return NAME; + } +} \ No newline at end of file diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/operator/comparison/BinaryOperatorProcessor.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/operator/comparison/BinaryOperatorProcessor.java new file mode 100644 index 00000000000..621154eb8d0 --- /dev/null +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/operator/comparison/BinaryOperatorProcessor.java @@ -0,0 +1,76 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.sql.expression.predicate.operator.comparison; + +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.xpack.sql.expression.gen.processor.BinaryProcessor; +import org.elasticsearch.xpack.sql.expression.gen.processor.Processor; + +import java.io.IOException; +import java.util.Locale; +import java.util.Objects; +import java.util.function.BiFunction; + +public abstract class BinaryOperatorProcessor & BiFunction> extends BinaryProcessor { + + private final O operation; + + protected BinaryOperatorProcessor(Processor left, Processor right, O operation) { + super(left, right); + this.operation = operation; + } + + protected BinaryOperatorProcessor(StreamInput in, Reader reader) throws IOException { + super(in); + operation = reader.read(in); + } + + protected O operation() { + return operation; + } + + @Override + protected Object doProcess(Object left, Object right) { + if (left == null || right == null) { + return null; + } + + checkParameter(left); + checkParameter(right); + + return operation.apply(left, right); + } + + protected void checkParameter(Object param) { + //no-op + } + + @Override + public int hashCode() { + return Objects.hash(operation); + } + + @Override + public boolean equals(Object obj) { + if (this == obj) { + return true; + } + + if (obj == null || getClass() != obj.getClass()) { + return false; + } + + BinaryOperatorProcessor other = (BinaryOperatorProcessor) obj; + return Objects.equals(operation, other.operation) + && Objects.equals(left(), other.left()) + && Objects.equals(right(), other.right()); + } + + @Override + public String toString() { + return String.format(Locale.ROOT, "(%s %s %s)", left(), operation, right()); + } +} \ No newline at end of file diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/BinaryComparison.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/operator/comparison/Comparisons.java similarity index 61% rename from x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/BinaryComparison.java rename to x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/operator/comparison/Comparisons.java index db1ba1d3cdf..cdd293cb1af 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/BinaryComparison.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/operator/comparison/Comparisons.java @@ -3,33 +3,36 @@ * or more contributor license agreements. Licensed under the Elastic License; * you may not use this file except in compliance with the Elastic License. */ -package org.elasticsearch.xpack.sql.expression.predicate; +package org.elasticsearch.xpack.sql.expression.predicate.operator.comparison; -import org.elasticsearch.xpack.sql.expression.BinaryOperator; -import org.elasticsearch.xpack.sql.expression.Expression; -import org.elasticsearch.xpack.sql.tree.Location; -import org.elasticsearch.xpack.sql.type.DataType; +/** + * Comparison utilities. + */ +abstract class Comparisons { -// marker class to indicate operations that rely on values -public abstract class BinaryComparison extends BinaryOperator { - - public BinaryComparison(Location location, Expression left, Expression right) { - super(location, left, right); + static Boolean eq(Object l, Object r) { + Integer i = compare(l, r); + return i == null ? null : i.intValue() == 0; } - @Override - protected TypeResolution resolveInputType(DataType inputType) { - return TypeResolution.TYPE_RESOLVED; + static Boolean lt(Object l, Object r) { + Integer i = compare(l, r); + return i == null ? null : i.intValue() < 0; } - @Override - protected Expression canonicalize() { - return left().hashCode() > right().hashCode() ? swapLeftAndRight() : this; + static Boolean lte(Object l, Object r) { + Integer i = compare(l, r); + return i == null ? null : i.intValue() <= 0; } - @Override - public DataType dataType() { - return DataType.BOOLEAN; + static Boolean gt(Object l, Object r) { + Integer i = compare(l, r); + return i == null ? null : i.intValue() > 0; + } + + static Boolean gte(Object l, Object r) { + Integer i = compare(l, r); + return i == null ? null : i.intValue() >= 0; } /** @@ -38,7 +41,7 @@ public abstract class BinaryComparison extends BinaryOperator { * one of them is null). */ @SuppressWarnings({ "rawtypes", "unchecked" }) - public static Integer compare(Object l, Object r) { + static Integer compare(Object l, Object r) { // typical number comparison if (l instanceof Number && r instanceof Number) { return compare((Number) l, (Number) r); @@ -57,7 +60,7 @@ public abstract class BinaryComparison extends BinaryOperator { return null; } - static Integer compare(Number l, Number r) { + private static Integer compare(Number l, Number r) { if (l instanceof Double || r instanceof Double) { return Double.compare(l.doubleValue(), r.doubleValue()); } @@ -70,4 +73,4 @@ public abstract class BinaryComparison extends BinaryOperator { return Integer.valueOf(Integer.compare(l.intValue(), r.intValue())); } -} +} \ No newline at end of file diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/Equals.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/operator/comparison/Equals.java similarity index 75% rename from x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/Equals.java rename to x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/operator/comparison/Equals.java index a5b3272d7cc..15dbacafc4a 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/Equals.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/operator/comparison/Equals.java @@ -3,18 +3,17 @@ * or more contributor license agreements. Licensed under the Elastic License; * you may not use this file except in compliance with the Elastic License. */ -package org.elasticsearch.xpack.sql.expression.predicate; +package org.elasticsearch.xpack.sql.expression.predicate.operator.comparison; import org.elasticsearch.xpack.sql.expression.Expression; +import org.elasticsearch.xpack.sql.expression.predicate.operator.comparison.BinaryComparisonProcessor.BinaryComparisonOperation; import org.elasticsearch.xpack.sql.tree.Location; import org.elasticsearch.xpack.sql.tree.NodeInfo; -import java.util.Objects; - public class Equals extends BinaryComparison { public Equals(Location location, Expression left, Expression right) { - super(location, left, right); + super(location, left, right, BinaryComparisonOperation.EQ); } @Override @@ -27,18 +26,8 @@ public class Equals extends BinaryComparison { return new Equals(location(), newLeft, newRight); } - @Override - public Object fold() { - return Objects.equals(left().fold(), right().fold()); - } - @Override public Equals swapLeftAndRight() { return new Equals(location(), right(), left()); } - - @Override - public String symbol() { - return "=="; - } } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/GreaterThan.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/operator/comparison/GreaterThan.java similarity index 73% rename from x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/GreaterThan.java rename to x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/operator/comparison/GreaterThan.java index 5fecc7c4f63..f643d873a1f 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/GreaterThan.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/operator/comparison/GreaterThan.java @@ -3,9 +3,10 @@ * or more contributor license agreements. Licensed under the Elastic License; * you may not use this file except in compliance with the Elastic License. */ -package org.elasticsearch.xpack.sql.expression.predicate; +package org.elasticsearch.xpack.sql.expression.predicate.operator.comparison; -import org.elasticsearch.xpack.sql.expression.BinaryOperator.Negateable; +import org.elasticsearch.xpack.sql.expression.predicate.BinaryOperator.Negateable; +import org.elasticsearch.xpack.sql.expression.predicate.operator.comparison.BinaryComparisonProcessor.BinaryComparisonOperation; import org.elasticsearch.xpack.sql.expression.Expression; import org.elasticsearch.xpack.sql.tree.Location; import org.elasticsearch.xpack.sql.tree.NodeInfo; @@ -13,7 +14,7 @@ import org.elasticsearch.xpack.sql.tree.NodeInfo; public class GreaterThan extends BinaryComparison implements Negateable { public GreaterThan(Location location, Expression left, Expression right) { - super(location, left, right); + super(location, left, right, BinaryComparisonOperation.GT); } @Override @@ -26,11 +27,6 @@ public class GreaterThan extends BinaryComparison implements Negateable { return new GreaterThan(location(), newLeft, newRight); } - public Object fold() { - Integer compare = compare(left().fold(), right().fold()); - return compare != null && compare.intValue() > 0; - } - @Override public LessThan swapLeftAndRight() { return new LessThan(location(), right(), left()); @@ -40,9 +36,4 @@ public class GreaterThan extends BinaryComparison implements Negateable { public LessThanOrEqual negate() { return new LessThanOrEqual(location(), left(), right()); } - - @Override - public String symbol() { - return ">"; - } } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/GreaterThanOrEqual.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/operator/comparison/GreaterThanOrEqual.java similarity index 74% rename from x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/GreaterThanOrEqual.java rename to x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/operator/comparison/GreaterThanOrEqual.java index 837cfa1df93..f91457216df 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/GreaterThanOrEqual.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/operator/comparison/GreaterThanOrEqual.java @@ -3,9 +3,10 @@ * or more contributor license agreements. Licensed under the Elastic License; * you may not use this file except in compliance with the Elastic License. */ -package org.elasticsearch.xpack.sql.expression.predicate; +package org.elasticsearch.xpack.sql.expression.predicate.operator.comparison; -import org.elasticsearch.xpack.sql.expression.BinaryOperator.Negateable; +import org.elasticsearch.xpack.sql.expression.predicate.BinaryOperator.Negateable; +import org.elasticsearch.xpack.sql.expression.predicate.operator.comparison.BinaryComparisonProcessor.BinaryComparisonOperation; import org.elasticsearch.xpack.sql.expression.Expression; import org.elasticsearch.xpack.sql.tree.Location; import org.elasticsearch.xpack.sql.tree.NodeInfo; @@ -13,7 +14,7 @@ import org.elasticsearch.xpack.sql.tree.NodeInfo; public class GreaterThanOrEqual extends BinaryComparison implements Negateable { public GreaterThanOrEqual(Location location, Expression left, Expression right) { - super(location, left, right); + super(location, left, right, BinaryComparisonOperation.GTE); } @Override @@ -26,11 +27,6 @@ public class GreaterThanOrEqual extends BinaryComparison implements Negateable { return new GreaterThanOrEqual(location(), newLeft, newRight); } - public Object fold() { - Integer compare = compare(left().fold(), right().fold()); - return compare != null && compare.intValue() >= 0; - } - @Override public LessThanOrEqual swapLeftAndRight() { return new LessThanOrEqual(location(), right(), left()); @@ -40,9 +36,4 @@ public class GreaterThanOrEqual extends BinaryComparison implements Negateable { public LessThan negate() { return new LessThan(location(), left(), right()); } - - @Override - public String symbol() { - return ">="; - } } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/LessThan.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/operator/comparison/LessThan.java similarity index 73% rename from x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/LessThan.java rename to x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/operator/comparison/LessThan.java index 151614b45dd..5f7d8ffbda8 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/LessThan.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/operator/comparison/LessThan.java @@ -3,9 +3,10 @@ * or more contributor license agreements. Licensed under the Elastic License; * you may not use this file except in compliance with the Elastic License. */ -package org.elasticsearch.xpack.sql.expression.predicate; +package org.elasticsearch.xpack.sql.expression.predicate.operator.comparison; -import org.elasticsearch.xpack.sql.expression.BinaryOperator.Negateable; +import org.elasticsearch.xpack.sql.expression.predicate.BinaryOperator.Negateable; +import org.elasticsearch.xpack.sql.expression.predicate.operator.comparison.BinaryComparisonProcessor.BinaryComparisonOperation; import org.elasticsearch.xpack.sql.expression.Expression; import org.elasticsearch.xpack.sql.tree.Location; import org.elasticsearch.xpack.sql.tree.NodeInfo; @@ -13,7 +14,7 @@ import org.elasticsearch.xpack.sql.tree.NodeInfo; public class LessThan extends BinaryComparison implements Negateable { public LessThan(Location location, Expression left, Expression right) { - super(location, left, right); + super(location, left, right, BinaryComparisonOperation.LT); } @Override @@ -26,11 +27,6 @@ public class LessThan extends BinaryComparison implements Negateable { return new LessThan(location(), newLeft, newRight); } - public Object fold() { - Integer compare = compare(left().fold(), right().fold()); - return compare != null && compare.intValue() < 0; - } - @Override public GreaterThan swapLeftAndRight() { return new GreaterThan(location(), right(), left()); @@ -40,9 +36,4 @@ public class LessThan extends BinaryComparison implements Negateable { public GreaterThanOrEqual negate() { return new GreaterThanOrEqual(location(), left(), right()); } - - @Override - public String symbol() { - return "<"; - } } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/LessThanOrEqual.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/operator/comparison/LessThanOrEqual.java similarity index 73% rename from x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/LessThanOrEqual.java rename to x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/operator/comparison/LessThanOrEqual.java index 3f5a1252691..489cd52a4c6 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/LessThanOrEqual.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/operator/comparison/LessThanOrEqual.java @@ -3,9 +3,10 @@ * or more contributor license agreements. Licensed under the Elastic License; * you may not use this file except in compliance with the Elastic License. */ -package org.elasticsearch.xpack.sql.expression.predicate; +package org.elasticsearch.xpack.sql.expression.predicate.operator.comparison; -import org.elasticsearch.xpack.sql.expression.BinaryOperator.Negateable; +import org.elasticsearch.xpack.sql.expression.predicate.BinaryOperator.Negateable; +import org.elasticsearch.xpack.sql.expression.predicate.operator.comparison.BinaryComparisonProcessor.BinaryComparisonOperation; import org.elasticsearch.xpack.sql.expression.Expression; import org.elasticsearch.xpack.sql.tree.Location; import org.elasticsearch.xpack.sql.tree.NodeInfo; @@ -13,7 +14,7 @@ import org.elasticsearch.xpack.sql.tree.NodeInfo; public class LessThanOrEqual extends BinaryComparison implements Negateable { public LessThanOrEqual(Location location, Expression left, Expression right) { - super(location, left, right); + super(location, left, right, BinaryComparisonOperation.LTE); } @Override @@ -26,12 +27,6 @@ public class LessThanOrEqual extends BinaryComparison implements Negateable { return new LessThanOrEqual(location(), newLeft, newRight); } - @Override - public Object fold() { - Integer compare = compare(left().fold(), right().fold()); - return compare != null && compare.intValue() <= 0; - } - @Override public GreaterThanOrEqual swapLeftAndRight() { return new GreaterThanOrEqual(location(), right(), left()); @@ -41,9 +36,4 @@ public class LessThanOrEqual extends BinaryComparison implements Negateable { public GreaterThan negate() { return new GreaterThan(location(), left(), right()); } - - @Override - public String symbol() { - return "<="; - } } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/regex/Like.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/regex/Like.java similarity index 62% rename from x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/regex/Like.java rename to x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/regex/Like.java index 3716b925911..9804214a28a 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/regex/Like.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/regex/Like.java @@ -3,20 +3,23 @@ * or more contributor license agreements. Licensed under the Elastic License; * you may not use this file except in compliance with the Elastic License. */ -package org.elasticsearch.xpack.sql.expression.regex; +package org.elasticsearch.xpack.sql.expression.predicate.regex; -import org.elasticsearch.xpack.sql.expression.BinaryExpression; +import org.elasticsearch.xpack.sql.SqlIllegalArgumentException; import org.elasticsearch.xpack.sql.expression.Expression; +import org.elasticsearch.xpack.sql.expression.gen.pipeline.Pipe; +import org.elasticsearch.xpack.sql.expression.gen.script.ScriptTemplate; +import org.elasticsearch.xpack.sql.expression.predicate.BinaryPredicate; import org.elasticsearch.xpack.sql.tree.Location; import org.elasticsearch.xpack.sql.tree.NodeInfo; import org.elasticsearch.xpack.sql.type.DataType; import java.util.regex.Pattern; -public class Like extends BinaryExpression { +public class Like extends BinaryPredicate { public Like(Location location, Expression left, LikePattern right) { - super(location, left, right); + super(location, left, right, "LIKE"); } @Override @@ -25,10 +28,11 @@ public class Like extends BinaryExpression { } @Override - protected BinaryExpression replaceChildren(Expression newLeft, Expression newRight) { + protected BinaryPredicate replaceChildren(Expression newLeft, Expression newRight) { return new Like(location(), newLeft, (LikePattern) newRight); } + @Override public LikePattern right() { return (LikePattern) super.right(); } @@ -45,18 +49,18 @@ public class Like extends BinaryExpression { return p.matcher(left().fold().toString()).matches(); } - @Override - public Like swapLeftAndRight() { - return this; - } - @Override public DataType dataType() { return DataType.BOOLEAN; } @Override - public String symbol() { - return "LIKE"; + protected ScriptTemplate asScriptFrom(ScriptTemplate leftScript, ScriptTemplate rightScript) { + throw new SqlIllegalArgumentException("Not supported yet"); + } + + @Override + protected Pipe makePipe() { + throw new SqlIllegalArgumentException("Not supported yet"); } } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/regex/LikePattern.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/regex/LikePattern.java similarity index 97% rename from x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/regex/LikePattern.java rename to x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/regex/LikePattern.java index 45f2fc0bd54..bde8129f8e7 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/regex/LikePattern.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/regex/LikePattern.java @@ -3,7 +3,7 @@ * or more contributor license agreements. Licensed under the Elastic License; * you may not use this file except in compliance with the Elastic License. */ -package org.elasticsearch.xpack.sql.expression.regex; +package org.elasticsearch.xpack.sql.expression.predicate.regex; import org.elasticsearch.xpack.sql.expression.LeafExpression; import org.elasticsearch.xpack.sql.tree.Location; @@ -103,4 +103,4 @@ public class LikePattern extends LeafExpression { return Objects.equals(pattern, other.pattern) && escape == other.escape; } -} +} \ No newline at end of file diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/regex/RLike.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/regex/RLike.java similarity index 60% rename from x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/regex/RLike.java rename to x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/regex/RLike.java index 8d48c4d532e..f445b49cf31 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/regex/RLike.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/regex/RLike.java @@ -3,21 +3,24 @@ * or more contributor license agreements. Licensed under the Elastic License; * you may not use this file except in compliance with the Elastic License. */ -package org.elasticsearch.xpack.sql.expression.regex; +package org.elasticsearch.xpack.sql.expression.predicate.regex; -import org.elasticsearch.xpack.sql.expression.BinaryExpression; +import org.elasticsearch.xpack.sql.SqlIllegalArgumentException; import org.elasticsearch.xpack.sql.expression.Expression; import org.elasticsearch.xpack.sql.expression.Literal; +import org.elasticsearch.xpack.sql.expression.gen.pipeline.Pipe; +import org.elasticsearch.xpack.sql.expression.gen.script.ScriptTemplate; +import org.elasticsearch.xpack.sql.expression.predicate.BinaryPredicate; import org.elasticsearch.xpack.sql.tree.Location; import org.elasticsearch.xpack.sql.tree.NodeInfo; import org.elasticsearch.xpack.sql.type.DataType; import java.util.regex.Pattern; -public class RLike extends BinaryExpression { +public class RLike extends BinaryPredicate { public RLike(Location location, Expression left, Literal right) { - super(location, left, right); + super(location, left, right, "RLIKE"); } @Override @@ -26,10 +29,11 @@ public class RLike extends BinaryExpression { } @Override - protected BinaryExpression replaceChildren(Expression newLeft, Expression newRight) { + protected BinaryPredicate replaceChildren(Expression newLeft, Expression newRight) { return new RLike(location(), newLeft, (Literal) newRight); } + @Override public Literal right() { return (Literal) super.right(); } @@ -40,18 +44,18 @@ public class RLike extends BinaryExpression { return p.matcher(left().fold().toString()).matches(); } - @Override - public RLike swapLeftAndRight() { - return this; - } - @Override public DataType dataType() { return DataType.BOOLEAN; } @Override - public String symbol() { - return "RLIKE"; + protected ScriptTemplate asScriptFrom(ScriptTemplate leftScript, ScriptTemplate rightScript) { + throw new SqlIllegalArgumentException("Not supported yet"); + } + + @Override + protected Pipe makePipe() { + throw new SqlIllegalArgumentException("Not supported yet"); } } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/optimizer/Optimizer.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/optimizer/Optimizer.java index 72105a2fae8..deb4ccb7da2 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/optimizer/Optimizer.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/optimizer/Optimizer.java @@ -10,8 +10,6 @@ import org.elasticsearch.xpack.sql.expression.Alias; import org.elasticsearch.xpack.sql.expression.Attribute; import org.elasticsearch.xpack.sql.expression.AttributeMap; import org.elasticsearch.xpack.sql.expression.AttributeSet; -import org.elasticsearch.xpack.sql.expression.BinaryExpression; -import org.elasticsearch.xpack.sql.expression.BinaryOperator.Negateable; import org.elasticsearch.xpack.sql.expression.Expression; import org.elasticsearch.xpack.sql.expression.ExpressionId; import org.elasticsearch.xpack.sql.expression.ExpressionSet; @@ -39,16 +37,19 @@ import org.elasticsearch.xpack.sql.expression.function.scalar.Cast; import org.elasticsearch.xpack.sql.expression.function.scalar.ScalarFunction; import org.elasticsearch.xpack.sql.expression.function.scalar.ScalarFunctionAttribute; import org.elasticsearch.xpack.sql.expression.predicate.And; -import org.elasticsearch.xpack.sql.expression.predicate.BinaryComparison; -import org.elasticsearch.xpack.sql.expression.predicate.Equals; -import org.elasticsearch.xpack.sql.expression.predicate.GreaterThan; -import org.elasticsearch.xpack.sql.expression.predicate.GreaterThanOrEqual; -import org.elasticsearch.xpack.sql.expression.predicate.LessThan; -import org.elasticsearch.xpack.sql.expression.predicate.LessThanOrEqual; +import org.elasticsearch.xpack.sql.expression.predicate.BinaryOperator; +import org.elasticsearch.xpack.sql.expression.predicate.BinaryOperator.Negateable; +import org.elasticsearch.xpack.sql.expression.predicate.BinaryPredicate; import org.elasticsearch.xpack.sql.expression.predicate.Not; import org.elasticsearch.xpack.sql.expression.predicate.Or; import org.elasticsearch.xpack.sql.expression.predicate.Predicates; import org.elasticsearch.xpack.sql.expression.predicate.Range; +import org.elasticsearch.xpack.sql.expression.predicate.operator.comparison.BinaryComparison; +import org.elasticsearch.xpack.sql.expression.predicate.operator.comparison.Equals; +import org.elasticsearch.xpack.sql.expression.predicate.operator.comparison.GreaterThan; +import org.elasticsearch.xpack.sql.expression.predicate.operator.comparison.GreaterThanOrEqual; +import org.elasticsearch.xpack.sql.expression.predicate.operator.comparison.LessThan; +import org.elasticsearch.xpack.sql.expression.predicate.operator.comparison.LessThanOrEqual; import org.elasticsearch.xpack.sql.plan.logical.Aggregate; import org.elasticsearch.xpack.sql.plan.logical.Filter; import org.elasticsearch.xpack.sql.plan.logical.Limit; @@ -1135,8 +1136,8 @@ public class Optimizer extends RuleExecutor { @Override protected Expression rule(Expression e) { - if (e instanceof BinaryExpression) { - return simplifyAndOr((BinaryExpression) e); + if (e instanceof BinaryPredicate) { + return simplifyAndOr((BinaryPredicate) e); } if (e instanceof Not) { return simplifyNot((Not) e); @@ -1145,7 +1146,7 @@ public class Optimizer extends RuleExecutor { return e; } - private Expression simplifyAndOr(BinaryExpression bc) { + private Expression simplifyAndOr(BinaryPredicate bc) { Expression l = bc.left(); Expression r = bc.right(); @@ -1229,12 +1230,12 @@ public class Optimizer extends RuleExecutor { } private Expression simplifyNot(Not n) { - Expression c = n.child(); + Expression c = n.field(); - if (TRUE.equals(c)) { + if (TRUE.semanticEquals(c)) { return FALSE; } - if (FALSE.equals(c)) { + if (FALSE.semanticEquals(c)) { return TRUE; } @@ -1243,7 +1244,7 @@ public class Optimizer extends RuleExecutor { } if (c instanceof Not) { - return ((Not) c).child(); + return ((Not) c).field(); } return n; @@ -1291,10 +1292,10 @@ public class Optimizer extends RuleExecutor { @Override protected Expression rule(Expression e) { - return e instanceof BinaryExpression ? literalToTheRight((BinaryExpression) e) : e; + return e instanceof BinaryOperator ? literalToTheRight((BinaryOperator) e) : e; } - private Expression literalToTheRight(BinaryExpression be) { + private Expression literalToTheRight(BinaryOperator be) { return be.left() instanceof Literal && !(be.right() instanceof Literal) ? be.swapLeftAndRight() : be; } } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/ExpressionBuilder.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/ExpressionBuilder.java index 539713f3285..640008aae3c 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/ExpressionBuilder.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/ExpressionBuilder.java @@ -23,35 +23,34 @@ import org.elasticsearch.xpack.sql.expression.UnresolvedStar; import org.elasticsearch.xpack.sql.expression.function.Function; import org.elasticsearch.xpack.sql.expression.function.UnresolvedFunction; import org.elasticsearch.xpack.sql.expression.function.scalar.Cast; -import org.elasticsearch.xpack.sql.expression.function.scalar.arithmetic.Add; -import org.elasticsearch.xpack.sql.expression.function.scalar.arithmetic.Div; -import org.elasticsearch.xpack.sql.expression.function.scalar.arithmetic.Mod; -import org.elasticsearch.xpack.sql.expression.function.scalar.arithmetic.Mul; -import org.elasticsearch.xpack.sql.expression.function.scalar.arithmetic.Neg; -import org.elasticsearch.xpack.sql.expression.function.scalar.arithmetic.Sub; import org.elasticsearch.xpack.sql.expression.predicate.And; -import org.elasticsearch.xpack.sql.expression.predicate.Equals; -import org.elasticsearch.xpack.sql.expression.predicate.GreaterThan; -import org.elasticsearch.xpack.sql.expression.predicate.GreaterThanOrEqual; import org.elasticsearch.xpack.sql.expression.predicate.In; import org.elasticsearch.xpack.sql.expression.predicate.IsNotNull; -import org.elasticsearch.xpack.sql.expression.predicate.LessThan; -import org.elasticsearch.xpack.sql.expression.predicate.LessThanOrEqual; import org.elasticsearch.xpack.sql.expression.predicate.Not; import org.elasticsearch.xpack.sql.expression.predicate.Or; import org.elasticsearch.xpack.sql.expression.predicate.Range; import org.elasticsearch.xpack.sql.expression.predicate.fulltext.MatchQueryPredicate; import org.elasticsearch.xpack.sql.expression.predicate.fulltext.MultiMatchQueryPredicate; import org.elasticsearch.xpack.sql.expression.predicate.fulltext.StringQueryPredicate; -import org.elasticsearch.xpack.sql.expression.regex.Like; -import org.elasticsearch.xpack.sql.expression.regex.LikePattern; -import org.elasticsearch.xpack.sql.expression.regex.RLike; +import org.elasticsearch.xpack.sql.expression.predicate.operator.arithmetic.Add; +import org.elasticsearch.xpack.sql.expression.predicate.operator.arithmetic.Div; +import org.elasticsearch.xpack.sql.expression.predicate.operator.arithmetic.Mod; +import org.elasticsearch.xpack.sql.expression.predicate.operator.arithmetic.Mul; +import org.elasticsearch.xpack.sql.expression.predicate.operator.arithmetic.Neg; +import org.elasticsearch.xpack.sql.expression.predicate.operator.arithmetic.Sub; +import org.elasticsearch.xpack.sql.expression.predicate.operator.comparison.Equals; +import org.elasticsearch.xpack.sql.expression.predicate.operator.comparison.GreaterThan; +import org.elasticsearch.xpack.sql.expression.predicate.operator.comparison.GreaterThanOrEqual; +import org.elasticsearch.xpack.sql.expression.predicate.operator.comparison.LessThan; +import org.elasticsearch.xpack.sql.expression.predicate.operator.comparison.LessThanOrEqual; +import org.elasticsearch.xpack.sql.expression.predicate.regex.Like; +import org.elasticsearch.xpack.sql.expression.predicate.regex.LikePattern; +import org.elasticsearch.xpack.sql.expression.predicate.regex.RLike; import org.elasticsearch.xpack.sql.parser.SqlBaseParser.ArithmeticBinaryContext; import org.elasticsearch.xpack.sql.parser.SqlBaseParser.ArithmeticUnaryContext; import org.elasticsearch.xpack.sql.parser.SqlBaseParser.BooleanLiteralContext; import org.elasticsearch.xpack.sql.parser.SqlBaseParser.CastExpressionContext; import org.elasticsearch.xpack.sql.parser.SqlBaseParser.CastTemplateContext; -import org.elasticsearch.xpack.sql.parser.SqlBaseParser.ColumnReferenceContext; import org.elasticsearch.xpack.sql.parser.SqlBaseParser.ComparisonContext; import org.elasticsearch.xpack.sql.parser.SqlBaseParser.DateEscapedLiteralContext; import org.elasticsearch.xpack.sql.parser.SqlBaseParser.DecimalLiteralContext; @@ -142,11 +141,6 @@ abstract class ExpressionBuilder extends IdentifierBuilder { new UnresolvedAttribute(source(ctx.qualifiedName()), visitQualifiedName(ctx.qualifiedName())) : null); } - @Override - public Object visitColumnReference(ColumnReferenceContext ctx) { - return new UnresolvedAttribute(source(ctx), visitIdentifier(ctx.identifier())); - } - @Override public Object visitDereference(DereferenceContext ctx) { return new UnresolvedAttribute(source(ctx), visitQualifiedName(ctx.qualifiedName())); @@ -292,6 +286,9 @@ abstract class ExpressionBuilder extends IdentifierBuilder { case SqlBaseParser.PLUS: return value; case SqlBaseParser.MINUS: + if (value instanceof Literal) { // Minus already processed together with literal number + return value; + } return new Neg(source(ctx.operator), value); default: throw new ParsingException(loc, "Unknown arithemtic {}", ctx.operator.getText()); @@ -489,38 +486,40 @@ abstract class ExpressionBuilder extends IdentifierBuilder { @Override public Literal visitDecimalLiteral(DecimalLiteralContext ctx) { + String ctxText = (hasMinusFromParent(ctx) ? "-" : "") + ctx.getText(); double value; try { - value = Double.parseDouble(ctx.getText()); + value = Double.parseDouble(ctxText); } catch (NumberFormatException nfe) { - throw new ParsingException(source(ctx), "Cannot parse number [{}]", ctx.getText()); + throw new ParsingException(source(ctx), "Cannot parse number [{}]", ctxText); } if (Double.isInfinite(value)) { - throw new ParsingException(source(ctx), "Number [{}] is too large", ctx.getText()); + throw new ParsingException(source(ctx), "Number [{}] is too large", ctxText); } if (Double.isNaN(value)) { - throw new ParsingException(source(ctx), "[{}] cannot be parsed as a number (NaN)", ctx.getText()); + throw new ParsingException(source(ctx), "[{}] cannot be parsed as a number (NaN)", ctxText); } return new Literal(source(ctx), Double.valueOf(value), DataType.DOUBLE); } @Override public Literal visitIntegerLiteral(IntegerLiteralContext ctx) { + String ctxText = (hasMinusFromParent(ctx) ? "-" : "") + ctx.getText(); long value; try { - value = Long.parseLong(ctx.getText()); + value = Long.parseLong(ctxText); } catch (NumberFormatException nfe) { try { - BigInteger bi = new BigInteger(ctx.getText()); + BigInteger bi = new BigInteger(ctxText); try { bi.longValueExact(); } catch (ArithmeticException ae) { - throw new ParsingException(source(ctx), "Number [{}] is too large", ctx.getText()); + throw new ParsingException(source(ctx), "Number [{}] is too large", ctxText); } } catch (NumberFormatException ex) { // parsing fails, go through } - throw new ParsingException(source(ctx), "Cannot parse number [{}]", ctx.getText()); + throw new ParsingException(source(ctx), "Cannot parse number [{}]", ctxText); } DataType type = DataType.LONG; @@ -687,4 +686,21 @@ abstract class ExpressionBuilder extends IdentifierBuilder { return new Literal(source(ctx), string, DataType.KEYWORD); } + + private boolean hasMinusFromParent(SqlBaseParser.NumberContext ctx) { + ParserRuleContext parentCtx = ctx.getParent(); + if (parentCtx != null && parentCtx instanceof SqlBaseParser.NumericLiteralContext) { + parentCtx = parentCtx.getParent(); + if (parentCtx != null && parentCtx instanceof SqlBaseParser.ConstantDefaultContext) { + parentCtx = parentCtx.getParent(); + if (parentCtx != null && parentCtx instanceof SqlBaseParser.ValueExpressionDefaultContext) { + parentCtx = parentCtx.getParent(); + if (parentCtx != null && parentCtx instanceof SqlBaseParser.ArithmeticUnaryContext) { + return ((ArithmeticUnaryContext) parentCtx).MINUS() != null; + } + } + } + } + return false; + } } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/LogicalPlanBuilder.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/LogicalPlanBuilder.java index 58d858c4241..23d2c20d305 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/LogicalPlanBuilder.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/LogicalPlanBuilder.java @@ -18,7 +18,6 @@ import org.elasticsearch.xpack.sql.parser.SqlBaseParser.FromClauseContext; import org.elasticsearch.xpack.sql.parser.SqlBaseParser.GroupByContext; import org.elasticsearch.xpack.sql.parser.SqlBaseParser.JoinCriteriaContext; import org.elasticsearch.xpack.sql.parser.SqlBaseParser.JoinRelationContext; -import org.elasticsearch.xpack.sql.parser.SqlBaseParser.JoinTypeContext; import org.elasticsearch.xpack.sql.parser.SqlBaseParser.LimitClauseContext; import org.elasticsearch.xpack.sql.parser.SqlBaseParser.NamedQueryContext; import org.elasticsearch.xpack.sql.parser.SqlBaseParser.QueryContext; @@ -33,7 +32,6 @@ import org.elasticsearch.xpack.sql.plan.logical.Aggregate; import org.elasticsearch.xpack.sql.plan.logical.Distinct; import org.elasticsearch.xpack.sql.plan.logical.Filter; import org.elasticsearch.xpack.sql.plan.logical.Join; -import org.elasticsearch.xpack.sql.plan.logical.Join.JoinType; import org.elasticsearch.xpack.sql.plan.logical.Limit; import org.elasticsearch.xpack.sql.plan.logical.LocalRelation; import org.elasticsearch.xpack.sql.plan.logical.LogicalPlan; @@ -168,39 +166,20 @@ abstract class LogicalPlanBuilder extends ExpressionBuilder { LogicalPlan result = plan(ctx.relationPrimary()); for (JoinRelationContext j : ctx.joinRelation()) { - result = doJoin(result, j); + result = doJoin(j); } return result; } - private Join doJoin(LogicalPlan left, JoinRelationContext ctx) { - JoinTypeContext joinType = ctx.joinType(); + private Join doJoin(JoinRelationContext ctx) { - Join.JoinType type = JoinType.INNER; - if (joinType != null) { - if (joinType.FULL() != null) { - type = JoinType.FULL; - } - if (joinType.LEFT() != null) { - type = JoinType.LEFT; - } - if (joinType.RIGHT() != null) { - type = JoinType.RIGHT; - } - } - - Expression condition = null; JoinCriteriaContext criteria = ctx.joinCriteria(); if (criteria != null) { if (criteria.USING() != null) { throw new UnsupportedOperationException(); } - if (criteria.booleanExpression() != null) { - condition = expression(criteria.booleanExpression()); - } } - // We would return this if we actually supported JOINs, but we don't yet. // new Join(source(ctx), left, plan(ctx.right), type, condition); throw new ParsingException(source(ctx), "Queries with JOIN are not yet supported"); diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/SqlBaseBaseListener.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/SqlBaseBaseListener.java index d3c025d240c..7dc0d5e985d 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/SqlBaseBaseListener.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/SqlBaseBaseListener.java @@ -719,18 +719,6 @@ class SqlBaseBaseListener implements SqlBaseListener { *

    The default implementation does nothing.

    */ @Override public void exitSubqueryExpression(SqlBaseParser.SubqueryExpressionContext ctx) { } - /** - * {@inheritDoc} - * - *

    The default implementation does nothing.

    - */ - @Override public void enterColumnReference(SqlBaseParser.ColumnReferenceContext ctx) { } - /** - * {@inheritDoc} - * - *

    The default implementation does nothing.

    - */ - @Override public void exitColumnReference(SqlBaseParser.ColumnReferenceContext ctx) { } /** * {@inheritDoc} * diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/SqlBaseBaseVisitor.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/SqlBaseBaseVisitor.java index 8e7603947e7..323fd914f55 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/SqlBaseBaseVisitor.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/SqlBaseBaseVisitor.java @@ -424,13 +424,6 @@ class SqlBaseBaseVisitor extends AbstractParseTreeVisitor implements SqlBa * {@link #visitChildren} on {@code ctx}.

    */ @Override public T visitSubqueryExpression(SqlBaseParser.SubqueryExpressionContext ctx) { return visitChildren(ctx); } - /** - * {@inheritDoc} - * - *

    The default implementation returns the result of calling - * {@link #visitChildren} on {@code ctx}.

    - */ - @Override public T visitColumnReference(SqlBaseParser.ColumnReferenceContext ctx) { return visitChildren(ctx); } /** * {@inheritDoc} * diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/SqlBaseLexer.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/SqlBaseLexer.java index 588f3ef028d..0361d719e63 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/SqlBaseLexer.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/SqlBaseLexer.java @@ -142,7 +142,7 @@ class SqlBaseLexer extends Lexer { public ATN getATN() { return _ATN; } public static final String _serializedATN = - "\3\u0430\ud6d1\u8206\uad2d\u4417\uaef1\u8d80\uaadd\2k\u0370\b\1\4\2\t"+ + "\3\u0430\ud6d1\u8206\uad2d\u4417\uaef1\u8d80\uaadd\2k\u036f\b\1\4\2\t"+ "\2\4\3\t\3\4\4\t\4\4\5\t\5\4\6\t\6\4\7\t\7\4\b\t\b\4\t\t\t\4\n\t\n\4\13"+ "\t\13\4\f\t\f\4\r\t\r\4\16\t\16\4\17\t\17\4\20\t\20\4\21\t\21\4\22\t\22"+ "\4\23\t\23\4\24\t\24\4\25\t\25\4\26\t\26\4\27\t\27\4\30\t\30\4\31\t\31"+ @@ -187,13 +187,13 @@ class SqlBaseLexer extends Lexer { "\16a\u02ec\3a\6a\u02f0\na\ra\16a\u02f1\3a\3a\7a\u02f6\na\fa\16a\u02f9"+ "\13a\5a\u02fb\na\3a\3a\3a\3a\6a\u0301\na\ra\16a\u0302\3a\3a\5a\u0307\n"+ "a\3b\3b\5b\u030b\nb\3b\3b\3b\7b\u0310\nb\fb\16b\u0313\13b\3c\3c\3c\3c"+ - "\6c\u0319\nc\rc\16c\u031a\3d\3d\3d\3d\6d\u0321\nd\rd\16d\u0322\3e\3e\3"+ - "e\3e\7e\u0329\ne\fe\16e\u032c\13e\3e\3e\3f\3f\3f\3f\7f\u0334\nf\ff\16"+ - "f\u0337\13f\3f\3f\3g\3g\5g\u033d\ng\3g\6g\u0340\ng\rg\16g\u0341\3h\3h"+ - "\3i\3i\3j\3j\3j\3j\7j\u034c\nj\fj\16j\u034f\13j\3j\5j\u0352\nj\3j\5j\u0355"+ - "\nj\3j\3j\3k\3k\3k\3k\3k\7k\u035e\nk\fk\16k\u0361\13k\3k\3k\3k\3k\3k\3"+ - "l\6l\u0369\nl\rl\16l\u036a\3l\3l\3m\3m\3\u035f\2n\3\3\5\4\7\5\t\6\13\7"+ - "\r\b\17\t\21\n\23\13\25\f\27\r\31\16\33\17\35\20\37\21!\22#\23%\24\'\25"+ + "\6c\u0319\nc\rc\16c\u031a\3d\3d\3d\6d\u0320\nd\rd\16d\u0321\3e\3e\3e\3"+ + "e\7e\u0328\ne\fe\16e\u032b\13e\3e\3e\3f\3f\3f\3f\7f\u0333\nf\ff\16f\u0336"+ + "\13f\3f\3f\3g\3g\5g\u033c\ng\3g\6g\u033f\ng\rg\16g\u0340\3h\3h\3i\3i\3"+ + "j\3j\3j\3j\7j\u034b\nj\fj\16j\u034e\13j\3j\5j\u0351\nj\3j\5j\u0354\nj"+ + "\3j\3j\3k\3k\3k\3k\3k\7k\u035d\nk\fk\16k\u0360\13k\3k\3k\3k\3k\3k\3l\6"+ + "l\u0368\nl\rl\16l\u0369\3l\3l\3m\3m\3\u035e\2n\3\3\5\4\7\5\t\6\13\7\r"+ + "\b\17\t\21\n\23\13\25\f\27\r\31\16\33\17\35\20\37\21!\22#\23%\24\'\25"+ ")\26+\27-\30/\31\61\32\63\33\65\34\67\359\36;\37= ?!A\"C#E$G%I&K\'M(O"+ ")Q*S+U,W-Y.[/]\60_\61a\62c\63e\64g\65i\66k\67m8o9q:s;u{?}@\177A\u0081"+ "B\u0083C\u0085D\u0087E\u0089F\u008bG\u008dH\u008fI\u0091J\u0093K\u0095"+ @@ -201,7 +201,7 @@ class SqlBaseLexer extends Lexer { "V\u00abW\u00adX\u00afY\u00b1Z\u00b3[\u00b5\\\u00b7]\u00b9^\u00bb_\u00bd"+ "`\u00bfa\u00c1b\u00c3c\u00c5d\u00c7e\u00c9f\u00cbg\u00cd\2\u00cf\2\u00d1"+ "\2\u00d3h\u00d5i\u00d7j\u00d9k\3\2\f\3\2))\4\2BBaa\5\2< T accept(ParseTreeVisitor visitor) { - if ( visitor instanceof SqlBaseVisitor ) return ((SqlBaseVisitor)visitor).visitColumnReference(this); - else return visitor.visitChildren(this); - } - } public static class ExtractContext extends PrimaryExpressionContext { public ExtractExpressionContext extractExpression() { return getRuleContext(ExtractExpressionContext.class,0); @@ -4051,7 +4080,7 @@ class SqlBaseParser extends Parser { enterRule(_localctx, 60, RULE_primaryExpression); int _la; try { - setState(577); + setState(575); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,76,_ctx) ) { case 1: @@ -4082,74 +4111,58 @@ class SqlBaseParser extends Parser { _localctx = new StarContext(_localctx); enterOuterAlt(_localctx, 4); { - setState(559); - match(ASTERISK); - } - break; - case 5: - _localctx = new StarContext(_localctx); - enterOuterAlt(_localctx, 5); - { - setState(563); + setState(562); _la = _input.LA(1); if ((((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << ANALYZE) | (1L << ANALYZED) | (1L << CATALOGS) | (1L << COLUMNS) | (1L << DEBUG) | (1L << EXECUTABLE) | (1L << EXPLAIN) | (1L << FORMAT) | (1L << FUNCTIONS) | (1L << GRAPHVIZ) | (1L << MAPPED) | (1L << OPTIMIZED) | (1L << PARSED) | (1L << PHYSICAL) | (1L << PLAN) | (1L << RLIKE) | (1L << QUERY) | (1L << SCHEMAS) | (1L << SHOW) | (1L << SYS) | (1L << TABLES))) != 0) || ((((_la - 64)) & ~0x3f) == 0 && ((1L << (_la - 64)) & ((1L << (TEXT - 64)) | (1L << (TYPE - 64)) | (1L << (TYPES - 64)) | (1L << (VERIFY - 64)) | (1L << (IDENTIFIER - 64)) | (1L << (DIGIT_IDENTIFIER - 64)) | (1L << (QUOTED_IDENTIFIER - 64)) | (1L << (BACKQUOTED_IDENTIFIER - 64)))) != 0)) { { - setState(560); + setState(559); qualifiedName(); - setState(561); + setState(560); match(DOT); } } - setState(565); + setState(564); match(ASTERISK); } break; - case 6: + case 5: _localctx = new FunctionContext(_localctx); - enterOuterAlt(_localctx, 6); + enterOuterAlt(_localctx, 5); { - setState(566); + setState(565); functionExpression(); } break; - case 7: + case 6: _localctx = new SubqueryExpressionContext(_localctx); - enterOuterAlt(_localctx, 7); + enterOuterAlt(_localctx, 6); { - setState(567); + setState(566); match(T__0); - setState(568); + setState(567); query(); - setState(569); + setState(568); match(T__1); } break; - case 8: - _localctx = new ColumnReferenceContext(_localctx); - enterOuterAlt(_localctx, 8); - { - setState(571); - identifier(); - } - break; - case 9: + case 7: _localctx = new DereferenceContext(_localctx); - enterOuterAlt(_localctx, 9); + enterOuterAlt(_localctx, 7); { - setState(572); + setState(570); qualifiedName(); } break; - case 10: + case 8: _localctx = new ParenthesizedExpressionContext(_localctx); - enterOuterAlt(_localctx, 10); + enterOuterAlt(_localctx, 8); { - setState(573); + setState(571); match(T__0); - setState(574); + setState(572); expression(); - setState(575); + setState(573); match(T__1); } break; @@ -4195,23 +4208,23 @@ class SqlBaseParser extends Parser { CastExpressionContext _localctx = new CastExpressionContext(_ctx, getState()); enterRule(_localctx, 62, RULE_castExpression); try { - setState(584); + setState(582); switch (_input.LA(1)) { case CAST: enterOuterAlt(_localctx, 1); { - setState(579); + setState(577); castTemplate(); } break; case FUNCTION_ESC: enterOuterAlt(_localctx, 2); { - setState(580); + setState(578); match(FUNCTION_ESC); - setState(581); + setState(579); castTemplate(); - setState(582); + setState(580); match(ESC_END); } break; @@ -4264,17 +4277,17 @@ class SqlBaseParser extends Parser { try { enterOuterAlt(_localctx, 1); { - setState(586); + setState(584); match(CAST); - setState(587); + setState(585); match(T__0); - setState(588); + setState(586); expression(); - setState(589); + setState(587); match(AS); - setState(590); + setState(588); dataType(); - setState(591); + setState(589); match(T__1); } } @@ -4318,23 +4331,23 @@ class SqlBaseParser extends Parser { ExtractExpressionContext _localctx = new ExtractExpressionContext(_ctx, getState()); enterRule(_localctx, 66, RULE_extractExpression); try { - setState(598); + setState(596); switch (_input.LA(1)) { case EXTRACT: enterOuterAlt(_localctx, 1); { - setState(593); + setState(591); extractTemplate(); } break; case FUNCTION_ESC: enterOuterAlt(_localctx, 2); { - setState(594); + setState(592); match(FUNCTION_ESC); - setState(595); + setState(593); extractTemplate(); - setState(596); + setState(594); match(ESC_END); } break; @@ -4388,17 +4401,17 @@ class SqlBaseParser extends Parser { try { enterOuterAlt(_localctx, 1); { - setState(600); + setState(598); match(EXTRACT); - setState(601); + setState(599); match(T__0); - setState(602); + setState(600); ((ExtractTemplateContext)_localctx).field = identifier(); - setState(603); + setState(601); match(FROM); - setState(604); + setState(602); valueExpression(0); - setState(605); + setState(603); match(T__1); } } @@ -4441,7 +4454,7 @@ class SqlBaseParser extends Parser { FunctionExpressionContext _localctx = new FunctionExpressionContext(_ctx, getState()); enterRule(_localctx, 70, RULE_functionExpression); try { - setState(612); + setState(610); switch (_input.LA(1)) { case ANALYZE: case ANALYZED: @@ -4476,18 +4489,18 @@ class SqlBaseParser extends Parser { case BACKQUOTED_IDENTIFIER: enterOuterAlt(_localctx, 1); { - setState(607); + setState(605); functionTemplate(); } break; case FUNCTION_ESC: enterOuterAlt(_localctx, 2); { - setState(608); + setState(606); match(FUNCTION_ESC); - setState(609); + setState(607); functionTemplate(); - setState(610); + setState(608); match(ESC_END); } break; @@ -4545,45 +4558,45 @@ class SqlBaseParser extends Parser { try { enterOuterAlt(_localctx, 1); { - setState(614); + setState(612); functionName(); - setState(615); + setState(613); match(T__0); - setState(627); + setState(625); _la = _input.LA(1); if ((((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << T__0) | (1L << ALL) | (1L << ANALYZE) | (1L << ANALYZED) | (1L << CAST) | (1L << CATALOGS) | (1L << COLUMNS) | (1L << DEBUG) | (1L << DISTINCT) | (1L << EXECUTABLE) | (1L << EXISTS) | (1L << EXPLAIN) | (1L << EXTRACT) | (1L << FALSE) | (1L << FORMAT) | (1L << FUNCTIONS) | (1L << GRAPHVIZ) | (1L << LEFT) | (1L << MAPPED) | (1L << MATCH) | (1L << NOT) | (1L << NULL) | (1L << OPTIMIZED) | (1L << PARSED) | (1L << PHYSICAL) | (1L << PLAN) | (1L << RIGHT) | (1L << RLIKE) | (1L << QUERY) | (1L << SCHEMAS) | (1L << SHOW) | (1L << SYS) | (1L << TABLES))) != 0) || ((((_la - 64)) & ~0x3f) == 0 && ((1L << (_la - 64)) & ((1L << (TEXT - 64)) | (1L << (TRUE - 64)) | (1L << (TYPE - 64)) | (1L << (TYPES - 64)) | (1L << (VERIFY - 64)) | (1L << (FUNCTION_ESC - 64)) | (1L << (DATE_ESC - 64)) | (1L << (TIME_ESC - 64)) | (1L << (TIMESTAMP_ESC - 64)) | (1L << (GUID_ESC - 64)) | (1L << (PLUS - 64)) | (1L << (MINUS - 64)) | (1L << (ASTERISK - 64)) | (1L << (PARAM - 64)) | (1L << (STRING - 64)) | (1L << (INTEGER_VALUE - 64)) | (1L << (DECIMAL_VALUE - 64)) | (1L << (IDENTIFIER - 64)) | (1L << (DIGIT_IDENTIFIER - 64)) | (1L << (QUOTED_IDENTIFIER - 64)) | (1L << (BACKQUOTED_IDENTIFIER - 64)))) != 0)) { { - setState(617); + setState(615); _la = _input.LA(1); if (_la==ALL || _la==DISTINCT) { { - setState(616); + setState(614); setQuantifier(); } } - setState(619); + setState(617); expression(); - setState(624); + setState(622); _errHandler.sync(this); _la = _input.LA(1); while (_la==T__2) { { { - setState(620); + setState(618); match(T__2); - setState(621); + setState(619); expression(); } } - setState(626); + setState(624); _errHandler.sync(this); _la = _input.LA(1); } } } - setState(629); + setState(627); match(T__1); } } @@ -4627,19 +4640,19 @@ class SqlBaseParser extends Parser { FunctionNameContext _localctx = new FunctionNameContext(_ctx, getState()); enterRule(_localctx, 74, RULE_functionName); try { - setState(634); + setState(632); switch (_input.LA(1)) { case LEFT: enterOuterAlt(_localctx, 1); { - setState(631); + setState(629); match(LEFT); } break; case RIGHT: enterOuterAlt(_localctx, 2); { - setState(632); + setState(630); match(RIGHT); } break; @@ -4674,7 +4687,7 @@ class SqlBaseParser extends Parser { case BACKQUOTED_IDENTIFIER: enterOuterAlt(_localctx, 3); { - setState(633); + setState(631); identifier(); } break; @@ -4886,24 +4899,22 @@ class SqlBaseParser extends Parser { enterRule(_localctx, 76, RULE_constant); try { int _alt; - setState(661); + setState(659); switch (_input.LA(1)) { case NULL: _localctx = new NullLiteralContext(_localctx); enterOuterAlt(_localctx, 1); { - setState(636); + setState(634); match(NULL); } break; - case PLUS: - case MINUS: case INTEGER_VALUE: case DECIMAL_VALUE: _localctx = new NumericLiteralContext(_localctx); enterOuterAlt(_localctx, 2); { - setState(637); + setState(635); number(); } break; @@ -4912,7 +4923,7 @@ class SqlBaseParser extends Parser { _localctx = new BooleanLiteralContext(_localctx); enterOuterAlt(_localctx, 3); { - setState(638); + setState(636); booleanValue(); } break; @@ -4920,7 +4931,7 @@ class SqlBaseParser extends Parser { _localctx = new StringLiteralContext(_localctx); enterOuterAlt(_localctx, 4); { - setState(640); + setState(638); _errHandler.sync(this); _alt = 1; do { @@ -4928,7 +4939,7 @@ class SqlBaseParser extends Parser { case 1: { { - setState(639); + setState(637); match(STRING); } } @@ -4936,7 +4947,7 @@ class SqlBaseParser extends Parser { default: throw new NoViableAltException(this); } - setState(642); + setState(640); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,84,_ctx); } while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ); @@ -4946,7 +4957,7 @@ class SqlBaseParser extends Parser { _localctx = new ParamLiteralContext(_localctx); enterOuterAlt(_localctx, 5); { - setState(644); + setState(642); match(PARAM); } break; @@ -4954,11 +4965,11 @@ class SqlBaseParser extends Parser { _localctx = new DateEscapedLiteralContext(_localctx); enterOuterAlt(_localctx, 6); { - setState(645); + setState(643); match(DATE_ESC); - setState(646); + setState(644); string(); - setState(647); + setState(645); match(ESC_END); } break; @@ -4966,11 +4977,11 @@ class SqlBaseParser extends Parser { _localctx = new TimeEscapedLiteralContext(_localctx); enterOuterAlt(_localctx, 7); { - setState(649); + setState(647); match(TIME_ESC); - setState(650); + setState(648); string(); - setState(651); + setState(649); match(ESC_END); } break; @@ -4978,11 +4989,11 @@ class SqlBaseParser extends Parser { _localctx = new TimestampEscapedLiteralContext(_localctx); enterOuterAlt(_localctx, 8); { - setState(653); + setState(651); match(TIMESTAMP_ESC); - setState(654); + setState(652); string(); - setState(655); + setState(653); match(ESC_END); } break; @@ -4990,11 +5001,11 @@ class SqlBaseParser extends Parser { _localctx = new GuidEscapedLiteralContext(_localctx); enterOuterAlt(_localctx, 9); { - setState(657); + setState(655); match(GUID_ESC); - setState(658); + setState(656); string(); - setState(659); + setState(657); match(ESC_END); } break; @@ -5046,7 +5057,7 @@ class SqlBaseParser extends Parser { try { enterOuterAlt(_localctx, 1); { - setState(663); + setState(661); _la = _input.LA(1); if ( !(((((_la - 80)) & ~0x3f) == 0 && ((1L << (_la - 80)) & ((1L << (EQ - 80)) | (1L << (NEQ - 80)) | (1L << (LT - 80)) | (1L << (LTE - 80)) | (1L << (GT - 80)) | (1L << (GTE - 80)))) != 0)) ) { _errHandler.recoverInline(this); @@ -5095,7 +5106,7 @@ class SqlBaseParser extends Parser { try { enterOuterAlt(_localctx, 1); { - setState(665); + setState(663); _la = _input.LA(1); if ( !(_la==FALSE || _la==TRUE) ) { _errHandler.recoverInline(this); @@ -5153,7 +5164,7 @@ class SqlBaseParser extends Parser { _localctx = new PrimitiveDataTypeContext(_localctx); enterOuterAlt(_localctx, 1); { - setState(667); + setState(665); identifier(); } } @@ -5205,25 +5216,25 @@ class SqlBaseParser extends Parser { int _alt; enterOuterAlt(_localctx, 1); { - setState(674); + setState(672); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,86,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(669); + setState(667); identifier(); - setState(670); + setState(668); match(DOT); } } } - setState(676); + setState(674); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,86,_ctx); } - setState(677); + setState(675); identifier(); } } @@ -5268,13 +5279,13 @@ class SqlBaseParser extends Parser { IdentifierContext _localctx = new IdentifierContext(_ctx, getState()); enterRule(_localctx, 86, RULE_identifier); try { - setState(681); + setState(679); switch (_input.LA(1)) { case QUOTED_IDENTIFIER: case BACKQUOTED_IDENTIFIER: enterOuterAlt(_localctx, 1); { - setState(679); + setState(677); quoteIdentifier(); } break; @@ -5307,7 +5318,7 @@ class SqlBaseParser extends Parser { case DIGIT_IDENTIFIER: enterOuterAlt(_localctx, 2); { - setState(680); + setState(678); unquoteIdentifier(); } break; @@ -5360,43 +5371,43 @@ class SqlBaseParser extends Parser { enterRule(_localctx, 88, RULE_tableIdentifier); int _la; try { - setState(695); + setState(693); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,90,_ctx) ) { case 1: enterOuterAlt(_localctx, 1); { - setState(686); + setState(684); _la = _input.LA(1); if ((((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << ANALYZE) | (1L << ANALYZED) | (1L << CATALOGS) | (1L << COLUMNS) | (1L << DEBUG) | (1L << EXECUTABLE) | (1L << EXPLAIN) | (1L << FORMAT) | (1L << FUNCTIONS) | (1L << GRAPHVIZ) | (1L << MAPPED) | (1L << OPTIMIZED) | (1L << PARSED) | (1L << PHYSICAL) | (1L << PLAN) | (1L << RLIKE) | (1L << QUERY) | (1L << SCHEMAS) | (1L << SHOW) | (1L << SYS) | (1L << TABLES))) != 0) || ((((_la - 64)) & ~0x3f) == 0 && ((1L << (_la - 64)) & ((1L << (TEXT - 64)) | (1L << (TYPE - 64)) | (1L << (TYPES - 64)) | (1L << (VERIFY - 64)) | (1L << (IDENTIFIER - 64)) | (1L << (DIGIT_IDENTIFIER - 64)) | (1L << (QUOTED_IDENTIFIER - 64)) | (1L << (BACKQUOTED_IDENTIFIER - 64)))) != 0)) { { - setState(683); + setState(681); ((TableIdentifierContext)_localctx).catalog = identifier(); - setState(684); + setState(682); match(T__3); } } - setState(688); + setState(686); match(TABLE_IDENTIFIER); } break; case 2: enterOuterAlt(_localctx, 2); { - setState(692); + setState(690); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,89,_ctx) ) { case 1: { - setState(689); + setState(687); ((TableIdentifierContext)_localctx).catalog = identifier(); - setState(690); + setState(688); match(T__3); } break; } - setState(694); + setState(692); ((TableIdentifierContext)_localctx).name = identifier(); } break; @@ -5463,13 +5474,13 @@ class SqlBaseParser extends Parser { QuoteIdentifierContext _localctx = new QuoteIdentifierContext(_ctx, getState()); enterRule(_localctx, 90, RULE_quoteIdentifier); try { - setState(699); + setState(697); switch (_input.LA(1)) { case QUOTED_IDENTIFIER: _localctx = new QuotedIdentifierContext(_localctx); enterOuterAlt(_localctx, 1); { - setState(697); + setState(695); match(QUOTED_IDENTIFIER); } break; @@ -5477,7 +5488,7 @@ class SqlBaseParser extends Parser { _localctx = new BackQuotedIdentifierContext(_localctx); enterOuterAlt(_localctx, 2); { - setState(698); + setState(696); match(BACKQUOTED_IDENTIFIER); } break; @@ -5549,13 +5560,13 @@ class SqlBaseParser extends Parser { UnquoteIdentifierContext _localctx = new UnquoteIdentifierContext(_ctx, getState()); enterRule(_localctx, 92, RULE_unquoteIdentifier); try { - setState(704); + setState(702); switch (_input.LA(1)) { case IDENTIFIER: _localctx = new UnquotedIdentifierContext(_localctx); enterOuterAlt(_localctx, 1); { - setState(701); + setState(699); match(IDENTIFIER); } break; @@ -5587,7 +5598,7 @@ class SqlBaseParser extends Parser { _localctx = new UnquotedIdentifierContext(_localctx); enterOuterAlt(_localctx, 2); { - setState(702); + setState(700); nonReserved(); } break; @@ -5595,7 +5606,7 @@ class SqlBaseParser extends Parser { _localctx = new DigitIdentifierContext(_localctx); enterOuterAlt(_localctx, 3); { - setState(703); + setState(701); match(DIGIT_IDENTIFIER); } break; @@ -5627,8 +5638,6 @@ class SqlBaseParser extends Parser { } public static class DecimalLiteralContext extends NumberContext { public TerminalNode DECIMAL_VALUE() { return getToken(SqlBaseParser.DECIMAL_VALUE, 0); } - public TerminalNode PLUS() { return getToken(SqlBaseParser.PLUS, 0); } - public TerminalNode MINUS() { return getToken(SqlBaseParser.MINUS, 0); } public DecimalLiteralContext(NumberContext ctx) { copyFrom(ctx); } @Override public void enterRule(ParseTreeListener listener) { @@ -5646,8 +5655,6 @@ class SqlBaseParser extends Parser { } public static class IntegerLiteralContext extends NumberContext { public TerminalNode INTEGER_VALUE() { return getToken(SqlBaseParser.INTEGER_VALUE, 0); } - public TerminalNode PLUS() { return getToken(SqlBaseParser.PLUS, 0); } - public TerminalNode MINUS() { return getToken(SqlBaseParser.MINUS, 0); } public IntegerLiteralContext(NumberContext ctx) { copyFrom(ctx); } @Override public void enterRule(ParseTreeListener listener) { @@ -5667,55 +5674,27 @@ class SqlBaseParser extends Parser { public final NumberContext number() throws RecognitionException { NumberContext _localctx = new NumberContext(_ctx, getState()); enterRule(_localctx, 94, RULE_number); - int _la; try { - setState(714); - _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,95,_ctx) ) { - case 1: + setState(706); + switch (_input.LA(1)) { + case DECIMAL_VALUE: _localctx = new DecimalLiteralContext(_localctx); enterOuterAlt(_localctx, 1); { - setState(707); - _la = _input.LA(1); - if (_la==PLUS || _la==MINUS) { - { - setState(706); - _la = _input.LA(1); - if ( !(_la==PLUS || _la==MINUS) ) { - _errHandler.recoverInline(this); - } else { - consume(); - } - } - } - - setState(709); + setState(704); match(DECIMAL_VALUE); } break; - case 2: + case INTEGER_VALUE: _localctx = new IntegerLiteralContext(_localctx); enterOuterAlt(_localctx, 2); { - setState(711); - _la = _input.LA(1); - if (_la==PLUS || _la==MINUS) { - { - setState(710); - _la = _input.LA(1); - if ( !(_la==PLUS || _la==MINUS) ) { - _errHandler.recoverInline(this); - } else { - consume(); - } - } - } - - setState(713); + setState(705); match(INTEGER_VALUE); } break; + default: + throw new NoViableAltException(this); } } catch (RecognitionException re) { @@ -5758,7 +5737,7 @@ class SqlBaseParser extends Parser { try { enterOuterAlt(_localctx, 1); { - setState(716); + setState(708); _la = _input.LA(1); if ( !(_la==PARAM || _la==STRING) ) { _errHandler.recoverInline(this); @@ -5830,7 +5809,7 @@ class SqlBaseParser extends Parser { try { enterOuterAlt(_localctx, 1); { - setState(718); + setState(710); _la = _input.LA(1); if ( !(((((_la - 6)) & ~0x3f) == 0 && ((1L << (_la - 6)) & ((1L << (ANALYZE - 6)) | (1L << (ANALYZED - 6)) | (1L << (CATALOGS - 6)) | (1L << (COLUMNS - 6)) | (1L << (DEBUG - 6)) | (1L << (EXECUTABLE - 6)) | (1L << (EXPLAIN - 6)) | (1L << (FORMAT - 6)) | (1L << (FUNCTIONS - 6)) | (1L << (GRAPHVIZ - 6)) | (1L << (MAPPED - 6)) | (1L << (OPTIMIZED - 6)) | (1L << (PARSED - 6)) | (1L << (PHYSICAL - 6)) | (1L << (PLAN - 6)) | (1L << (RLIKE - 6)) | (1L << (QUERY - 6)) | (1L << (SCHEMAS - 6)) | (1L << (SHOW - 6)) | (1L << (SYS - 6)) | (1L << (TABLES - 6)) | (1L << (TEXT - 6)) | (1L << (TYPE - 6)) | (1L << (TYPES - 6)) | (1L << (VERIFY - 6)))) != 0)) ) { _errHandler.recoverInline(this); @@ -5881,7 +5860,7 @@ class SqlBaseParser extends Parser { } public static final String _serializedATN = - "\3\u0430\ud6d1\u8206\uad2d\u4417\uaef1\u8d80\uaadd\3l\u02d3\4\2\t\2\4"+ + "\3\u0430\ud6d1\u8206\uad2d\u4417\uaef1\u8d80\uaadd\3l\u02cb\4\2\t\2\4"+ "\3\t\3\4\4\t\4\4\5\t\5\4\6\t\6\4\7\t\7\4\b\t\b\4\t\t\t\4\n\t\n\4\13\t"+ "\13\4\f\t\f\4\r\t\r\4\16\t\16\4\17\t\17\4\20\t\20\4\21\t\21\4\22\t\22"+ "\4\23\t\23\4\24\t\24\4\25\t\25\4\26\t\26\4\27\t\27\4\30\t\30\4\31\t\31"+ @@ -5925,246 +5904,242 @@ class SqlBaseParser extends Parser { "\n\33\3\34\3\34\3\34\3\35\3\35\5\35\u0210\n\35\3\36\3\36\3\36\3\36\3\36"+ "\3\36\5\36\u0218\n\36\3\37\3\37\3\37\3\37\5\37\u021e\n\37\3\37\3\37\3"+ "\37\3\37\3\37\3\37\3\37\3\37\3\37\3\37\7\37\u022a\n\37\f\37\16\37\u022d"+ - "\13\37\3 \3 \3 \3 \3 \3 \3 \5 \u0236\n \3 \3 \3 \3 \3 \3 \3 \3 \3 \3 "+ - "\3 \3 \5 \u0244\n \3!\3!\3!\3!\3!\5!\u024b\n!\3\"\3\"\3\"\3\"\3\"\3\""+ - "\3\"\3#\3#\3#\3#\3#\5#\u0259\n#\3$\3$\3$\3$\3$\3$\3$\3%\3%\3%\3%\3%\5"+ - "%\u0267\n%\3&\3&\3&\5&\u026c\n&\3&\3&\3&\7&\u0271\n&\f&\16&\u0274\13&"+ - "\5&\u0276\n&\3&\3&\3\'\3\'\3\'\5\'\u027d\n\'\3(\3(\3(\3(\6(\u0283\n(\r"+ - "(\16(\u0284\3(\3(\3(\3(\3(\3(\3(\3(\3(\3(\3(\3(\3(\3(\3(\3(\3(\5(\u0298"+ - "\n(\3)\3)\3*\3*\3+\3+\3,\3,\3,\7,\u02a3\n,\f,\16,\u02a6\13,\3,\3,\3-\3"+ - "-\5-\u02ac\n-\3.\3.\3.\5.\u02b1\n.\3.\3.\3.\3.\5.\u02b7\n.\3.\5.\u02ba"+ - "\n.\3/\3/\5/\u02be\n/\3\60\3\60\3\60\5\60\u02c3\n\60\3\61\5\61\u02c6\n"+ - "\61\3\61\3\61\5\61\u02ca\n\61\3\61\5\61\u02cd\n\61\3\62\3\62\3\63\3\63"+ - "\3\63\2\4.<\64\2\4\6\b\n\f\16\20\22\24\26\30\32\34\36 \"$&(*,.\60\62\64"+ - "\668:<>@BDFHJLNPRTVXZ\\^`bd\2\20\b\2\7\7\t\t\31\31,,\62\62\66\66\4\2\""+ - "\"BB\4\2\t\t\62\62\4\2\37\37%%\3\2\25\26\4\2\7\7aa\4\2\r\r\25\25\4\2\7"+ - "\7\27\27\3\2XY\3\2Z\\\3\2RW\4\2\35\35CC\3\2_`\20\2\b\t\22\24\31\31\33"+ - "\33\36\36!\",,\62\62\668:<>?ABDEGG\u032c\2f\3\2\2\2\4i\3\2\2\2\6\u00d0"+ - "\3\2\2\2\b\u00db\3\2\2\2\n\u00df\3\2\2\2\f\u00f4\3\2\2\2\16\u00fb\3\2"+ - "\2\2\20\u00fd\3\2\2\2\22\u0101\3\2\2\2\24\u011d\3\2\2\2\26\u0127\3\2\2"+ - "\2\30\u0131\3\2\2\2\32\u0140\3\2\2\2\34\u0142\3\2\2\2\36\u0148\3\2\2\2"+ - " \u014a\3\2\2\2\"\u0151\3\2\2\2$\u0163\3\2\2\2&\u0174\3\2\2\2(\u0184\3"+ - "\2\2\2*\u019f\3\2\2\2,\u01a1\3\2\2\2.\u01c2\3\2\2\2\60\u01d3\3\2\2\2\62"+ - "\u01d6\3\2\2\2\64\u0208\3\2\2\2\66\u020a\3\2\2\28\u020d\3\2\2\2:\u0217"+ - "\3\2\2\2<\u021d\3\2\2\2>\u0243\3\2\2\2@\u024a\3\2\2\2B\u024c\3\2\2\2D"+ - "\u0258\3\2\2\2F\u025a\3\2\2\2H\u0266\3\2\2\2J\u0268\3\2\2\2L\u027c\3\2"+ - "\2\2N\u0297\3\2\2\2P\u0299\3\2\2\2R\u029b\3\2\2\2T\u029d\3\2\2\2V\u02a4"+ - "\3\2\2\2X\u02ab\3\2\2\2Z\u02b9\3\2\2\2\\\u02bd\3\2\2\2^\u02c2\3\2\2\2"+ - "`\u02cc\3\2\2\2b\u02ce\3\2\2\2d\u02d0\3\2\2\2fg\5\6\4\2gh\7\2\2\3h\3\3"+ - "\2\2\2ij\5,\27\2jk\7\2\2\3k\5\3\2\2\2l\u00d1\5\b\5\2m{\7\33\2\2nw\7\3"+ - "\2\2op\78\2\2pv\t\2\2\2qr\7\36\2\2rv\t\3\2\2st\7G\2\2tv\5R*\2uo\3\2\2"+ - "\2uq\3\2\2\2us\3\2\2\2vy\3\2\2\2wu\3\2\2\2wx\3\2\2\2xz\3\2\2\2yw\3\2\2"+ - "\2z|\7\4\2\2{n\3\2\2\2{|\3\2\2\2|}\3\2\2\2}\u00d1\5\6\4\2~\u008a\7\24"+ - "\2\2\177\u0086\7\3\2\2\u0080\u0081\78\2\2\u0081\u0085\t\4\2\2\u0082\u0083"+ - "\7\36\2\2\u0083\u0085\t\3\2\2\u0084\u0080\3\2\2\2\u0084\u0082\3\2\2\2"+ - "\u0085\u0088\3\2\2\2\u0086\u0084\3\2\2\2\u0086\u0087\3\2\2\2\u0087\u0089"+ - "\3\2\2\2\u0088\u0086\3\2\2\2\u0089\u008b\7\4\2\2\u008a\177\3\2\2\2\u008a"+ - "\u008b\3\2\2\2\u008b\u008c\3\2\2\2\u008c\u00d1\5\6\4\2\u008d\u008e\7>"+ - "\2\2\u008e\u0091\7A\2\2\u008f\u0092\5\66\34\2\u0090\u0092\5Z.\2\u0091"+ - "\u008f\3\2\2\2\u0091\u0090\3\2\2\2\u0091\u0092\3\2\2\2\u0092\u00d1\3\2"+ - "\2\2\u0093\u0094\7>\2\2\u0094\u0095\7\23\2\2\u0095\u0098\t\5\2\2\u0096"+ - "\u0099\5\66\34\2\u0097\u0099\5Z.\2\u0098\u0096\3\2\2\2\u0098\u0097\3\2"+ - "\2\2\u0099\u00d1\3\2\2\2\u009a\u009d\t\6\2\2\u009b\u009e\5\66\34\2\u009c"+ - "\u009e\5Z.\2\u009d\u009b\3\2\2\2\u009d\u009c\3\2\2\2\u009e\u00d1\3\2\2"+ - "\2\u009f\u00a0\7>\2\2\u00a0\u00a2\7!\2\2\u00a1\u00a3\5\66\34\2\u00a2\u00a1"+ - "\3\2\2\2\u00a2\u00a3\3\2\2\2\u00a3\u00d1\3\2\2\2\u00a4\u00a5\7>\2\2\u00a5"+ - "\u00d1\7<\2\2\u00a6\u00a7\7?\2\2\u00a7\u00d1\7\22\2\2\u00a8\u00a9\7?\2"+ - "\2\u00a9\u00ac\7A\2\2\u00aa\u00ab\7\21\2\2\u00ab\u00ad\5\66\34\2\u00ac"+ - "\u00aa\3\2\2\2\u00ac\u00ad\3\2\2\2\u00ad\u00b0\3\2\2\2\u00ae\u00b1\5\66"+ - "\34\2\u00af\u00b1\5Z.\2\u00b0\u00ae\3\2\2\2\u00b0\u00af\3\2\2\2\u00b0"+ - "\u00b1\3\2\2\2\u00b1\u00bb\3\2\2\2\u00b2\u00b3\7D\2\2\u00b3\u00b8\5b\62"+ - "\2\u00b4\u00b5\7\5\2\2\u00b5\u00b7\5b\62\2\u00b6\u00b4\3\2\2\2\u00b7\u00ba"+ - "\3\2\2\2\u00b8\u00b6\3\2\2\2\u00b8\u00b9\3\2\2\2\u00b9\u00bc\3\2\2\2\u00ba"+ - "\u00b8\3\2\2\2\u00bb\u00b2\3\2\2\2\u00bb\u00bc\3\2\2\2\u00bc\u00d1\3\2"+ - "\2\2\u00bd\u00be\7?\2\2\u00be\u00c1\7\23\2\2\u00bf\u00c0\7\21\2\2\u00c0"+ - "\u00c2\5b\62\2\u00c1\u00bf\3\2\2\2\u00c1\u00c2\3\2\2\2\u00c2\u00c6\3\2"+ - "\2\2\u00c3\u00c4\7@\2\2\u00c4\u00c7\5\66\34\2\u00c5\u00c7\5Z.\2\u00c6"+ - "\u00c3\3\2\2\2\u00c6\u00c5\3\2\2\2\u00c6\u00c7\3\2\2\2\u00c7\u00c9\3\2"+ - "\2\2\u00c8\u00ca\5\66\34\2\u00c9\u00c8\3\2\2\2\u00c9\u00ca\3\2\2\2\u00ca"+ - "\u00d1\3\2\2\2\u00cb\u00cc\7?\2\2\u00cc\u00d1\7E\2\2\u00cd\u00ce\7?\2"+ - "\2\u00ce\u00cf\7@\2\2\u00cf\u00d1\7E\2\2\u00d0l\3\2\2\2\u00d0m\3\2\2\2"+ - "\u00d0~\3\2\2\2\u00d0\u008d\3\2\2\2\u00d0\u0093\3\2\2\2\u00d0\u009a\3"+ - "\2\2\2\u00d0\u009f\3\2\2\2\u00d0\u00a4\3\2\2\2\u00d0\u00a6\3\2\2\2\u00d0"+ - "\u00a8\3\2\2\2\u00d0\u00bd\3\2\2\2\u00d0\u00cb\3\2\2\2\u00d0\u00cd\3\2"+ - "\2\2\u00d1\7\3\2\2\2\u00d2\u00d3\7I\2\2\u00d3\u00d8\5\34\17\2\u00d4\u00d5"+ - "\7\5\2\2\u00d5\u00d7\5\34\17\2\u00d6\u00d4\3\2\2\2\u00d7\u00da\3\2\2\2"+ - "\u00d8\u00d6\3\2\2\2\u00d8\u00d9\3\2\2\2\u00d9\u00dc\3\2\2\2\u00da\u00d8"+ - "\3\2\2\2\u00db\u00d2\3\2\2\2\u00db\u00dc\3\2\2\2\u00dc\u00dd\3\2\2\2\u00dd"+ - "\u00de\5\n\6\2\u00de\t\3\2\2\2\u00df\u00ea\5\16\b\2\u00e0\u00e1\7\64\2"+ - "\2\u00e1\u00e2\7\17\2\2\u00e2\u00e7\5\20\t\2\u00e3\u00e4\7\5\2\2\u00e4"+ - "\u00e6\5\20\t\2\u00e5\u00e3\3\2\2\2\u00e6\u00e9\3\2\2\2\u00e7\u00e5\3"+ - "\2\2\2\u00e7\u00e8\3\2\2\2\u00e8\u00eb\3\2\2\2\u00e9\u00e7\3\2\2\2\u00ea"+ - "\u00e0\3\2\2\2\u00ea\u00eb\3\2\2\2\u00eb\u00ed\3\2\2\2\u00ec\u00ee\5\f"+ - "\7\2\u00ed\u00ec\3\2\2\2\u00ed\u00ee\3\2\2\2\u00ee\13\3\2\2\2\u00ef\u00f0"+ - "\7+\2\2\u00f0\u00f5\t\7\2\2\u00f1\u00f2\7L\2\2\u00f2\u00f3\t\7\2\2\u00f3"+ - "\u00f5\7Q\2\2\u00f4\u00ef\3\2\2\2\u00f4\u00f1\3\2\2\2\u00f5\r\3\2\2\2"+ - "\u00f6\u00fc\5\22\n\2\u00f7\u00f8\7\3\2\2\u00f8\u00f9\5\n\6\2\u00f9\u00fa"+ - "\7\4\2\2\u00fa\u00fc\3\2\2\2\u00fb\u00f6\3\2\2\2\u00fb\u00f7\3\2\2\2\u00fc"+ - "\17\3\2\2\2\u00fd\u00ff\5,\27\2\u00fe\u0100\t\b\2\2\u00ff\u00fe\3\2\2"+ - "\2\u00ff\u0100\3\2\2\2\u0100\21\3\2\2\2\u0101\u0103\7=\2\2\u0102\u0104"+ - "\5\36\20\2\u0103\u0102\3\2\2\2\u0103\u0104\3\2\2\2\u0104\u0105\3\2\2\2"+ - "\u0105\u010a\5 \21\2\u0106\u0107\7\5\2\2\u0107\u0109\5 \21\2\u0108\u0106"+ - "\3\2\2\2\u0109\u010c\3\2\2\2\u010a\u0108\3\2\2\2\u010a\u010b\3\2\2\2\u010b"+ - "\u010e\3\2\2\2\u010c\u010a\3\2\2\2\u010d\u010f\5\24\13\2\u010e\u010d\3"+ - "\2\2\2\u010e\u010f\3\2\2\2\u010f\u0112\3\2\2\2\u0110\u0111\7H\2\2\u0111"+ - "\u0113\5.\30\2\u0112\u0110\3\2\2\2\u0112\u0113\3\2\2\2\u0113\u0117\3\2"+ - "\2\2\u0114\u0115\7#\2\2\u0115\u0116\7\17\2\2\u0116\u0118\5\26\f\2\u0117"+ - "\u0114\3\2\2\2\u0117\u0118\3\2\2\2\u0118\u011b\3\2\2\2\u0119\u011a\7$"+ - "\2\2\u011a\u011c\5.\30\2\u011b\u0119\3\2\2\2\u011b\u011c\3\2\2\2\u011c"+ - "\23\3\2\2\2\u011d\u011e\7\37\2\2\u011e\u0123\5\"\22\2\u011f\u0120\7\5"+ - "\2\2\u0120\u0122\5\"\22\2\u0121\u011f\3\2\2\2\u0122\u0125\3\2\2\2\u0123"+ - "\u0121\3\2\2\2\u0123\u0124\3\2\2\2\u0124\25\3\2\2\2\u0125\u0123\3\2\2"+ - "\2\u0126\u0128\5\36\20\2\u0127\u0126\3\2\2\2\u0127\u0128\3\2\2\2\u0128"+ - "\u0129\3\2\2\2\u0129\u012e\5\30\r\2\u012a\u012b\7\5\2\2\u012b\u012d\5"+ - "\30\r\2\u012c\u012a\3\2\2\2\u012d\u0130\3\2\2\2\u012e\u012c\3\2\2\2\u012e"+ - "\u012f\3\2\2\2\u012f\27\3\2\2\2\u0130\u012e\3\2\2\2\u0131\u0132\5\32\16"+ - "\2\u0132\31\3\2\2\2\u0133\u013c\7\3\2\2\u0134\u0139\5,\27\2\u0135\u0136"+ - "\7\5\2\2\u0136\u0138\5,\27\2\u0137\u0135\3\2\2\2\u0138\u013b\3\2\2\2\u0139"+ - "\u0137\3\2\2\2\u0139\u013a\3\2\2\2\u013a\u013d\3\2\2\2\u013b\u0139\3\2"+ - "\2\2\u013c\u0134\3\2\2\2\u013c\u013d\3\2\2\2\u013d\u013e\3\2\2\2\u013e"+ - "\u0141\7\4\2\2\u013f\u0141\5,\27\2\u0140\u0133\3\2\2\2\u0140\u013f\3\2"+ - "\2\2\u0141\33\3\2\2\2\u0142\u0143\5X-\2\u0143\u0144\7\f\2\2\u0144\u0145"+ - "\7\3\2\2\u0145\u0146\5\n\6\2\u0146\u0147\7\4\2\2\u0147\35\3\2\2\2\u0148"+ - "\u0149\t\t\2\2\u0149\37\3\2\2\2\u014a\u014f\5,\27\2\u014b\u014d\7\f\2"+ - "\2\u014c\u014b\3\2\2\2\u014c\u014d\3\2\2\2\u014d\u014e\3\2\2\2\u014e\u0150"+ - "\5X-\2\u014f\u014c\3\2\2\2\u014f\u0150\3\2\2\2\u0150!\3\2\2\2\u0151\u0155"+ - "\5*\26\2\u0152\u0154\5$\23\2\u0153\u0152\3\2\2\2\u0154\u0157\3\2\2\2\u0155"+ - "\u0153\3\2\2\2\u0155\u0156\3\2\2\2\u0156#\3\2\2\2\u0157\u0155\3\2\2\2"+ - "\u0158\u0159\5&\24\2\u0159\u015a\7(\2\2\u015a\u015c\5*\26\2\u015b\u015d"+ - "\5(\25\2\u015c\u015b\3\2\2\2\u015c\u015d\3\2\2\2\u015d\u0164\3\2\2\2\u015e"+ - "\u015f\7.\2\2\u015f\u0160\5&\24\2\u0160\u0161\7(\2\2\u0161\u0162\5*\26"+ - "\2\u0162\u0164\3\2\2\2\u0163\u0158\3\2\2\2\u0163\u015e\3\2\2\2\u0164%"+ - "\3\2\2\2\u0165\u0167\7&\2\2\u0166\u0165\3\2\2\2\u0166\u0167\3\2\2\2\u0167"+ - "\u0175\3\2\2\2\u0168\u016a\7)\2\2\u0169\u016b\7\65\2\2\u016a\u0169\3\2"+ - "\2\2\u016a\u016b\3\2\2\2\u016b\u0175\3\2\2\2\u016c\u016e\79\2\2\u016d"+ - "\u016f\7\65\2\2\u016e\u016d\3\2\2\2\u016e\u016f\3\2\2\2\u016f\u0175\3"+ - "\2\2\2\u0170\u0172\7 \2\2\u0171\u0173\7\65\2\2\u0172\u0171\3\2\2\2\u0172"+ - "\u0173\3\2\2\2\u0173\u0175\3\2\2\2\u0174\u0166\3\2\2\2\u0174\u0168\3\2"+ - "\2\2\u0174\u016c\3\2\2\2\u0174\u0170\3\2\2\2\u0175\'\3\2\2\2\u0176\u0177"+ - "\7\61\2\2\u0177\u0185\5.\30\2\u0178\u0179\7F\2\2\u0179\u017a\7\3\2\2\u017a"+ - "\u017f\5X-\2\u017b\u017c\7\5\2\2\u017c\u017e\5X-\2\u017d\u017b\3\2\2\2"+ - "\u017e\u0181\3\2\2\2\u017f\u017d\3\2\2\2\u017f\u0180\3\2\2\2\u0180\u0182"+ - "\3\2\2\2\u0181\u017f\3\2\2\2\u0182\u0183\7\4\2\2\u0183\u0185\3\2\2\2\u0184"+ - "\u0176\3\2\2\2\u0184\u0178\3\2\2\2\u0185)\3\2\2\2\u0186\u018b\5Z.\2\u0187"+ - "\u0189\7\f\2\2\u0188\u0187\3\2\2\2\u0188\u0189\3\2\2\2\u0189\u018a\3\2"+ - "\2\2\u018a\u018c\5V,\2\u018b\u0188\3\2\2\2\u018b\u018c\3\2\2\2\u018c\u01a0"+ - "\3\2\2\2\u018d\u018e\7\3\2\2\u018e\u018f\5\n\6\2\u018f\u0194\7\4\2\2\u0190"+ - "\u0192\7\f\2\2\u0191\u0190\3\2\2\2\u0191\u0192\3\2\2\2\u0192\u0193\3\2"+ - "\2\2\u0193\u0195\5V,\2\u0194\u0191\3\2\2\2\u0194\u0195\3\2\2\2\u0195\u01a0"+ - "\3\2\2\2\u0196\u0197\7\3\2\2\u0197\u0198\5\"\22\2\u0198\u019d\7\4\2\2"+ - "\u0199\u019b\7\f\2\2\u019a\u0199\3\2\2\2\u019a\u019b\3\2\2\2\u019b\u019c"+ - "\3\2\2\2\u019c\u019e\5V,\2\u019d\u019a\3\2\2\2\u019d\u019e\3\2\2\2\u019e"+ - "\u01a0\3\2\2\2\u019f\u0186\3\2\2\2\u019f\u018d\3\2\2\2\u019f\u0196\3\2"+ - "\2\2\u01a0+\3\2\2\2\u01a1\u01a2\5.\30\2\u01a2-\3\2\2\2\u01a3\u01a4\b\30"+ - "\1\2\u01a4\u01a5\7/\2\2\u01a5\u01c3\5.\30\n\u01a6\u01a7\7\32\2\2\u01a7"+ - "\u01a8\7\3\2\2\u01a8\u01a9\5\b\5\2\u01a9\u01aa\7\4\2\2\u01aa\u01c3\3\2"+ - "\2\2\u01ab\u01ac\7;\2\2\u01ac\u01ad\7\3\2\2\u01ad\u01ae\5b\62\2\u01ae"+ - "\u01af\5\60\31\2\u01af\u01b0\7\4\2\2\u01b0\u01c3\3\2\2\2\u01b1\u01b2\7"+ - "-\2\2\u01b2\u01b3\7\3\2\2\u01b3\u01b4\5V,\2\u01b4\u01b5\7\5\2\2\u01b5"+ - "\u01b6\5b\62\2\u01b6\u01b7\5\60\31\2\u01b7\u01b8\7\4\2\2\u01b8\u01c3\3"+ - "\2\2\2\u01b9\u01ba\7-\2\2\u01ba\u01bb\7\3\2\2\u01bb\u01bc\5b\62\2\u01bc"+ - "\u01bd\7\5\2\2\u01bd\u01be\5b\62\2\u01be\u01bf\5\60\31\2\u01bf\u01c0\7"+ - "\4\2\2\u01c0\u01c3\3\2\2\2\u01c1\u01c3\5\62\32\2\u01c2\u01a3\3\2\2\2\u01c2"+ - "\u01a6\3\2\2\2\u01c2\u01ab\3\2\2\2\u01c2\u01b1\3\2\2\2\u01c2\u01b9\3\2"+ - "\2\2\u01c2\u01c1\3\2\2\2\u01c3\u01cc\3\2\2\2\u01c4\u01c5\f\4\2\2\u01c5"+ - "\u01c6\7\n\2\2\u01c6\u01cb\5.\30\5\u01c7\u01c8\f\3\2\2\u01c8\u01c9\7\63"+ - "\2\2\u01c9\u01cb\5.\30\4\u01ca\u01c4\3\2\2\2\u01ca\u01c7\3\2\2\2\u01cb"+ - "\u01ce\3\2\2\2\u01cc\u01ca\3\2\2\2\u01cc\u01cd\3\2\2\2\u01cd/\3\2\2\2"+ - "\u01ce\u01cc\3\2\2\2\u01cf\u01d0\7\5\2\2\u01d0\u01d2\5b\62\2\u01d1\u01cf"+ - "\3\2\2\2\u01d2\u01d5\3\2\2\2\u01d3\u01d1\3\2\2\2\u01d3\u01d4\3\2\2\2\u01d4"+ - "\61\3\2\2\2\u01d5\u01d3\3\2\2\2\u01d6\u01d8\5<\37\2\u01d7\u01d9\5\64\33"+ - "\2\u01d8\u01d7\3\2\2\2\u01d8\u01d9\3\2\2\2\u01d9\63\3\2\2\2\u01da\u01dc"+ - "\7/\2\2\u01db\u01da\3\2\2\2\u01db\u01dc\3\2\2\2\u01dc\u01dd\3\2\2\2\u01dd"+ - "\u01de\7\16\2\2\u01de\u01df\5<\37\2\u01df\u01e0\7\n\2\2\u01e0\u01e1\5"+ - "<\37\2\u01e1\u0209\3\2\2\2\u01e2\u01e4\7/\2\2\u01e3\u01e2\3\2\2\2\u01e3"+ - "\u01e4\3\2\2\2\u01e4\u01e5\3\2\2\2\u01e5\u01e6\7%\2\2\u01e6\u01e7\7\3"+ - "\2\2\u01e7\u01ec\5,\27\2\u01e8\u01e9\7\5\2\2\u01e9\u01eb\5,\27\2\u01ea"+ - "\u01e8\3\2\2\2\u01eb\u01ee\3\2\2\2\u01ec\u01ea\3\2\2\2\u01ec\u01ed\3\2"+ - "\2\2\u01ed\u01ef\3\2\2\2\u01ee\u01ec\3\2\2\2\u01ef\u01f0\7\4\2\2\u01f0"+ - "\u0209\3\2\2\2\u01f1\u01f3\7/\2\2\u01f2\u01f1\3\2\2\2\u01f2\u01f3\3\2"+ - "\2\2\u01f3\u01f4\3\2\2\2\u01f4\u01f5\7%\2\2\u01f5\u01f6\7\3\2\2\u01f6"+ - "\u01f7\5\b\5\2\u01f7\u01f8\7\4\2\2\u01f8\u0209\3\2\2\2\u01f9\u01fb\7/"+ - "\2\2\u01fa\u01f9\3\2\2\2\u01fa\u01fb\3\2\2\2\u01fb\u01fc\3\2\2\2\u01fc"+ - "\u01fd\7*\2\2\u01fd\u0209\58\35\2\u01fe\u0200\7/\2\2\u01ff\u01fe\3\2\2"+ - "\2\u01ff\u0200\3\2\2\2\u0200\u0201\3\2\2\2\u0201\u0202\7:\2\2\u0202\u0209"+ - "\5b\62\2\u0203\u0205\7\'\2\2\u0204\u0206\7/\2\2\u0205\u0204\3\2\2\2\u0205"+ - "\u0206\3\2\2\2\u0206\u0207\3\2\2\2\u0207\u0209\7\60\2\2\u0208\u01db\3"+ - "\2\2\2\u0208\u01e3\3\2\2\2\u0208\u01f2\3\2\2\2\u0208\u01fa\3\2\2\2\u0208"+ - "\u01ff\3\2\2\2\u0208\u0203\3\2\2\2\u0209\65\3\2\2\2\u020a\u020b\7*\2\2"+ - "\u020b\u020c\58\35\2\u020c\67\3\2\2\2\u020d\u020f\5b\62\2\u020e\u0210"+ - "\5:\36\2\u020f\u020e\3\2\2\2\u020f\u0210\3\2\2\2\u02109\3\2\2\2\u0211"+ - "\u0212\7\30\2\2\u0212\u0218\5b\62\2\u0213\u0214\7J\2\2\u0214\u0215\5b"+ - "\62\2\u0215\u0216\7Q\2\2\u0216\u0218\3\2\2\2\u0217\u0211\3\2\2\2\u0217"+ - "\u0213\3\2\2\2\u0218;\3\2\2\2\u0219\u021a\b\37\1\2\u021a\u021e\5> \2\u021b"+ - "\u021c\t\n\2\2\u021c\u021e\5<\37\6\u021d\u0219\3\2\2\2\u021d\u021b\3\2"+ - "\2\2\u021e\u022b\3\2\2\2\u021f\u0220\f\5\2\2\u0220\u0221\t\13\2\2\u0221"+ - "\u022a\5<\37\6\u0222\u0223\f\4\2\2\u0223\u0224\t\n\2\2\u0224\u022a\5<"+ - "\37\5\u0225\u0226\f\3\2\2\u0226\u0227\5P)\2\u0227\u0228\5<\37\4\u0228"+ - "\u022a\3\2\2\2\u0229\u021f\3\2\2\2\u0229\u0222\3\2\2\2\u0229\u0225\3\2"+ - "\2\2\u022a\u022d\3\2\2\2\u022b\u0229\3\2\2\2\u022b\u022c\3\2\2\2\u022c"+ - "=\3\2\2\2\u022d\u022b\3\2\2\2\u022e\u0244\5@!\2\u022f\u0244\5D#\2\u0230"+ - "\u0244\5N(\2\u0231\u0244\7Z\2\2\u0232\u0233\5V,\2\u0233\u0234\7^\2\2\u0234"+ - "\u0236\3\2\2\2\u0235\u0232\3\2\2\2\u0235\u0236\3\2\2\2\u0236\u0237\3\2"+ - "\2\2\u0237\u0244\7Z\2\2\u0238\u0244\5H%\2\u0239\u023a\7\3\2\2\u023a\u023b"+ - "\5\b\5\2\u023b\u023c\7\4\2\2\u023c\u0244\3\2\2\2\u023d\u0244\5X-\2\u023e"+ - "\u0244\5V,\2\u023f\u0240\7\3\2\2\u0240\u0241\5,\27\2\u0241\u0242\7\4\2"+ - "\2\u0242\u0244\3\2\2\2\u0243\u022e\3\2\2\2\u0243\u022f\3\2\2\2\u0243\u0230"+ - "\3\2\2\2\u0243\u0231\3\2\2\2\u0243\u0235\3\2\2\2\u0243\u0238\3\2\2\2\u0243"+ - "\u0239\3\2\2\2\u0243\u023d\3\2\2\2\u0243\u023e\3\2\2\2\u0243\u023f\3\2"+ - "\2\2\u0244?\3\2\2\2\u0245\u024b\5B\"\2\u0246\u0247\7K\2\2\u0247\u0248"+ - "\5B\"\2\u0248\u0249\7Q\2\2\u0249\u024b\3\2\2\2\u024a\u0245\3\2\2\2\u024a"+ - "\u0246\3\2\2\2\u024bA\3\2\2\2\u024c\u024d\7\20\2\2\u024d\u024e\7\3\2\2"+ - "\u024e\u024f\5,\27\2\u024f\u0250\7\f\2\2\u0250\u0251\5T+\2\u0251\u0252"+ - "\7\4\2\2\u0252C\3\2\2\2\u0253\u0259\5F$\2\u0254\u0255\7K\2\2\u0255\u0256"+ - "\5F$\2\u0256\u0257\7Q\2\2\u0257\u0259\3\2\2\2\u0258\u0253\3\2\2\2\u0258"+ - "\u0254\3\2\2\2\u0259E\3\2\2\2\u025a\u025b\7\34\2\2\u025b\u025c\7\3\2\2"+ - "\u025c\u025d\5X-\2\u025d\u025e\7\37\2\2\u025e\u025f\5<\37\2\u025f\u0260"+ - "\7\4\2\2\u0260G\3\2\2\2\u0261\u0267\5J&\2\u0262\u0263\7K\2\2\u0263\u0264"+ - "\5J&\2\u0264\u0265\7Q\2\2\u0265\u0267\3\2\2\2\u0266\u0261\3\2\2\2\u0266"+ - "\u0262\3\2\2\2\u0267I\3\2\2\2\u0268\u0269\5L\'\2\u0269\u0275\7\3\2\2\u026a"+ - "\u026c\5\36\20\2\u026b\u026a\3\2\2\2\u026b\u026c\3\2\2\2\u026c\u026d\3"+ - "\2\2\2\u026d\u0272\5,\27\2\u026e\u026f\7\5\2\2\u026f\u0271\5,\27\2\u0270"+ - "\u026e\3\2\2\2\u0271\u0274\3\2\2\2\u0272\u0270\3\2\2\2\u0272\u0273\3\2"+ - "\2\2\u0273\u0276\3\2\2\2\u0274\u0272\3\2\2\2\u0275\u026b\3\2\2\2\u0275"+ - "\u0276\3\2\2\2\u0276\u0277\3\2\2\2\u0277\u0278\7\4\2\2\u0278K\3\2\2\2"+ - "\u0279\u027d\7)\2\2\u027a\u027d\79\2\2\u027b\u027d\5X-\2\u027c\u0279\3"+ - "\2\2\2\u027c\u027a\3\2\2\2\u027c\u027b\3\2\2\2\u027dM\3\2\2\2\u027e\u0298"+ - "\7\60\2\2\u027f\u0298\5`\61\2\u0280\u0298\5R*\2\u0281\u0283\7`\2\2\u0282"+ - "\u0281\3\2\2\2\u0283\u0284\3\2\2\2\u0284\u0282\3\2\2\2\u0284\u0285\3\2"+ - "\2\2\u0285\u0298\3\2\2\2\u0286\u0298\7_\2\2\u0287\u0288\7M\2\2\u0288\u0289"+ - "\5b\62\2\u0289\u028a\7Q\2\2\u028a\u0298\3\2\2\2\u028b\u028c\7N\2\2\u028c"+ - "\u028d\5b\62\2\u028d\u028e\7Q\2\2\u028e\u0298\3\2\2\2\u028f\u0290\7O\2"+ - "\2\u0290\u0291\5b\62\2\u0291\u0292\7Q\2\2\u0292\u0298\3\2\2\2\u0293\u0294"+ - "\7P\2\2\u0294\u0295\5b\62\2\u0295\u0296\7Q\2\2\u0296\u0298\3\2\2\2\u0297"+ - "\u027e\3\2\2\2\u0297\u027f\3\2\2\2\u0297\u0280\3\2\2\2\u0297\u0282\3\2"+ - "\2\2\u0297\u0286\3\2\2\2\u0297\u0287\3\2\2\2\u0297\u028b\3\2\2\2\u0297"+ - "\u028f\3\2\2\2\u0297\u0293\3\2\2\2\u0298O\3\2\2\2\u0299\u029a\t\f\2\2"+ - "\u029aQ\3\2\2\2\u029b\u029c\t\r\2\2\u029cS\3\2\2\2\u029d\u029e\5X-\2\u029e"+ - "U\3\2\2\2\u029f\u02a0\5X-\2\u02a0\u02a1\7^\2\2\u02a1\u02a3\3\2\2\2\u02a2"+ - "\u029f\3\2\2\2\u02a3\u02a6\3\2\2\2\u02a4\u02a2\3\2\2\2\u02a4\u02a5\3\2"+ - "\2\2\u02a5\u02a7\3\2\2\2\u02a6\u02a4\3\2\2\2\u02a7\u02a8\5X-\2\u02a8W"+ - "\3\2\2\2\u02a9\u02ac\5\\/\2\u02aa\u02ac\5^\60\2\u02ab\u02a9\3\2\2\2\u02ab"+ - "\u02aa\3\2\2\2\u02acY\3\2\2\2\u02ad\u02ae\5X-\2\u02ae\u02af\7\6\2\2\u02af"+ - "\u02b1\3\2\2\2\u02b0\u02ad\3\2\2\2\u02b0\u02b1\3\2\2\2\u02b1\u02b2\3\2"+ - "\2\2\u02b2\u02ba\7e\2\2\u02b3\u02b4\5X-\2\u02b4\u02b5\7\6\2\2\u02b5\u02b7"+ - "\3\2\2\2\u02b6\u02b3\3\2\2\2\u02b6\u02b7\3\2\2\2\u02b7\u02b8\3\2\2\2\u02b8"+ - "\u02ba\5X-\2\u02b9\u02b0\3\2\2\2\u02b9\u02b6\3\2\2\2\u02ba[\3\2\2\2\u02bb"+ - "\u02be\7f\2\2\u02bc\u02be\7g\2\2\u02bd\u02bb\3\2\2\2\u02bd\u02bc\3\2\2"+ - "\2\u02be]\3\2\2\2\u02bf\u02c3\7c\2\2\u02c0\u02c3\5d\63\2\u02c1\u02c3\7"+ - "d\2\2\u02c2\u02bf\3\2\2\2\u02c2\u02c0\3\2\2\2\u02c2\u02c1\3\2\2\2\u02c3"+ - "_\3\2\2\2\u02c4\u02c6\t\n\2\2\u02c5\u02c4\3\2\2\2\u02c5\u02c6\3\2\2\2"+ - "\u02c6\u02c7\3\2\2\2\u02c7\u02cd\7b\2\2\u02c8\u02ca\t\n\2\2\u02c9\u02c8"+ - "\3\2\2\2\u02c9\u02ca\3\2\2\2\u02ca\u02cb\3\2\2\2\u02cb\u02cd\7a\2\2\u02cc"+ - "\u02c5\3\2\2\2\u02cc\u02c9\3\2\2\2\u02cda\3\2\2\2\u02ce\u02cf\t\16\2\2"+ - "\u02cfc\3\2\2\2\u02d0\u02d1\t\17\2\2\u02d1e\3\2\2\2buw{\u0084\u0086\u008a"+ - "\u0091\u0098\u009d\u00a2\u00ac\u00b0\u00b8\u00bb\u00c1\u00c6\u00c9\u00d0"+ - "\u00d8\u00db\u00e7\u00ea\u00ed\u00f4\u00fb\u00ff\u0103\u010a\u010e\u0112"+ - "\u0117\u011b\u0123\u0127\u012e\u0139\u013c\u0140\u014c\u014f\u0155\u015c"+ - "\u0163\u0166\u016a\u016e\u0172\u0174\u017f\u0184\u0188\u018b\u0191\u0194"+ - "\u019a\u019d\u019f\u01c2\u01ca\u01cc\u01d3\u01d8\u01db\u01e3\u01ec\u01f2"+ - "\u01fa\u01ff\u0205\u0208\u020f\u0217\u021d\u0229\u022b\u0235\u0243\u024a"+ - "\u0258\u0266\u026b\u0272\u0275\u027c\u0284\u0297\u02a4\u02ab\u02b0\u02b6"+ - "\u02b9\u02bd\u02c2\u02c5\u02c9\u02cc"; + "\13\37\3 \3 \3 \3 \3 \3 \5 \u0235\n \3 \3 \3 \3 \3 \3 \3 \3 \3 \3 \3 "+ + "\5 \u0242\n \3!\3!\3!\3!\3!\5!\u0249\n!\3\"\3\"\3\"\3\"\3\"\3\"\3\"\3"+ + "#\3#\3#\3#\3#\5#\u0257\n#\3$\3$\3$\3$\3$\3$\3$\3%\3%\3%\3%\3%\5%\u0265"+ + "\n%\3&\3&\3&\5&\u026a\n&\3&\3&\3&\7&\u026f\n&\f&\16&\u0272\13&\5&\u0274"+ + "\n&\3&\3&\3\'\3\'\3\'\5\'\u027b\n\'\3(\3(\3(\3(\6(\u0281\n(\r(\16(\u0282"+ + "\3(\3(\3(\3(\3(\3(\3(\3(\3(\3(\3(\3(\3(\3(\3(\3(\3(\5(\u0296\n(\3)\3)"+ + "\3*\3*\3+\3+\3,\3,\3,\7,\u02a1\n,\f,\16,\u02a4\13,\3,\3,\3-\3-\5-\u02aa"+ + "\n-\3.\3.\3.\5.\u02af\n.\3.\3.\3.\3.\5.\u02b5\n.\3.\5.\u02b8\n.\3/\3/"+ + "\5/\u02bc\n/\3\60\3\60\3\60\5\60\u02c1\n\60\3\61\3\61\5\61\u02c5\n\61"+ + "\3\62\3\62\3\63\3\63\3\63\2\4.<\64\2\4\6\b\n\f\16\20\22\24\26\30\32\34"+ + "\36 \"$&(*,.\60\62\64\668:<>@BDFHJLNPRTVXZ\\^`bd\2\20\b\2\7\7\t\t\31\31"+ + ",,\62\62\66\66\4\2\"\"BB\4\2\t\t\62\62\4\2\37\37%%\3\2\25\26\4\2\7\7a"+ + "a\4\2\r\r\25\25\4\2\7\7\27\27\3\2XY\3\2Z\\\3\2RW\4\2\35\35CC\3\2_`\20"+ + "\2\b\t\22\24\31\31\33\33\36\36!\",,\62\62\668:<>?ABDEGG\u0320\2f\3\2\2"+ + "\2\4i\3\2\2\2\6\u00d0\3\2\2\2\b\u00db\3\2\2\2\n\u00df\3\2\2\2\f\u00f4"+ + "\3\2\2\2\16\u00fb\3\2\2\2\20\u00fd\3\2\2\2\22\u0101\3\2\2\2\24\u011d\3"+ + "\2\2\2\26\u0127\3\2\2\2\30\u0131\3\2\2\2\32\u0140\3\2\2\2\34\u0142\3\2"+ + "\2\2\36\u0148\3\2\2\2 \u014a\3\2\2\2\"\u0151\3\2\2\2$\u0163\3\2\2\2&\u0174"+ + "\3\2\2\2(\u0184\3\2\2\2*\u019f\3\2\2\2,\u01a1\3\2\2\2.\u01c2\3\2\2\2\60"+ + "\u01d3\3\2\2\2\62\u01d6\3\2\2\2\64\u0208\3\2\2\2\66\u020a\3\2\2\28\u020d"+ + "\3\2\2\2:\u0217\3\2\2\2<\u021d\3\2\2\2>\u0241\3\2\2\2@\u0248\3\2\2\2B"+ + "\u024a\3\2\2\2D\u0256\3\2\2\2F\u0258\3\2\2\2H\u0264\3\2\2\2J\u0266\3\2"+ + "\2\2L\u027a\3\2\2\2N\u0295\3\2\2\2P\u0297\3\2\2\2R\u0299\3\2\2\2T\u029b"+ + "\3\2\2\2V\u02a2\3\2\2\2X\u02a9\3\2\2\2Z\u02b7\3\2\2\2\\\u02bb\3\2\2\2"+ + "^\u02c0\3\2\2\2`\u02c4\3\2\2\2b\u02c6\3\2\2\2d\u02c8\3\2\2\2fg\5\6\4\2"+ + "gh\7\2\2\3h\3\3\2\2\2ij\5,\27\2jk\7\2\2\3k\5\3\2\2\2l\u00d1\5\b\5\2m{"+ + "\7\33\2\2nw\7\3\2\2op\78\2\2pv\t\2\2\2qr\7\36\2\2rv\t\3\2\2st\7G\2\2t"+ + "v\5R*\2uo\3\2\2\2uq\3\2\2\2us\3\2\2\2vy\3\2\2\2wu\3\2\2\2wx\3\2\2\2xz"+ + "\3\2\2\2yw\3\2\2\2z|\7\4\2\2{n\3\2\2\2{|\3\2\2\2|}\3\2\2\2}\u00d1\5\6"+ + "\4\2~\u008a\7\24\2\2\177\u0086\7\3\2\2\u0080\u0081\78\2\2\u0081\u0085"+ + "\t\4\2\2\u0082\u0083\7\36\2\2\u0083\u0085\t\3\2\2\u0084\u0080\3\2\2\2"+ + "\u0084\u0082\3\2\2\2\u0085\u0088\3\2\2\2\u0086\u0084\3\2\2\2\u0086\u0087"+ + "\3\2\2\2\u0087\u0089\3\2\2\2\u0088\u0086\3\2\2\2\u0089\u008b\7\4\2\2\u008a"+ + "\177\3\2\2\2\u008a\u008b\3\2\2\2\u008b\u008c\3\2\2\2\u008c\u00d1\5\6\4"+ + "\2\u008d\u008e\7>\2\2\u008e\u0091\7A\2\2\u008f\u0092\5\66\34\2\u0090\u0092"+ + "\5Z.\2\u0091\u008f\3\2\2\2\u0091\u0090\3\2\2\2\u0091\u0092\3\2\2\2\u0092"+ + "\u00d1\3\2\2\2\u0093\u0094\7>\2\2\u0094\u0095\7\23\2\2\u0095\u0098\t\5"+ + "\2\2\u0096\u0099\5\66\34\2\u0097\u0099\5Z.\2\u0098\u0096\3\2\2\2\u0098"+ + "\u0097\3\2\2\2\u0099\u00d1\3\2\2\2\u009a\u009d\t\6\2\2\u009b\u009e\5\66"+ + "\34\2\u009c\u009e\5Z.\2\u009d\u009b\3\2\2\2\u009d\u009c\3\2\2\2\u009e"+ + "\u00d1\3\2\2\2\u009f\u00a0\7>\2\2\u00a0\u00a2\7!\2\2\u00a1\u00a3\5\66"+ + "\34\2\u00a2\u00a1\3\2\2\2\u00a2\u00a3\3\2\2\2\u00a3\u00d1\3\2\2\2\u00a4"+ + "\u00a5\7>\2\2\u00a5\u00d1\7<\2\2\u00a6\u00a7\7?\2\2\u00a7\u00d1\7\22\2"+ + "\2\u00a8\u00a9\7?\2\2\u00a9\u00ac\7A\2\2\u00aa\u00ab\7\21\2\2\u00ab\u00ad"+ + "\5\66\34\2\u00ac\u00aa\3\2\2\2\u00ac\u00ad\3\2\2\2\u00ad\u00b0\3\2\2\2"+ + "\u00ae\u00b1\5\66\34\2\u00af\u00b1\5Z.\2\u00b0\u00ae\3\2\2\2\u00b0\u00af"+ + "\3\2\2\2\u00b0\u00b1\3\2\2\2\u00b1\u00bb\3\2\2\2\u00b2\u00b3\7D\2\2\u00b3"+ + "\u00b8\5b\62\2\u00b4\u00b5\7\5\2\2\u00b5\u00b7\5b\62\2\u00b6\u00b4\3\2"+ + "\2\2\u00b7\u00ba\3\2\2\2\u00b8\u00b6\3\2\2\2\u00b8\u00b9\3\2\2\2\u00b9"+ + "\u00bc\3\2\2\2\u00ba\u00b8\3\2\2\2\u00bb\u00b2\3\2\2\2\u00bb\u00bc\3\2"+ + "\2\2\u00bc\u00d1\3\2\2\2\u00bd\u00be\7?\2\2\u00be\u00c1\7\23\2\2\u00bf"+ + "\u00c0\7\21\2\2\u00c0\u00c2\5b\62\2\u00c1\u00bf\3\2\2\2\u00c1\u00c2\3"+ + "\2\2\2\u00c2\u00c6\3\2\2\2\u00c3\u00c4\7@\2\2\u00c4\u00c7\5\66\34\2\u00c5"+ + "\u00c7\5Z.\2\u00c6\u00c3\3\2\2\2\u00c6\u00c5\3\2\2\2\u00c6\u00c7\3\2\2"+ + "\2\u00c7\u00c9\3\2\2\2\u00c8\u00ca\5\66\34\2\u00c9\u00c8\3\2\2\2\u00c9"+ + "\u00ca\3\2\2\2\u00ca\u00d1\3\2\2\2\u00cb\u00cc\7?\2\2\u00cc\u00d1\7E\2"+ + "\2\u00cd\u00ce\7?\2\2\u00ce\u00cf\7@\2\2\u00cf\u00d1\7E\2\2\u00d0l\3\2"+ + "\2\2\u00d0m\3\2\2\2\u00d0~\3\2\2\2\u00d0\u008d\3\2\2\2\u00d0\u0093\3\2"+ + "\2\2\u00d0\u009a\3\2\2\2\u00d0\u009f\3\2\2\2\u00d0\u00a4\3\2\2\2\u00d0"+ + "\u00a6\3\2\2\2\u00d0\u00a8\3\2\2\2\u00d0\u00bd\3\2\2\2\u00d0\u00cb\3\2"+ + "\2\2\u00d0\u00cd\3\2\2\2\u00d1\7\3\2\2\2\u00d2\u00d3\7I\2\2\u00d3\u00d8"+ + "\5\34\17\2\u00d4\u00d5\7\5\2\2\u00d5\u00d7\5\34\17\2\u00d6\u00d4\3\2\2"+ + "\2\u00d7\u00da\3\2\2\2\u00d8\u00d6\3\2\2\2\u00d8\u00d9\3\2\2\2\u00d9\u00dc"+ + "\3\2\2\2\u00da\u00d8\3\2\2\2\u00db\u00d2\3\2\2\2\u00db\u00dc\3\2\2\2\u00dc"+ + "\u00dd\3\2\2\2\u00dd\u00de\5\n\6\2\u00de\t\3\2\2\2\u00df\u00ea\5\16\b"+ + "\2\u00e0\u00e1\7\64\2\2\u00e1\u00e2\7\17\2\2\u00e2\u00e7\5\20\t\2\u00e3"+ + "\u00e4\7\5\2\2\u00e4\u00e6\5\20\t\2\u00e5\u00e3\3\2\2\2\u00e6\u00e9\3"+ + "\2\2\2\u00e7\u00e5\3\2\2\2\u00e7\u00e8\3\2\2\2\u00e8\u00eb\3\2\2\2\u00e9"+ + "\u00e7\3\2\2\2\u00ea\u00e0\3\2\2\2\u00ea\u00eb\3\2\2\2\u00eb\u00ed\3\2"+ + "\2\2\u00ec\u00ee\5\f\7\2\u00ed\u00ec\3\2\2\2\u00ed\u00ee\3\2\2\2\u00ee"+ + "\13\3\2\2\2\u00ef\u00f0\7+\2\2\u00f0\u00f5\t\7\2\2\u00f1\u00f2\7L\2\2"+ + "\u00f2\u00f3\t\7\2\2\u00f3\u00f5\7Q\2\2\u00f4\u00ef\3\2\2\2\u00f4\u00f1"+ + "\3\2\2\2\u00f5\r\3\2\2\2\u00f6\u00fc\5\22\n\2\u00f7\u00f8\7\3\2\2\u00f8"+ + "\u00f9\5\n\6\2\u00f9\u00fa\7\4\2\2\u00fa\u00fc\3\2\2\2\u00fb\u00f6\3\2"+ + "\2\2\u00fb\u00f7\3\2\2\2\u00fc\17\3\2\2\2\u00fd\u00ff\5,\27\2\u00fe\u0100"+ + "\t\b\2\2\u00ff\u00fe\3\2\2\2\u00ff\u0100\3\2\2\2\u0100\21\3\2\2\2\u0101"+ + "\u0103\7=\2\2\u0102\u0104\5\36\20\2\u0103\u0102\3\2\2\2\u0103\u0104\3"+ + "\2\2\2\u0104\u0105\3\2\2\2\u0105\u010a\5 \21\2\u0106\u0107\7\5\2\2\u0107"+ + "\u0109\5 \21\2\u0108\u0106\3\2\2\2\u0109\u010c\3\2\2\2\u010a\u0108\3\2"+ + "\2\2\u010a\u010b\3\2\2\2\u010b\u010e\3\2\2\2\u010c\u010a\3\2\2\2\u010d"+ + "\u010f\5\24\13\2\u010e\u010d\3\2\2\2\u010e\u010f\3\2\2\2\u010f\u0112\3"+ + "\2\2\2\u0110\u0111\7H\2\2\u0111\u0113\5.\30\2\u0112\u0110\3\2\2\2\u0112"+ + "\u0113\3\2\2\2\u0113\u0117\3\2\2\2\u0114\u0115\7#\2\2\u0115\u0116\7\17"+ + "\2\2\u0116\u0118\5\26\f\2\u0117\u0114\3\2\2\2\u0117\u0118\3\2\2\2\u0118"+ + "\u011b\3\2\2\2\u0119\u011a\7$\2\2\u011a\u011c\5.\30\2\u011b\u0119\3\2"+ + "\2\2\u011b\u011c\3\2\2\2\u011c\23\3\2\2\2\u011d\u011e\7\37\2\2\u011e\u0123"+ + "\5\"\22\2\u011f\u0120\7\5\2\2\u0120\u0122\5\"\22\2\u0121\u011f\3\2\2\2"+ + "\u0122\u0125\3\2\2\2\u0123\u0121\3\2\2\2\u0123\u0124\3\2\2\2\u0124\25"+ + "\3\2\2\2\u0125\u0123\3\2\2\2\u0126\u0128\5\36\20\2\u0127\u0126\3\2\2\2"+ + "\u0127\u0128\3\2\2\2\u0128\u0129\3\2\2\2\u0129\u012e\5\30\r\2\u012a\u012b"+ + "\7\5\2\2\u012b\u012d\5\30\r\2\u012c\u012a\3\2\2\2\u012d\u0130\3\2\2\2"+ + "\u012e\u012c\3\2\2\2\u012e\u012f\3\2\2\2\u012f\27\3\2\2\2\u0130\u012e"+ + "\3\2\2\2\u0131\u0132\5\32\16\2\u0132\31\3\2\2\2\u0133\u013c\7\3\2\2\u0134"+ + "\u0139\5,\27\2\u0135\u0136\7\5\2\2\u0136\u0138\5,\27\2\u0137\u0135\3\2"+ + "\2\2\u0138\u013b\3\2\2\2\u0139\u0137\3\2\2\2\u0139\u013a\3\2\2\2\u013a"+ + "\u013d\3\2\2\2\u013b\u0139\3\2\2\2\u013c\u0134\3\2\2\2\u013c\u013d\3\2"+ + "\2\2\u013d\u013e\3\2\2\2\u013e\u0141\7\4\2\2\u013f\u0141\5,\27\2\u0140"+ + "\u0133\3\2\2\2\u0140\u013f\3\2\2\2\u0141\33\3\2\2\2\u0142\u0143\5X-\2"+ + "\u0143\u0144\7\f\2\2\u0144\u0145\7\3\2\2\u0145\u0146\5\n\6\2\u0146\u0147"+ + "\7\4\2\2\u0147\35\3\2\2\2\u0148\u0149\t\t\2\2\u0149\37\3\2\2\2\u014a\u014f"+ + "\5,\27\2\u014b\u014d\7\f\2\2\u014c\u014b\3\2\2\2\u014c\u014d\3\2\2\2\u014d"+ + "\u014e\3\2\2\2\u014e\u0150\5X-\2\u014f\u014c\3\2\2\2\u014f\u0150\3\2\2"+ + "\2\u0150!\3\2\2\2\u0151\u0155\5*\26\2\u0152\u0154\5$\23\2\u0153\u0152"+ + "\3\2\2\2\u0154\u0157\3\2\2\2\u0155\u0153\3\2\2\2\u0155\u0156\3\2\2\2\u0156"+ + "#\3\2\2\2\u0157\u0155\3\2\2\2\u0158\u0159\5&\24\2\u0159\u015a\7(\2\2\u015a"+ + "\u015c\5*\26\2\u015b\u015d\5(\25\2\u015c\u015b\3\2\2\2\u015c\u015d\3\2"+ + "\2\2\u015d\u0164\3\2\2\2\u015e\u015f\7.\2\2\u015f\u0160\5&\24\2\u0160"+ + "\u0161\7(\2\2\u0161\u0162\5*\26\2\u0162\u0164\3\2\2\2\u0163\u0158\3\2"+ + "\2\2\u0163\u015e\3\2\2\2\u0164%\3\2\2\2\u0165\u0167\7&\2\2\u0166\u0165"+ + "\3\2\2\2\u0166\u0167\3\2\2\2\u0167\u0175\3\2\2\2\u0168\u016a\7)\2\2\u0169"+ + "\u016b\7\65\2\2\u016a\u0169\3\2\2\2\u016a\u016b\3\2\2\2\u016b\u0175\3"+ + "\2\2\2\u016c\u016e\79\2\2\u016d\u016f\7\65\2\2\u016e\u016d\3\2\2\2\u016e"+ + "\u016f\3\2\2\2\u016f\u0175\3\2\2\2\u0170\u0172\7 \2\2\u0171\u0173\7\65"+ + "\2\2\u0172\u0171\3\2\2\2\u0172\u0173\3\2\2\2\u0173\u0175\3\2\2\2\u0174"+ + "\u0166\3\2\2\2\u0174\u0168\3\2\2\2\u0174\u016c\3\2\2\2\u0174\u0170\3\2"+ + "\2\2\u0175\'\3\2\2\2\u0176\u0177\7\61\2\2\u0177\u0185\5.\30\2\u0178\u0179"+ + "\7F\2\2\u0179\u017a\7\3\2\2\u017a\u017f\5X-\2\u017b\u017c\7\5\2\2\u017c"+ + "\u017e\5X-\2\u017d\u017b\3\2\2\2\u017e\u0181\3\2\2\2\u017f\u017d\3\2\2"+ + "\2\u017f\u0180\3\2\2\2\u0180\u0182\3\2\2\2\u0181\u017f\3\2\2\2\u0182\u0183"+ + "\7\4\2\2\u0183\u0185\3\2\2\2\u0184\u0176\3\2\2\2\u0184\u0178\3\2\2\2\u0185"+ + ")\3\2\2\2\u0186\u018b\5Z.\2\u0187\u0189\7\f\2\2\u0188\u0187\3\2\2\2\u0188"+ + "\u0189\3\2\2\2\u0189\u018a\3\2\2\2\u018a\u018c\5V,\2\u018b\u0188\3\2\2"+ + "\2\u018b\u018c\3\2\2\2\u018c\u01a0\3\2\2\2\u018d\u018e\7\3\2\2\u018e\u018f"+ + "\5\n\6\2\u018f\u0194\7\4\2\2\u0190\u0192\7\f\2\2\u0191\u0190\3\2\2\2\u0191"+ + "\u0192\3\2\2\2\u0192\u0193\3\2\2\2\u0193\u0195\5V,\2\u0194\u0191\3\2\2"+ + "\2\u0194\u0195\3\2\2\2\u0195\u01a0\3\2\2\2\u0196\u0197\7\3\2\2\u0197\u0198"+ + "\5\"\22\2\u0198\u019d\7\4\2\2\u0199\u019b\7\f\2\2\u019a\u0199\3\2\2\2"+ + "\u019a\u019b\3\2\2\2\u019b\u019c\3\2\2\2\u019c\u019e\5V,\2\u019d\u019a"+ + "\3\2\2\2\u019d\u019e\3\2\2\2\u019e\u01a0\3\2\2\2\u019f\u0186\3\2\2\2\u019f"+ + "\u018d\3\2\2\2\u019f\u0196\3\2\2\2\u01a0+\3\2\2\2\u01a1\u01a2\5.\30\2"+ + "\u01a2-\3\2\2\2\u01a3\u01a4\b\30\1\2\u01a4\u01a5\7/\2\2\u01a5\u01c3\5"+ + ".\30\n\u01a6\u01a7\7\32\2\2\u01a7\u01a8\7\3\2\2\u01a8\u01a9\5\b\5\2\u01a9"+ + "\u01aa\7\4\2\2\u01aa\u01c3\3\2\2\2\u01ab\u01ac\7;\2\2\u01ac\u01ad\7\3"+ + "\2\2\u01ad\u01ae\5b\62\2\u01ae\u01af\5\60\31\2\u01af\u01b0\7\4\2\2\u01b0"+ + "\u01c3\3\2\2\2\u01b1\u01b2\7-\2\2\u01b2\u01b3\7\3\2\2\u01b3\u01b4\5V,"+ + "\2\u01b4\u01b5\7\5\2\2\u01b5\u01b6\5b\62\2\u01b6\u01b7\5\60\31\2\u01b7"+ + "\u01b8\7\4\2\2\u01b8\u01c3\3\2\2\2\u01b9\u01ba\7-\2\2\u01ba\u01bb\7\3"+ + "\2\2\u01bb\u01bc\5b\62\2\u01bc\u01bd\7\5\2\2\u01bd\u01be\5b\62\2\u01be"+ + "\u01bf\5\60\31\2\u01bf\u01c0\7\4\2\2\u01c0\u01c3\3\2\2\2\u01c1\u01c3\5"+ + "\62\32\2\u01c2\u01a3\3\2\2\2\u01c2\u01a6\3\2\2\2\u01c2\u01ab\3\2\2\2\u01c2"+ + "\u01b1\3\2\2\2\u01c2\u01b9\3\2\2\2\u01c2\u01c1\3\2\2\2\u01c3\u01cc\3\2"+ + "\2\2\u01c4\u01c5\f\4\2\2\u01c5\u01c6\7\n\2\2\u01c6\u01cb\5.\30\5\u01c7"+ + "\u01c8\f\3\2\2\u01c8\u01c9\7\63\2\2\u01c9\u01cb\5.\30\4\u01ca\u01c4\3"+ + "\2\2\2\u01ca\u01c7\3\2\2\2\u01cb\u01ce\3\2\2\2\u01cc\u01ca\3\2\2\2\u01cc"+ + "\u01cd\3\2\2\2\u01cd/\3\2\2\2\u01ce\u01cc\3\2\2\2\u01cf\u01d0\7\5\2\2"+ + "\u01d0\u01d2\5b\62\2\u01d1\u01cf\3\2\2\2\u01d2\u01d5\3\2\2\2\u01d3\u01d1"+ + "\3\2\2\2\u01d3\u01d4\3\2\2\2\u01d4\61\3\2\2\2\u01d5\u01d3\3\2\2\2\u01d6"+ + "\u01d8\5<\37\2\u01d7\u01d9\5\64\33\2\u01d8\u01d7\3\2\2\2\u01d8\u01d9\3"+ + "\2\2\2\u01d9\63\3\2\2\2\u01da\u01dc\7/\2\2\u01db\u01da\3\2\2\2\u01db\u01dc"+ + "\3\2\2\2\u01dc\u01dd\3\2\2\2\u01dd\u01de\7\16\2\2\u01de\u01df\5<\37\2"+ + "\u01df\u01e0\7\n\2\2\u01e0\u01e1\5<\37\2\u01e1\u0209\3\2\2\2\u01e2\u01e4"+ + "\7/\2\2\u01e3\u01e2\3\2\2\2\u01e3\u01e4\3\2\2\2\u01e4\u01e5\3\2\2\2\u01e5"+ + "\u01e6\7%\2\2\u01e6\u01e7\7\3\2\2\u01e7\u01ec\5,\27\2\u01e8\u01e9\7\5"+ + "\2\2\u01e9\u01eb\5,\27\2\u01ea\u01e8\3\2\2\2\u01eb\u01ee\3\2\2\2\u01ec"+ + "\u01ea\3\2\2\2\u01ec\u01ed\3\2\2\2\u01ed\u01ef\3\2\2\2\u01ee\u01ec\3\2"+ + "\2\2\u01ef\u01f0\7\4\2\2\u01f0\u0209\3\2\2\2\u01f1\u01f3\7/\2\2\u01f2"+ + "\u01f1\3\2\2\2\u01f2\u01f3\3\2\2\2\u01f3\u01f4\3\2\2\2\u01f4\u01f5\7%"+ + "\2\2\u01f5\u01f6\7\3\2\2\u01f6\u01f7\5\b\5\2\u01f7\u01f8\7\4\2\2\u01f8"+ + "\u0209\3\2\2\2\u01f9\u01fb\7/\2\2\u01fa\u01f9\3\2\2\2\u01fa\u01fb\3\2"+ + "\2\2\u01fb\u01fc\3\2\2\2\u01fc\u01fd\7*\2\2\u01fd\u0209\58\35\2\u01fe"+ + "\u0200\7/\2\2\u01ff\u01fe\3\2\2\2\u01ff\u0200\3\2\2\2\u0200\u0201\3\2"+ + "\2\2\u0201\u0202\7:\2\2\u0202\u0209\5b\62\2\u0203\u0205\7\'\2\2\u0204"+ + "\u0206\7/\2\2\u0205\u0204\3\2\2\2\u0205\u0206\3\2\2\2\u0206\u0207\3\2"+ + "\2\2\u0207\u0209\7\60\2\2\u0208\u01db\3\2\2\2\u0208\u01e3\3\2\2\2\u0208"+ + "\u01f2\3\2\2\2\u0208\u01fa\3\2\2\2\u0208\u01ff\3\2\2\2\u0208\u0203\3\2"+ + "\2\2\u0209\65\3\2\2\2\u020a\u020b\7*\2\2\u020b\u020c\58\35\2\u020c\67"+ + "\3\2\2\2\u020d\u020f\5b\62\2\u020e\u0210\5:\36\2\u020f\u020e\3\2\2\2\u020f"+ + "\u0210\3\2\2\2\u02109\3\2\2\2\u0211\u0212\7\30\2\2\u0212\u0218\5b\62\2"+ + "\u0213\u0214\7J\2\2\u0214\u0215\5b\62\2\u0215\u0216\7Q\2\2\u0216\u0218"+ + "\3\2\2\2\u0217\u0211\3\2\2\2\u0217\u0213\3\2\2\2\u0218;\3\2\2\2\u0219"+ + "\u021a\b\37\1\2\u021a\u021e\5> \2\u021b\u021c\t\n\2\2\u021c\u021e\5<\37"+ + "\6\u021d\u0219\3\2\2\2\u021d\u021b\3\2\2\2\u021e\u022b\3\2\2\2\u021f\u0220"+ + "\f\5\2\2\u0220\u0221\t\13\2\2\u0221\u022a\5<\37\6\u0222\u0223\f\4\2\2"+ + "\u0223\u0224\t\n\2\2\u0224\u022a\5<\37\5\u0225\u0226\f\3\2\2\u0226\u0227"+ + "\5P)\2\u0227\u0228\5<\37\4\u0228\u022a\3\2\2\2\u0229\u021f\3\2\2\2\u0229"+ + "\u0222\3\2\2\2\u0229\u0225\3\2\2\2\u022a\u022d\3\2\2\2\u022b\u0229\3\2"+ + "\2\2\u022b\u022c\3\2\2\2\u022c=\3\2\2\2\u022d\u022b\3\2\2\2\u022e\u0242"+ + "\5@!\2\u022f\u0242\5D#\2\u0230\u0242\5N(\2\u0231\u0232\5V,\2\u0232\u0233"+ + "\7^\2\2\u0233\u0235\3\2\2\2\u0234\u0231\3\2\2\2\u0234\u0235\3\2\2\2\u0235"+ + "\u0236\3\2\2\2\u0236\u0242\7Z\2\2\u0237\u0242\5H%\2\u0238\u0239\7\3\2"+ + "\2\u0239\u023a\5\b\5\2\u023a\u023b\7\4\2\2\u023b\u0242\3\2\2\2\u023c\u0242"+ + "\5V,\2\u023d\u023e\7\3\2\2\u023e\u023f\5,\27\2\u023f\u0240\7\4\2\2\u0240"+ + "\u0242\3\2\2\2\u0241\u022e\3\2\2\2\u0241\u022f\3\2\2\2\u0241\u0230\3\2"+ + "\2\2\u0241\u0234\3\2\2\2\u0241\u0237\3\2\2\2\u0241\u0238\3\2\2\2\u0241"+ + "\u023c\3\2\2\2\u0241\u023d\3\2\2\2\u0242?\3\2\2\2\u0243\u0249\5B\"\2\u0244"+ + "\u0245\7K\2\2\u0245\u0246\5B\"\2\u0246\u0247\7Q\2\2\u0247\u0249\3\2\2"+ + "\2\u0248\u0243\3\2\2\2\u0248\u0244\3\2\2\2\u0249A\3\2\2\2\u024a\u024b"+ + "\7\20\2\2\u024b\u024c\7\3\2\2\u024c\u024d\5,\27\2\u024d\u024e\7\f\2\2"+ + "\u024e\u024f\5T+\2\u024f\u0250\7\4\2\2\u0250C\3\2\2\2\u0251\u0257\5F$"+ + "\2\u0252\u0253\7K\2\2\u0253\u0254\5F$\2\u0254\u0255\7Q\2\2\u0255\u0257"+ + "\3\2\2\2\u0256\u0251\3\2\2\2\u0256\u0252\3\2\2\2\u0257E\3\2\2\2\u0258"+ + "\u0259\7\34\2\2\u0259\u025a\7\3\2\2\u025a\u025b\5X-\2\u025b\u025c\7\37"+ + "\2\2\u025c\u025d\5<\37\2\u025d\u025e\7\4\2\2\u025eG\3\2\2\2\u025f\u0265"+ + "\5J&\2\u0260\u0261\7K\2\2\u0261\u0262\5J&\2\u0262\u0263\7Q\2\2\u0263\u0265"+ + "\3\2\2\2\u0264\u025f\3\2\2\2\u0264\u0260\3\2\2\2\u0265I\3\2\2\2\u0266"+ + "\u0267\5L\'\2\u0267\u0273\7\3\2\2\u0268\u026a\5\36\20\2\u0269\u0268\3"+ + "\2\2\2\u0269\u026a\3\2\2\2\u026a\u026b\3\2\2\2\u026b\u0270\5,\27\2\u026c"+ + "\u026d\7\5\2\2\u026d\u026f\5,\27\2\u026e\u026c\3\2\2\2\u026f\u0272\3\2"+ + "\2\2\u0270\u026e\3\2\2\2\u0270\u0271\3\2\2\2\u0271\u0274\3\2\2\2\u0272"+ + "\u0270\3\2\2\2\u0273\u0269\3\2\2\2\u0273\u0274\3\2\2\2\u0274\u0275\3\2"+ + "\2\2\u0275\u0276\7\4\2\2\u0276K\3\2\2\2\u0277\u027b\7)\2\2\u0278\u027b"+ + "\79\2\2\u0279\u027b\5X-\2\u027a\u0277\3\2\2\2\u027a\u0278\3\2\2\2\u027a"+ + "\u0279\3\2\2\2\u027bM\3\2\2\2\u027c\u0296\7\60\2\2\u027d\u0296\5`\61\2"+ + "\u027e\u0296\5R*\2\u027f\u0281\7`\2\2\u0280\u027f\3\2\2\2\u0281\u0282"+ + "\3\2\2\2\u0282\u0280\3\2\2\2\u0282\u0283\3\2\2\2\u0283\u0296\3\2\2\2\u0284"+ + "\u0296\7_\2\2\u0285\u0286\7M\2\2\u0286\u0287\5b\62\2\u0287\u0288\7Q\2"+ + "\2\u0288\u0296\3\2\2\2\u0289\u028a\7N\2\2\u028a\u028b\5b\62\2\u028b\u028c"+ + "\7Q\2\2\u028c\u0296\3\2\2\2\u028d\u028e\7O\2\2\u028e\u028f\5b\62\2\u028f"+ + "\u0290\7Q\2\2\u0290\u0296\3\2\2\2\u0291\u0292\7P\2\2\u0292\u0293\5b\62"+ + "\2\u0293\u0294\7Q\2\2\u0294\u0296\3\2\2\2\u0295\u027c\3\2\2\2\u0295\u027d"+ + "\3\2\2\2\u0295\u027e\3\2\2\2\u0295\u0280\3\2\2\2\u0295\u0284\3\2\2\2\u0295"+ + "\u0285\3\2\2\2\u0295\u0289\3\2\2\2\u0295\u028d\3\2\2\2\u0295\u0291\3\2"+ + "\2\2\u0296O\3\2\2\2\u0297\u0298\t\f\2\2\u0298Q\3\2\2\2\u0299\u029a\t\r"+ + "\2\2\u029aS\3\2\2\2\u029b\u029c\5X-\2\u029cU\3\2\2\2\u029d\u029e\5X-\2"+ + "\u029e\u029f\7^\2\2\u029f\u02a1\3\2\2\2\u02a0\u029d\3\2\2\2\u02a1\u02a4"+ + "\3\2\2\2\u02a2\u02a0\3\2\2\2\u02a2\u02a3\3\2\2\2\u02a3\u02a5\3\2\2\2\u02a4"+ + "\u02a2\3\2\2\2\u02a5\u02a6\5X-\2\u02a6W\3\2\2\2\u02a7\u02aa\5\\/\2\u02a8"+ + "\u02aa\5^\60\2\u02a9\u02a7\3\2\2\2\u02a9\u02a8\3\2\2\2\u02aaY\3\2\2\2"+ + "\u02ab\u02ac\5X-\2\u02ac\u02ad\7\6\2\2\u02ad\u02af\3\2\2\2\u02ae\u02ab"+ + "\3\2\2\2\u02ae\u02af\3\2\2\2\u02af\u02b0\3\2\2\2\u02b0\u02b8\7e\2\2\u02b1"+ + "\u02b2\5X-\2\u02b2\u02b3\7\6\2\2\u02b3\u02b5\3\2\2\2\u02b4\u02b1\3\2\2"+ + "\2\u02b4\u02b5\3\2\2\2\u02b5\u02b6\3\2\2\2\u02b6\u02b8\5X-\2\u02b7\u02ae"+ + "\3\2\2\2\u02b7\u02b4\3\2\2\2\u02b8[\3\2\2\2\u02b9\u02bc\7f\2\2\u02ba\u02bc"+ + "\7g\2\2\u02bb\u02b9\3\2\2\2\u02bb\u02ba\3\2\2\2\u02bc]\3\2\2\2\u02bd\u02c1"+ + "\7c\2\2\u02be\u02c1\5d\63\2\u02bf\u02c1\7d\2\2\u02c0\u02bd\3\2\2\2\u02c0"+ + "\u02be\3\2\2\2\u02c0\u02bf\3\2\2\2\u02c1_\3\2\2\2\u02c2\u02c5\7b\2\2\u02c3"+ + "\u02c5\7a\2\2\u02c4\u02c2\3\2\2\2\u02c4\u02c3\3\2\2\2\u02c5a\3\2\2\2\u02c6"+ + "\u02c7\t\16\2\2\u02c7c\3\2\2\2\u02c8\u02c9\t\17\2\2\u02c9e\3\2\2\2`uw"+ + "{\u0084\u0086\u008a\u0091\u0098\u009d\u00a2\u00ac\u00b0\u00b8\u00bb\u00c1"+ + "\u00c6\u00c9\u00d0\u00d8\u00db\u00e7\u00ea\u00ed\u00f4\u00fb\u00ff\u0103"+ + "\u010a\u010e\u0112\u0117\u011b\u0123\u0127\u012e\u0139\u013c\u0140\u014c"+ + "\u014f\u0155\u015c\u0163\u0166\u016a\u016e\u0172\u0174\u017f\u0184\u0188"+ + "\u018b\u0191\u0194\u019a\u019d\u019f\u01c2\u01ca\u01cc\u01d3\u01d8\u01db"+ + "\u01e3\u01ec\u01f2\u01fa\u01ff\u0205\u0208\u020f\u0217\u021d\u0229\u022b"+ + "\u0234\u0241\u0248\u0256\u0264\u0269\u0270\u0273\u027a\u0282\u0295\u02a2"+ + "\u02a9\u02ae\u02b4\u02b7\u02bb\u02c0\u02c4"; public static final ATN _ATN = new ATNDeserializer().deserialize(_serializedATN.toCharArray()); static { diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/SqlBaseVisitor.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/SqlBaseVisitor.java index 3b1b730e81b..afb4747ae49 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/SqlBaseVisitor.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/SqlBaseVisitor.java @@ -400,13 +400,6 @@ interface SqlBaseVisitor extends ParseTreeVisitor { * @return the visitor result */ T visitSubqueryExpression(SqlBaseParser.SubqueryExpressionContext ctx); - /** - * Visit a parse tree produced by the {@code columnReference} - * labeled alternative in {@link SqlBaseParser#primaryExpression}. - * @param ctx the parse tree - * @return the visitor result - */ - T visitColumnReference(SqlBaseParser.ColumnReferenceContext ctx); /** * Visit a parse tree produced by the {@code dereference} * labeled alternative in {@link SqlBaseParser#primaryExpression}. diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/SqlParser.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/SqlParser.java index 2824b5502a8..2aee552907b 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/SqlParser.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/SqlParser.java @@ -5,6 +5,7 @@ */ package org.elasticsearch.xpack.sql.parser; +import com.carrotsearch.hppc.ObjectShortHashMap; import org.antlr.v4.runtime.BaseErrorListener; import org.antlr.v4.runtime.CharStream; import org.antlr.v4.runtime.CommonToken; @@ -22,8 +23,8 @@ import org.antlr.v4.runtime.atn.PredictionMode; import org.antlr.v4.runtime.dfa.DFA; import org.antlr.v4.runtime.misc.Pair; import org.antlr.v4.runtime.tree.TerminalNode; +import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; -import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.xpack.sql.expression.Expression; import org.elasticsearch.xpack.sql.plan.logical.LogicalPlan; import org.elasticsearch.xpack.sql.proto.SqlTypedParamValue; @@ -41,7 +42,8 @@ import java.util.function.Function; import static java.lang.String.format; public class SqlParser { - private static final Logger log = Loggers.getLogger(SqlParser.class); + + private static final Logger log = LogManager.getLogger(); private final boolean DEBUG = false; @@ -83,7 +85,9 @@ public class SqlParser { return invokeParser(expression, params, SqlBaseParser::singleExpression, AstBuilder::expression); } - private T invokeParser(String sql, List params, Function parseFunction, + private T invokeParser(String sql, + List params, Function parseFunction, BiFunction visitor) { SqlBaseLexer lexer = new SqlBaseLexer(new CaseInsensitiveStream(sql)); @@ -96,6 +100,7 @@ public class SqlParser { CommonTokenStream tokenStream = new CommonTokenStream(tokenSource); SqlBaseParser parser = new SqlBaseParser(tokenStream); + parser.addParseListener(new CircuitBreakerListener()); parser.addParseListener(new PostProcessor(Arrays.asList(parser.getRuleNames()))); parser.removeErrorListeners(); @@ -125,10 +130,10 @@ public class SqlParser { return visitor.apply(new AstBuilder(paramTokens), tree); } - private void debug(SqlBaseParser parser) { + private static void debug(SqlBaseParser parser) { // when debugging, use the exact prediction mode (needed for diagnostics as well) - parser.getInterpreter().setPredictionMode(PredictionMode.SLL); + parser.getInterpreter().setPredictionMode(PredictionMode.LL_EXACT_AMBIG_DETECTION); parser.addParseListener(parser.new TraceListener()); @@ -154,7 +159,7 @@ public class SqlParser { public void exitBackQuotedIdentifier(SqlBaseParser.BackQuotedIdentifierContext context) { Token token = context.BACKQUOTED_IDENTIFIER().getSymbol(); throw new ParsingException( - "backquoted indetifiers not supported; please use double quotes instead", + "backquoted identifiers not supported; please use double quotes instead", null, token.getLine(), token.getCharPositionInLine()); @@ -205,6 +210,35 @@ public class SqlParser { } } + /** + * Used to catch large expressions that can lead to stack overflows + */ + private class CircuitBreakerListener extends SqlBaseBaseListener { + + private static final short MAX_RULE_DEPTH = 100; + + // Keep current depth for every rule visited. + // The totalDepth alone cannot be used as expressions like: e1 OR e2 OR e3 OR ... + // are processed as e1 OR (e2 OR (e3 OR (... and this results in the totalDepth not growing + // while the stack call depth is, leading to a StackOverflowError. + private ObjectShortHashMap depthCounts = new ObjectShortHashMap<>(); + + @Override + public void enterEveryRule(ParserRuleContext ctx) { + short currentDepth = depthCounts.putOrAdd(ctx.getClass().getSimpleName(), (short) 1, (short) 1); + if (currentDepth > MAX_RULE_DEPTH) { + throw new ParsingException("expression is too large to parse, (tree's depth exceeds {})", MAX_RULE_DEPTH); + } + super.enterEveryRule(ctx); + } + + @Override + public void exitEveryRule(ParserRuleContext ctx) { + depthCounts.putOrAdd(ctx.getClass().getSimpleName(), (short) 0, (short) -1); + super.exitEveryRule(ctx); + } + } + private static final BaseErrorListener ERROR_LISTENER = new BaseErrorListener() { @Override public void syntaxError(Recognizer recognizer, Object offendingSymbol, int line, diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plan/logical/command/ShowColumns.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plan/logical/command/ShowColumns.java index e2197d42608..aa2e784de3d 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plan/logical/command/ShowColumns.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plan/logical/command/ShowColumns.java @@ -8,7 +8,7 @@ package org.elasticsearch.xpack.sql.plan.logical.command; import org.elasticsearch.action.ActionListener; import org.elasticsearch.xpack.sql.expression.Attribute; import org.elasticsearch.xpack.sql.expression.FieldAttribute; -import org.elasticsearch.xpack.sql.expression.regex.LikePattern; +import org.elasticsearch.xpack.sql.expression.predicate.regex.LikePattern; import org.elasticsearch.xpack.sql.session.Rows; import org.elasticsearch.xpack.sql.session.SchemaRowSet; import org.elasticsearch.xpack.sql.session.SqlSession; diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plan/logical/command/ShowFunctions.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plan/logical/command/ShowFunctions.java index 9fdbab46eb8..c71d492faff 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plan/logical/command/ShowFunctions.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plan/logical/command/ShowFunctions.java @@ -10,7 +10,7 @@ import org.elasticsearch.xpack.sql.expression.Attribute; import org.elasticsearch.xpack.sql.expression.FieldAttribute; import org.elasticsearch.xpack.sql.expression.function.FunctionDefinition; import org.elasticsearch.xpack.sql.expression.function.FunctionRegistry; -import org.elasticsearch.xpack.sql.expression.regex.LikePattern; +import org.elasticsearch.xpack.sql.expression.predicate.regex.LikePattern; import org.elasticsearch.xpack.sql.session.Rows; import org.elasticsearch.xpack.sql.session.SchemaRowSet; import org.elasticsearch.xpack.sql.session.SqlSession; diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plan/logical/command/ShowTables.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plan/logical/command/ShowTables.java index 0735f870545..7f6c0c355e3 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plan/logical/command/ShowTables.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plan/logical/command/ShowTables.java @@ -7,7 +7,7 @@ package org.elasticsearch.xpack.sql.plan.logical.command; import org.elasticsearch.action.ActionListener; import org.elasticsearch.xpack.sql.expression.Attribute; -import org.elasticsearch.xpack.sql.expression.regex.LikePattern; +import org.elasticsearch.xpack.sql.expression.predicate.regex.LikePattern; import org.elasticsearch.xpack.sql.session.Rows; import org.elasticsearch.xpack.sql.session.SchemaRowSet; import org.elasticsearch.xpack.sql.session.SqlSession; diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plan/logical/command/sys/SysColumns.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plan/logical/command/sys/SysColumns.java index 6337108b54b..40475f4fe57 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plan/logical/command/sys/SysColumns.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plan/logical/command/sys/SysColumns.java @@ -9,7 +9,7 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.common.Strings; import org.elasticsearch.xpack.sql.analysis.index.EsIndex; import org.elasticsearch.xpack.sql.expression.Attribute; -import org.elasticsearch.xpack.sql.expression.regex.LikePattern; +import org.elasticsearch.xpack.sql.expression.predicate.regex.LikePattern; import org.elasticsearch.xpack.sql.plan.logical.command.Command; import org.elasticsearch.xpack.sql.session.Rows; import org.elasticsearch.xpack.sql.session.SchemaRowSet; diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plan/logical/command/sys/SysTables.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plan/logical/command/sys/SysTables.java index 69d0ad50648..58b739cc00f 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plan/logical/command/sys/SysTables.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plan/logical/command/sys/SysTables.java @@ -9,7 +9,7 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.xpack.sql.analysis.index.IndexResolver.IndexInfo; import org.elasticsearch.xpack.sql.analysis.index.IndexResolver.IndexType; import org.elasticsearch.xpack.sql.expression.Attribute; -import org.elasticsearch.xpack.sql.expression.regex.LikePattern; +import org.elasticsearch.xpack.sql.expression.predicate.regex.LikePattern; import org.elasticsearch.xpack.sql.plan.logical.command.Command; import org.elasticsearch.xpack.sql.session.Rows; import org.elasticsearch.xpack.sql.session.SchemaRowSet; diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/planner/QueryFolder.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/planner/QueryFolder.java index 2a31d697431..8c813019330 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/planner/QueryFolder.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/planner/QueryFolder.java @@ -25,11 +25,10 @@ import org.elasticsearch.xpack.sql.expression.function.scalar.ScalarFunction; import org.elasticsearch.xpack.sql.expression.function.scalar.ScalarFunctionAttribute; import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.DateTimeFunction; import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.DateTimeHistogramFunction; -import org.elasticsearch.xpack.sql.expression.function.scalar.processor.definition.AggPathInput; -import org.elasticsearch.xpack.sql.expression.function.scalar.processor.definition.ProcessorDefinition; -import org.elasticsearch.xpack.sql.expression.function.scalar.processor.definition.ProcessorDefinitions; -import org.elasticsearch.xpack.sql.expression.function.scalar.processor.definition.UnaryProcessorDefinition; -import org.elasticsearch.xpack.sql.expression.function.scalar.processor.runtime.Processor; +import org.elasticsearch.xpack.sql.expression.gen.pipeline.AggPathInput; +import org.elasticsearch.xpack.sql.expression.gen.pipeline.Pipe; +import org.elasticsearch.xpack.sql.expression.gen.pipeline.UnaryPipe; +import org.elasticsearch.xpack.sql.expression.gen.processor.Processor; import org.elasticsearch.xpack.sql.plan.physical.AggregateExec; import org.elasticsearch.xpack.sql.plan.physical.EsQueryExec; import org.elasticsearch.xpack.sql.plan.physical.FilterExec; @@ -112,7 +111,7 @@ class QueryFolder extends RuleExecutor { QueryContainer queryC = exec.queryContainer(); Map aliases = new LinkedHashMap<>(queryC.aliases()); - Map processors = new LinkedHashMap<>(queryC.scalarFunctions()); + Map processors = new LinkedHashMap<>(queryC.scalarFunctions()); for (NamedExpression pj : project.projections()) { if (pj instanceof Alias) { @@ -124,10 +123,10 @@ class QueryFolder extends RuleExecutor { aliases.put(aliasAttr, attr); // add placeholder for each scalar function if (e instanceof ScalarFunction) { - processors.put(attr, ProcessorDefinitions.toProcessorDefinition(e)); + processors.put(attr, Expressions.pipe(e)); } } else { - processors.put(aliasAttr, ProcessorDefinitions.toProcessorDefinition(e)); + processors.put(aliasAttr, Expressions.pipe(e)); } } else { @@ -137,7 +136,7 @@ class QueryFolder extends RuleExecutor { if (pj instanceof ScalarFunction) { ScalarFunction f = (ScalarFunction) pj; - processors.put(f.toAttribute(), f.asProcessorDefinition()); + processors.put(f.toAttribute(), Expressions.pipe(f)); } } } @@ -249,7 +248,7 @@ class QueryFolder extends RuleExecutor { // ) if (child instanceof ScalarFunction) { ScalarFunction f = (ScalarFunction) child; - ProcessorDefinition proc = f.asProcessorDefinition(); + Pipe proc = f.asPipe(); final AtomicReference qC = new AtomicReference<>(queryC); @@ -288,7 +287,7 @@ class QueryFolder extends RuleExecutor { * as it already got access to the extraction action */ if (exp instanceof DateTimeHistogramFunction) { - action = ((UnaryProcessorDefinition) p).action(); + action = ((UnaryPipe) p).action(); tz = ((DateTimeFunction) exp).timeZone(); } return new AggPathInput(exp.location(), exp, new GroupByRef(matchingGroup.id(), null, tz), action); diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/planner/QueryTranslator.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/planner/QueryTranslator.java index e691aef8d3e..12b68ceee33 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/planner/QueryTranslator.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/planner/QueryTranslator.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.sql.planner; import org.elasticsearch.xpack.sql.SqlIllegalArgumentException; import org.elasticsearch.xpack.sql.expression.Attribute; -import org.elasticsearch.xpack.sql.expression.BinaryExpression; import org.elasticsearch.xpack.sql.expression.Expression; import org.elasticsearch.xpack.sql.expression.ExpressionId; import org.elasticsearch.xpack.sql.expression.Expressions; @@ -18,7 +17,6 @@ import org.elasticsearch.xpack.sql.expression.UnaryExpression; import org.elasticsearch.xpack.sql.expression.function.Function; import org.elasticsearch.xpack.sql.expression.function.Functions; import org.elasticsearch.xpack.sql.expression.function.aggregate.AggregateFunction; -import org.elasticsearch.xpack.sql.expression.function.aggregate.AggregateFunctionAttribute; import org.elasticsearch.xpack.sql.expression.function.aggregate.Avg; import org.elasticsearch.xpack.sql.expression.function.aggregate.CompoundNumericAggregate; import org.elasticsearch.xpack.sql.expression.function.aggregate.Count; @@ -31,28 +29,27 @@ import org.elasticsearch.xpack.sql.expression.function.aggregate.Percentiles; import org.elasticsearch.xpack.sql.expression.function.aggregate.Stats; import org.elasticsearch.xpack.sql.expression.function.aggregate.Sum; import org.elasticsearch.xpack.sql.expression.function.scalar.ScalarFunction; -import org.elasticsearch.xpack.sql.expression.function.scalar.ScalarFunctionAttribute; import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.DateTimeFunction; import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.DateTimeHistogramFunction; -import org.elasticsearch.xpack.sql.expression.function.scalar.script.Params; -import org.elasticsearch.xpack.sql.expression.function.scalar.script.ScriptTemplate; +import org.elasticsearch.xpack.sql.expression.gen.script.ScriptTemplate; import org.elasticsearch.xpack.sql.expression.predicate.And; -import org.elasticsearch.xpack.sql.expression.predicate.BinaryComparison; -import org.elasticsearch.xpack.sql.expression.predicate.Equals; -import org.elasticsearch.xpack.sql.expression.predicate.GreaterThan; -import org.elasticsearch.xpack.sql.expression.predicate.GreaterThanOrEqual; +import org.elasticsearch.xpack.sql.expression.predicate.BinaryPredicate; import org.elasticsearch.xpack.sql.expression.predicate.IsNotNull; -import org.elasticsearch.xpack.sql.expression.predicate.LessThan; -import org.elasticsearch.xpack.sql.expression.predicate.LessThanOrEqual; import org.elasticsearch.xpack.sql.expression.predicate.Not; import org.elasticsearch.xpack.sql.expression.predicate.Or; import org.elasticsearch.xpack.sql.expression.predicate.Range; import org.elasticsearch.xpack.sql.expression.predicate.fulltext.MatchQueryPredicate; import org.elasticsearch.xpack.sql.expression.predicate.fulltext.MultiMatchQueryPredicate; import org.elasticsearch.xpack.sql.expression.predicate.fulltext.StringQueryPredicate; -import org.elasticsearch.xpack.sql.expression.regex.Like; -import org.elasticsearch.xpack.sql.expression.regex.LikePattern; -import org.elasticsearch.xpack.sql.expression.regex.RLike; +import org.elasticsearch.xpack.sql.expression.predicate.operator.comparison.BinaryComparison; +import org.elasticsearch.xpack.sql.expression.predicate.operator.comparison.Equals; +import org.elasticsearch.xpack.sql.expression.predicate.operator.comparison.GreaterThan; +import org.elasticsearch.xpack.sql.expression.predicate.operator.comparison.GreaterThanOrEqual; +import org.elasticsearch.xpack.sql.expression.predicate.operator.comparison.LessThan; +import org.elasticsearch.xpack.sql.expression.predicate.operator.comparison.LessThanOrEqual; +import org.elasticsearch.xpack.sql.expression.predicate.regex.Like; +import org.elasticsearch.xpack.sql.expression.predicate.regex.LikePattern; +import org.elasticsearch.xpack.sql.expression.predicate.regex.RLike; import org.elasticsearch.xpack.sql.querydsl.agg.AggFilter; import org.elasticsearch.xpack.sql.querydsl.agg.AndAggFilter; import org.elasticsearch.xpack.sql.querydsl.agg.AvgAgg; @@ -85,24 +82,19 @@ import org.elasticsearch.xpack.sql.querydsl.query.ScriptQuery; import org.elasticsearch.xpack.sql.querydsl.query.TermQuery; import org.elasticsearch.xpack.sql.querydsl.query.WildcardQuery; import org.elasticsearch.xpack.sql.tree.Location; -import org.elasticsearch.xpack.sql.type.DataType; import org.elasticsearch.xpack.sql.util.Check; import org.elasticsearch.xpack.sql.util.ReflectionUtils; import java.util.Arrays; import java.util.LinkedHashMap; import java.util.List; -import java.util.Locale; import java.util.Map; import java.util.Map.Entry; -import static java.lang.String.format; import static java.util.Collections.singletonList; import static org.elasticsearch.xpack.sql.expression.Foldables.doubleValuesOf; import static org.elasticsearch.xpack.sql.expression.Foldables.stringValueOf; import static org.elasticsearch.xpack.sql.expression.Foldables.valueOf; -import static org.elasticsearch.xpack.sql.expression.function.scalar.script.ParamsBuilder.paramsBuilder; -import static org.elasticsearch.xpack.sql.expression.function.scalar.script.ScriptTemplate.formatTemplate; abstract class QueryTranslator { @@ -402,10 +394,10 @@ abstract class QueryTranslator { // TODO: need to optimize on ngram // TODO: see whether escaping is needed - static class Likes extends ExpressionTranslator { + static class Likes extends ExpressionTranslator { @Override - protected QueryTranslation asQuery(BinaryExpression e, boolean onAggs) { + protected QueryTranslation asQuery(BinaryPredicate e, boolean onAggs) { Query q = null; boolean inexact = true; String target = null; @@ -415,7 +407,7 @@ abstract class QueryTranslator { inexact = fa.isInexact(); target = nameOf(inexact ? fa : fa.exactAttribute()); } else { - throw new SqlIllegalArgumentException("Scalar function ({}) not allowed (yet) as arguments for LIKE", + throw new SqlIllegalArgumentException("Scalar function ({}) not allowed (yet) as arguments for LIKE", Expressions.name(e.left())); } @@ -467,10 +459,10 @@ abstract class QueryTranslator { } } - static class BinaryLogic extends ExpressionTranslator { + static class BinaryLogic extends ExpressionTranslator { @Override - protected QueryTranslation asQuery(BinaryExpression e, boolean onAggs) { + protected QueryTranslation asQuery(BinaryPredicate e, boolean onAggs) { if (e instanceof And) { return and(e.location(), toQuery(e.left(), onAggs), toQuery(e.right(), onAggs)); } @@ -486,7 +478,7 @@ abstract class QueryTranslator { @Override protected QueryTranslation asQuery(Not not, boolean onAggs) { - QueryTranslation translation = toQuery(not.child(), onAggs); + QueryTranslation translation = toQuery(not.field(), onAggs); return new QueryTranslation(not(translation.query), translation.aggFilter); } } @@ -520,60 +512,26 @@ abstract class QueryTranslator { AggFilter aggFilter = null; Attribute at = ne.toAttribute(); - - // scalar function can appear in both WHERE and HAVING so handle it first - // in both cases the function script is used - script-query/query for the former, bucket-selector/aggFilter for the latter - - if (at instanceof ScalarFunctionAttribute) { - ScalarFunctionAttribute sfa = (ScalarFunctionAttribute) at; - ScriptTemplate scriptTemplate = sfa.script(); - - String template = formatTemplate(format(Locale.ROOT, "%s %s {}", scriptTemplate.template(), bc.symbol())); - // no need to bind the wrapped/target agg - it is already available through the nested script - // (needed to create the script itself) - Params params = paramsBuilder().script(scriptTemplate.params()).variable(valueOf(bc.right())).build(); - ScriptTemplate script = new ScriptTemplate(template, params, DataType.BOOLEAN); - if (onAggs) { - aggFilter = new AggFilter(at.id().toString(), script); - } - else { - query = new ScriptQuery(at.location(), script); - } - } - // // Agg context means HAVING -> PipelineAggs // - else if (onAggs) { - String template = null; - Params params = null; - - // agg function - if (at instanceof AggregateFunctionAttribute) { - AggregateFunctionAttribute fa = (AggregateFunctionAttribute) at; - - // TODO: handle case where both sides of the comparison are functions - template = formatTemplate(format(Locale.ROOT, "{} %s {}", bc.symbol())); - - // bind the agg and the variable to the script - params = paramsBuilder().agg(fa).variable(valueOf(bc.right())).build(); - } - - aggFilter = new AggFilter(at.id().toString(), new ScriptTemplate(template, params, DataType.BOOLEAN)); + ScriptTemplate script = bc.asScript(); + if (onAggs) { + aggFilter = new AggFilter(at.id().toString(), script); } - - // - // No Agg context means WHERE clause - // else { + // query directly on the field if (at instanceof FieldAttribute) { query = wrapIfNested(translateQuery(bc), ne); + } else { + query = new ScriptQuery(at.location(), script); } } - return new QueryTranslation(query, aggFilter); } - + // + // if the code gets here it's a bug + // else { throw new UnsupportedOperationException("No idea how to translate " + bc.left()); } @@ -618,92 +576,34 @@ abstract class QueryTranslator { @Override protected QueryTranslation asQuery(Range r, boolean onAggs) { - Object lower = valueOf(r.lower()); - Object upper = valueOf(r.upper()); - Expression e = r.value(); - - + if (e instanceof NamedExpression) { - NamedExpression ne = (NamedExpression) e; - Query query = null; AggFilter aggFilter = null; - Attribute at = ne.toAttribute(); + // + // Agg context means HAVING -> PipelineAggs + // + ScriptTemplate script = r.asScript(); + Attribute at = ((NamedExpression) e).toAttribute(); - // scalar function can appear in both WHERE and HAVING so handle it first - // in both cases the function script is used - script-query/query for the former, bucket-selector/aggFilter - // for the latter - - if (at instanceof ScalarFunctionAttribute) { - ScalarFunctionAttribute sfa = (ScalarFunctionAttribute) at; - ScriptTemplate scriptTemplate = sfa.script(); - - String template = formatTemplate(format(Locale.ROOT, "({} %s %s) && (%s %s {})", - r.includeLower() ? "<=" : "<", - scriptTemplate.template(), - scriptTemplate.template(), - r.includeUpper() ? "<=" : "<")); - - // no need to bind the wrapped/target - it is already available through the nested script (needed to - // create the script itself) - Params params = paramsBuilder().variable(lower) - .script(scriptTemplate.params()) - .script(scriptTemplate.params()) - .variable(upper) - .build(); - - ScriptTemplate script = new ScriptTemplate(template, params, DataType.BOOLEAN); - - if (onAggs) { - aggFilter = new AggFilter(at.id().toString(), script); + if (onAggs) { + aggFilter = new AggFilter(at.id().toString(), script); + } else { + // typical range; no scripting involved + if (at instanceof FieldAttribute) { + RangeQuery rangeQuery = new RangeQuery(r.location(), nameOf(r.value()), valueOf(r.lower()), r.includeLower(), + valueOf(r.upper()), r.includeUpper(), dateFormat(r.value())); + query = wrapIfNested(rangeQuery, r.value()); } + // scripted query else { query = new ScriptQuery(at.location(), script); } } - - // - // HAVING - // - else if (onAggs) { - String template = null; - Params params = null; - - // agg function - if (at instanceof AggregateFunctionAttribute) { - AggregateFunctionAttribute fa = (AggregateFunctionAttribute) at; - - template = formatTemplate(format(Locale.ROOT, "{} %s {} && {} %s {}", - r.includeLower() ? "<=" : "<", - r.includeUpper() ? "<=" : "<")); - - params = paramsBuilder().variable(lower) - .agg(fa) - .agg(fa) - .variable(upper) - .build(); - - } - aggFilter = new AggFilter(((NamedExpression) r.value()).id().toString(), - new ScriptTemplate(template, params, DataType.BOOLEAN)); - } - // - // WHERE - // - else { - // typical range - if (at instanceof FieldAttribute) { - RangeQuery rangeQuery = new RangeQuery(r.location(), nameOf(r.value()), - valueOf(r.lower()), r.includeLower(), valueOf(r.upper()), r.includeUpper(), dateFormat(r.value())); - query = wrapIfNested(rangeQuery, r.value()); - } - } - return new QueryTranslation(query, aggFilter); - } - else { + } else { throw new SqlIllegalArgumentException("No idea how to translate " + e); } } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/SqlPainlessExtension.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/SqlPainlessExtension.java index 426d725ac79..1846429cc80 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/SqlPainlessExtension.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/SqlPainlessExtension.java @@ -8,6 +8,7 @@ package org.elasticsearch.xpack.sql.plugin; import org.elasticsearch.painless.spi.PainlessExtension; import org.elasticsearch.painless.spi.Whitelist; import org.elasticsearch.painless.spi.WhitelistLoader; +import org.elasticsearch.script.BucketAggregationSelectorScript; import org.elasticsearch.script.FilterScript; import org.elasticsearch.script.ScriptContext; import org.elasticsearch.script.SearchScript; @@ -30,6 +31,7 @@ public class SqlPainlessExtension implements PainlessExtension { whitelist.put(SearchScript.AGGS_CONTEXT, list); whitelist.put(SearchScript.CONTEXT, list); whitelist.put(SearchScript.SCRIPT_SORT_CONTEXT, list); + whitelist.put(BucketAggregationSelectorScript.CONTEXT, list); return whitelist; } } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/querydsl/agg/AggFilter.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/querydsl/agg/AggFilter.java index 60f621b38a3..38f7f81946d 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/querydsl/agg/AggFilter.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/querydsl/agg/AggFilter.java @@ -11,18 +11,20 @@ import java.util.Objects; import org.elasticsearch.script.Script; import org.elasticsearch.search.aggregations.PipelineAggregationBuilder; -import org.elasticsearch.xpack.sql.expression.function.scalar.script.ScriptTemplate; +import org.elasticsearch.xpack.sql.expression.gen.script.ScriptTemplate; import org.elasticsearch.xpack.sql.util.Check; import static org.elasticsearch.search.aggregations.pipeline.PipelineAggregatorBuilders.bucketSelector; public class AggFilter extends PipelineAgg { + private static final String BUCKET_SELECTOR_ID_PREFIX = "having"; + private final ScriptTemplate scriptTemplate; private final Map aggPaths; public AggFilter(String name, ScriptTemplate scriptTemplate) { - super(name); + super(BUCKET_SELECTOR_ID_PREFIX + name); Check.isTrue(scriptTemplate != null, "a valid script is required"); this.scriptTemplate = scriptTemplate; this.aggPaths = scriptTemplate.aggPaths(); diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/querydsl/agg/AndAggFilter.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/querydsl/agg/AndAggFilter.java index 424b957db7a..c2c980c8568 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/querydsl/agg/AndAggFilter.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/querydsl/agg/AndAggFilter.java @@ -5,9 +5,9 @@ */ package org.elasticsearch.xpack.sql.querydsl.agg; -import org.elasticsearch.xpack.sql.expression.function.scalar.script.Params; -import org.elasticsearch.xpack.sql.expression.function.scalar.script.ParamsBuilder; -import org.elasticsearch.xpack.sql.expression.function.scalar.script.ScriptTemplate; +import org.elasticsearch.xpack.sql.expression.gen.script.Params; +import org.elasticsearch.xpack.sql.expression.gen.script.ParamsBuilder; +import org.elasticsearch.xpack.sql.expression.gen.script.ScriptTemplate; import org.elasticsearch.xpack.sql.type.DataType; import java.util.Locale; diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/querydsl/agg/GroupByScriptKey.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/querydsl/agg/GroupByScriptKey.java index ccd2bf934ab..99bd4e27671 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/querydsl/agg/GroupByScriptKey.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/querydsl/agg/GroupByScriptKey.java @@ -7,7 +7,7 @@ package org.elasticsearch.xpack.sql.querydsl.agg; import org.elasticsearch.search.aggregations.bucket.composite.TermsValuesSourceBuilder; import org.elasticsearch.search.aggregations.support.ValueType; -import org.elasticsearch.xpack.sql.expression.function.scalar.script.ScriptTemplate; +import org.elasticsearch.xpack.sql.expression.gen.script.ScriptTemplate; import org.elasticsearch.xpack.sql.querydsl.container.Sort.Direction; import java.util.Objects; @@ -39,8 +39,12 @@ public class GroupByScriptKey extends GroupByKey { .order(direction().asOrder()) .missingBucket(true); - if (script.outputType().isNumeric()) { - builder.valueType(ValueType.NUMBER); + if (script.outputType().isInteger) { + builder.valueType(ValueType.LONG); + } else if (script.outputType().isRational) { + builder.valueType(ValueType.DOUBLE); + } else if (script.outputType().isString()) { + builder.valueType(ValueType.STRING); } return builder; diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/querydsl/agg/OrAggFilter.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/querydsl/agg/OrAggFilter.java index 42500b9c606..120f0a5848c 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/querydsl/agg/OrAggFilter.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/querydsl/agg/OrAggFilter.java @@ -5,9 +5,9 @@ */ package org.elasticsearch.xpack.sql.querydsl.agg; -import org.elasticsearch.xpack.sql.expression.function.scalar.script.Params; -import org.elasticsearch.xpack.sql.expression.function.scalar.script.ParamsBuilder; -import org.elasticsearch.xpack.sql.expression.function.scalar.script.ScriptTemplate; +import org.elasticsearch.xpack.sql.expression.gen.script.Params; +import org.elasticsearch.xpack.sql.expression.gen.script.ParamsBuilder; +import org.elasticsearch.xpack.sql.expression.gen.script.ScriptTemplate; import org.elasticsearch.xpack.sql.type.DataType; import java.util.Locale; diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/querydsl/container/ComputedRef.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/querydsl/container/ComputedRef.java index 8d1a55cfdd1..e1aa6032c05 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/querydsl/container/ComputedRef.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/querydsl/container/ComputedRef.java @@ -7,17 +7,17 @@ package org.elasticsearch.xpack.sql.querydsl.container; import org.elasticsearch.xpack.sql.execution.search.FieldExtraction; import org.elasticsearch.xpack.sql.execution.search.SqlSourceBuilder; -import org.elasticsearch.xpack.sql.expression.function.scalar.processor.definition.ProcessorDefinition; +import org.elasticsearch.xpack.sql.expression.gen.pipeline.Pipe; public class ComputedRef implements FieldExtraction { - private final ProcessorDefinition processor; + private final Pipe processor; - public ComputedRef(ProcessorDefinition processor) { + public ComputedRef(Pipe processor) { this.processor = processor; } - public ProcessorDefinition processor() { + public Pipe processor() { return processor; } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/querydsl/container/QueryContainer.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/querydsl/container/QueryContainer.java index 9f9c1bb21bb..9048df42017 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/querydsl/container/QueryContainer.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/querydsl/container/QueryContainer.java @@ -19,8 +19,7 @@ import org.elasticsearch.xpack.sql.expression.FieldAttribute; import org.elasticsearch.xpack.sql.expression.LiteralAttribute; import org.elasticsearch.xpack.sql.expression.function.ScoreAttribute; import org.elasticsearch.xpack.sql.expression.function.scalar.ScalarFunctionAttribute; -import org.elasticsearch.xpack.sql.expression.function.scalar.processor.definition.ProcessorDefinition; -import org.elasticsearch.xpack.sql.expression.function.scalar.processor.definition.ScoreProcessorDefinition; +import org.elasticsearch.xpack.sql.expression.gen.pipeline.Pipe; import org.elasticsearch.xpack.sql.querydsl.agg.Aggs; import org.elasticsearch.xpack.sql.querydsl.agg.GroupByKey; import org.elasticsearch.xpack.sql.querydsl.agg.LeafAgg; @@ -64,7 +63,7 @@ public class QueryContainer { // scalar function processors - recorded as functions get folded; // at scrolling, their inputs (leaves) get updated - private final Map scalarFunctions; + private final Map scalarFunctions; private final Set sort; private final int limit; @@ -78,7 +77,7 @@ public class QueryContainer { public QueryContainer(Query query, Aggs aggs, List refs, Map aliases, Map pseudoFunctions, - Map scalarFunctions, + Map scalarFunctions, Set sort, int limit) { this.query = query; this.aggs = aggs == null ? new Aggs() : aggs; @@ -155,7 +154,7 @@ public class QueryContainer { return l == limit ? this : new QueryContainer(query, aggs, columns, aliases, pseudoFunctions, scalarFunctions, sort, l); } - public QueryContainer withScalarProcessors(Map procs) { + public QueryContainer withScalarProcessors(Map procs) { return new QueryContainer(query, aggs, columns, aliases, pseudoFunctions, procs, sort, limit); } @@ -215,22 +214,22 @@ public class QueryContainer { return new BoolQuery(location, true, query, nested); } - // replace function's input with references - private Tuple computingRef(ScalarFunctionAttribute sfa) { - Attribute name = aliases.getOrDefault(sfa, sfa); - ProcessorDefinition proc = scalarFunctions.get(name); + // replace function/operators's input with references + private Tuple resolvedTreeComputingRef(ScalarFunctionAttribute ta) { + Attribute attribute = aliases.getOrDefault(ta, ta); + Pipe proc = scalarFunctions.get(attribute); // check the attribute itself if (proc == null) { - if (name instanceof ScalarFunctionAttribute) { - sfa = (ScalarFunctionAttribute) name; + if (attribute instanceof ScalarFunctionAttribute) { + ta = (ScalarFunctionAttribute) attribute; } - proc = sfa.processorDef(); + proc = ta.asPipe(); } // find the processor inputs (Attributes) and convert them into references // no need to promote them to the top since the container doesn't have to be aware - class QueryAttributeResolver implements ProcessorDefinition.AttributeResolver { + class QueryAttributeResolver implements Pipe.AttributeResolver { private QueryContainer container; private QueryAttributeResolver(QueryContainer container) { @@ -250,8 +249,8 @@ public class QueryContainer { QueryContainer qContainer = resolver.container; // update proc - Map procs = new LinkedHashMap<>(qContainer.scalarFunctions()); - procs.put(name, proc); + Map procs = new LinkedHashMap<>(qContainer.scalarFunctions()); + procs.put(attribute, proc); qContainer = qContainer.withScalarProcessors(procs); return new Tuple<>(qContainer, new ComputedRef(proc)); } @@ -271,13 +270,13 @@ public class QueryContainer { } } if (attr instanceof ScalarFunctionAttribute) { - return computingRef((ScalarFunctionAttribute) attr); + return resolvedTreeComputingRef((ScalarFunctionAttribute) attr); } if (attr instanceof LiteralAttribute) { - return new Tuple<>(this, new ComputedRef(((LiteralAttribute) attr).asProcessorDefinition())); + return new Tuple<>(this, new ComputedRef(((LiteralAttribute) attr).asPipe())); } if (attr instanceof ScoreAttribute) { - return new Tuple<>(this, new ComputedRef(new ScoreProcessorDefinition(attr.location(), attr))); + return new Tuple<>(this, new ComputedRef(((ScoreAttribute) attr).asPipe())); } throw new SqlIllegalArgumentException("Unknown output attribute {}", attr); @@ -287,7 +286,7 @@ public class QueryContainer { return with(combine(columns, ref)); } - public Map scalarFunctions() { + public Map scalarFunctions() { return scalarFunctions; } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/querydsl/container/ScriptFieldRef.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/querydsl/container/ScriptFieldRef.java index 02767bdea9e..c89b986a4f2 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/querydsl/container/ScriptFieldRef.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/querydsl/container/ScriptFieldRef.java @@ -6,7 +6,7 @@ package org.elasticsearch.xpack.sql.querydsl.container; import org.elasticsearch.xpack.sql.execution.search.SqlSourceBuilder; -import org.elasticsearch.xpack.sql.expression.function.scalar.script.ScriptTemplate; +import org.elasticsearch.xpack.sql.expression.gen.script.ScriptTemplate; public class ScriptFieldRef extends FieldReference { diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/querydsl/container/ScriptSort.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/querydsl/container/ScriptSort.java index 62c3750f638..b7d7da7a9d0 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/querydsl/container/ScriptSort.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/querydsl/container/ScriptSort.java @@ -5,9 +5,9 @@ */ package org.elasticsearch.xpack.sql.querydsl.container; -import java.util.Objects; +import org.elasticsearch.xpack.sql.expression.gen.script.ScriptTemplate; -import org.elasticsearch.xpack.sql.expression.function.scalar.script.ScriptTemplate; +import java.util.Objects; public class ScriptSort extends Sort { diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/querydsl/query/ScriptQuery.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/querydsl/query/ScriptQuery.java index b918fd71a58..7c93f7cc95c 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/querydsl/query/ScriptQuery.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/querydsl/query/ScriptQuery.java @@ -8,7 +8,7 @@ package org.elasticsearch.xpack.sql.querydsl.query; import java.util.Objects; import org.elasticsearch.index.query.QueryBuilder; -import org.elasticsearch.xpack.sql.expression.function.scalar.script.ScriptTemplate; +import org.elasticsearch.xpack.sql.expression.gen.script.ScriptTemplate; import org.elasticsearch.xpack.sql.tree.Location; import static org.elasticsearch.index.query.QueryBuilders.scriptQuery; diff --git a/x-pack/plugin/sql/src/main/resources/org/elasticsearch/xpack/sql/plugin/sql_whitelist.txt b/x-pack/plugin/sql/src/main/resources/org/elasticsearch/xpack/sql/plugin/sql_whitelist.txt index 0f12d32d44e..1bb2802a5db 100644 --- a/x-pack/plugin/sql/src/main/resources/org/elasticsearch/xpack/sql/plugin/sql_whitelist.txt +++ b/x-pack/plugin/sql/src/main/resources/org/elasticsearch/xpack/sql/plugin/sql_whitelist.txt @@ -12,24 +12,26 @@ class org.elasticsearch.xpack.sql.expression.function.scalar.whitelist.InternalS String dayName(long, String) String monthName(long, String) Integer quarter(long, String) + Number round(Number, Number) + Number truncate(Number, Number) Integer ascii(String) Integer bitLength(String) - String character(Number) Integer charLength(String) - String lcase(String) - String ucase(String) - Integer length(String) - String rtrim(String) - String ltrim(String) - String space(Number) - String left(String, int) - String right(String, int) + String character(Number) String concat(String, String) - String repeat(String, int) - Integer position(String, String) String insert(String, int, int, String) - String substring(String, int, int) - String replace(String, String, String) + String lcase(String) + String left(String, int) + Integer length(String) Integer locate(String, String) Integer locate(String, String, Integer) + String ltrim(String) + Integer position(String, String) + String repeat(String, int) + String replace(String, String, String) + String right(String, int) + String rtrim(String) + String space(Number) + String substring(String, int, int) + String ucase(String) } \ No newline at end of file diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/analysis/analyzer/PreAnalyzerTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/analysis/analyzer/PreAnalyzerTests.java index 43aacd52083..f37378c8fa9 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/analysis/analyzer/PreAnalyzerTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/analysis/analyzer/PreAnalyzerTests.java @@ -38,7 +38,7 @@ public class PreAnalyzerTests extends ESTestCase { } public void testWildIndexWithCatalog() { - LogicalPlan plan = parser.createStatement("SELECT * FROM elastic:index*"); + LogicalPlan plan = parser.createStatement("SELECT * FROM elastic:\"index*\""); PreAnalysis result = preAnalyzer.preAnalyze(plan); assertThat(plan.preAnalyzed(), is(true)); assertThat(result.indices, hasSize(1)); diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/execution/search/SourceGeneratorTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/execution/search/SourceGeneratorTests.java index 816b6651335..3d84f852bc8 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/execution/search/SourceGeneratorTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/execution/search/SourceGeneratorTests.java @@ -61,12 +61,12 @@ public class SourceGeneratorTests extends ESTestCase { public void testLimit() { QueryContainer container = new QueryContainer().withLimit(10).addGroups(singletonList(new GroupByColumnKey("1", "field"))); - SearchSourceBuilder sourceBuilder = SourceGenerator.sourceBuilder(container, null, randomIntBetween(1, 10)); + int size = randomIntBetween(1, 10); + SearchSourceBuilder sourceBuilder = SourceGenerator.sourceBuilder(container, null, size); Builder aggBuilder = sourceBuilder.aggregations(); assertEquals(1, aggBuilder.count()); - CompositeAggregationBuilder composite = (CompositeAggregationBuilder) aggBuilder.getAggregatorFactories().get(0); - // TODO: cannot access size - //assertEquals(10, composite.size()); + CompositeAggregationBuilder composite = (CompositeAggregationBuilder) aggBuilder.getAggregatorFactories().iterator().next(); + assertEquals(size, composite.size()); } public void testSortNoneSpecified() { diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/execution/search/extractor/ComputingExtractorTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/execution/search/extractor/ComputingExtractorTests.java index 375de112fe8..c628b090df2 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/execution/search/extractor/ComputingExtractorTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/execution/search/extractor/ComputingExtractorTests.java @@ -16,10 +16,10 @@ import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.DateTimeP import org.elasticsearch.xpack.sql.expression.function.scalar.math.MathFunctionProcessorTests; import org.elasticsearch.xpack.sql.expression.function.scalar.math.MathProcessor; import org.elasticsearch.xpack.sql.expression.function.scalar.math.MathProcessor.MathOperation; -import org.elasticsearch.xpack.sql.expression.function.scalar.processor.runtime.ChainingProcessor; -import org.elasticsearch.xpack.sql.expression.function.scalar.processor.runtime.ChainingProcessorTests; -import org.elasticsearch.xpack.sql.expression.function.scalar.processor.runtime.HitExtractorProcessor; -import org.elasticsearch.xpack.sql.expression.function.scalar.processor.runtime.Processor; +import org.elasticsearch.xpack.sql.expression.gen.processor.ChainingProcessor; +import org.elasticsearch.xpack.sql.expression.gen.processor.ChainingProcessorTests; +import org.elasticsearch.xpack.sql.expression.gen.processor.HitExtractorProcessor; +import org.elasticsearch.xpack.sql.expression.gen.processor.Processor; import java.io.IOException; import java.util.ArrayList; diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/ParameterTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/ParameterTests.java index 37ab5fb2b6c..33185df465e 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/ParameterTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/ParameterTests.java @@ -6,10 +6,10 @@ package org.elasticsearch.xpack.sql.expression; import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.xpack.sql.expression.function.scalar.arithmetic.Add; -import org.elasticsearch.xpack.sql.expression.function.scalar.arithmetic.Mul; -import org.elasticsearch.xpack.sql.expression.function.scalar.arithmetic.Sub; -import org.elasticsearch.xpack.sql.expression.predicate.Equals; +import org.elasticsearch.xpack.sql.expression.predicate.operator.arithmetic.Add; +import org.elasticsearch.xpack.sql.expression.predicate.operator.arithmetic.Mul; +import org.elasticsearch.xpack.sql.expression.predicate.operator.arithmetic.Sub; +import org.elasticsearch.xpack.sql.expression.predicate.operator.comparison.Equals; import org.elasticsearch.xpack.sql.parser.ParsingException; import org.elasticsearch.xpack.sql.parser.SqlParser; import org.elasticsearch.xpack.sql.proto.SqlTypedParamValue; diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/QuotingTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/QuotingTests.java index ceb9611a62c..2f23ea7cb73 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/QuotingTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/QuotingTests.java @@ -70,7 +70,7 @@ public class QuotingTests extends ESTestCase { String name = "@timestamp"; ParsingException ex = expectThrows(ParsingException.class, () -> new SqlParser().createExpression(quote + name + quote)); - assertThat(ex.getMessage(), equalTo("line 1:1: backquoted indetifiers not supported; please use double quotes instead")); + assertThat(ex.getMessage(), equalTo("line 1:1: backquoted identifiers not supported; please use double quotes instead")); } public void testQuotedAttributeAndQualifier() { @@ -92,7 +92,7 @@ public class QuotingTests extends ESTestCase { String name = "@timestamp"; ParsingException ex = expectThrows(ParsingException.class, () -> new SqlParser().createExpression(quote + qualifier + quote + "." + quote + name + quote)); - assertThat(ex.getMessage(), equalTo("line 1:1: backquoted indetifiers not supported; please use double quotes instead")); + assertThat(ex.getMessage(), equalTo("line 1:1: backquoted identifiers not supported; please use double quotes instead")); } public void testGreedyQuoting() { diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/FunctionRegistryTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/FunctionRegistryTests.java index 12581e9577c..0ca75ee05d9 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/FunctionRegistryTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/FunctionRegistryTests.java @@ -6,31 +6,35 @@ package org.elasticsearch.xpack.sql.expression.function; import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.sql.SqlIllegalArgumentException; +import org.elasticsearch.xpack.sql.expression.Expression; +import org.elasticsearch.xpack.sql.expression.function.scalar.ScalarFunction; +import org.elasticsearch.xpack.sql.expression.gen.pipeline.Pipe; +import org.elasticsearch.xpack.sql.expression.gen.script.ScriptTemplate; +import org.elasticsearch.xpack.sql.parser.ParsingException; import org.elasticsearch.xpack.sql.tree.Location; import org.elasticsearch.xpack.sql.tree.LocationTests; import org.elasticsearch.xpack.sql.tree.NodeInfo; import org.elasticsearch.xpack.sql.type.DataType; -import org.elasticsearch.xpack.sql.expression.Expression; -import org.elasticsearch.xpack.sql.expression.function.scalar.ScalarFunction; -import org.elasticsearch.xpack.sql.expression.function.scalar.processor.definition.ProcessorDefinition; -import org.elasticsearch.xpack.sql.expression.function.scalar.script.ScriptTemplate; -import org.elasticsearch.xpack.sql.parser.ParsingException; + import java.util.Arrays; +import java.util.Collections; import java.util.List; import java.util.TimeZone; +import static java.util.Collections.emptyList; import static org.elasticsearch.xpack.sql.expression.function.FunctionRegistry.def; import static org.elasticsearch.xpack.sql.expression.function.UnresolvedFunction.ResolutionType.DISTINCT; import static org.elasticsearch.xpack.sql.expression.function.UnresolvedFunction.ResolutionType.EXTRACT; import static org.elasticsearch.xpack.sql.expression.function.UnresolvedFunction.ResolutionType.STANDARD; import static org.hamcrest.Matchers.endsWith; +import static org.hamcrest.Matchers.is; import static org.mockito.Mockito.mock; -import static java.util.Collections.emptyList; public class FunctionRegistryTests extends ESTestCase { public void testNoArgFunction() { UnresolvedFunction ur = uf(STANDARD); - FunctionRegistry r = new FunctionRegistry(Arrays.asList(def(Dummy.class, Dummy::new))); + FunctionRegistry r = new FunctionRegistry(Collections.singletonList(def(DummyFunction.class, DummyFunction::new))); FunctionDefinition def = r.resolveFunction(ur.name()); assertEquals(ur.location(), ur.buildResolved(randomTimeZone(), def).location()); @@ -47,9 +51,10 @@ public class FunctionRegistryTests extends ESTestCase { public void testUnaryFunction() { UnresolvedFunction ur = uf(STANDARD, mock(Expression.class)); - FunctionRegistry r = new FunctionRegistry(Arrays.asList(def(Dummy.class, (Location l, Expression e) -> { + FunctionRegistry r = new FunctionRegistry(Collections.singletonList( + def(DummyFunction.class, (Location l, Expression e) -> { assertSame(e, ur.children().get(0)); - return new Dummy(l); + return new DummyFunction(l); }))); FunctionDefinition def = r.resolveFunction(ur.name()); assertFalse(def.datetime()); @@ -74,11 +79,12 @@ public class FunctionRegistryTests extends ESTestCase { public void testUnaryDistinctAwareFunction() { boolean urIsDistinct = randomBoolean(); UnresolvedFunction ur = uf(urIsDistinct ? DISTINCT : STANDARD, mock(Expression.class)); - FunctionRegistry r = new FunctionRegistry(Arrays.asList(def(Dummy.class, (Location l, Expression e, boolean distinct) -> { - assertEquals(urIsDistinct, distinct); - assertSame(e, ur.children().get(0)); - return new Dummy(l); - }))); + FunctionRegistry r = new FunctionRegistry(Collections.singletonList( + def(DummyFunction.class, (Location l, Expression e, boolean distinct) -> { + assertEquals(urIsDistinct, distinct); + assertSame(e, ur.children().get(0)); + return new DummyFunction(l); + }))); FunctionDefinition def = r.resolveFunction(ur.name()); assertEquals(ur.location(), ur.buildResolved(randomTimeZone(), def).location()); assertFalse(def.datetime()); @@ -98,11 +104,12 @@ public class FunctionRegistryTests extends ESTestCase { boolean urIsExtract = randomBoolean(); UnresolvedFunction ur = uf(urIsExtract ? EXTRACT : STANDARD, mock(Expression.class)); TimeZone providedTimeZone = randomTimeZone(); - FunctionRegistry r = new FunctionRegistry(Arrays.asList(def(Dummy.class, (Location l, Expression e, TimeZone tz) -> { - assertEquals(providedTimeZone, tz); - assertSame(e, ur.children().get(0)); - return new Dummy(l); - }))); + FunctionRegistry r = new FunctionRegistry(Collections.singletonList( + def(DummyFunction.class, (Location l, Expression e, TimeZone tz) -> { + assertEquals(providedTimeZone, tz); + assertSame(e, ur.children().get(0)); + return new DummyFunction(l); + }))); FunctionDefinition def = r.resolveFunction(ur.name()); assertEquals(ur.location(), ur.buildResolved(providedTimeZone, def).location()); assertTrue(def.datetime()); @@ -125,11 +132,12 @@ public class FunctionRegistryTests extends ESTestCase { public void testBinaryFunction() { UnresolvedFunction ur = uf(STANDARD, mock(Expression.class), mock(Expression.class)); - FunctionRegistry r = new FunctionRegistry(Arrays.asList(def(Dummy.class, (Location l, Expression lhs, Expression rhs) -> { - assertSame(lhs, ur.children().get(0)); - assertSame(rhs, ur.children().get(1)); - return new Dummy(l); - }))); + FunctionRegistry r = new FunctionRegistry(Collections.singletonList( + def(DummyFunction.class, (Location l, Expression lhs, Expression rhs) -> { + assertSame(lhs, ur.children().get(0)); + assertSame(rhs, ur.children().get(1)); + return new DummyFunction(l); + }))); FunctionDefinition def = r.resolveFunction(ur.name()); assertEquals(ur.location(), ur.buildResolved(randomTimeZone(), def).location()); assertFalse(def.datetime()); @@ -156,17 +164,60 @@ public class FunctionRegistryTests extends ESTestCase { assertThat(e.getMessage(), endsWith("expects exactly two arguments")); } - private UnresolvedFunction uf(UnresolvedFunction.ResolutionType resolutionType, Expression... children) { - return new UnresolvedFunction(LocationTests.randomLocation(), "dummy", resolutionType, Arrays.asList(children)); + public void testFunctionResolving() { + UnresolvedFunction ur = uf(STANDARD, mock(Expression.class)); + FunctionRegistry r = new FunctionRegistry( + Collections.singletonList(def(DummyFunction.class, (Location l, Expression e) -> { + assertSame(e, ur.children().get(0)); + return new DummyFunction(l); + }, "DUMMY_FUNC"))); + + // Resolve by primary name + FunctionDefinition def = r.resolveFunction(r.resolveAlias("DuMMy_FuncTIon")); + assertEquals(ur.location(), ur.buildResolved(randomTimeZone(), def).location()); + + def = r.resolveFunction(r.resolveAlias("Dummy_Function")); + assertEquals(ur.location(), ur.buildResolved(randomTimeZone(), def).location()); + + def = r.resolveFunction(r.resolveAlias("dummy_function")); + assertEquals(ur.location(), ur.buildResolved(randomTimeZone(), def).location()); + + def = r.resolveFunction(r.resolveAlias("DUMMY_FUNCTION")); + assertEquals(ur.location(), ur.buildResolved(randomTimeZone(), def).location()); + + // Resolve by alias + def = r.resolveFunction(r.resolveAlias("DumMy_FunC")); + assertEquals(ur.location(), ur.buildResolved(randomTimeZone(), def).location()); + + def = r.resolveFunction(r.resolveAlias("dummy_func")); + assertEquals(ur.location(), ur.buildResolved(randomTimeZone(), def).location()); + + def = r.resolveFunction(r.resolveAlias("DUMMY_FUNC")); + assertEquals(ur.location(), ur.buildResolved(randomTimeZone(), def).location()); + + // Not resolved + SqlIllegalArgumentException e = expectThrows(SqlIllegalArgumentException.class, + () -> r.resolveFunction(r.resolveAlias("DummyFunction"))); + assertThat(e.getMessage(), + is("Cannot find function DUMMYFUNCTION; this should have been caught during analysis")); + + e = expectThrows(SqlIllegalArgumentException.class, + () -> r.resolveFunction(r.resolveAlias("dummyFunction"))); + assertThat(e.getMessage(), + is("Cannot find function DUMMYFUNCTION; this should have been caught during analysis")); } - public static class Dummy extends ScalarFunction { - public Dummy(Location location) { + private UnresolvedFunction uf(UnresolvedFunction.ResolutionType resolutionType, Expression... children) { + return new UnresolvedFunction(LocationTests.randomLocation(), "DUMMY_FUNCTION", resolutionType, Arrays.asList(children)); + } + + public static class DummyFunction extends ScalarFunction { + public DummyFunction(Location location) { super(location, emptyList()); } @Override - protected NodeInfo info() { + protected NodeInfo info() { return NodeInfo.create(this); } @@ -186,7 +237,7 @@ public class FunctionRegistryTests extends ESTestCase { } @Override - protected ProcessorDefinition makeProcessorDefinition() { + protected Pipe makePipe() { return null; } } diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/NamedExpressionTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/NamedExpressionTests.java index 3692e5e4752..791eef8752c 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/NamedExpressionTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/NamedExpressionTests.java @@ -8,12 +8,12 @@ package org.elasticsearch.xpack.sql.expression.function; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xpack.sql.expression.FieldAttribute; import org.elasticsearch.xpack.sql.expression.Literal; -import org.elasticsearch.xpack.sql.expression.function.scalar.arithmetic.Add; -import org.elasticsearch.xpack.sql.expression.function.scalar.arithmetic.Div; -import org.elasticsearch.xpack.sql.expression.function.scalar.arithmetic.Mod; -import org.elasticsearch.xpack.sql.expression.function.scalar.arithmetic.Mul; -import org.elasticsearch.xpack.sql.expression.function.scalar.arithmetic.Neg; -import org.elasticsearch.xpack.sql.expression.function.scalar.arithmetic.Sub; +import org.elasticsearch.xpack.sql.expression.predicate.operator.arithmetic.Add; +import org.elasticsearch.xpack.sql.expression.predicate.operator.arithmetic.Div; +import org.elasticsearch.xpack.sql.expression.predicate.operator.arithmetic.Mod; +import org.elasticsearch.xpack.sql.expression.predicate.operator.arithmetic.Mul; +import org.elasticsearch.xpack.sql.expression.predicate.operator.arithmetic.Neg; +import org.elasticsearch.xpack.sql.expression.predicate.operator.arithmetic.Sub; import org.elasticsearch.xpack.sql.type.DataType; import org.elasticsearch.xpack.sql.type.EsField; @@ -24,19 +24,19 @@ public class NamedExpressionTests extends ESTestCase { public void testArithmeticFunctionName() { Add add = new Add(EMPTY, l(5), l(2)); - assertEquals("(5 + 2)", add.name()); + assertEquals("5 + 2", add.name()); Div div = new Div(EMPTY, l(5), l(2)); - assertEquals("(5 / 2)", div.name()); + assertEquals("5 / 2", div.name()); Mod mod = new Mod(EMPTY, l(5), l(2)); - assertEquals("(5 % 2)", mod.name()); + assertEquals("5 % 2", mod.name()); Mul mul = new Mul(EMPTY, l(5), l(2)); - assertEquals("(5 * 2)", mul.name()); + assertEquals("5 * 2", mul.name()); Sub sub = new Sub(EMPTY, l(5), l(2)); - assertEquals("(5 - 2)", sub.name()); + assertEquals("5 - 2", sub.name()); Neg neg = new Neg(EMPTY, l(5)); assertEquals("-5", neg.name()); @@ -45,7 +45,7 @@ public class NamedExpressionTests extends ESTestCase { public void testNameForArithmeticFunctionAppliedOnTableColumn() { FieldAttribute fa = new FieldAttribute(EMPTY, "myField", new EsField("myESField", DataType.INTEGER, emptyMap(), true)); Add add = new Add(EMPTY, fa, l(10)); - assertEquals("((myField) + 10)", add.name()); + assertEquals("(myField) + 10", add.name()); } private static Literal l(Object value) { diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DayOfYearTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DayOfYearTests.java index f866ee72920..0bd54bd7382 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DayOfYearTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DayOfYearTests.java @@ -27,7 +27,7 @@ public class DayOfYearTests extends ESTestCase { } private Object extract(Object value, TimeZone timeZone) { - return build(value, timeZone).asProcessorDefinition().asProcessor().process(value); + return build(value, timeZone).asPipe().asProcessor().process(value); } private DayOfYear build(Object value, TimeZone timeZone) { diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/math/BinaryMathProcessorTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/math/BinaryMathProcessorTests.java index 6563760d225..84ca662beba 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/math/BinaryMathProcessorTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/math/BinaryMathProcessorTests.java @@ -8,10 +8,11 @@ package org.elasticsearch.xpack.sql.expression.function.scalar.math; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.Writeable.Reader; import org.elasticsearch.test.AbstractWireSerializingTestCase; +import org.elasticsearch.xpack.sql.SqlIllegalArgumentException; import org.elasticsearch.xpack.sql.expression.Literal; import org.elasticsearch.xpack.sql.expression.function.scalar.Processors; -import org.elasticsearch.xpack.sql.expression.function.scalar.processor.runtime.ConstantProcessor; -import org.elasticsearch.xpack.sql.expression.function.scalar.processor.runtime.Processor; +import org.elasticsearch.xpack.sql.expression.gen.processor.ConstantProcessor; +import org.elasticsearch.xpack.sql.expression.gen.processor.Processor; import static org.elasticsearch.xpack.sql.tree.Location.EMPTY; @@ -39,18 +40,81 @@ public class BinaryMathProcessorTests extends AbstractWireSerializingTestCase new Round(EMPTY, l(5), l("foobarbar")).makePipe().asProcessor().process(null)); + assertEquals("A number is required; received foobarbar", siae.getMessage()); + siae = expectThrows(SqlIllegalArgumentException.class, + () -> new Round(EMPTY, l("bla"), l(0)).makePipe().asProcessor().process(null)); + assertEquals("A number is required; received bla", siae.getMessage()); + siae = expectThrows(SqlIllegalArgumentException.class, + () -> new Round(EMPTY, l(123.34), l(0.1)).makePipe().asProcessor().process(null)); + assertEquals("An integer number is required; received [0.1] as second parameter", siae.getMessage()); + } + + public void testTruncateWithValidInput() { + assertEquals(123.0, new Truncate(EMPTY, l(123), l(3)).makePipe().asProcessor().process(null)); + assertEquals(123.4, new Truncate(EMPTY, l(123.45), l(1)).makePipe().asProcessor().process(null)); + assertEquals(123.0, new Truncate(EMPTY, l(123.45), l(0)).makePipe().asProcessor().process(null)); + assertEquals(123.0, new Truncate(EMPTY, l(123.45), null).makePipe().asProcessor().process(null)); + assertEquals(-100.0, new Truncate(EMPTY, l(-123), l(-2)).makePipe().asProcessor().process(null)); + assertEquals(-120.0, new Truncate(EMPTY, l(-123.45), l(-1)).makePipe().asProcessor().process(null)); + assertEquals(-123.0, new Truncate(EMPTY, l(-123.5), l(0)).makePipe().asProcessor().process(null)); + assertEquals(-123.0, new Truncate(EMPTY, l(-123.45), null).makePipe().asProcessor().process(null)); + } + + public void testTruncateFunctionWithEdgeCasesInputs() { + assertNull(new Truncate(EMPTY, l(null), l(3)).makePipe().asProcessor().process(null)); + assertEquals(0.0, new Truncate(EMPTY, l(0), l(0)).makePipe().asProcessor().process(null)); + assertEquals((double) Long.MAX_VALUE, new Truncate(EMPTY, l(Long.MAX_VALUE), l(0)) + .makePipe().asProcessor().process(null)); + assertEquals(Double.NaN, new Truncate(EMPTY, l(123.456), l(Integer.MAX_VALUE)) + .makePipe().asProcessor().process(null)); + } + + public void testTruncateInputValidation() { + SqlIllegalArgumentException siae = expectThrows(SqlIllegalArgumentException.class, + () -> new Truncate(EMPTY, l(5), l("foobarbar")).makePipe().asProcessor().process(null)); + assertEquals("A number is required; received foobarbar", siae.getMessage()); + siae = expectThrows(SqlIllegalArgumentException.class, + () -> new Truncate(EMPTY, l("bla"), l(0)).makePipe().asProcessor().process(null)); + assertEquals("A number is required; received bla", siae.getMessage()); + siae = expectThrows(SqlIllegalArgumentException.class, + () -> new Truncate(EMPTY, l(123.34), l(0.1)).makePipe().asProcessor().process(null)); + assertEquals("An integer number is required; received [0.1] as second parameter", siae.getMessage()); + } public void testHandleNull() { - assertNull(new ATan2(EMPTY, l(null), l(3)).makeProcessorDefinition().asProcessor().process(null)); - assertNull(new Power(EMPTY, l(null), l(null)).makeProcessorDefinition().asProcessor().process(null)); + assertNull(new ATan2(EMPTY, l(null), l(3)).makePipe().asProcessor().process(null)); + assertNull(new Power(EMPTY, l(null), l(null)).makePipe().asProcessor().process(null)); } private static Literal l(Object value) { diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/processor/definition/BinaryProcessorDefinitionTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/processor/definition/BinaryProcessorDefinitionTests.java deleted file mode 100644 index 110c4829162..00000000000 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/processor/definition/BinaryProcessorDefinitionTests.java +++ /dev/null @@ -1,154 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License; - * you may not use this file except in compliance with the Elastic License. - */ -package org.elasticsearch.xpack.sql.expression.function.scalar.processor.definition; - -import org.elasticsearch.search.builder.SearchSourceBuilder; -import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.xpack.sql.execution.search.SqlSourceBuilder; -import org.elasticsearch.xpack.sql.expression.function.scalar.processor.definition.ProcessorDefinition.AttributeResolver; -import org.elasticsearch.xpack.sql.expression.function.scalar.processor.runtime.Processor; -import org.elasticsearch.xpack.sql.tree.Location; -import org.elasticsearch.xpack.sql.tree.NodeInfo; - -import java.util.List; - -import static java.util.Collections.emptyList; - -public class BinaryProcessorDefinitionTests extends ESTestCase { - public void testSupportedByAggsOnlyQuery() { - ProcessorDefinition supported = new DummyProcessorDefinition(true); - ProcessorDefinition unsupported = new DummyProcessorDefinition(false); - - assertFalse(new DummyBinaryProcessorDefinition(unsupported, unsupported).supportedByAggsOnlyQuery()); - assertFalse(new DummyBinaryProcessorDefinition(unsupported, supported).supportedByAggsOnlyQuery()); - assertFalse(new DummyBinaryProcessorDefinition(supported, unsupported).supportedByAggsOnlyQuery()); - assertTrue(new DummyBinaryProcessorDefinition(supported, supported).supportedByAggsOnlyQuery()); - } - - public void testResolveAttributes() { - ProcessorDefinition needsNothing = new DummyProcessorDefinition(randomBoolean()); - ProcessorDefinition resolvesTo = new DummyProcessorDefinition(randomBoolean()); - ProcessorDefinition needsResolution = new DummyProcessorDefinition(randomBoolean()) { - @Override - public ProcessorDefinition resolveAttributes(AttributeResolver resolver) { - return resolvesTo; - } - }; - AttributeResolver resolver = a -> { - fail("not exepected"); - return null; - }; - - ProcessorDefinition d = new DummyBinaryProcessorDefinition(needsNothing, needsNothing); - assertSame(d, d.resolveAttributes(resolver)); - - d = new DummyBinaryProcessorDefinition(needsNothing, needsResolution); - ProcessorDefinition expected = new DummyBinaryProcessorDefinition(needsNothing, resolvesTo); - assertEquals(expected, d.resolveAttributes(resolver)); - - d = new DummyBinaryProcessorDefinition(needsResolution, needsNothing); - expected = new DummyBinaryProcessorDefinition(resolvesTo, needsNothing); - assertEquals(expected, d.resolveAttributes(resolver)); - } - - public void testCollectFields() { - DummyProcessorDefinition wantsScore = new DummyProcessorDefinition(randomBoolean()) { - @Override - public void collectFields(SqlSourceBuilder sourceBuilder) { - sourceBuilder.trackScores(); - } - }; - DummyProcessorDefinition wantsNothing = new DummyProcessorDefinition(randomBoolean()); - assertFalse(tracksScores(new DummyBinaryProcessorDefinition(wantsNothing, wantsNothing))); - assertTrue(tracksScores(new DummyBinaryProcessorDefinition(wantsScore, wantsNothing))); - assertTrue(tracksScores(new DummyBinaryProcessorDefinition(wantsNothing, wantsScore))); - } - - /** - * Returns {@code true} if the processor defintion builds a query that - * tracks scores, {@code false} otherwise. Used for testing - * {@link ProcessorDefinition#collectFields(SqlSourceBuilder)}. - */ - static boolean tracksScores(ProcessorDefinition d) { - SqlSourceBuilder b = new SqlSourceBuilder(); - d.collectFields(b); - SearchSourceBuilder source = new SearchSourceBuilder(); - b.build(source); - return source.trackScores(); - } - - public static final class DummyBinaryProcessorDefinition extends BinaryProcessorDefinition { - public DummyBinaryProcessorDefinition(ProcessorDefinition left, ProcessorDefinition right) { - this(Location.EMPTY, left, right); - } - - public DummyBinaryProcessorDefinition(Location location, ProcessorDefinition left, ProcessorDefinition right) { - super(location, null, left, right); - } - - @Override - protected NodeInfo info() { - return NodeInfo.create(this, DummyBinaryProcessorDefinition::new, left(), right()); - } - - @Override - public Processor asProcessor() { - return null; - } - - @Override - protected BinaryProcessorDefinition replaceChildren(ProcessorDefinition left, ProcessorDefinition right) { - return new DummyBinaryProcessorDefinition(location(), left, right); - } - } - - public static class DummyProcessorDefinition extends ProcessorDefinition { - private final boolean supportedByAggsOnlyQuery; - - public DummyProcessorDefinition(boolean supportedByAggsOnlyQuery) { - this(Location.EMPTY, supportedByAggsOnlyQuery); - } - - public DummyProcessorDefinition(Location location, boolean supportedByAggsOnlyQuery) { - super(location, null, emptyList()); - this.supportedByAggsOnlyQuery = supportedByAggsOnlyQuery; - } - - @Override - protected NodeInfo info() { - return NodeInfo.create(this, DummyProcessorDefinition::new, supportedByAggsOnlyQuery); - } - - @Override - public ProcessorDefinition replaceChildren(List newChildren) { - throw new UnsupportedOperationException("this type of node doesn't have any children to replace"); - } - - @Override - public boolean supportedByAggsOnlyQuery() { - return supportedByAggsOnlyQuery; - } - - @Override - public boolean resolved() { - return true; - } - - @Override - public Processor asProcessor() { - return null; - } - - @Override - public ProcessorDefinition resolveAttributes(AttributeResolver resolver) { - return this; - } - - @Override - public void collectFields(SqlSourceBuilder sourceBuilder) { - } - } -} \ No newline at end of file diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/processor/definition/UnaryProcessorDefinitionTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/processor/definition/UnaryProcessorDefinitionTests.java deleted file mode 100644 index 5a102403d30..00000000000 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/processor/definition/UnaryProcessorDefinitionTests.java +++ /dev/null @@ -1,62 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License; - * you may not use this file except in compliance with the Elastic License. - */ -package org.elasticsearch.xpack.sql.expression.function.scalar.processor.definition; - -import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.xpack.sql.execution.search.SqlSourceBuilder; -import org.elasticsearch.xpack.sql.expression.function.scalar.processor.definition.BinaryProcessorDefinitionTests.DummyProcessorDefinition; -import org.elasticsearch.xpack.sql.expression.function.scalar.processor.definition.ProcessorDefinition.AttributeResolver; -import org.elasticsearch.xpack.sql.tree.Location; - -import static org.elasticsearch.xpack.sql.expression.function.scalar.processor.definition.BinaryProcessorDefinitionTests.tracksScores; - -public class UnaryProcessorDefinitionTests extends ESTestCase { - public void testSupportedByAggsOnlyQuery() { - ProcessorDefinition supported = new DummyProcessorDefinition(true); - ProcessorDefinition unsupported = new DummyProcessorDefinition(false); - - assertFalse(newUnaryProcessor(unsupported).supportedByAggsOnlyQuery()); - assertTrue(newUnaryProcessor(supported).supportedByAggsOnlyQuery()); - } - - public void testResolveAttributes() { - ProcessorDefinition needsNothing = new DummyProcessorDefinition(randomBoolean()); - ProcessorDefinition resolvesTo = new DummyProcessorDefinition(randomBoolean()); - ProcessorDefinition needsResolution = new DummyProcessorDefinition(randomBoolean()) { - @Override - public ProcessorDefinition resolveAttributes(AttributeResolver resolver) { - return resolvesTo; - } - }; - AttributeResolver resolver = a -> { - fail("not exepected"); - return null; - }; - - ProcessorDefinition d = newUnaryProcessor(needsNothing); - assertSame(d, d.resolveAttributes(resolver)); - - d = newUnaryProcessor(needsResolution); - ProcessorDefinition expected = newUnaryProcessor(resolvesTo); - assertEquals(expected, d.resolveAttributes(resolver)); - } - - public void testCollectFields() { - DummyProcessorDefinition wantsScore = new DummyProcessorDefinition(randomBoolean()) { - @Override - public void collectFields(SqlSourceBuilder sourceBuilder) { - sourceBuilder.trackScores(); - } - }; - DummyProcessorDefinition wantsNothing = new DummyProcessorDefinition(randomBoolean()); - assertFalse(tracksScores(newUnaryProcessor(wantsNothing))); - assertTrue(tracksScores(newUnaryProcessor(wantsScore))); - } - - private ProcessorDefinition newUnaryProcessor(ProcessorDefinition child) { - return new UnaryProcessorDefinition(Location.EMPTY, null, child, null); - } -} diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/BinaryStringNumericPipeTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/BinaryStringNumericPipeTests.java new file mode 100644 index 00000000000..bed19063f08 --- /dev/null +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/BinaryStringNumericPipeTests.java @@ -0,0 +1,146 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +package org.elasticsearch.xpack.sql.expression.function.scalar.string; + +import org.elasticsearch.xpack.sql.expression.Expression; +import org.elasticsearch.xpack.sql.expression.function.scalar.string.BinaryStringNumericProcessor.BinaryStringNumericOperation; +import org.elasticsearch.xpack.sql.expression.gen.pipeline.BinaryPipe; +import org.elasticsearch.xpack.sql.expression.gen.pipeline.Pipe; +import org.elasticsearch.xpack.sql.tree.AbstractNodeTestCase; +import org.elasticsearch.xpack.sql.tree.Location; + +import java.util.ArrayList; +import java.util.List; +import java.util.Objects; +import java.util.function.Function; + +import static org.elasticsearch.xpack.sql.expression.Expressions.pipe; +import static org.elasticsearch.xpack.sql.expression.function.scalar.FunctionTestUtils.randomIntLiteral; +import static org.elasticsearch.xpack.sql.expression.function.scalar.FunctionTestUtils.randomStringLiteral; +import static org.elasticsearch.xpack.sql.tree.LocationTests.randomLocation; + +public class BinaryStringNumericPipeTests + extends AbstractNodeTestCase { + + @Override + protected BinaryStringNumericPipe randomInstance() { + return randomBinaryStringNumericPipe(); + } + + private Expression randomBinaryStringNumericExpression() { + return randomBinaryStringNumericPipe().expression(); + } + + private BinaryStringNumericOperation randomBinaryStringNumericOperation() { + return randomBinaryStringNumericPipe().operation(); + } + + public static BinaryStringNumericPipe randomBinaryStringNumericPipe() { + List functions = new ArrayList<>(); + functions.add(new Left(randomLocation(), randomStringLiteral(), randomIntLiteral()).makePipe()); + functions.add(new Right(randomLocation(), randomStringLiteral(), randomIntLiteral()).makePipe()); + functions.add(new Repeat(randomLocation(), randomStringLiteral(), randomIntLiteral()).makePipe()); + + return (BinaryStringNumericPipe) randomFrom(functions); + } + + @Override + public void testTransform() { + // test transforming only the properties (location, expression, operation), + // skipping the children (the two parameters of the binary function) which are tested separately + BinaryStringNumericPipe b1 = randomInstance(); + + Expression newExpression = randomValueOtherThan(b1.expression(), () -> randomBinaryStringNumericExpression()); + BinaryStringNumericPipe newB = new BinaryStringNumericPipe( + b1.location(), + newExpression, + b1.left(), + b1.right(), + b1.operation()); + assertEquals(newB, b1.transformPropertiesOnly(v -> Objects.equals(v, b1.expression()) ? newExpression : v, Expression.class)); + + BinaryStringNumericPipe b2 = randomInstance(); + BinaryStringNumericOperation newOp = randomValueOtherThan(b2.operation(), () -> randomBinaryStringNumericOperation()); + newB = new BinaryStringNumericPipe( + b2.location(), + b2.expression(), + b2.left(), + b2.right(), + newOp); + assertEquals(newB, + b2.transformPropertiesOnly(v -> Objects.equals(v, b2.operation()) ? newOp : v, BinaryStringNumericOperation.class)); + + BinaryStringNumericPipe b3 = randomInstance(); + Location newLoc = randomValueOtherThan(b3.location(), () -> randomLocation()); + newB = new BinaryStringNumericPipe( + newLoc, + b3.expression(), + b3.left(), + b3.right(), + b3.operation()); + assertEquals(newB, + b3.transformPropertiesOnly(v -> Objects.equals(v, b3.location()) ? newLoc : v, Location.class)); + } + + @Override + public void testReplaceChildren() { + BinaryStringNumericPipe b = randomInstance(); + Pipe newLeft = pipe(((Expression) randomValueOtherThan(b.left(), () -> randomStringLiteral()))); + Pipe newRight = pipe(((Expression) randomValueOtherThan(b.right(), () -> randomIntLiteral()))); + BinaryStringNumericPipe newB = + new BinaryStringNumericPipe(b.location(), b.expression(), b.left(), b.right(), b.operation()); + BinaryPipe transformed = newB.replaceChildren(newLeft, b.right()); + + assertEquals(transformed.left(), newLeft); + assertEquals(transformed.location(), b.location()); + assertEquals(transformed.expression(), b.expression()); + assertEquals(transformed.right(), b.right()); + + transformed = newB.replaceChildren(b.left(), newRight); + assertEquals(transformed.left(), b.left()); + assertEquals(transformed.location(), b.location()); + assertEquals(transformed.expression(), b.expression()); + assertEquals(transformed.right(), newRight); + + transformed = newB.replaceChildren(newLeft, newRight); + assertEquals(transformed.left(), newLeft); + assertEquals(transformed.location(), b.location()); + assertEquals(transformed.expression(), b.expression()); + assertEquals(transformed.right(), newRight); + } + + @Override + protected BinaryStringNumericPipe mutate(BinaryStringNumericPipe instance) { + List> randoms = new ArrayList<>(); + randoms.add(f -> new BinaryStringNumericPipe(f.location(), + f.expression(), + pipe(((Expression) randomValueOtherThan(f.left(), () -> randomStringLiteral()))), + f.right(), + f.operation())); + randoms.add(f -> new BinaryStringNumericPipe(f.location(), + f.expression(), + f.left(), + pipe(((Expression) randomValueOtherThan(f.right(), () -> randomIntLiteral()))), + f.operation())); + randoms.add(f -> new BinaryStringNumericPipe(f.location(), + f.expression(), + pipe(((Expression) randomValueOtherThan(f.left(), () -> randomStringLiteral()))), + pipe(((Expression) randomValueOtherThan(f.right(), () -> randomIntLiteral()))), + f.operation())); + + return randomFrom(randoms).apply(instance); + } + + @Override + protected BinaryStringNumericPipe copy(BinaryStringNumericPipe instance) { + return new BinaryStringNumericPipe(instance.location(), + instance.expression(), + instance.left(), + instance.right(), + instance.operation()); + } +} diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/BinaryStringNumericProcessorDefinitionTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/BinaryStringNumericProcessorDefinitionTests.java deleted file mode 100644 index 0b644995fc6..00000000000 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/BinaryStringNumericProcessorDefinitionTests.java +++ /dev/null @@ -1,146 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License; - * you may not use this file except in compliance with the Elastic License. - */ - -package org.elasticsearch.xpack.sql.expression.function.scalar.string; - -import org.elasticsearch.xpack.sql.expression.Expression; -import org.elasticsearch.xpack.sql.expression.function.scalar.processor.definition.BinaryProcessorDefinition; -import org.elasticsearch.xpack.sql.expression.function.scalar.processor.definition.ProcessorDefinition; -import org.elasticsearch.xpack.sql.expression.function.scalar.string.BinaryStringNumericProcessor.BinaryStringNumericOperation; -import org.elasticsearch.xpack.sql.tree.AbstractNodeTestCase; -import org.elasticsearch.xpack.sql.tree.Location; - -import java.util.ArrayList; -import java.util.List; -import java.util.Objects; -import java.util.function.Function; - -import static org.elasticsearch.xpack.sql.expression.function.scalar.FunctionTestUtils.randomIntLiteral; -import static org.elasticsearch.xpack.sql.expression.function.scalar.FunctionTestUtils.randomStringLiteral; -import static org.elasticsearch.xpack.sql.expression.function.scalar.processor.definition.ProcessorDefinitions.toProcessorDefinition; -import static org.elasticsearch.xpack.sql.tree.LocationTests.randomLocation; - -public class BinaryStringNumericProcessorDefinitionTests - extends AbstractNodeTestCase { - - @Override - protected BinaryStringNumericProcessorDefinition randomInstance() { - return randomBinaryStringNumericProcessorDefinition(); - } - - private Expression randomBinaryStringNumericExpression() { - return randomBinaryStringNumericProcessorDefinition().expression(); - } - - private BinaryStringNumericOperation randomBinaryStringNumericOperation() { - return randomBinaryStringNumericProcessorDefinition().operation(); - } - - public static BinaryStringNumericProcessorDefinition randomBinaryStringNumericProcessorDefinition() { - List functions = new ArrayList<>(); - functions.add(new Left(randomLocation(), randomStringLiteral(), randomIntLiteral()).makeProcessorDefinition()); - functions.add(new Right(randomLocation(), randomStringLiteral(), randomIntLiteral()).makeProcessorDefinition()); - functions.add(new Repeat(randomLocation(), randomStringLiteral(), randomIntLiteral()).makeProcessorDefinition()); - - return (BinaryStringNumericProcessorDefinition) randomFrom(functions); - } - - @Override - public void testTransform() { - // test transforming only the properties (location, expression, operation), - // skipping the children (the two parameters of the binary function) which are tested separately - BinaryStringNumericProcessorDefinition b1 = randomInstance(); - - Expression newExpression = randomValueOtherThan(b1.expression(), () -> randomBinaryStringNumericExpression()); - BinaryStringNumericProcessorDefinition newB = new BinaryStringNumericProcessorDefinition( - b1.location(), - newExpression, - b1.left(), - b1.right(), - b1.operation()); - assertEquals(newB, b1.transformPropertiesOnly(v -> Objects.equals(v, b1.expression()) ? newExpression : v, Expression.class)); - - BinaryStringNumericProcessorDefinition b2 = randomInstance(); - BinaryStringNumericOperation newOp = randomValueOtherThan(b2.operation(), () -> randomBinaryStringNumericOperation()); - newB = new BinaryStringNumericProcessorDefinition( - b2.location(), - b2.expression(), - b2.left(), - b2.right(), - newOp); - assertEquals(newB, - b2.transformPropertiesOnly(v -> Objects.equals(v, b2.operation()) ? newOp : v, BinaryStringNumericOperation.class)); - - BinaryStringNumericProcessorDefinition b3 = randomInstance(); - Location newLoc = randomValueOtherThan(b3.location(), () -> randomLocation()); - newB = new BinaryStringNumericProcessorDefinition( - newLoc, - b3.expression(), - b3.left(), - b3.right(), - b3.operation()); - assertEquals(newB, - b3.transformPropertiesOnly(v -> Objects.equals(v, b3.location()) ? newLoc : v, Location.class)); - } - - @Override - public void testReplaceChildren() { - BinaryStringNumericProcessorDefinition b = randomInstance(); - ProcessorDefinition newLeft = toProcessorDefinition((Expression) randomValueOtherThan(b.left(), () -> randomStringLiteral())); - ProcessorDefinition newRight = toProcessorDefinition((Expression) randomValueOtherThan(b.right(), () -> randomIntLiteral())); - BinaryStringNumericProcessorDefinition newB = - new BinaryStringNumericProcessorDefinition(b.location(), b.expression(), b.left(), b.right(), b.operation()); - BinaryProcessorDefinition transformed = newB.replaceChildren(newLeft, b.right()); - - assertEquals(transformed.left(), newLeft); - assertEquals(transformed.location(), b.location()); - assertEquals(transformed.expression(), b.expression()); - assertEquals(transformed.right(), b.right()); - - transformed = newB.replaceChildren(b.left(), newRight); - assertEquals(transformed.left(), b.left()); - assertEquals(transformed.location(), b.location()); - assertEquals(transformed.expression(), b.expression()); - assertEquals(transformed.right(), newRight); - - transformed = newB.replaceChildren(newLeft, newRight); - assertEquals(transformed.left(), newLeft); - assertEquals(transformed.location(), b.location()); - assertEquals(transformed.expression(), b.expression()); - assertEquals(transformed.right(), newRight); - } - - @Override - protected BinaryStringNumericProcessorDefinition mutate(BinaryStringNumericProcessorDefinition instance) { - List> randoms = new ArrayList<>(); - randoms.add(f -> new BinaryStringNumericProcessorDefinition(f.location(), - f.expression(), - toProcessorDefinition((Expression) randomValueOtherThan(f.left(), () -> randomStringLiteral())), - f.right(), - f.operation())); - randoms.add(f -> new BinaryStringNumericProcessorDefinition(f.location(), - f.expression(), - f.left(), - toProcessorDefinition((Expression) randomValueOtherThan(f.right(), () -> randomIntLiteral())), - f.operation())); - randoms.add(f -> new BinaryStringNumericProcessorDefinition(f.location(), - f.expression(), - toProcessorDefinition((Expression) randomValueOtherThan(f.left(), () -> randomStringLiteral())), - toProcessorDefinition((Expression) randomValueOtherThan(f.right(), () -> randomIntLiteral())), - f.operation())); - - return randomFrom(randoms).apply(instance); - } - - @Override - protected BinaryStringNumericProcessorDefinition copy(BinaryStringNumericProcessorDefinition instance) { - return new BinaryStringNumericProcessorDefinition(instance.location(), - instance.expression(), - instance.left(), - instance.right(), - instance.operation()); - } -} diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/BinaryStringNumericProcessorTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/BinaryStringNumericProcessorTests.java index 4cfc43d50b1..6712df0c8f6 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/BinaryStringNumericProcessorTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/BinaryStringNumericProcessorTests.java @@ -11,8 +11,8 @@ import org.elasticsearch.common.io.stream.Writeable.Reader; import org.elasticsearch.test.AbstractWireSerializingTestCase; import org.elasticsearch.xpack.sql.SqlIllegalArgumentException; import org.elasticsearch.xpack.sql.expression.function.scalar.Processors; -import org.elasticsearch.xpack.sql.expression.function.scalar.processor.runtime.ConstantProcessor; import org.elasticsearch.xpack.sql.expression.function.scalar.string.BinaryStringNumericProcessor.BinaryStringNumericOperation; +import org.elasticsearch.xpack.sql.expression.gen.processor.ConstantProcessor; import static org.elasticsearch.xpack.sql.expression.function.scalar.FunctionTestUtils.l; import static org.elasticsearch.xpack.sql.tree.Location.EMPTY; @@ -38,75 +38,75 @@ public class BinaryStringNumericProcessorTests extends AbstractWireSerializingTe } public void testLeftFunctionWithValidInput() { - assertEquals("foo", new Left(EMPTY, l("foo bar"), l(3)).makeProcessorDefinition().asProcessor().process(null)); - assertEquals("foo bar", new Left(EMPTY, l("foo bar"), l(7)).makeProcessorDefinition().asProcessor().process(null)); - assertEquals("foo bar", new Left(EMPTY, l("foo bar"), l(123)).makeProcessorDefinition().asProcessor().process(null)); - assertEquals("f", new Left(EMPTY, l('f'), l(1)).makeProcessorDefinition().asProcessor().process(null)); + assertEquals("foo", new Left(EMPTY, l("foo bar"), l(3)).makePipe().asProcessor().process(null)); + assertEquals("foo bar", new Left(EMPTY, l("foo bar"), l(7)).makePipe().asProcessor().process(null)); + assertEquals("foo bar", new Left(EMPTY, l("foo bar"), l(123)).makePipe().asProcessor().process(null)); + assertEquals("f", new Left(EMPTY, l('f'), l(1)).makePipe().asProcessor().process(null)); } public void testLeftFunctionWithEdgeCases() { - assertNull(new Left(EMPTY, l("foo bar"), l(null)).makeProcessorDefinition().asProcessor().process(null)); - assertNull(new Left(EMPTY, l(null), l(3)).makeProcessorDefinition().asProcessor().process(null)); - assertNull(new Left(EMPTY, l(null), l(null)).makeProcessorDefinition().asProcessor().process(null)); - assertEquals("", new Left(EMPTY, l("foo bar"), l(-1)).makeProcessorDefinition().asProcessor().process(null)); - assertEquals("", new Left(EMPTY, l("foo bar"), l(0)).makeProcessorDefinition().asProcessor().process(null)); - assertEquals("", new Left(EMPTY, l('f'), l(0)).makeProcessorDefinition().asProcessor().process(null)); + assertNull(new Left(EMPTY, l("foo bar"), l(null)).makePipe().asProcessor().process(null)); + assertNull(new Left(EMPTY, l(null), l(3)).makePipe().asProcessor().process(null)); + assertNull(new Left(EMPTY, l(null), l(null)).makePipe().asProcessor().process(null)); + assertEquals("", new Left(EMPTY, l("foo bar"), l(-1)).makePipe().asProcessor().process(null)); + assertEquals("", new Left(EMPTY, l("foo bar"), l(0)).makePipe().asProcessor().process(null)); + assertEquals("", new Left(EMPTY, l('f'), l(0)).makePipe().asProcessor().process(null)); } public void testLeftFunctionInputValidation() { SqlIllegalArgumentException siae = expectThrows(SqlIllegalArgumentException.class, - () -> new Left(EMPTY, l(5), l(3)).makeProcessorDefinition().asProcessor().process(null)); + () -> new Left(EMPTY, l(5), l(3)).makePipe().asProcessor().process(null)); assertEquals("A string/char is required; received [5]", siae.getMessage()); siae = expectThrows(SqlIllegalArgumentException.class, - () -> new Left(EMPTY, l("foo bar"), l("baz")).makeProcessorDefinition().asProcessor().process(null)); + () -> new Left(EMPTY, l("foo bar"), l("baz")).makePipe().asProcessor().process(null)); assertEquals("A number is required; received [baz]", siae.getMessage()); } public void testRightFunctionWithValidInput() { - assertEquals("bar", new Right(EMPTY, l("foo bar"), l(3)).makeProcessorDefinition().asProcessor().process(null)); - assertEquals("foo bar", new Right(EMPTY, l("foo bar"), l(7)).makeProcessorDefinition().asProcessor().process(null)); - assertEquals("foo bar", new Right(EMPTY, l("foo bar"), l(123)).makeProcessorDefinition().asProcessor().process(null)); - assertEquals("f", new Right(EMPTY, l('f'), l(1)).makeProcessorDefinition().asProcessor().process(null)); + assertEquals("bar", new Right(EMPTY, l("foo bar"), l(3)).makePipe().asProcessor().process(null)); + assertEquals("foo bar", new Right(EMPTY, l("foo bar"), l(7)).makePipe().asProcessor().process(null)); + assertEquals("foo bar", new Right(EMPTY, l("foo bar"), l(123)).makePipe().asProcessor().process(null)); + assertEquals("f", new Right(EMPTY, l('f'), l(1)).makePipe().asProcessor().process(null)); } public void testRightFunctionWithEdgeCases() { - assertNull(new Right(EMPTY, l("foo bar"), l(null)).makeProcessorDefinition().asProcessor().process(null)); - assertNull(new Right(EMPTY, l(null), l(3)).makeProcessorDefinition().asProcessor().process(null)); - assertNull(new Right(EMPTY, l(null), l(null)).makeProcessorDefinition().asProcessor().process(null)); - assertEquals("", new Right(EMPTY, l("foo bar"), l(-1)).makeProcessorDefinition().asProcessor().process(null)); - assertEquals("", new Right(EMPTY, l("foo bar"), l(0)).makeProcessorDefinition().asProcessor().process(null)); - assertEquals("", new Right(EMPTY, l('f'), l(0)).makeProcessorDefinition().asProcessor().process(null)); + assertNull(new Right(EMPTY, l("foo bar"), l(null)).makePipe().asProcessor().process(null)); + assertNull(new Right(EMPTY, l(null), l(3)).makePipe().asProcessor().process(null)); + assertNull(new Right(EMPTY, l(null), l(null)).makePipe().asProcessor().process(null)); + assertEquals("", new Right(EMPTY, l("foo bar"), l(-1)).makePipe().asProcessor().process(null)); + assertEquals("", new Right(EMPTY, l("foo bar"), l(0)).makePipe().asProcessor().process(null)); + assertEquals("", new Right(EMPTY, l('f'), l(0)).makePipe().asProcessor().process(null)); } public void testRightFunctionInputValidation() { SqlIllegalArgumentException siae = expectThrows(SqlIllegalArgumentException.class, - () -> new Right(EMPTY, l(5), l(3)).makeProcessorDefinition().asProcessor().process(null)); + () -> new Right(EMPTY, l(5), l(3)).makePipe().asProcessor().process(null)); assertEquals("A string/char is required; received [5]", siae.getMessage()); siae = expectThrows(SqlIllegalArgumentException.class, - () -> new Right(EMPTY, l("foo bar"), l("baz")).makeProcessorDefinition().asProcessor().process(null)); + () -> new Right(EMPTY, l("foo bar"), l("baz")).makePipe().asProcessor().process(null)); assertEquals("A number is required; received [baz]", siae.getMessage()); } public void testRepeatFunctionWithValidInput() { - assertEquals("foofoofoo", new Repeat(EMPTY, l("foo"), l(3)).makeProcessorDefinition().asProcessor().process(null)); - assertEquals("foo", new Repeat(EMPTY, l("foo"), l(1)).makeProcessorDefinition().asProcessor().process(null)); - assertEquals("fff", new Repeat(EMPTY, l('f'), l(3)).makeProcessorDefinition().asProcessor().process(null)); + assertEquals("foofoofoo", new Repeat(EMPTY, l("foo"), l(3)).makePipe().asProcessor().process(null)); + assertEquals("foo", new Repeat(EMPTY, l("foo"), l(1)).makePipe().asProcessor().process(null)); + assertEquals("fff", new Repeat(EMPTY, l('f'), l(3)).makePipe().asProcessor().process(null)); } public void testRepeatFunctionWithEdgeCases() { - assertNull(new Repeat(EMPTY, l("foo"), l(null)).makeProcessorDefinition().asProcessor().process(null)); - assertNull(new Repeat(EMPTY, l(null), l(3)).makeProcessorDefinition().asProcessor().process(null)); - assertNull(new Repeat(EMPTY, l(null), l(null)).makeProcessorDefinition().asProcessor().process(null)); - assertNull(new Repeat(EMPTY, l("foo"), l(-1)).makeProcessorDefinition().asProcessor().process(null)); - assertNull(new Repeat(EMPTY, l("foo"), l(0)).makeProcessorDefinition().asProcessor().process(null)); + assertNull(new Repeat(EMPTY, l("foo"), l(null)).makePipe().asProcessor().process(null)); + assertNull(new Repeat(EMPTY, l(null), l(3)).makePipe().asProcessor().process(null)); + assertNull(new Repeat(EMPTY, l(null), l(null)).makePipe().asProcessor().process(null)); + assertNull(new Repeat(EMPTY, l("foo"), l(-1)).makePipe().asProcessor().process(null)); + assertNull(new Repeat(EMPTY, l("foo"), l(0)).makePipe().asProcessor().process(null)); } public void testRepeatFunctionInputsValidation() { SqlIllegalArgumentException siae = expectThrows(SqlIllegalArgumentException.class, - () -> new Repeat(EMPTY, l(5), l(3)).makeProcessorDefinition().asProcessor().process(null)); + () -> new Repeat(EMPTY, l(5), l(3)).makePipe().asProcessor().process(null)); assertEquals("A string/char is required; received [5]", siae.getMessage()); siae = expectThrows(SqlIllegalArgumentException.class, - () -> new Repeat(EMPTY, l("foo bar"), l("baz")).makeProcessorDefinition().asProcessor().process(null)); + () -> new Repeat(EMPTY, l("foo bar"), l("baz")).makePipe().asProcessor().process(null)); assertEquals("A number is required; received [baz]", siae.getMessage()); } } diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/BinaryStringStringPipeTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/BinaryStringStringPipeTests.java new file mode 100644 index 00000000000..65a52b2ee04 --- /dev/null +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/BinaryStringStringPipeTests.java @@ -0,0 +1,130 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +package org.elasticsearch.xpack.sql.expression.function.scalar.string; + +import org.elasticsearch.xpack.sql.expression.Expression; +import org.elasticsearch.xpack.sql.expression.gen.pipeline.BinaryPipe; +import org.elasticsearch.xpack.sql.expression.gen.pipeline.Pipe; +import org.elasticsearch.xpack.sql.tree.AbstractNodeTestCase; +import org.elasticsearch.xpack.sql.tree.Location; + +import java.util.ArrayList; +import java.util.List; +import java.util.Objects; +import java.util.function.Function; + +import static org.elasticsearch.xpack.sql.expression.Expressions.pipe; +import static org.elasticsearch.xpack.sql.expression.function.scalar.FunctionTestUtils.randomStringLiteral; +import static org.elasticsearch.xpack.sql.tree.LocationTests.randomLocation; + +public class BinaryStringStringPipeTests + extends AbstractNodeTestCase { + + @Override + protected BinaryStringStringPipe randomInstance() { + return randomBinaryStringStringPipe(); + } + + private Expression randomBinaryStringStringExpression() { + return randomBinaryStringStringPipe().expression(); + } + + public static BinaryStringStringPipe randomBinaryStringStringPipe() { + List functions = new ArrayList<>(); + functions.add(new Position( + randomLocation(), + randomStringLiteral(), + randomStringLiteral() + ).makePipe()); + // if we decide to add DIFFERENCE(string,string) in the future, here we'd add it as well + return (BinaryStringStringPipe) randomFrom(functions); + } + + @Override + public void testTransform() { + // test transforming only the properties (location, expression), + // skipping the children (the two parameters of the binary function) which are tested separately + BinaryStringStringPipe b1 = randomInstance(); + Expression newExpression = randomValueOtherThan(b1.expression(), () -> randomBinaryStringStringExpression()); + BinaryStringStringPipe newB = new BinaryStringStringPipe( + b1.location(), + newExpression, + b1.left(), + b1.right(), + b1.operation()); + assertEquals(newB, b1.transformPropertiesOnly(v -> Objects.equals(v, b1.expression()) ? newExpression : v, Expression.class)); + + BinaryStringStringPipe b2 = randomInstance(); + Location newLoc = randomValueOtherThan(b2.location(), () -> randomLocation()); + newB = new BinaryStringStringPipe( + newLoc, + b2.expression(), + b2.left(), + b2.right(), + b2.operation()); + assertEquals(newB, + b2.transformPropertiesOnly(v -> Objects.equals(v, b2.location()) ? newLoc : v, Location.class)); + } + + @Override + public void testReplaceChildren() { + BinaryStringStringPipe b = randomInstance(); + Pipe newLeft = pipe(((Expression) randomValueOtherThan(b.left(), () -> randomStringLiteral()))); + Pipe newRight = pipe(((Expression) randomValueOtherThan(b.right(), () -> randomStringLiteral()))); + BinaryStringStringPipe newB = + new BinaryStringStringPipe(b.location(), b.expression(), b.left(), b.right(), b.operation()); + + BinaryPipe transformed = newB.replaceChildren(newLeft, b.right()); + assertEquals(transformed.left(), newLeft); + assertEquals(transformed.location(), b.location()); + assertEquals(transformed.expression(), b.expression()); + assertEquals(transformed.right(), b.right()); + + transformed = newB.replaceChildren(b.left(), newRight); + assertEquals(transformed.left(), b.left()); + assertEquals(transformed.location(), b.location()); + assertEquals(transformed.expression(), b.expression()); + assertEquals(transformed.right(), newRight); + + transformed = newB.replaceChildren(newLeft, newRight); + assertEquals(transformed.left(), newLeft); + assertEquals(transformed.location(), b.location()); + assertEquals(transformed.expression(), b.expression()); + assertEquals(transformed.right(), newRight); + } + + @Override + protected BinaryStringStringPipe mutate(BinaryStringStringPipe instance) { + List> randoms = new ArrayList<>(); + randoms.add(f -> new BinaryStringStringPipe(f.location(), + f.expression(), + pipe(((Expression) randomValueOtherThan(f.left(), () -> randomStringLiteral()))), + f.right(), + f.operation())); + randoms.add(f -> new BinaryStringStringPipe(f.location(), + f.expression(), + f.left(), + pipe(((Expression) randomValueOtherThan(f.right(), () -> randomStringLiteral()))), + f.operation())); + randoms.add(f -> new BinaryStringStringPipe(f.location(), + f.expression(), + pipe(((Expression) randomValueOtherThan(f.left(), () -> randomStringLiteral()))), + pipe(((Expression) randomValueOtherThan(f.right(), () -> randomStringLiteral()))), + f.operation())); + + return randomFrom(randoms).apply(instance); + } + + @Override + protected BinaryStringStringPipe copy(BinaryStringStringPipe instance) { + return new BinaryStringStringPipe(instance.location(), + instance.expression(), + instance.left(), + instance.right(), + instance.operation()); + } +} diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/BinaryStringStringProcessorDefinitionTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/BinaryStringStringProcessorDefinitionTests.java deleted file mode 100644 index 77b1329bdea..00000000000 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/BinaryStringStringProcessorDefinitionTests.java +++ /dev/null @@ -1,130 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License; - * you may not use this file except in compliance with the Elastic License. - */ - -package org.elasticsearch.xpack.sql.expression.function.scalar.string; - -import org.elasticsearch.xpack.sql.expression.Expression; -import org.elasticsearch.xpack.sql.expression.function.scalar.processor.definition.BinaryProcessorDefinition; -import org.elasticsearch.xpack.sql.expression.function.scalar.processor.definition.ProcessorDefinition; -import org.elasticsearch.xpack.sql.tree.AbstractNodeTestCase; -import org.elasticsearch.xpack.sql.tree.Location; - -import java.util.ArrayList; -import java.util.List; -import java.util.Objects; -import java.util.function.Function; - -import static org.elasticsearch.xpack.sql.expression.function.scalar.FunctionTestUtils.randomStringLiteral; -import static org.elasticsearch.xpack.sql.expression.function.scalar.processor.definition.ProcessorDefinitions.toProcessorDefinition; -import static org.elasticsearch.xpack.sql.tree.LocationTests.randomLocation; - -public class BinaryStringStringProcessorDefinitionTests - extends AbstractNodeTestCase { - - @Override - protected BinaryStringStringProcessorDefinition randomInstance() { - return randomBinaryStringStringProcessorDefinition(); - } - - private Expression randomBinaryStringStringExpression() { - return randomBinaryStringStringProcessorDefinition().expression(); - } - - public static BinaryStringStringProcessorDefinition randomBinaryStringStringProcessorDefinition() { - List functions = new ArrayList<>(); - functions.add(new Position( - randomLocation(), - randomStringLiteral(), - randomStringLiteral() - ).makeProcessorDefinition()); - // if we decide to add DIFFERENCE(string,string) in the future, here we'd add it as well - return (BinaryStringStringProcessorDefinition) randomFrom(functions); - } - - @Override - public void testTransform() { - // test transforming only the properties (location, expression), - // skipping the children (the two parameters of the binary function) which are tested separately - BinaryStringStringProcessorDefinition b1 = randomInstance(); - Expression newExpression = randomValueOtherThan(b1.expression(), () -> randomBinaryStringStringExpression()); - BinaryStringStringProcessorDefinition newB = new BinaryStringStringProcessorDefinition( - b1.location(), - newExpression, - b1.left(), - b1.right(), - b1.operation()); - assertEquals(newB, b1.transformPropertiesOnly(v -> Objects.equals(v, b1.expression()) ? newExpression : v, Expression.class)); - - BinaryStringStringProcessorDefinition b2 = randomInstance(); - Location newLoc = randomValueOtherThan(b2.location(), () -> randomLocation()); - newB = new BinaryStringStringProcessorDefinition( - newLoc, - b2.expression(), - b2.left(), - b2.right(), - b2.operation()); - assertEquals(newB, - b2.transformPropertiesOnly(v -> Objects.equals(v, b2.location()) ? newLoc : v, Location.class)); - } - - @Override - public void testReplaceChildren() { - BinaryStringStringProcessorDefinition b = randomInstance(); - ProcessorDefinition newLeft = toProcessorDefinition((Expression) randomValueOtherThan(b.left(), () -> randomStringLiteral())); - ProcessorDefinition newRight = toProcessorDefinition((Expression) randomValueOtherThan(b.right(), () -> randomStringLiteral())); - BinaryStringStringProcessorDefinition newB = - new BinaryStringStringProcessorDefinition(b.location(), b.expression(), b.left(), b.right(), b.operation()); - - BinaryProcessorDefinition transformed = newB.replaceChildren(newLeft, b.right()); - assertEquals(transformed.left(), newLeft); - assertEquals(transformed.location(), b.location()); - assertEquals(transformed.expression(), b.expression()); - assertEquals(transformed.right(), b.right()); - - transformed = newB.replaceChildren(b.left(), newRight); - assertEquals(transformed.left(), b.left()); - assertEquals(transformed.location(), b.location()); - assertEquals(transformed.expression(), b.expression()); - assertEquals(transformed.right(), newRight); - - transformed = newB.replaceChildren(newLeft, newRight); - assertEquals(transformed.left(), newLeft); - assertEquals(transformed.location(), b.location()); - assertEquals(transformed.expression(), b.expression()); - assertEquals(transformed.right(), newRight); - } - - @Override - protected BinaryStringStringProcessorDefinition mutate(BinaryStringStringProcessorDefinition instance) { - List> randoms = new ArrayList<>(); - randoms.add(f -> new BinaryStringStringProcessorDefinition(f.location(), - f.expression(), - toProcessorDefinition((Expression) randomValueOtherThan(f.left(), () -> randomStringLiteral())), - f.right(), - f.operation())); - randoms.add(f -> new BinaryStringStringProcessorDefinition(f.location(), - f.expression(), - f.left(), - toProcessorDefinition((Expression) randomValueOtherThan(f.right(), () -> randomStringLiteral())), - f.operation())); - randoms.add(f -> new BinaryStringStringProcessorDefinition(f.location(), - f.expression(), - toProcessorDefinition((Expression) randomValueOtherThan(f.left(), () -> randomStringLiteral())), - toProcessorDefinition((Expression) randomValueOtherThan(f.right(), () -> randomStringLiteral())), - f.operation())); - - return randomFrom(randoms).apply(instance); - } - - @Override - protected BinaryStringStringProcessorDefinition copy(BinaryStringStringProcessorDefinition instance) { - return new BinaryStringStringProcessorDefinition(instance.location(), - instance.expression(), - instance.left(), - instance.right(), - instance.operation()); - } -} diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/BinaryStringStringProcessorTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/BinaryStringStringProcessorTests.java index 3d0a1ebb1ab..bc5c2f57a7d 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/BinaryStringStringProcessorTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/BinaryStringStringProcessorTests.java @@ -11,8 +11,8 @@ import org.elasticsearch.common.io.stream.Writeable.Reader; import org.elasticsearch.test.AbstractWireSerializingTestCase; import org.elasticsearch.xpack.sql.SqlIllegalArgumentException; import org.elasticsearch.xpack.sql.expression.function.scalar.Processors; -import org.elasticsearch.xpack.sql.expression.function.scalar.processor.runtime.ConstantProcessor; import org.elasticsearch.xpack.sql.expression.function.scalar.string.BinaryStringStringProcessor.BinaryStringStringOperation; +import org.elasticsearch.xpack.sql.expression.gen.processor.ConstantProcessor; import static org.elasticsearch.xpack.sql.tree.Location.EMPTY; import static org.elasticsearch.xpack.sql.expression.function.scalar.FunctionTestUtils.l; @@ -38,26 +38,26 @@ public class BinaryStringStringProcessorTests extends AbstractWireSerializingTes } public void testPositionFunctionWithValidInput() { - assertEquals(4, new Position(EMPTY, l("bar"), l("foobar")).makeProcessorDefinition().asProcessor().process(null)); - assertEquals(1, new Position(EMPTY, l("foo"), l("foobar")).makeProcessorDefinition().asProcessor().process(null)); - assertEquals(0, new Position(EMPTY, l("foo"), l("bar")).makeProcessorDefinition().asProcessor().process(null)); - assertEquals(3, new Position(EMPTY, l('r'), l("bar")).makeProcessorDefinition().asProcessor().process(null)); - assertEquals(0, new Position(EMPTY, l('z'), l("bar")).makeProcessorDefinition().asProcessor().process(null)); - assertEquals(1, new Position(EMPTY, l('b'), l('b')).makeProcessorDefinition().asProcessor().process(null)); + assertEquals(4, new Position(EMPTY, l("bar"), l("foobar")).makePipe().asProcessor().process(null)); + assertEquals(1, new Position(EMPTY, l("foo"), l("foobar")).makePipe().asProcessor().process(null)); + assertEquals(0, new Position(EMPTY, l("foo"), l("bar")).makePipe().asProcessor().process(null)); + assertEquals(3, new Position(EMPTY, l('r'), l("bar")).makePipe().asProcessor().process(null)); + assertEquals(0, new Position(EMPTY, l('z'), l("bar")).makePipe().asProcessor().process(null)); + assertEquals(1, new Position(EMPTY, l('b'), l('b')).makePipe().asProcessor().process(null)); } public void testPositionFunctionWithEdgeCases() { - assertNull(new Position(EMPTY, l("foo"), l(null)).makeProcessorDefinition().asProcessor().process(null)); - assertNull(new Position(EMPTY, l(null), l("foo")).makeProcessorDefinition().asProcessor().process(null)); - assertNull(new Position(EMPTY, l(null), l(null)).makeProcessorDefinition().asProcessor().process(null)); + assertNull(new Position(EMPTY, l("foo"), l(null)).makePipe().asProcessor().process(null)); + assertNull(new Position(EMPTY, l(null), l("foo")).makePipe().asProcessor().process(null)); + assertNull(new Position(EMPTY, l(null), l(null)).makePipe().asProcessor().process(null)); } public void testPositionFunctionInputsValidation() { SqlIllegalArgumentException siae = expectThrows(SqlIllegalArgumentException.class, - () -> new Position(EMPTY, l(5), l("foo")).makeProcessorDefinition().asProcessor().process(null)); + () -> new Position(EMPTY, l(5), l("foo")).makePipe().asProcessor().process(null)); assertEquals("A string/char is required; received [5]", siae.getMessage()); siae = expectThrows(SqlIllegalArgumentException.class, - () -> new Position(EMPTY, l("foo bar"), l(3)).makeProcessorDefinition().asProcessor().process(null)); + () -> new Position(EMPTY, l("foo bar"), l(3)).makePipe().asProcessor().process(null)); assertEquals("A string/char is required; received [3]", siae.getMessage()); } } diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/ConcatFunctionPipeTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/ConcatFunctionPipeTests.java new file mode 100644 index 00000000000..b5e18f9f9f8 --- /dev/null +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/ConcatFunctionPipeTests.java @@ -0,0 +1,121 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +package org.elasticsearch.xpack.sql.expression.function.scalar.string; + +import org.elasticsearch.xpack.sql.expression.Expression; +import org.elasticsearch.xpack.sql.expression.gen.pipeline.BinaryPipe; +import org.elasticsearch.xpack.sql.expression.gen.pipeline.Pipe; +import org.elasticsearch.xpack.sql.tree.AbstractNodeTestCase; +import org.elasticsearch.xpack.sql.tree.Location; + +import java.util.ArrayList; +import java.util.List; +import java.util.Objects; +import java.util.function.Function; + +import static org.elasticsearch.xpack.sql.expression.Expressions.pipe; +import static org.elasticsearch.xpack.sql.expression.function.scalar.FunctionTestUtils.randomStringLiteral; +import static org.elasticsearch.xpack.sql.tree.LocationTests.randomLocation; + +public class ConcatFunctionPipeTests extends AbstractNodeTestCase { + + @Override + protected ConcatFunctionPipe randomInstance() { + return randomConcatFunctionPipe(); + } + + private Expression randomConcatFunctionExpression() { + return randomConcatFunctionPipe().expression(); + } + + public static ConcatFunctionPipe randomConcatFunctionPipe() { + return (ConcatFunctionPipe) new Concat( + randomLocation(), + randomStringLiteral(), + randomStringLiteral()) + .makePipe(); + } + + @Override + public void testTransform() { + // test transforming only the properties (location, expression), + // skipping the children (the two parameters of the binary function) which are tested separately + ConcatFunctionPipe b1 = randomInstance(); + + Expression newExpression = randomValueOtherThan(b1.expression(), () -> randomConcatFunctionExpression()); + ConcatFunctionPipe newB = new ConcatFunctionPipe( + b1.location(), + newExpression, + b1.left(), + b1.right()); + assertEquals(newB, b1.transformPropertiesOnly(v -> Objects.equals(v, b1.expression()) ? newExpression : v, Expression.class)); + + ConcatFunctionPipe b2 = randomInstance(); + Location newLoc = randomValueOtherThan(b2.location(), () -> randomLocation()); + newB = new ConcatFunctionPipe( + newLoc, + b2.expression(), + b2.left(), + b2.right()); + assertEquals(newB, + b2.transformPropertiesOnly(v -> Objects.equals(v, b2.location()) ? newLoc : v, Location.class)); + } + + @Override + public void testReplaceChildren() { + ConcatFunctionPipe b = randomInstance(); + Pipe newLeft = pipe(((Expression) randomValueOtherThan(b.left(), () -> randomStringLiteral()))); + Pipe newRight = pipe(((Expression) randomValueOtherThan(b.right(), () -> randomStringLiteral()))); + ConcatFunctionPipe newB = + new ConcatFunctionPipe(b.location(), b.expression(), b.left(), b.right()); + BinaryPipe transformed = newB.replaceChildren(newLeft, b.right()); + + assertEquals(transformed.left(), newLeft); + assertEquals(transformed.location(), b.location()); + assertEquals(transformed.expression(), b.expression()); + assertEquals(transformed.right(), b.right()); + + transformed = newB.replaceChildren(b.left(), newRight); + assertEquals(transformed.left(), b.left()); + assertEquals(transformed.location(), b.location()); + assertEquals(transformed.expression(), b.expression()); + assertEquals(transformed.right(), newRight); + + transformed = newB.replaceChildren(newLeft, newRight); + assertEquals(transformed.left(), newLeft); + assertEquals(transformed.location(), b.location()); + assertEquals(transformed.expression(), b.expression()); + assertEquals(transformed.right(), newRight); + } + + @Override + protected ConcatFunctionPipe mutate(ConcatFunctionPipe instance) { + List> randoms = new ArrayList<>(); + randoms.add(f -> new ConcatFunctionPipe(f.location(), + f.expression(), + pipe(((Expression) randomValueOtherThan(f.left(), () -> randomStringLiteral()))), + f.right())); + randoms.add(f -> new ConcatFunctionPipe(f.location(), + f.expression(), + f.left(), + pipe(((Expression) randomValueOtherThan(f.right(), () -> randomStringLiteral()))))); + randoms.add(f -> new ConcatFunctionPipe(f.location(), + f.expression(), + pipe(((Expression) randomValueOtherThan(f.left(), () -> randomStringLiteral()))), + pipe(((Expression) randomValueOtherThan(f.right(), () -> randomStringLiteral()))))); + + return randomFrom(randoms).apply(instance); + } + + @Override + protected ConcatFunctionPipe copy(ConcatFunctionPipe instance) { + return new ConcatFunctionPipe(instance.location(), + instance.expression(), + instance.left(), + instance.right()); + } +} diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/ConcatFunctionProcessorDefinitionTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/ConcatFunctionProcessorDefinitionTests.java deleted file mode 100644 index 4e14e15c195..00000000000 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/ConcatFunctionProcessorDefinitionTests.java +++ /dev/null @@ -1,121 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License; - * you may not use this file except in compliance with the Elastic License. - */ - -package org.elasticsearch.xpack.sql.expression.function.scalar.string; - -import org.elasticsearch.xpack.sql.expression.Expression; -import org.elasticsearch.xpack.sql.expression.function.scalar.processor.definition.BinaryProcessorDefinition; -import org.elasticsearch.xpack.sql.expression.function.scalar.processor.definition.ProcessorDefinition; -import org.elasticsearch.xpack.sql.tree.AbstractNodeTestCase; -import org.elasticsearch.xpack.sql.tree.Location; - -import java.util.ArrayList; -import java.util.List; -import java.util.Objects; -import java.util.function.Function; - -import static org.elasticsearch.xpack.sql.expression.function.scalar.FunctionTestUtils.randomStringLiteral; -import static org.elasticsearch.xpack.sql.expression.function.scalar.processor.definition.ProcessorDefinitions.toProcessorDefinition; -import static org.elasticsearch.xpack.sql.tree.LocationTests.randomLocation; - -public class ConcatFunctionProcessorDefinitionTests extends AbstractNodeTestCase { - - @Override - protected ConcatFunctionProcessorDefinition randomInstance() { - return randomConcatFunctionProcessorDefinition(); - } - - private Expression randomConcatFunctionExpression() { - return randomConcatFunctionProcessorDefinition().expression(); - } - - public static ConcatFunctionProcessorDefinition randomConcatFunctionProcessorDefinition() { - return (ConcatFunctionProcessorDefinition) new Concat( - randomLocation(), - randomStringLiteral(), - randomStringLiteral()) - .makeProcessorDefinition(); - } - - @Override - public void testTransform() { - // test transforming only the properties (location, expression), - // skipping the children (the two parameters of the binary function) which are tested separately - ConcatFunctionProcessorDefinition b1 = randomInstance(); - - Expression newExpression = randomValueOtherThan(b1.expression(), () -> randomConcatFunctionExpression()); - ConcatFunctionProcessorDefinition newB = new ConcatFunctionProcessorDefinition( - b1.location(), - newExpression, - b1.left(), - b1.right()); - assertEquals(newB, b1.transformPropertiesOnly(v -> Objects.equals(v, b1.expression()) ? newExpression : v, Expression.class)); - - ConcatFunctionProcessorDefinition b2 = randomInstance(); - Location newLoc = randomValueOtherThan(b2.location(), () -> randomLocation()); - newB = new ConcatFunctionProcessorDefinition( - newLoc, - b2.expression(), - b2.left(), - b2.right()); - assertEquals(newB, - b2.transformPropertiesOnly(v -> Objects.equals(v, b2.location()) ? newLoc : v, Location.class)); - } - - @Override - public void testReplaceChildren() { - ConcatFunctionProcessorDefinition b = randomInstance(); - ProcessorDefinition newLeft = toProcessorDefinition((Expression) randomValueOtherThan(b.left(), () -> randomStringLiteral())); - ProcessorDefinition newRight = toProcessorDefinition((Expression) randomValueOtherThan(b.right(), () -> randomStringLiteral())); - ConcatFunctionProcessorDefinition newB = - new ConcatFunctionProcessorDefinition(b.location(), b.expression(), b.left(), b.right()); - BinaryProcessorDefinition transformed = newB.replaceChildren(newLeft, b.right()); - - assertEquals(transformed.left(), newLeft); - assertEquals(transformed.location(), b.location()); - assertEquals(transformed.expression(), b.expression()); - assertEquals(transformed.right(), b.right()); - - transformed = newB.replaceChildren(b.left(), newRight); - assertEquals(transformed.left(), b.left()); - assertEquals(transformed.location(), b.location()); - assertEquals(transformed.expression(), b.expression()); - assertEquals(transformed.right(), newRight); - - transformed = newB.replaceChildren(newLeft, newRight); - assertEquals(transformed.left(), newLeft); - assertEquals(transformed.location(), b.location()); - assertEquals(transformed.expression(), b.expression()); - assertEquals(transformed.right(), newRight); - } - - @Override - protected ConcatFunctionProcessorDefinition mutate(ConcatFunctionProcessorDefinition instance) { - List> randoms = new ArrayList<>(); - randoms.add(f -> new ConcatFunctionProcessorDefinition(f.location(), - f.expression(), - toProcessorDefinition((Expression) randomValueOtherThan(f.left(), () -> randomStringLiteral())), - f.right())); - randoms.add(f -> new ConcatFunctionProcessorDefinition(f.location(), - f.expression(), - f.left(), - toProcessorDefinition((Expression) randomValueOtherThan(f.right(), () -> randomStringLiteral())))); - randoms.add(f -> new ConcatFunctionProcessorDefinition(f.location(), - f.expression(), - toProcessorDefinition((Expression) randomValueOtherThan(f.left(), () -> randomStringLiteral())), - toProcessorDefinition((Expression) randomValueOtherThan(f.right(), () -> randomStringLiteral())))); - - return randomFrom(randoms).apply(instance); - } - - @Override - protected ConcatFunctionProcessorDefinition copy(ConcatFunctionProcessorDefinition instance) { - return new ConcatFunctionProcessorDefinition(instance.location(), - instance.expression(), - instance.left(), - instance.right()); - } -} diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/ConcatProcessorTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/ConcatProcessorTests.java index 804f9a9d150..1a99d4ab27d 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/ConcatProcessorTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/ConcatProcessorTests.java @@ -11,7 +11,7 @@ import org.elasticsearch.common.io.stream.Writeable.Reader; import org.elasticsearch.test.AbstractWireSerializingTestCase; import org.elasticsearch.xpack.sql.SqlIllegalArgumentException; import org.elasticsearch.xpack.sql.expression.function.scalar.Processors; -import org.elasticsearch.xpack.sql.expression.function.scalar.processor.runtime.ConstantProcessor; +import org.elasticsearch.xpack.sql.expression.gen.processor.ConstantProcessor; import static org.elasticsearch.xpack.sql.tree.Location.EMPTY; import static org.elasticsearch.xpack.sql.expression.function.scalar.FunctionTestUtils.l; @@ -36,22 +36,22 @@ public class ConcatProcessorTests extends AbstractWireSerializingTestCase new Concat(EMPTY, l(5), l("foo")).makeProcessorDefinition().asProcessor().process(null)); + () -> new Concat(EMPTY, l(5), l("foo")).makePipe().asProcessor().process(null)); assertEquals("A string/char is required; received [5]", siae.getMessage()); siae = expectThrows(SqlIllegalArgumentException.class, - () -> new Concat(EMPTY, l("foo bar"), l(3)).makeProcessorDefinition().asProcessor().process(null)); + () -> new Concat(EMPTY, l("foo bar"), l(3)).makePipe().asProcessor().process(null)); assertEquals("A string/char is required; received [3]", siae.getMessage()); } } diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/InsertFunctionProcessorDefinitionTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/InsertFunctionPipeTests.java similarity index 50% rename from x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/InsertFunctionProcessorDefinitionTests.java rename to x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/InsertFunctionPipeTests.java index bf17962b083..c4ee5fd9ad7 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/InsertFunctionProcessorDefinitionTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/InsertFunctionPipeTests.java @@ -8,7 +8,7 @@ package org.elasticsearch.xpack.sql.expression.function.scalar.string; import org.elasticsearch.xpack.sql.expression.Expression; import org.elasticsearch.xpack.sql.expression.function.scalar.FunctionTestUtils.Combinations; -import org.elasticsearch.xpack.sql.expression.function.scalar.processor.definition.ProcessorDefinition; +import org.elasticsearch.xpack.sql.expression.gen.pipeline.Pipe; import org.elasticsearch.xpack.sql.tree.AbstractNodeTestCase; import org.elasticsearch.xpack.sql.tree.Location; @@ -18,74 +18,74 @@ import java.util.List; import java.util.Objects; import java.util.function.Function; +import static org.elasticsearch.xpack.sql.expression.Expressions.pipe; import static org.elasticsearch.xpack.sql.expression.function.scalar.FunctionTestUtils.randomIntLiteral; import static org.elasticsearch.xpack.sql.expression.function.scalar.FunctionTestUtils.randomStringLiteral; -import static org.elasticsearch.xpack.sql.expression.function.scalar.processor.definition.ProcessorDefinitions.toProcessorDefinition; import static org.elasticsearch.xpack.sql.tree.LocationTests.randomLocation; -public class InsertFunctionProcessorDefinitionTests extends AbstractNodeTestCase { +public class InsertFunctionPipeTests extends AbstractNodeTestCase { @Override - protected InsertFunctionProcessorDefinition randomInstance() { - return randomInsertFunctionProcessorDefinition(); + protected InsertFunctionPipe randomInstance() { + return randomInsertFunctionPipe(); } - private Expression randomInsertFunctionExpression() { - return randomInsertFunctionProcessorDefinition().expression(); + private Expression randomInsertFunctionExpression() { + return randomInsertFunctionPipe().expression(); } - public static InsertFunctionProcessorDefinition randomInsertFunctionProcessorDefinition() { - return (InsertFunctionProcessorDefinition) (new Insert(randomLocation(), - randomStringLiteral(), + public static InsertFunctionPipe randomInsertFunctionPipe() { + return (InsertFunctionPipe) (new Insert(randomLocation(), + randomStringLiteral(), randomIntLiteral(), randomIntLiteral(), randomStringLiteral()) - .makeProcessorDefinition()); + .makePipe()); } @Override public void testTransform() { - // test transforming only the properties (location, expression), + // test transforming only the properties (location, expression), // skipping the children (the two parameters of the binary function) which are tested separately - InsertFunctionProcessorDefinition b1 = randomInstance(); + InsertFunctionPipe b1 = randomInstance(); Expression newExpression = randomValueOtherThan(b1.expression(), () -> randomInsertFunctionExpression()); - InsertFunctionProcessorDefinition newB = new InsertFunctionProcessorDefinition( - b1.location(), + InsertFunctionPipe newB = new InsertFunctionPipe( + b1.location(), newExpression, - b1.source(), + b1.source(), b1.start(), b1.length(), b1.replacement()); assertEquals(newB, b1.transformPropertiesOnly(v -> Objects.equals(v, b1.expression()) ? newExpression : v, Expression.class)); - InsertFunctionProcessorDefinition b2 = randomInstance(); + InsertFunctionPipe b2 = randomInstance(); Location newLoc = randomValueOtherThan(b2.location(), () -> randomLocation()); - newB = new InsertFunctionProcessorDefinition( - newLoc, + newB = new InsertFunctionPipe( + newLoc, b2.expression(), - b2.source(), + b2.source(), b2.start(), b2.length(), b2.replacement()); - assertEquals(newB, + assertEquals(newB, b2.transformPropertiesOnly(v -> Objects.equals(v, b2.location()) ? newLoc : v, Location.class)); } @Override public void testReplaceChildren() { - InsertFunctionProcessorDefinition b = randomInstance(); - ProcessorDefinition newSource = toProcessorDefinition((Expression) randomValueOtherThan(b.source(), () -> randomStringLiteral())); - ProcessorDefinition newStart = toProcessorDefinition((Expression) randomValueOtherThan(b.start(), () -> randomIntLiteral())); - ProcessorDefinition newLength = toProcessorDefinition((Expression) randomValueOtherThan(b.length(), () -> randomIntLiteral())); - ProcessorDefinition newR = toProcessorDefinition((Expression) randomValueOtherThan(b.replacement(), () -> randomStringLiteral())); - InsertFunctionProcessorDefinition newB = - new InsertFunctionProcessorDefinition(b.location(), b.expression(), b.source(), b.start(), b.length(), b.replacement()); - InsertFunctionProcessorDefinition transformed = null; + InsertFunctionPipe b = randomInstance(); + Pipe newSource = pipe(((Expression) randomValueOtherThan(b.source(), () -> randomStringLiteral()))); + Pipe newStart = pipe(((Expression) randomValueOtherThan(b.start(), () -> randomIntLiteral()))); + Pipe newLength = pipe(((Expression) randomValueOtherThan(b.length(), () -> randomIntLiteral()))); + Pipe newR = pipe(((Expression) randomValueOtherThan(b.replacement(), () -> randomStringLiteral()))); + InsertFunctionPipe newB = + new InsertFunctionPipe(b.location(), b.expression(), b.source(), b.start(), b.length(), b.replacement()); + InsertFunctionPipe transformed = null; // generate all the combinations of possible children modifications and test all of them for(int i = 1; i < 5; i++) { for(BitSet comb : new Combinations(4, i)) { - transformed = (InsertFunctionProcessorDefinition) newB.replaceChildren( + transformed = (InsertFunctionPipe) newB.replaceChildren( comb.get(0) ? newSource : b.source(), comb.get(1) ? newStart : b.start(), comb.get(2) ? newLength : b.length(), @@ -101,22 +101,22 @@ public class InsertFunctionProcessorDefinitionTests extends AbstractNodeTestCase } @Override - protected InsertFunctionProcessorDefinition mutate(InsertFunctionProcessorDefinition instance) { - List> randoms = new ArrayList<>(); + protected InsertFunctionPipe mutate(InsertFunctionPipe instance) { + List> randoms = new ArrayList<>(); for(int i = 1; i < 5; i++) { for(BitSet comb : new Combinations(4, i)) { - randoms.add(f -> new InsertFunctionProcessorDefinition( + randoms.add(f -> new InsertFunctionPipe( f.location(), - f.expression(), - comb.get(0) ? toProcessorDefinition((Expression) randomValueOtherThan(f.source(), - () -> randomStringLiteral())) : f.source(), - comb.get(1) ? toProcessorDefinition((Expression) randomValueOtherThan(f.start(), - () -> randomIntLiteral())) : f.start(), - comb.get(2) ? toProcessorDefinition((Expression) randomValueOtherThan(f.length(), - () -> randomIntLiteral())): f.length(), - comb.get(3) ? toProcessorDefinition((Expression) randomValueOtherThan(f.replacement(), - () -> randomStringLiteral())) : f.replacement())); + f.expression(), + comb.get(0) ? pipe(((Expression) randomValueOtherThan(f.source(), + () -> randomStringLiteral()))) : f.source(), + comb.get(1) ? pipe(((Expression) randomValueOtherThan(f.start(), + () -> randomIntLiteral()))) : f.start(), + comb.get(2) ? pipe(((Expression) randomValueOtherThan(f.length(), + () -> randomIntLiteral()))): f.length(), + comb.get(3) ? pipe(((Expression) randomValueOtherThan(f.replacement(), + () -> randomStringLiteral()))) : f.replacement())); } } @@ -124,8 +124,8 @@ public class InsertFunctionProcessorDefinitionTests extends AbstractNodeTestCase } @Override - protected InsertFunctionProcessorDefinition copy(InsertFunctionProcessorDefinition instance) { - return new InsertFunctionProcessorDefinition(instance.location(), + protected InsertFunctionPipe copy(InsertFunctionPipe instance) { + return new InsertFunctionPipe(instance.location(), instance.expression(), instance.source(), instance.start(), diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/InsertProcessorTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/InsertProcessorTests.java index 851dc6f1aa8..1cafe1cfbd9 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/InsertProcessorTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/InsertProcessorTests.java @@ -11,7 +11,7 @@ import org.elasticsearch.common.io.stream.Writeable.Reader; import org.elasticsearch.test.AbstractWireSerializingTestCase; import org.elasticsearch.xpack.sql.SqlIllegalArgumentException; import org.elasticsearch.xpack.sql.expression.function.scalar.Processors; -import org.elasticsearch.xpack.sql.expression.function.scalar.processor.runtime.ConstantProcessor; +import org.elasticsearch.xpack.sql.expression.gen.processor.ConstantProcessor; import static org.elasticsearch.xpack.sql.tree.Location.EMPTY; import static org.elasticsearch.xpack.sql.expression.function.scalar.FunctionTestUtils.l; @@ -38,47 +38,47 @@ public class InsertProcessorTests extends AbstractWireSerializingTestCase new Insert(EMPTY, l(5), l(1), l(3), l("baz")).makeProcessorDefinition().asProcessor().process(null)); + () -> new Insert(EMPTY, l(5), l(1), l(3), l("baz")).makePipe().asProcessor().process(null)); assertEquals("A string/char is required; received [5]", siae.getMessage()); siae = expectThrows(SqlIllegalArgumentException.class, - () -> new Insert(EMPTY, l("foobar"), l(1), l(3), l(66)).makeProcessorDefinition().asProcessor().process(null)); + () -> new Insert(EMPTY, l("foobar"), l(1), l(3), l(66)).makePipe().asProcessor().process(null)); assertEquals("A string/char is required; received [66]", siae.getMessage()); siae = expectThrows(SqlIllegalArgumentException.class, - () -> new Insert(EMPTY, l("foobar"), l("c"), l(3), l("baz")).makeProcessorDefinition().asProcessor().process(null)); + () -> new Insert(EMPTY, l("foobar"), l("c"), l(3), l("baz")).makePipe().asProcessor().process(null)); assertEquals("A number is required; received [c]", siae.getMessage()); siae = expectThrows(SqlIllegalArgumentException.class, - () -> new Insert(EMPTY, l("foobar"), l(1), l('z'), l("baz")).makeProcessorDefinition().asProcessor().process(null)); + () -> new Insert(EMPTY, l("foobar"), l(1), l('z'), l("baz")).makePipe().asProcessor().process(null)); assertEquals("A number is required; received [z]", siae.getMessage()); siae = expectThrows(SqlIllegalArgumentException.class, - () -> new Insert(EMPTY, l("foobar"), l(1), l(-1), l("baz")).makeProcessorDefinition().asProcessor().process(null)); + () -> new Insert(EMPTY, l("foobar"), l(1), l(-1), l("baz")).makePipe().asProcessor().process(null)); assertEquals("A positive number is required for [length]; received [-1]", siae.getMessage()); } } diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/LocateFunctionProcessorDefinitionTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/LocateFunctionPipeTests.java similarity index 53% rename from x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/LocateFunctionProcessorDefinitionTests.java rename to x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/LocateFunctionPipeTests.java index 4815c9c9528..25b361ab381 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/LocateFunctionProcessorDefinitionTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/LocateFunctionPipeTests.java @@ -8,7 +8,7 @@ package org.elasticsearch.xpack.sql.expression.function.scalar.string; import org.elasticsearch.xpack.sql.expression.Expression; import org.elasticsearch.xpack.sql.expression.function.scalar.FunctionTestUtils.Combinations; -import org.elasticsearch.xpack.sql.expression.function.scalar.processor.definition.ProcessorDefinition; +import org.elasticsearch.xpack.sql.expression.gen.pipeline.Pipe; import org.elasticsearch.xpack.sql.tree.AbstractNodeTestCase; import org.elasticsearch.xpack.sql.tree.Location; @@ -18,74 +18,74 @@ import java.util.List; import java.util.Objects; import java.util.function.Function; +import static org.elasticsearch.xpack.sql.expression.Expressions.pipe; import static org.elasticsearch.xpack.sql.expression.function.scalar.FunctionTestUtils.randomIntLiteral; import static org.elasticsearch.xpack.sql.expression.function.scalar.FunctionTestUtils.randomStringLiteral; -import static org.elasticsearch.xpack.sql.expression.function.scalar.processor.definition.ProcessorDefinitions.toProcessorDefinition; import static org.elasticsearch.xpack.sql.tree.LocationTests.randomLocation; -public class LocateFunctionProcessorDefinitionTests extends AbstractNodeTestCase { +public class LocateFunctionPipeTests extends AbstractNodeTestCase { @Override - protected LocateFunctionProcessorDefinition randomInstance() { - return randomLocateFunctionProcessorDefinition(); + protected LocateFunctionPipe randomInstance() { + return randomLocateFunctionPipe(); } - private Expression randomLocateFunctionExpression() { - return randomLocateFunctionProcessorDefinition().expression(); + private Expression randomLocateFunctionExpression() { + return randomLocateFunctionPipe().expression(); } - public static LocateFunctionProcessorDefinition randomLocateFunctionProcessorDefinition() { - return (LocateFunctionProcessorDefinition) (new Locate(randomLocation(), - randomStringLiteral(), + public static LocateFunctionPipe randomLocateFunctionPipe() { + return (LocateFunctionPipe) (new Locate(randomLocation(), + randomStringLiteral(), randomStringLiteral(), randomFrom(true, false) ? randomIntLiteral() : null) - .makeProcessorDefinition()); + .makePipe()); } @Override public void testTransform() { - // test transforming only the properties (location, expression), + // test transforming only the properties (location, expression), // skipping the children (the two parameters of the binary function) which are tested separately - LocateFunctionProcessorDefinition b1 = randomInstance(); + LocateFunctionPipe b1 = randomInstance(); Expression newExpression = randomValueOtherThan(b1.expression(), () -> randomLocateFunctionExpression()); - LocateFunctionProcessorDefinition newB = new LocateFunctionProcessorDefinition( - b1.location(), + LocateFunctionPipe newB = new LocateFunctionPipe( + b1.location(), newExpression, - b1.pattern(), + b1.pattern(), b1.source(), b1.start()); assertEquals(newB, b1.transformPropertiesOnly(v -> Objects.equals(v, b1.expression()) ? newExpression : v, Expression.class)); - LocateFunctionProcessorDefinition b2 = randomInstance(); + LocateFunctionPipe b2 = randomInstance(); Location newLoc = randomValueOtherThan(b2.location(), () -> randomLocation()); - newB = new LocateFunctionProcessorDefinition( - newLoc, + newB = new LocateFunctionPipe( + newLoc, b2.expression(), - b2.pattern(), + b2.pattern(), b2.source(), b2.start()); - assertEquals(newB, + assertEquals(newB, b2.transformPropertiesOnly(v -> Objects.equals(v, b2.location()) ? newLoc : v, Location.class)); } @Override public void testReplaceChildren() { - LocateFunctionProcessorDefinition b = randomInstance(); - ProcessorDefinition newPattern = toProcessorDefinition((Expression) randomValueOtherThan(b.pattern(), () -> randomStringLiteral())); - ProcessorDefinition newSource = toProcessorDefinition((Expression) randomValueOtherThan(b.source(), () -> randomStringLiteral())); - ProcessorDefinition newStart; + LocateFunctionPipe b = randomInstance(); + Pipe newPattern = pipe(((Expression) randomValueOtherThan(b.pattern(), () -> randomStringLiteral()))); + Pipe newSource = pipe(((Expression) randomValueOtherThan(b.source(), () -> randomStringLiteral()))); + Pipe newStart; - LocateFunctionProcessorDefinition newB = new LocateFunctionProcessorDefinition( + LocateFunctionPipe newB = new LocateFunctionPipe( b.location(), b.expression(), b.pattern(), b.source(), b.start()); - newStart = toProcessorDefinition((Expression) randomValueOtherThan(b.start(), () -> randomIntLiteral())); - LocateFunctionProcessorDefinition transformed = null; + newStart = pipe(((Expression) randomValueOtherThan(b.start(), () -> randomIntLiteral()))); + LocateFunctionPipe transformed = null; // generate all the combinations of possible children modifications and test all of them for(int i = 1; i < 4; i++) { for(BitSet comb : new Combinations(3, i)) { - transformed = (LocateFunctionProcessorDefinition) newB.replaceChildren( + transformed = (LocateFunctionPipe) newB.replaceChildren( comb.get(0) ? newPattern : b.pattern(), comb.get(1) ? newSource : b.source(), comb.get(2) ? newStart : b.start()); @@ -100,31 +100,31 @@ public class LocateFunctionProcessorDefinitionTests extends AbstractNodeTestCase } @Override - protected LocateFunctionProcessorDefinition mutate(LocateFunctionProcessorDefinition instance) { - List> randoms = new ArrayList<>(); + protected LocateFunctionPipe mutate(LocateFunctionPipe instance) { + List> randoms = new ArrayList<>(); if (instance.start() == null) { for(int i = 1; i < 3; i++) { for(BitSet comb : new Combinations(2, i)) { - randoms.add(f -> new LocateFunctionProcessorDefinition(f.location(), - f.expression(), - comb.get(0) ? toProcessorDefinition((Expression) randomValueOtherThan(f.pattern(), - () -> randomStringLiteral())) : f.pattern(), - comb.get(1) ? toProcessorDefinition((Expression) randomValueOtherThan(f.source(), - () -> randomStringLiteral())) : f.source(), + randoms.add(f -> new LocateFunctionPipe(f.location(), + f.expression(), + comb.get(0) ? pipe(((Expression) randomValueOtherThan(f.pattern(), + () -> randomStringLiteral()))) : f.pattern(), + comb.get(1) ? pipe(((Expression) randomValueOtherThan(f.source(), + () -> randomStringLiteral()))) : f.source(), null)); } } } else { for(int i = 1; i < 4; i++) { for(BitSet comb : new Combinations(3, i)) { - randoms.add(f -> new LocateFunctionProcessorDefinition(f.location(), - f.expression(), - comb.get(0) ? toProcessorDefinition((Expression) randomValueOtherThan(f.pattern(), - () -> randomStringLiteral())) : f.pattern(), - comb.get(1) ? toProcessorDefinition((Expression) randomValueOtherThan(f.source(), - () -> randomStringLiteral())) : f.source(), - comb.get(2) ? toProcessorDefinition((Expression) randomValueOtherThan(f.start(), - () -> randomIntLiteral())) : f.start())); + randoms.add(f -> new LocateFunctionPipe(f.location(), + f.expression(), + comb.get(0) ? pipe(((Expression) randomValueOtherThan(f.pattern(), + () -> randomStringLiteral()))) : f.pattern(), + comb.get(1) ? pipe(((Expression) randomValueOtherThan(f.source(), + () -> randomStringLiteral()))) : f.source(), + comb.get(2) ? pipe(((Expression) randomValueOtherThan(f.start(), + () -> randomIntLiteral()))) : f.start())); } } } @@ -133,8 +133,8 @@ public class LocateFunctionProcessorDefinitionTests extends AbstractNodeTestCase } @Override - protected LocateFunctionProcessorDefinition copy(LocateFunctionProcessorDefinition instance) { - return new LocateFunctionProcessorDefinition(instance.location(), + protected LocateFunctionPipe copy(LocateFunctionPipe instance) { + return new LocateFunctionPipe(instance.location(), instance.expression(), instance.pattern(), instance.source(), diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/LocateProcessorTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/LocateProcessorTests.java index e03ef72b8b8..f000ab813ec 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/LocateProcessorTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/LocateProcessorTests.java @@ -11,7 +11,7 @@ import org.elasticsearch.common.io.stream.Writeable.Reader; import org.elasticsearch.test.AbstractWireSerializingTestCase; import org.elasticsearch.xpack.sql.SqlIllegalArgumentException; import org.elasticsearch.xpack.sql.expression.function.scalar.Processors; -import org.elasticsearch.xpack.sql.expression.function.scalar.processor.runtime.ConstantProcessor; +import org.elasticsearch.xpack.sql.expression.gen.processor.ConstantProcessor; import static org.elasticsearch.xpack.sql.tree.Location.EMPTY; import static org.elasticsearch.xpack.sql.expression.function.scalar.FunctionTestUtils.l; @@ -41,31 +41,31 @@ public class LocateProcessorTests extends AbstractWireSerializingTestCase new Locate(EMPTY, l(5), l("foobarbar"), l(3)).makeProcessorDefinition().asProcessor().process(null)); + () -> new Locate(EMPTY, l(5), l("foobarbar"), l(3)).makePipe().asProcessor().process(null)); assertEquals("A string/char is required; received [5]", siae.getMessage()); siae = expectThrows(SqlIllegalArgumentException.class, - () -> new Locate(EMPTY, l("foo"), l(1), l(3)).makeProcessorDefinition().asProcessor().process(null)); + () -> new Locate(EMPTY, l("foo"), l(1), l(3)).makePipe().asProcessor().process(null)); assertEquals("A string/char is required; received [1]", siae.getMessage()); siae = expectThrows(SqlIllegalArgumentException.class, - () -> new Locate(EMPTY, l("foobarbar"), l("bar"), l('c')).makeProcessorDefinition().asProcessor().process(null)); + () -> new Locate(EMPTY, l("foobarbar"), l("bar"), l('c')).makePipe().asProcessor().process(null)); assertEquals("A number is required; received [c]", siae.getMessage()); } } diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/ReplaceFunctionProcessorDefinitionTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/ReplaceFunctionPipeTests.java similarity index 54% rename from x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/ReplaceFunctionProcessorDefinitionTests.java rename to x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/ReplaceFunctionPipeTests.java index 14e9e51f471..bdc45f50ed2 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/ReplaceFunctionProcessorDefinitionTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/ReplaceFunctionPipeTests.java @@ -8,7 +8,7 @@ package org.elasticsearch.xpack.sql.expression.function.scalar.string; import org.elasticsearch.xpack.sql.expression.Expression; import org.elasticsearch.xpack.sql.expression.function.scalar.FunctionTestUtils.Combinations; -import org.elasticsearch.xpack.sql.expression.function.scalar.processor.definition.ProcessorDefinition; +import org.elasticsearch.xpack.sql.expression.gen.pipeline.Pipe; import org.elasticsearch.xpack.sql.tree.AbstractNodeTestCase; import org.elasticsearch.xpack.sql.tree.Location; @@ -18,37 +18,37 @@ import java.util.List; import java.util.Objects; import java.util.function.Function; +import static org.elasticsearch.xpack.sql.expression.Expressions.pipe; import static org.elasticsearch.xpack.sql.expression.function.scalar.FunctionTestUtils.randomStringLiteral; -import static org.elasticsearch.xpack.sql.expression.function.scalar.processor.definition.ProcessorDefinitions.toProcessorDefinition; import static org.elasticsearch.xpack.sql.tree.LocationTests.randomLocation; -public class ReplaceFunctionProcessorDefinitionTests extends AbstractNodeTestCase { +public class ReplaceFunctionPipeTests extends AbstractNodeTestCase { @Override - protected ReplaceFunctionProcessorDefinition randomInstance() { - return randomReplaceFunctionProcessorDefinition(); + protected ReplaceFunctionPipe randomInstance() { + return randomReplaceFunctionPipe(); } private Expression randomReplaceFunctionExpression() { - return randomReplaceFunctionProcessorDefinition().expression(); + return randomReplaceFunctionPipe().expression(); } - public static ReplaceFunctionProcessorDefinition randomReplaceFunctionProcessorDefinition() { - return (ReplaceFunctionProcessorDefinition) (new Replace(randomLocation(), + public static ReplaceFunctionPipe randomReplaceFunctionPipe() { + return (ReplaceFunctionPipe) (new Replace(randomLocation(), randomStringLiteral(), randomStringLiteral(), randomStringLiteral()) - .makeProcessorDefinition()); + .makePipe()); } @Override public void testTransform() { // test transforming only the properties (location, expression), // skipping the children (the two parameters of the binary function) which are tested separately - ReplaceFunctionProcessorDefinition b1 = randomInstance(); + ReplaceFunctionPipe b1 = randomInstance(); Expression newExpression = randomValueOtherThan(b1.expression(), () -> randomReplaceFunctionExpression()); - ReplaceFunctionProcessorDefinition newB = new ReplaceFunctionProcessorDefinition( + ReplaceFunctionPipe newB = new ReplaceFunctionPipe( b1.location(), newExpression, b1.source(), @@ -56,9 +56,9 @@ public class ReplaceFunctionProcessorDefinitionTests extends AbstractNodeTestCas b1.replacement()); assertEquals(newB, b1.transformPropertiesOnly(v -> Objects.equals(v, b1.expression()) ? newExpression : v, Expression.class)); - ReplaceFunctionProcessorDefinition b2 = randomInstance(); + ReplaceFunctionPipe b2 = randomInstance(); Location newLoc = randomValueOtherThan(b2.location(), () -> randomLocation()); - newB = new ReplaceFunctionProcessorDefinition( + newB = new ReplaceFunctionPipe( newLoc, b2.expression(), b2.source(), @@ -70,18 +70,18 @@ public class ReplaceFunctionProcessorDefinitionTests extends AbstractNodeTestCas @Override public void testReplaceChildren() { - ReplaceFunctionProcessorDefinition b = randomInstance(); - ProcessorDefinition newSource = toProcessorDefinition((Expression) randomValueOtherThan(b.source(), () -> randomStringLiteral())); - ProcessorDefinition newPattern = toProcessorDefinition((Expression) randomValueOtherThan(b.pattern(), () -> randomStringLiteral())); - ProcessorDefinition newR = toProcessorDefinition((Expression) randomValueOtherThan(b.replacement(), () -> randomStringLiteral())); - ReplaceFunctionProcessorDefinition newB = - new ReplaceFunctionProcessorDefinition(b.location(), b.expression(), b.source(), b.pattern(), b.replacement()); - ReplaceFunctionProcessorDefinition transformed = null; + ReplaceFunctionPipe b = randomInstance(); + Pipe newSource = pipe(((Expression) randomValueOtherThan(b.source(), () -> randomStringLiteral()))); + Pipe newPattern = pipe(((Expression) randomValueOtherThan(b.pattern(), () -> randomStringLiteral()))); + Pipe newR = pipe(((Expression) randomValueOtherThan(b.replacement(), () -> randomStringLiteral()))); + ReplaceFunctionPipe newB = + new ReplaceFunctionPipe(b.location(), b.expression(), b.source(), b.pattern(), b.replacement()); + ReplaceFunctionPipe transformed = null; // generate all the combinations of possible children modifications and test all of them for(int i = 1; i < 4; i++) { for(BitSet comb : new Combinations(3, i)) { - transformed = (ReplaceFunctionProcessorDefinition) newB.replaceChildren( + transformed = (ReplaceFunctionPipe) newB.replaceChildren( comb.get(0) ? newSource : b.source(), comb.get(1) ? newPattern : b.pattern(), comb.get(2) ? newR : b.replacement()); @@ -96,19 +96,19 @@ public class ReplaceFunctionProcessorDefinitionTests extends AbstractNodeTestCas } @Override - protected ReplaceFunctionProcessorDefinition mutate(ReplaceFunctionProcessorDefinition instance) { - List> randoms = new ArrayList<>(); + protected ReplaceFunctionPipe mutate(ReplaceFunctionPipe instance) { + List> randoms = new ArrayList<>(); for(int i = 1; i < 4; i++) { for(BitSet comb : new Combinations(3, i)) { - randoms.add(f -> new ReplaceFunctionProcessorDefinition(f.location(), + randoms.add(f -> new ReplaceFunctionPipe(f.location(), f.expression(), - comb.get(0) ? toProcessorDefinition((Expression) randomValueOtherThan(f.source(), - () -> randomStringLiteral())) : f.source(), - comb.get(1) ? toProcessorDefinition((Expression) randomValueOtherThan(f.pattern(), - () -> randomStringLiteral())) : f.pattern(), - comb.get(2) ? toProcessorDefinition((Expression) randomValueOtherThan(f.replacement(), - () -> randomStringLiteral())) : f.replacement())); + comb.get(0) ? pipe(((Expression) randomValueOtherThan(f.source(), + () -> randomStringLiteral()))) : f.source(), + comb.get(1) ? pipe(((Expression) randomValueOtherThan(f.pattern(), + () -> randomStringLiteral()))) : f.pattern(), + comb.get(2) ? pipe(((Expression) randomValueOtherThan(f.replacement(), + () -> randomStringLiteral()))) : f.replacement())); } } @@ -116,8 +116,8 @@ public class ReplaceFunctionProcessorDefinitionTests extends AbstractNodeTestCas } @Override - protected ReplaceFunctionProcessorDefinition copy(ReplaceFunctionProcessorDefinition instance) { - return new ReplaceFunctionProcessorDefinition(instance.location(), + protected ReplaceFunctionPipe copy(ReplaceFunctionPipe instance) { + return new ReplaceFunctionPipe(instance.location(), instance.expression(), instance.source(), instance.pattern(), diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/ReplaceProcessorTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/ReplaceProcessorTests.java index 77c84d5e7f4..49a7dd78fee 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/ReplaceProcessorTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/ReplaceProcessorTests.java @@ -11,7 +11,7 @@ import org.elasticsearch.common.io.stream.Writeable.Reader; import org.elasticsearch.test.AbstractWireSerializingTestCase; import org.elasticsearch.xpack.sql.SqlIllegalArgumentException; import org.elasticsearch.xpack.sql.expression.function.scalar.Processors; -import org.elasticsearch.xpack.sql.expression.function.scalar.processor.runtime.ConstantProcessor; +import org.elasticsearch.xpack.sql.expression.gen.processor.ConstantProcessor; import static org.elasticsearch.xpack.sql.tree.Location.EMPTY; import static org.elasticsearch.xpack.sql.expression.function.scalar.FunctionTestUtils.l; @@ -38,29 +38,29 @@ public class ReplaceProcessorTests extends AbstractWireSerializingTestCase new Replace(EMPTY, l(5), l("bar"), l("baz")).makeProcessorDefinition().asProcessor().process(null)); + () -> new Replace(EMPTY, l(5), l("bar"), l("baz")).makePipe().asProcessor().process(null)); assertEquals("A string/char is required; received [5]", siae.getMessage()); siae = expectThrows(SqlIllegalArgumentException.class, - () -> new Replace(EMPTY, l("foobarbar"), l(4), l("baz")).makeProcessorDefinition().asProcessor().process(null)); + () -> new Replace(EMPTY, l("foobarbar"), l(4), l("baz")).makePipe().asProcessor().process(null)); assertEquals("A string/char is required; received [4]", siae.getMessage()); siae = expectThrows(SqlIllegalArgumentException.class, - () -> new Replace(EMPTY, l("foobarbar"), l("bar"), l(3)).makeProcessorDefinition().asProcessor().process(null)); + () -> new Replace(EMPTY, l("foobarbar"), l("bar"), l(3)).makePipe().asProcessor().process(null)); assertEquals("A string/char is required; received [3]", siae.getMessage()); } } diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/SubstringFunctionProcessorDefinitionTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/SubstringFunctionPipeTests.java similarity index 50% rename from x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/SubstringFunctionProcessorDefinitionTests.java rename to x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/SubstringFunctionPipeTests.java index 250949123a9..3218339484e 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/SubstringFunctionProcessorDefinitionTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/SubstringFunctionPipeTests.java @@ -8,7 +8,7 @@ package org.elasticsearch.xpack.sql.expression.function.scalar.string; import org.elasticsearch.xpack.sql.expression.Expression; import org.elasticsearch.xpack.sql.expression.function.scalar.FunctionTestUtils.Combinations; -import org.elasticsearch.xpack.sql.expression.function.scalar.processor.definition.ProcessorDefinition; +import org.elasticsearch.xpack.sql.expression.gen.pipeline.Pipe; import org.elasticsearch.xpack.sql.tree.AbstractNodeTestCase; import org.elasticsearch.xpack.sql.tree.Location; @@ -18,71 +18,71 @@ import java.util.List; import java.util.Objects; import java.util.function.Function; +import static org.elasticsearch.xpack.sql.expression.Expressions.pipe; import static org.elasticsearch.xpack.sql.expression.function.scalar.FunctionTestUtils.randomIntLiteral; import static org.elasticsearch.xpack.sql.expression.function.scalar.FunctionTestUtils.randomStringLiteral; -import static org.elasticsearch.xpack.sql.expression.function.scalar.processor.definition.ProcessorDefinitions.toProcessorDefinition; import static org.elasticsearch.xpack.sql.tree.LocationTests.randomLocation; -public class SubstringFunctionProcessorDefinitionTests - extends AbstractNodeTestCase { +public class SubstringFunctionPipeTests + extends AbstractNodeTestCase { @Override - protected SubstringFunctionProcessorDefinition randomInstance() { - return randomSubstringFunctionProcessorDefinition(); + protected SubstringFunctionPipe randomInstance() { + return randomSubstringFunctionPipe(); } - private Expression randomSubstringFunctionExpression() { - return randomSubstringFunctionProcessorDefinition().expression(); + private Expression randomSubstringFunctionExpression() { + return randomSubstringFunctionPipe().expression(); } - public static SubstringFunctionProcessorDefinition randomSubstringFunctionProcessorDefinition() { - return (SubstringFunctionProcessorDefinition) (new Substring(randomLocation(), - randomStringLiteral(), + public static SubstringFunctionPipe randomSubstringFunctionPipe() { + return (SubstringFunctionPipe) (new Substring(randomLocation(), + randomStringLiteral(), randomIntLiteral(), randomIntLiteral()) - .makeProcessorDefinition()); + .makePipe()); } @Override public void testTransform() { - // test transforming only the properties (location, expression), + // test transforming only the properties (location, expression), // skipping the children (the two parameters of the binary function) which are tested separately - SubstringFunctionProcessorDefinition b1 = randomInstance(); + SubstringFunctionPipe b1 = randomInstance(); Expression newExpression = randomValueOtherThan(b1.expression(), () -> randomSubstringFunctionExpression()); - SubstringFunctionProcessorDefinition newB = new SubstringFunctionProcessorDefinition( - b1.location(), + SubstringFunctionPipe newB = new SubstringFunctionPipe( + b1.location(), newExpression, - b1.source(), + b1.source(), b1.start(), b1.length()); assertEquals(newB, b1.transformPropertiesOnly(v -> Objects.equals(v, b1.expression()) ? newExpression : v, Expression.class)); - SubstringFunctionProcessorDefinition b2 = randomInstance(); + SubstringFunctionPipe b2 = randomInstance(); Location newLoc = randomValueOtherThan(b2.location(), () -> randomLocation()); - newB = new SubstringFunctionProcessorDefinition( - newLoc, + newB = new SubstringFunctionPipe( + newLoc, b2.expression(), - b2.source(), + b2.source(), b2.start(), b2.length()); - assertEquals(newB, + assertEquals(newB, b2.transformPropertiesOnly(v -> Objects.equals(v, b2.location()) ? newLoc : v, Location.class)); } @Override public void testReplaceChildren() { - SubstringFunctionProcessorDefinition b = randomInstance(); - ProcessorDefinition newSource = toProcessorDefinition((Expression) randomValueOtherThan(b.source(), () -> randomStringLiteral())); - ProcessorDefinition newStart = toProcessorDefinition((Expression) randomValueOtherThan(b.start(), () -> randomIntLiteral())); - ProcessorDefinition newLength = toProcessorDefinition((Expression) randomValueOtherThan(b.length(), () -> randomIntLiteral())); - SubstringFunctionProcessorDefinition newB = - new SubstringFunctionProcessorDefinition(b.location(), b.expression(), b.source(), b.start(), b.length()); - SubstringFunctionProcessorDefinition transformed = null; + SubstringFunctionPipe b = randomInstance(); + Pipe newSource = pipe(((Expression) randomValueOtherThan(b.source(), () -> randomStringLiteral()))); + Pipe newStart = pipe(((Expression) randomValueOtherThan(b.start(), () -> randomIntLiteral()))); + Pipe newLength = pipe(((Expression) randomValueOtherThan(b.length(), () -> randomIntLiteral()))); + SubstringFunctionPipe newB = + new SubstringFunctionPipe(b.location(), b.expression(), b.source(), b.start(), b.length()); + SubstringFunctionPipe transformed = null; // generate all the combinations of possible children modifications and test all of them for(int i = 1; i < 4; i++) { for(BitSet comb : new Combinations(3, i)) { - transformed = (SubstringFunctionProcessorDefinition) newB.replaceChildren( + transformed = (SubstringFunctionPipe) newB.replaceChildren( comb.get(0) ? newSource : b.source(), comb.get(1) ? newStart : b.start(), comb.get(2) ? newLength : b.length()); @@ -96,20 +96,20 @@ public class SubstringFunctionProcessorDefinitionTests } @Override - protected SubstringFunctionProcessorDefinition mutate(SubstringFunctionProcessorDefinition instance) { - List> randoms = new ArrayList<>(); + protected SubstringFunctionPipe mutate(SubstringFunctionPipe instance) { + List> randoms = new ArrayList<>(); for(int i = 1; i < 4; i++) { for(BitSet comb : new Combinations(3, i)) { - randoms.add(f -> new SubstringFunctionProcessorDefinition( + randoms.add(f -> new SubstringFunctionPipe( f.location(), - f.expression(), - comb.get(0) ? toProcessorDefinition((Expression) randomValueOtherThan(f.source(), - () -> randomStringLiteral())) : f.source(), - comb.get(1) ? toProcessorDefinition((Expression) randomValueOtherThan(f.start(), - () -> randomIntLiteral())) : f.start(), - comb.get(2) ? toProcessorDefinition((Expression) randomValueOtherThan(f.length(), - () -> randomIntLiteral())): f.length())); + f.expression(), + comb.get(0) ? pipe(((Expression) randomValueOtherThan(f.source(), + () -> randomStringLiteral()))) : f.source(), + comb.get(1) ? pipe(((Expression) randomValueOtherThan(f.start(), + () -> randomIntLiteral()))) : f.start(), + comb.get(2) ? pipe(((Expression) randomValueOtherThan(f.length(), + () -> randomIntLiteral()))): f.length())); } } @@ -117,8 +117,8 @@ public class SubstringFunctionProcessorDefinitionTests } @Override - protected SubstringFunctionProcessorDefinition copy(SubstringFunctionProcessorDefinition instance) { - return new SubstringFunctionProcessorDefinition(instance.location(), + protected SubstringFunctionPipe copy(SubstringFunctionPipe instance) { + return new SubstringFunctionPipe(instance.location(), instance.expression(), instance.source(), instance.start(), diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/SubstringProcessorTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/SubstringProcessorTests.java index 8afc63a05da..bb22f005fe7 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/SubstringProcessorTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/SubstringProcessorTests.java @@ -11,7 +11,7 @@ import org.elasticsearch.common.io.stream.Writeable.Reader; import org.elasticsearch.test.AbstractWireSerializingTestCase; import org.elasticsearch.xpack.sql.SqlIllegalArgumentException; import org.elasticsearch.xpack.sql.expression.function.scalar.Processors; -import org.elasticsearch.xpack.sql.expression.function.scalar.processor.runtime.ConstantProcessor; +import org.elasticsearch.xpack.sql.expression.gen.processor.ConstantProcessor; import static org.elasticsearch.xpack.sql.tree.Location.EMPTY; import static org.elasticsearch.xpack.sql.expression.function.scalar.FunctionTestUtils.l; @@ -37,39 +37,39 @@ public class SubstringProcessorTests extends AbstractWireSerializingTestCase new Substring(EMPTY, l(5), l(1), l(3)).makeProcessorDefinition().asProcessor().process(null)); + () -> new Substring(EMPTY, l(5), l(1), l(3)).makePipe().asProcessor().process(null)); assertEquals("A string/char is required; received [5]", siae.getMessage()); siae = expectThrows(SqlIllegalArgumentException.class, - () -> new Substring(EMPTY, l("foobarbar"), l(1), l("baz")).makeProcessorDefinition().asProcessor().process(null)); + () -> new Substring(EMPTY, l("foobarbar"), l(1), l("baz")).makePipe().asProcessor().process(null)); assertEquals("A number is required; received [baz]", siae.getMessage()); siae = expectThrows(SqlIllegalArgumentException.class, - () -> new Substring(EMPTY, l("foobarbar"), l("bar"), l(3)).makeProcessorDefinition().asProcessor().process(null)); + () -> new Substring(EMPTY, l("foobarbar"), l("bar"), l(3)).makePipe().asProcessor().process(null)); assertEquals("A number is required; received [bar]", siae.getMessage()); siae = expectThrows(SqlIllegalArgumentException.class, - () -> new Substring(EMPTY, l("foobarbar"), l(1), l(-3)).makeProcessorDefinition().asProcessor().process(null)); + () -> new Substring(EMPTY, l("foobarbar"), l(1), l(-3)).makePipe().asProcessor().process(null)); assertEquals("A positive number is required for [length]; received [-3]", siae.getMessage()); } } diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/processor/definition/AttributeInputTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/gen/pipeline/AttributeInputTests.java similarity index 83% rename from x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/processor/definition/AttributeInputTests.java rename to x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/gen/pipeline/AttributeInputTests.java index 7378675075d..6d3bcbbe25a 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/processor/definition/AttributeInputTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/gen/pipeline/AttributeInputTests.java @@ -3,12 +3,14 @@ * or more contributor license agreements. Licensed under the Elastic License; * you may not use this file except in compliance with the Elastic License. */ -package org.elasticsearch.xpack.sql.expression.function.scalar.processor.definition; +package org.elasticsearch.xpack.sql.expression.gen.pipeline; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xpack.sql.execution.search.FieldExtraction; import org.elasticsearch.xpack.sql.expression.Attribute; import org.elasticsearch.xpack.sql.expression.Expression; +import org.elasticsearch.xpack.sql.expression.gen.pipeline.AttributeInput; +import org.elasticsearch.xpack.sql.expression.gen.pipeline.ReferenceInput; import static org.mockito.Mockito.mock; diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/gen/pipeline/BinaryPipesTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/gen/pipeline/BinaryPipesTests.java new file mode 100644 index 00000000000..e786b4345f8 --- /dev/null +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/gen/pipeline/BinaryPipesTests.java @@ -0,0 +1,163 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.sql.expression.gen.pipeline; + +import org.elasticsearch.search.builder.SearchSourceBuilder; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.sql.execution.search.SqlSourceBuilder; +import org.elasticsearch.xpack.sql.expression.Literal; +import org.elasticsearch.xpack.sql.expression.gen.pipeline.Pipe.AttributeResolver; +import org.elasticsearch.xpack.sql.expression.gen.processor.Processor; +import org.elasticsearch.xpack.sql.tree.Location; +import org.elasticsearch.xpack.sql.tree.NodeInfo; + +import java.util.List; + +import static java.util.Collections.emptyList; + +public class BinaryPipesTests extends ESTestCase { + public void testSupportedByAggsOnlyQuery() { + Pipe supported = new DummyPipe(true); + Pipe unsupported = new DummyPipe(false); + + assertFalse(new DummyBinaryPipe(unsupported, unsupported).supportedByAggsOnlyQuery()); + assertFalse(new DummyBinaryPipe(unsupported, supported).supportedByAggsOnlyQuery()); + assertFalse(new DummyBinaryPipe(supported, unsupported).supportedByAggsOnlyQuery()); + assertTrue(new DummyBinaryPipe(supported, supported).supportedByAggsOnlyQuery()); + } + + public void testResolveAttributes() { + Pipe needsNothing = new DummyPipe(randomBoolean()); + Pipe resolvesTo = new DummyPipe(randomBoolean()); + Pipe needsResolution = new DummyPipe(randomBoolean()) { + @Override + public Pipe resolveAttributes(AttributeResolver resolver) { + return resolvesTo; + } + }; + AttributeResolver resolver = a -> { + fail("not exepected"); + return null; + }; + + Pipe d = new DummyBinaryPipe(needsNothing, needsNothing); + assertSame(d, d.resolveAttributes(resolver)); + + d = new DummyBinaryPipe(needsNothing, needsResolution); + Pipe expected = new DummyBinaryPipe(needsNothing, resolvesTo); + assertEquals(expected, d.resolveAttributes(resolver)); + + d = new DummyBinaryPipe(needsResolution, needsNothing); + expected = new DummyBinaryPipe(resolvesTo, needsNothing); + assertEquals(expected, d.resolveAttributes(resolver)); + } + + public void testCollectFields() { + DummyPipe wantsScore = new DummyPipe(randomBoolean()) { + @Override + public void collectFields(SqlSourceBuilder sourceBuilder) { + sourceBuilder.trackScores(); + } + }; + DummyPipe wantsNothing = new DummyPipe(randomBoolean()); + assertFalse(tracksScores(new DummyBinaryPipe(wantsNothing, wantsNothing))); + assertTrue(tracksScores(new DummyBinaryPipe(wantsScore, wantsNothing))); + assertTrue(tracksScores(new DummyBinaryPipe(wantsNothing, wantsScore))); + } + + /** + * Returns {@code true} if the processor defintion builds a query that + * tracks scores, {@code false} otherwise. Used for testing + * {@link Pipe#collectFields(SqlSourceBuilder)}. + */ + static boolean tracksScores(Pipe d) { + SqlSourceBuilder b = new SqlSourceBuilder(); + d.collectFields(b); + SearchSourceBuilder source = new SearchSourceBuilder(); + b.build(source); + return source.trackScores(); + } + + public static BinaryPipe randomBinaryPipe() { + return new DummyBinaryPipe(randomUnaryPipe(), randomUnaryPipe()); + } + + public static Pipe randomUnaryPipe() { + return new ConstantInput(Location.EMPTY, Literal.of(Location.EMPTY, randomAlphaOfLength(16)), randomAlphaOfLength(16)); + } + + public static final class DummyBinaryPipe extends BinaryPipe { + public DummyBinaryPipe(Pipe left, Pipe right) { + this(Location.EMPTY, left, right); + } + + public DummyBinaryPipe(Location location, Pipe left, Pipe right) { + super(location, null, left, right); + } + + @Override + protected NodeInfo info() { + return NodeInfo.create(this, DummyBinaryPipe::new, left(), right()); + } + + @Override + public Processor asProcessor() { + return null; + } + + @Override + protected BinaryPipe replaceChildren(Pipe left, Pipe right) { + return new DummyBinaryPipe(location(), left, right); + } + } + + public static class DummyPipe extends Pipe { + private final boolean supportedByAggsOnlyQuery; + + public DummyPipe(boolean supportedByAggsOnlyQuery) { + this(Location.EMPTY, supportedByAggsOnlyQuery); + } + + public DummyPipe(Location location, boolean supportedByAggsOnlyQuery) { + super(location, null, emptyList()); + this.supportedByAggsOnlyQuery = supportedByAggsOnlyQuery; + } + + @Override + protected NodeInfo info() { + return NodeInfo.create(this, DummyPipe::new, supportedByAggsOnlyQuery); + } + + @Override + public Pipe replaceChildren(List newChildren) { + throw new UnsupportedOperationException("this type of node doesn't have any children to replace"); + } + + @Override + public boolean supportedByAggsOnlyQuery() { + return supportedByAggsOnlyQuery; + } + + @Override + public boolean resolved() { + return true; + } + + @Override + public Processor asProcessor() { + return null; + } + + @Override + public Pipe resolveAttributes(AttributeResolver resolver) { + return this; + } + + @Override + public void collectFields(SqlSourceBuilder sourceBuilder) { + } + } +} \ No newline at end of file diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/gen/pipeline/UnaryPipeTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/gen/pipeline/UnaryPipeTests.java new file mode 100644 index 00000000000..f1c24ae2bd4 --- /dev/null +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/gen/pipeline/UnaryPipeTests.java @@ -0,0 +1,64 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.sql.expression.gen.pipeline; + +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.sql.execution.search.SqlSourceBuilder; +import org.elasticsearch.xpack.sql.expression.gen.pipeline.Pipe; +import org.elasticsearch.xpack.sql.expression.gen.pipeline.UnaryPipe; +import org.elasticsearch.xpack.sql.expression.gen.pipeline.BinaryPipesTests.DummyPipe; +import org.elasticsearch.xpack.sql.expression.gen.pipeline.Pipe.AttributeResolver; +import org.elasticsearch.xpack.sql.tree.Location; + +import static org.elasticsearch.xpack.sql.expression.gen.pipeline.BinaryPipesTests.tracksScores; + +public class UnaryPipeTests extends ESTestCase { + public void testSupportedByAggsOnlyQuery() { + Pipe supported = new DummyPipe(true); + Pipe unsupported = new DummyPipe(false); + + assertFalse(newUnaryProcessor(unsupported).supportedByAggsOnlyQuery()); + assertTrue(newUnaryProcessor(supported).supportedByAggsOnlyQuery()); + } + + public void testResolveAttributes() { + Pipe needsNothing = new DummyPipe(randomBoolean()); + Pipe resolvesTo = new DummyPipe(randomBoolean()); + Pipe needsResolution = new DummyPipe(randomBoolean()) { + @Override + public Pipe resolveAttributes(AttributeResolver resolver) { + return resolvesTo; + } + }; + AttributeResolver resolver = a -> { + fail("not exepected"); + return null; + }; + + Pipe d = newUnaryProcessor(needsNothing); + assertSame(d, d.resolveAttributes(resolver)); + + d = newUnaryProcessor(needsResolution); + Pipe expected = newUnaryProcessor(resolvesTo); + assertEquals(expected, d.resolveAttributes(resolver)); + } + + public void testCollectFields() { + DummyPipe wantsScore = new DummyPipe(randomBoolean()) { + @Override + public void collectFields(SqlSourceBuilder sourceBuilder) { + sourceBuilder.trackScores(); + } + }; + DummyPipe wantsNothing = new DummyPipe(randomBoolean()); + assertFalse(tracksScores(newUnaryProcessor(wantsNothing))); + assertTrue(tracksScores(newUnaryProcessor(wantsScore))); + } + + private Pipe newUnaryProcessor(Pipe child) { + return new UnaryPipe(Location.EMPTY, null, child, null); + } +} diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/processor/runtime/ChainingProcessorTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/gen/processor/ChainingProcessorTests.java similarity index 92% rename from x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/processor/runtime/ChainingProcessorTests.java rename to x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/gen/processor/ChainingProcessorTests.java index a7440ba5377..98ad0daa823 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/processor/runtime/ChainingProcessorTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/gen/processor/ChainingProcessorTests.java @@ -3,12 +3,13 @@ * or more contributor license agreements. Licensed under the Elastic License; * you may not use this file except in compliance with the Elastic License. */ -package org.elasticsearch.xpack.sql.expression.function.scalar.processor.runtime; +package org.elasticsearch.xpack.sql.expression.gen.processor; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.Writeable.Reader; import org.elasticsearch.test.AbstractWireSerializingTestCase; import org.elasticsearch.xpack.sql.expression.function.scalar.Processors; +import org.elasticsearch.xpack.sql.expression.gen.processor.ChainingProcessor; import java.io.IOException; import java.util.function.Supplier; diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/processor/runtime/ConstantProcessorTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/gen/processor/ConstantProcessorTests.java similarity index 90% rename from x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/processor/runtime/ConstantProcessorTests.java rename to x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/gen/processor/ConstantProcessorTests.java index b26cf8edc1c..5354dbd9ed1 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/processor/runtime/ConstantProcessorTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/gen/processor/ConstantProcessorTests.java @@ -3,10 +3,11 @@ * or more contributor license agreements. Licensed under the Elastic License; * you may not use this file except in compliance with the Elastic License. */ -package org.elasticsearch.xpack.sql.expression.function.scalar.processor.runtime; +package org.elasticsearch.xpack.sql.expression.gen.processor; import org.elasticsearch.common.io.stream.Writeable.Reader; import org.elasticsearch.test.AbstractWireSerializingTestCase; +import org.elasticsearch.xpack.sql.expression.gen.processor.ConstantProcessor; import java.io.IOException; diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/predicate/fulltext/FullTextUtilsTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/predicate/fulltext/FullTextUtilsTests.java index 7dd08e9c34c..058cfeabb2c 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/predicate/fulltext/FullTextUtilsTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/predicate/fulltext/FullTextUtilsTests.java @@ -6,6 +6,7 @@ package org.elasticsearch.xpack.sql.expression.predicate.fulltext; import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.sql.expression.predicate.fulltext.FullTextUtils; import org.elasticsearch.xpack.sql.parser.ParsingException; import org.elasticsearch.xpack.sql.tree.Location; diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/arithmetic/BinaryArithmeticProcessorTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/predicate/operator/arithmetic/BinaryArithmeticProcessorTests.java similarity index 57% rename from x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/arithmetic/BinaryArithmeticProcessorTests.java rename to x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/predicate/operator/arithmetic/BinaryArithmeticProcessorTests.java index 7baba683d74..ee021f056f8 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/arithmetic/BinaryArithmeticProcessorTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/predicate/operator/arithmetic/BinaryArithmeticProcessorTests.java @@ -3,7 +3,7 @@ * or more contributor license agreements. Licensed under the Elastic License; * you may not use this file except in compliance with the Elastic License. */ -package org.elasticsearch.xpack.sql.expression.function.scalar.arithmetic; +package org.elasticsearch.xpack.sql.expression.predicate.operator.arithmetic; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.Writeable.Reader; @@ -11,8 +11,15 @@ import org.elasticsearch.test.AbstractWireSerializingTestCase; import org.elasticsearch.xpack.sql.expression.Expression; import org.elasticsearch.xpack.sql.expression.Literal; import org.elasticsearch.xpack.sql.expression.function.scalar.Processors; -import org.elasticsearch.xpack.sql.expression.function.scalar.processor.runtime.ConstantProcessor; -import org.elasticsearch.xpack.sql.expression.function.scalar.processor.runtime.Processor; +import org.elasticsearch.xpack.sql.expression.gen.processor.ConstantProcessor; +import org.elasticsearch.xpack.sql.expression.gen.processor.Processor; +import org.elasticsearch.xpack.sql.expression.predicate.operator.arithmetic.Add; +import org.elasticsearch.xpack.sql.expression.predicate.operator.arithmetic.BinaryArithmeticProcessor; +import org.elasticsearch.xpack.sql.expression.predicate.operator.arithmetic.Div; +import org.elasticsearch.xpack.sql.expression.predicate.operator.arithmetic.Mod; +import org.elasticsearch.xpack.sql.expression.predicate.operator.arithmetic.Mul; +import org.elasticsearch.xpack.sql.expression.predicate.operator.arithmetic.Neg; +import org.elasticsearch.xpack.sql.expression.predicate.operator.arithmetic.Sub; import static org.elasticsearch.xpack.sql.tree.Location.EMPTY; @@ -40,34 +47,34 @@ public class BinaryArithmeticProcessorTests extends AbstractWireSerializingTestC } public void testAdd() { - Processor ba = new Add(EMPTY, l(7), l(3)).makeProcessorDefinition().asProcessor(); + Processor ba = new Add(EMPTY, l(7), l(3)).makePipe().asProcessor(); assertEquals(10, ba.process(null)); } public void testSub() { - Processor ba = new Sub(EMPTY, l(7), l(3)).makeProcessorDefinition().asProcessor(); + Processor ba = new Sub(EMPTY, l(7), l(3)).makePipe().asProcessor(); assertEquals(4, ba.process(null)); } public void testMul() { - Processor ba = new Mul(EMPTY, l(7), l(3)).makeProcessorDefinition().asProcessor(); + Processor ba = new Mul(EMPTY, l(7), l(3)).makePipe().asProcessor(); assertEquals(21, ba.process(null)); } public void testDiv() { - Processor ba = new Div(EMPTY, l(7), l(3)).makeProcessorDefinition().asProcessor(); + Processor ba = new Div(EMPTY, l(7), l(3)).makePipe().asProcessor(); assertEquals(2, ((Number) ba.process(null)).longValue()); - ba = new Div(EMPTY, l((double) 7), l(3)).makeProcessorDefinition().asProcessor(); + ba = new Div(EMPTY, l((double) 7), l(3)).makePipe().asProcessor(); assertEquals(2.33, ((Number) ba.process(null)).doubleValue(), 0.01d); } public void testMod() { - Processor ba = new Mod(EMPTY, l(7), l(3)).makeProcessorDefinition().asProcessor(); + Processor ba = new Mod(EMPTY, l(7), l(3)).makePipe().asProcessor(); assertEquals(1, ba.process(null)); } public void testNegate() { - Processor ba = new Neg(EMPTY, l(7)).asProcessorDefinition().asProcessor(); + Processor ba = new Neg(EMPTY, l(7)).asPipe().asProcessor(); assertEquals(-7, ba.process(null)); } @@ -79,17 +86,17 @@ public class BinaryArithmeticProcessorTests extends AbstractWireSerializingTestC Expression sub = new Sub(EMPTY, div, l(2)); Mod mod = new Mod(EMPTY, sub, l(2)); - Processor proc = mod.makeProcessorDefinition().asProcessor(); + Processor proc = mod.makePipe().asProcessor(); assertEquals(1, proc.process(null)); } public void testHandleNull() { - assertNull(new Add(EMPTY, l(null), l(3)).makeProcessorDefinition().asProcessor().process(null)); - assertNull(new Sub(EMPTY, l(null), l(3)).makeProcessorDefinition().asProcessor().process(null)); - assertNull(new Mul(EMPTY, l(null), l(3)).makeProcessorDefinition().asProcessor().process(null)); - assertNull(new Div(EMPTY, l(null), l(3)).makeProcessorDefinition().asProcessor().process(null)); - assertNull(new Mod(EMPTY, l(null), l(3)).makeProcessorDefinition().asProcessor().process(null)); - assertNull(new Neg(EMPTY, l(null)).makeProcessorDefinition().asProcessor().process(null)); + assertNull(new Add(EMPTY, l(null), l(3)).makePipe().asProcessor().process(null)); + assertNull(new Sub(EMPTY, l(null), l(3)).makePipe().asProcessor().process(null)); + assertNull(new Mul(EMPTY, l(null), l(3)).makePipe().asProcessor().process(null)); + assertNull(new Div(EMPTY, l(null), l(3)).makePipe().asProcessor().process(null)); + assertNull(new Mod(EMPTY, l(null), l(3)).makePipe().asProcessor().process(null)); + assertNull(new Neg(EMPTY, l(null)).makePipe().asProcessor().process(null)); } private static Literal l(Object value) { diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/predicate/operator/comparison/BinaryComparisonProcessorTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/predicate/operator/comparison/BinaryComparisonProcessorTests.java new file mode 100644 index 00000000000..0761ec5f2fa --- /dev/null +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/predicate/operator/comparison/BinaryComparisonProcessorTests.java @@ -0,0 +1,86 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.sql.expression.predicate.operator.comparison; + +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; +import org.elasticsearch.common.io.stream.Writeable.Reader; +import org.elasticsearch.test.AbstractWireSerializingTestCase; +import org.elasticsearch.xpack.sql.expression.Literal; +import org.elasticsearch.xpack.sql.expression.function.scalar.Processors; +import org.elasticsearch.xpack.sql.expression.gen.processor.ConstantProcessor; +import org.elasticsearch.xpack.sql.expression.predicate.operator.comparison.BinaryComparisonProcessor; +import org.elasticsearch.xpack.sql.expression.predicate.operator.comparison.Equals; +import org.elasticsearch.xpack.sql.expression.predicate.operator.comparison.GreaterThan; +import org.elasticsearch.xpack.sql.expression.predicate.operator.comparison.GreaterThanOrEqual; +import org.elasticsearch.xpack.sql.expression.predicate.operator.comparison.LessThan; +import org.elasticsearch.xpack.sql.expression.predicate.operator.comparison.LessThanOrEqual; + +import static org.elasticsearch.xpack.sql.tree.Location.EMPTY; + +public class BinaryComparisonProcessorTests extends AbstractWireSerializingTestCase { + public static BinaryComparisonProcessor randomProcessor() { + return new BinaryComparisonProcessor( + new ConstantProcessor(randomLong()), + new ConstantProcessor(randomLong()), + randomFrom(BinaryComparisonProcessor.BinaryComparisonOperation.values())); + } + + @Override + protected BinaryComparisonProcessor createTestInstance() { + return randomProcessor(); + } + + @Override + protected Reader instanceReader() { + return BinaryComparisonProcessor::new; + } + + @Override + protected NamedWriteableRegistry getNamedWriteableRegistry() { + return new NamedWriteableRegistry(Processors.getNamedWriteables()); + } + + public void testEq() { + assertEquals(true, new Equals(EMPTY, l(4), l(4)).makePipe().asProcessor().process(null)); + assertEquals(false, new Equals(EMPTY, l(3), l(4)).makePipe().asProcessor().process(null)); + } + + public void testGt() { + assertEquals(true, new GreaterThan(EMPTY, l(4), l(3)).makePipe().asProcessor().process(null)); + assertEquals(false, new GreaterThan(EMPTY, l(3), l(4)).makePipe().asProcessor().process(null)); + assertEquals(false, new GreaterThan(EMPTY, l(3), l(3)).makePipe().asProcessor().process(null)); + } + + public void testGte() { + assertEquals(true, new GreaterThanOrEqual(EMPTY, l(4), l(3)).makePipe().asProcessor().process(null)); + assertEquals(false, new GreaterThanOrEqual(EMPTY, l(3), l(4)).makePipe().asProcessor().process(null)); + assertEquals(true, new GreaterThanOrEqual(EMPTY, l(3), l(3)).makePipe().asProcessor().process(null)); + } + + public void testLt() { + assertEquals(false, new LessThan(EMPTY, l(4), l(3)).makePipe().asProcessor().process(null)); + assertEquals(true, new LessThan(EMPTY, l(3), l(4)).makePipe().asProcessor().process(null)); + assertEquals(false, new LessThan(EMPTY, l(3), l(3)).makePipe().asProcessor().process(null)); + } + + public void testLte() { + assertEquals(false, new LessThanOrEqual(EMPTY, l(4), l(3)).makePipe().asProcessor().process(null)); + assertEquals(true, new LessThanOrEqual(EMPTY, l(3), l(4)).makePipe().asProcessor().process(null)); + assertEquals(true, new LessThanOrEqual(EMPTY, l(3), l(3)).makePipe().asProcessor().process(null)); + } + + public void testHandleNull() { + assertNull(new Equals(EMPTY, l(null), l(3)).makePipe().asProcessor().process(null)); + assertNull(new GreaterThan(EMPTY, l(null), l(3)).makePipe().asProcessor().process(null)); + assertNull(new GreaterThanOrEqual(EMPTY, l(null), l(3)).makePipe().asProcessor().process(null)); + assertNull(new LessThan(EMPTY, l(null), l(3)).makePipe().asProcessor().process(null)); + assertNull(new LessThanOrEqual(EMPTY, l(null), l(3)).makePipe().asProcessor().process(null)); + } + + private static Literal l(Object value) { + return Literal.of(EMPTY, value); + } +} \ No newline at end of file diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/optimizer/OptimizerTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/optimizer/OptimizerTests.java index 07349008c07..cf1f64bd583 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/optimizer/OptimizerTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/optimizer/OptimizerTests.java @@ -18,11 +18,6 @@ import org.elasticsearch.xpack.sql.expression.function.Function; import org.elasticsearch.xpack.sql.expression.function.aggregate.AggregateFunction; import org.elasticsearch.xpack.sql.expression.function.aggregate.Count; import org.elasticsearch.xpack.sql.expression.function.scalar.Cast; -import org.elasticsearch.xpack.sql.expression.function.scalar.arithmetic.Add; -import org.elasticsearch.xpack.sql.expression.function.scalar.arithmetic.Div; -import org.elasticsearch.xpack.sql.expression.function.scalar.arithmetic.Mod; -import org.elasticsearch.xpack.sql.expression.function.scalar.arithmetic.Mul; -import org.elasticsearch.xpack.sql.expression.function.scalar.arithmetic.Sub; import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.DayOfMonth; import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.DayOfYear; import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.MonthOfYear; @@ -35,18 +30,24 @@ import org.elasticsearch.xpack.sql.expression.function.scalar.math.Abs; import org.elasticsearch.xpack.sql.expression.function.scalar.math.E; import org.elasticsearch.xpack.sql.expression.function.scalar.math.Floor; import org.elasticsearch.xpack.sql.expression.predicate.And; -import org.elasticsearch.xpack.sql.expression.predicate.Equals; -import org.elasticsearch.xpack.sql.expression.predicate.GreaterThan; -import org.elasticsearch.xpack.sql.expression.predicate.GreaterThanOrEqual; +import org.elasticsearch.xpack.sql.expression.predicate.BinaryOperator; import org.elasticsearch.xpack.sql.expression.predicate.IsNotNull; -import org.elasticsearch.xpack.sql.expression.predicate.LessThan; -import org.elasticsearch.xpack.sql.expression.predicate.LessThanOrEqual; import org.elasticsearch.xpack.sql.expression.predicate.Not; import org.elasticsearch.xpack.sql.expression.predicate.Or; import org.elasticsearch.xpack.sql.expression.predicate.Range; -import org.elasticsearch.xpack.sql.expression.regex.Like; -import org.elasticsearch.xpack.sql.expression.regex.LikePattern; -import org.elasticsearch.xpack.sql.expression.regex.RLike; +import org.elasticsearch.xpack.sql.expression.predicate.operator.arithmetic.Add; +import org.elasticsearch.xpack.sql.expression.predicate.operator.arithmetic.Div; +import org.elasticsearch.xpack.sql.expression.predicate.operator.arithmetic.Mod; +import org.elasticsearch.xpack.sql.expression.predicate.operator.arithmetic.Mul; +import org.elasticsearch.xpack.sql.expression.predicate.operator.arithmetic.Sub; +import org.elasticsearch.xpack.sql.expression.predicate.operator.comparison.Equals; +import org.elasticsearch.xpack.sql.expression.predicate.operator.comparison.GreaterThan; +import org.elasticsearch.xpack.sql.expression.predicate.operator.comparison.GreaterThanOrEqual; +import org.elasticsearch.xpack.sql.expression.predicate.operator.comparison.LessThan; +import org.elasticsearch.xpack.sql.expression.predicate.operator.comparison.LessThanOrEqual; +import org.elasticsearch.xpack.sql.expression.predicate.regex.Like; +import org.elasticsearch.xpack.sql.expression.predicate.regex.LikePattern; +import org.elasticsearch.xpack.sql.expression.predicate.regex.RLike; import org.elasticsearch.xpack.sql.optimizer.Optimizer.BinaryComparisonSimplification; import org.elasticsearch.xpack.sql.optimizer.Optimizer.BooleanLiteralsOnTheRight; import org.elasticsearch.xpack.sql.optimizer.Optimizer.BooleanSimplification; @@ -237,12 +238,8 @@ public class OptimizerTests extends ESTestCase { Expression exp = new Add(EMPTY, TWO, THREE); assertTrue(exp.foldable()); - assertTrue(exp instanceof NamedExpression); - String n = Expressions.name(exp); - Expression result = new ConstantFolding().rule(exp); assertTrue(result instanceof Literal); - assertEquals(n, Expressions.name(result)); assertEquals(5, ((Literal) result).value()); // check now with an alias @@ -252,21 +249,23 @@ public class OptimizerTests extends ESTestCase { } public void testConstantFoldingBinaryComparison() { - assertEquals(Literal.FALSE, new ConstantFolding().rule(new GreaterThan(EMPTY, TWO, THREE))); - assertEquals(Literal.FALSE, new ConstantFolding().rule(new GreaterThanOrEqual(EMPTY, TWO, THREE))); - assertEquals(Literal.FALSE, new ConstantFolding().rule(new Equals(EMPTY, TWO, THREE))); - assertEquals(Literal.TRUE, new ConstantFolding().rule(new LessThanOrEqual(EMPTY, TWO, THREE))); - assertEquals(Literal.TRUE, new ConstantFolding().rule(new LessThan(EMPTY, TWO, THREE))); + assertEquals(Literal.FALSE, new ConstantFolding().rule(new GreaterThan(EMPTY, TWO, THREE)).canonical()); + assertEquals(Literal.FALSE, new ConstantFolding().rule(new GreaterThanOrEqual(EMPTY, TWO, THREE)).canonical()); + assertEquals(Literal.FALSE, new ConstantFolding().rule(new Equals(EMPTY, TWO, THREE)).canonical()); + assertEquals(Literal.TRUE, new ConstantFolding().rule(new LessThanOrEqual(EMPTY, TWO, THREE)).canonical()); + assertEquals(Literal.TRUE, new ConstantFolding().rule(new LessThan(EMPTY, TWO, THREE)).canonical()); } public void testConstantFoldingBinaryLogic() { - assertEquals(Literal.FALSE, new ConstantFolding().rule(new And(EMPTY, new GreaterThan(EMPTY, TWO, THREE), Literal.TRUE))); - assertEquals(Literal.TRUE, new ConstantFolding().rule(new Or(EMPTY, new GreaterThanOrEqual(EMPTY, TWO, THREE), Literal.TRUE))); + assertEquals(Literal.FALSE, + new ConstantFolding().rule(new And(EMPTY, new GreaterThan(EMPTY, TWO, THREE), Literal.TRUE)).canonical()); + assertEquals(Literal.TRUE, + new ConstantFolding().rule(new Or(EMPTY, new GreaterThanOrEqual(EMPTY, TWO, THREE), Literal.TRUE)).canonical()); } public void testConstantFoldingRange() { - assertEquals(Literal.TRUE, new ConstantFolding().rule(new Range(EMPTY, FIVE, FIVE, true, L(10), false))); - assertEquals(Literal.FALSE, new ConstantFolding().rule(new Range(EMPTY, FIVE, FIVE, false, L(10), false))); + assertEquals(true, new ConstantFolding().rule(new Range(EMPTY, FIVE, FIVE, true, L(10), false)).fold()); + assertEquals(false, new ConstantFolding().rule(new Range(EMPTY, FIVE, FIVE, false, L(10), false)).fold()); } public void testConstantIsNotNull() { @@ -281,9 +280,10 @@ public class OptimizerTests extends ESTestCase { public void testConstantFoldingLikes() { assertEquals(Literal.TRUE, - new ConstantFolding().rule(new Like(EMPTY, Literal.of(EMPTY, "test_emp"), new LikePattern(EMPTY, "test%", (char) 0)))); + new ConstantFolding().rule(new Like(EMPTY, Literal.of(EMPTY, "test_emp"), new LikePattern(EMPTY, "test%", (char) 0))) + .canonical()); assertEquals(Literal.TRUE, - new ConstantFolding().rule(new RLike(EMPTY, Literal.of(EMPTY, "test_emp"), Literal.of(EMPTY, "test.emp")))); + new ConstantFolding().rule(new RLike(EMPTY, Literal.of(EMPTY, "test_emp"), Literal.of(EMPTY, "test.emp"))).canonical()); } public void testConstantFoldingDatetime() { @@ -299,11 +299,11 @@ public class OptimizerTests extends ESTestCase { } public void testArithmeticFolding() { - assertEquals(10, foldFunction(new Add(EMPTY, L(7), THREE))); - assertEquals(4, foldFunction(new Sub(EMPTY, L(7), THREE))); - assertEquals(21, foldFunction(new Mul(EMPTY, L(7), THREE))); - assertEquals(2, foldFunction(new Div(EMPTY, L(7), THREE))); - assertEquals(1, foldFunction(new Mod(EMPTY, L(7), THREE))); + assertEquals(10, foldOperator(new Add(EMPTY, L(7), THREE))); + assertEquals(4, foldOperator(new Sub(EMPTY, L(7), THREE))); + assertEquals(21, foldOperator(new Mul(EMPTY, L(7), THREE))); + assertEquals(2, foldOperator(new Div(EMPTY, L(7), THREE))); + assertEquals(1, foldOperator(new Mod(EMPTY, L(7), THREE))); } public void testMathFolding() { @@ -319,6 +319,10 @@ public class OptimizerTests extends ESTestCase { return ((Literal) new ConstantFolding().rule(f)).value(); } + private static Object foldOperator(BinaryOperator b) { + return ((Literal) new ConstantFolding().rule(b)).value(); + } + // // Logical simplifications // diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/parser/EscapedFunctionsTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/parser/EscapedFunctionsTests.java index 11ad24582ef..710689ca861 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/parser/EscapedFunctionsTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/parser/EscapedFunctionsTests.java @@ -12,8 +12,8 @@ import org.elasticsearch.xpack.sql.expression.Literal; import org.elasticsearch.xpack.sql.expression.UnresolvedAttribute; import org.elasticsearch.xpack.sql.expression.function.Function; import org.elasticsearch.xpack.sql.expression.function.UnresolvedFunction; -import org.elasticsearch.xpack.sql.expression.regex.Like; -import org.elasticsearch.xpack.sql.expression.regex.LikePattern; +import org.elasticsearch.xpack.sql.expression.predicate.regex.Like; +import org.elasticsearch.xpack.sql.expression.predicate.regex.LikePattern; import org.elasticsearch.xpack.sql.plan.logical.Limit; import org.elasticsearch.xpack.sql.plan.logical.LogicalPlan; import org.elasticsearch.xpack.sql.plan.logical.With; diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/parser/ExpressionTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/parser/ExpressionTests.java index 466e749c9a3..122da8fa3a6 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/parser/ExpressionTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/parser/ExpressionTests.java @@ -9,21 +9,26 @@ import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xpack.sql.expression.Expression; import org.elasticsearch.xpack.sql.expression.Literal; import org.elasticsearch.xpack.sql.expression.function.UnresolvedFunction; +import org.elasticsearch.xpack.sql.expression.predicate.operator.arithmetic.Add; +import org.elasticsearch.xpack.sql.expression.predicate.operator.arithmetic.Mul; +import org.elasticsearch.xpack.sql.expression.predicate.operator.arithmetic.Neg; +import org.elasticsearch.xpack.sql.expression.predicate.operator.arithmetic.Sub; import org.elasticsearch.xpack.sql.type.DataType; +import static org.hamcrest.core.StringStartsWith.startsWith; + public class ExpressionTests extends ESTestCase { private final SqlParser parser = new SqlParser(); - public void testTokenFunctionName() throws Exception { + public void testTokenFunctionName() { Expression lt = parser.createExpression("LEFT()"); assertEquals(UnresolvedFunction.class, lt.getClass()); UnresolvedFunction uf = (UnresolvedFunction) lt; assertEquals("LEFT", uf.functionName()); } - - public void testLiteralBoolean() throws Exception { + public void testLiteralBoolean() { Expression lt = parser.createExpression("TRUE"); assertEquals(Literal.class, lt.getClass()); Literal l = (Literal) lt; @@ -31,7 +36,7 @@ public class ExpressionTests extends ESTestCase { assertEquals(DataType.BOOLEAN, l.dataType()); } - public void testLiteralDouble() throws Exception { + public void testLiteralDouble() { Expression lt = parser.createExpression(String.valueOf(Double.MAX_VALUE)); assertEquals(Literal.class, lt.getClass()); Literal l = (Literal) lt; @@ -39,7 +44,7 @@ public class ExpressionTests extends ESTestCase { assertEquals(DataType.DOUBLE, l.dataType()); } - public void testLiteralDoubleNegative() throws Exception { + public void testLiteralDoubleNegative() { Expression lt = parser.createExpression(String.valueOf(Double.MIN_VALUE)); assertEquals(Literal.class, lt.getClass()); Literal l = (Literal) lt; @@ -47,7 +52,7 @@ public class ExpressionTests extends ESTestCase { assertEquals(DataType.DOUBLE, l.dataType()); } - public void testLiteralDoublePositive() throws Exception { + public void testLiteralDoublePositive() { Expression lt = parser.createExpression("+" + Double.MAX_VALUE); assertEquals(Literal.class, lt.getClass()); Literal l = (Literal) lt; @@ -55,7 +60,7 @@ public class ExpressionTests extends ESTestCase { assertEquals(DataType.DOUBLE, l.dataType()); } - public void testLiteralLong() throws Exception { + public void testLiteralLong() { Expression lt = parser.createExpression(String.valueOf(Long.MAX_VALUE)); assertEquals(Literal.class, lt.getClass()); Literal l = (Literal) lt; @@ -63,14 +68,14 @@ public class ExpressionTests extends ESTestCase { assertEquals(DataType.LONG, l.dataType()); } - public void testLiteralLongNegative() throws Exception { + public void testLiteralLongNegative() { Expression lt = parser.createExpression(String.valueOf(Long.MIN_VALUE)); assertTrue(lt.foldable()); assertEquals(Long.MIN_VALUE, lt.fold()); assertEquals(DataType.LONG, lt.dataType()); } - public void testLiteralLongPositive() throws Exception { + public void testLiteralLongPositive() { Expression lt = parser.createExpression("+" + String.valueOf(Long.MAX_VALUE)); assertEquals(Literal.class, lt.getClass()); Literal l = (Literal) lt; @@ -78,7 +83,7 @@ public class ExpressionTests extends ESTestCase { assertEquals(DataType.LONG, l.dataType()); } - public void testLiteralInteger() throws Exception { + public void testLiteralInteger() { Expression lt = parser.createExpression(String.valueOf(Integer.MAX_VALUE)); assertEquals(Literal.class, lt.getClass()); Literal l = (Literal) lt; @@ -86,29 +91,69 @@ public class ExpressionTests extends ESTestCase { assertEquals(DataType.INTEGER, l.dataType()); } - public void testLiteralIntegerWithShortValue() throws Exception { + public void testLiteralIntegerWithShortValue() { Expression lt = parser.createExpression(String.valueOf(Short.MAX_VALUE)); assertEquals(Literal.class, lt.getClass()); Literal l = (Literal) lt; - assertEquals(Integer.valueOf(Short.MAX_VALUE), l.value()); + assertEquals((int) Short.MAX_VALUE, l.value()); assertEquals(DataType.INTEGER, l.dataType()); } - public void testLiteralIntegerWithByteValue() throws Exception { + public void testLiteralIntegerWithByteValue() { Expression lt = parser.createExpression(String.valueOf(Byte.MAX_VALUE)); assertEquals(Literal.class, lt.getClass()); Literal l = (Literal) lt; - assertEquals(Integer.valueOf(Byte.MAX_VALUE), l.value()); + assertEquals((int) Byte.MAX_VALUE, l.value()); assertEquals(DataType.INTEGER, l.dataType()); } - public void testLiteralIntegerInvalid() throws Exception { + public void testLiteralIntegerInvalid() { ParsingException ex = expectThrows(ParsingException.class, () -> parser.createExpression("123456789098765432101")); assertEquals("Number [123456789098765432101] is too large", ex.getErrorMessage()); } - public void testLiteralDecimalTooBig() throws Exception { + public void testLiteralDecimalTooBig() { ParsingException ex = expectThrows(ParsingException.class, () -> parser.createExpression("1.9976931348623157e+308")); assertEquals("Number [1.9976931348623157e+308] is too large", ex.getErrorMessage()); } -} \ No newline at end of file + + public void testLiteralTimesLiteral() { + Expression expr = parser.createExpression("10*2"); + assertEquals(Mul.class, expr.getClass()); + Mul mul = (Mul) expr; + assertEquals("10 * 2", mul.name()); + assertEquals(DataType.INTEGER, mul.dataType()); + } + + public void testFunctionTimesLiteral() { + Expression expr = parser.createExpression("PI()*2"); + assertEquals(Mul.class, expr.getClass()); + Mul mul = (Mul) expr; + assertEquals("(PI) * 2", mul.name()); + } + + public void testComplexArithmetic() { + Expression expr = parser.createExpression("-(((a-2)-(-3))+b)"); + assertEquals(Neg.class, expr.getClass()); + Neg neg = (Neg) expr; + assertThat(neg.name(), startsWith("-(((a) - 2) - -3) + (b)#")); + assertEquals(1, neg.children().size()); + assertEquals(Add.class, neg.children().get(0).getClass()); + Add add = (Add) neg.children().get(0); + assertEquals("(((a) - 2) - -3) + (b)", add.name()); + assertEquals(2, add.children().size()); + assertEquals("?b", add.children().get(1).toString()); + assertEquals(Sub.class, add.children().get(0).getClass()); + Sub sub1 = (Sub) add.children().get(0); + assertEquals("((a) - 2) - -3", sub1.name()); + assertEquals(2, sub1.children().size()); + assertEquals(Literal.class, sub1.children().get(1).getClass()); + assertEquals("-3", ((Literal) sub1.children().get(1)).name()); + assertEquals(Sub.class, sub1.children().get(0).getClass()); + Sub sub2 = (Sub) sub1.children().get(0); + assertEquals(2, sub2.children().size()); + assertEquals("?a", sub2.children().get(0).toString()); + assertEquals(Literal.class, sub2.children().get(1).getClass()); + assertEquals("2", ((Literal) sub2.children().get(1)).name()); + } +} diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/parser/LikeEscapingParsingTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/parser/LikeEscapingParsingTests.java index b2abf0b6800..890718737a4 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/parser/LikeEscapingParsingTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/parser/LikeEscapingParsingTests.java @@ -7,8 +7,8 @@ package org.elasticsearch.xpack.sql.parser; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xpack.sql.expression.Expression; -import org.elasticsearch.xpack.sql.expression.regex.Like; -import org.elasticsearch.xpack.sql.expression.regex.LikePattern; +import org.elasticsearch.xpack.sql.expression.predicate.regex.Like; +import org.elasticsearch.xpack.sql.expression.predicate.regex.LikePattern; import org.elasticsearch.xpack.sql.proto.SqlTypedParamValue; import org.elasticsearch.xpack.sql.type.DataType; diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/parser/SqlParserTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/parser/SqlParserTests.java index 3e7e562e599..aab25349a1d 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/parser/SqlParserTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/parser/SqlParserTests.java @@ -5,6 +5,7 @@ */ package org.elasticsearch.xpack.sql.parser; +import com.google.common.base.Joiner; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xpack.sql.expression.NamedExpression; import org.elasticsearch.xpack.sql.expression.Order; @@ -22,6 +23,7 @@ import org.elasticsearch.xpack.sql.plan.logical.Project; import java.util.ArrayList; import java.util.List; +import static java.util.Collections.nCopies; import static java.util.stream.Collectors.toList; import static org.hamcrest.Matchers.hasEntry; import static org.hamcrest.Matchers.hasSize; @@ -136,6 +138,88 @@ public class SqlParserTests extends ESTestCase { assertThat(mmqp.optionMap(), hasEntry("fuzzy_rewrite", "scoring_boolean")); } + public void testLimitToPreventStackOverflowFromLargeUnaryBooleanExpression() { + // Create expression in the form of NOT(NOT(NOT ... (b) ...) + + // 40 elements is ok + new SqlParser().createExpression( + Joiner.on("").join(nCopies(40, "NOT(")).concat("b").concat(Joiner.on("").join(nCopies(40, ")")))); + + // 100 elements parser's "circuit breaker" is triggered + ParsingException e = expectThrows(ParsingException.class, () -> new SqlParser().createExpression( + Joiner.on("").join(nCopies(100, "NOT(")).concat("b").concat(Joiner.on("").join(nCopies(100, ")"))))); + assertEquals("expression is too large to parse, (tree's depth exceeds 100)", e.getErrorMessage()); + } + + public void testLimitToPreventStackOverflowFromLargeBinaryBooleanExpression() { + // Create expression in the form of a = b OR a = b OR ... a = b + + // 50 elements is ok + new SqlParser().createExpression(Joiner.on(" OR ").join(nCopies(50, "a = b"))); + + // 100 elements parser's "circuit breaker" is triggered + ParsingException e = expectThrows(ParsingException.class, () -> + new SqlParser().createExpression(Joiner.on(" OR ").join(nCopies(100, "a = b")))); + assertEquals("expression is too large to parse, (tree's depth exceeds 100)", e.getErrorMessage()); + } + + public void testLimitToPreventStackOverflowFromLargeUnaryArithmeticExpression() { + // Create expression in the form of abs(abs(abs ... (i) ...) + + // 50 elements is ok + new SqlParser().createExpression( + Joiner.on("").join(nCopies(50, "abs(")).concat("i").concat(Joiner.on("").join(nCopies(50, ")")))); + + // 101 elements parser's "circuit breaker" is triggered + ParsingException e = expectThrows(ParsingException.class, () -> new SqlParser().createExpression( + Joiner.on("").join(nCopies(101, "abs(")).concat("i").concat(Joiner.on("").join(nCopies(101, ")"))))); + assertEquals("expression is too large to parse, (tree's depth exceeds 100)", e.getErrorMessage()); + } + + public void testLimitToPreventStackOverflowFromLargeBinaryArithmeticExpression() { + // Create expression in the form of a + a + a + ... + a + + // 100 elements is ok + new SqlParser().createExpression(Joiner.on(" + ").join(nCopies(100, "a"))); + + // 101 elements parser's "circuit breaker" is triggered + ParsingException e = expectThrows(ParsingException.class, () -> + new SqlParser().createExpression(Joiner.on(" + ").join(nCopies(101, "a")))); + assertEquals("expression is too large to parse, (tree's depth exceeds 100)", e.getErrorMessage()); + } + + public void testLimitToPreventStackOverflowFromLargeSubselectTree() { + // Test with queries in the form of `SELECT * FROM (SELECT * FROM (... t) ...) + + // 100 elements is ok + new SqlParser().createStatement( + Joiner.on(" (").join(nCopies(100, "SELECT * FROM")) + .concat("t") + .concat(Joiner.on("").join(nCopies(99, ")")))); + + // 101 elements parser's "circuit breaker" is triggered + ParsingException e = expectThrows(ParsingException.class, () -> new SqlParser().createStatement( + Joiner.on(" (").join(nCopies(101, "SELECT * FROM")) + .concat("t") + .concat(Joiner.on("").join(nCopies(100, ")"))))); + assertEquals("expression is too large to parse, (tree's depth exceeds 100)", e.getErrorMessage()); + } + + public void testLimitToPreventStackOverflowFromLargeComplexSubselectTree() { + // Test with queries in the form of `SELECT true OR true OR .. FROM (SELECT true OR true OR... FROM (... t) ...) + + new SqlParser().createStatement( + Joiner.on(" (").join(nCopies(20, "SELECT ")). + concat(Joiner.on(" OR ").join(nCopies(50, "true"))).concat(" FROM") + .concat("t").concat(Joiner.on("").join(nCopies(19, ")")))); + + ParsingException e = expectThrows(ParsingException.class, () -> new SqlParser().createStatement( + Joiner.on(" (").join(nCopies(20, "SELECT ")). + concat(Joiner.on(" OR ").join(nCopies(100, "true"))).concat(" FROM") + .concat("t").concat(Joiner.on("").join(nCopies(19, ")"))))); + assertEquals("expression is too large to parse, (tree's depth exceeds 100)", e.getErrorMessage()); + } + private LogicalPlan parseStatement(String sql) { return new SqlParser().createStatement(sql); } diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/tree/NodeSubclassTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/tree/NodeSubclassTests.java index 96c641d4fbb..90fd7392960 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/tree/NodeSubclassTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/tree/NodeSubclassTests.java @@ -6,7 +6,6 @@ package org.elasticsearch.xpack.sql.tree; import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; - import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.PathUtils; import org.elasticsearch.test.ESTestCase; @@ -17,9 +16,13 @@ import org.elasticsearch.xpack.sql.expression.function.Function; import org.elasticsearch.xpack.sql.expression.function.aggregate.AggregateFunction; import org.elasticsearch.xpack.sql.expression.function.aggregate.Avg; import org.elasticsearch.xpack.sql.expression.function.aggregate.InnerAggregate; -import org.elasticsearch.xpack.sql.expression.function.scalar.processor.definition.AggExtractorInput; +import org.elasticsearch.xpack.sql.expression.gen.pipeline.AggExtractorInput; +import org.elasticsearch.xpack.sql.expression.gen.pipeline.BinaryPipesTests; +import org.elasticsearch.xpack.sql.expression.gen.pipeline.Pipe; +import org.elasticsearch.xpack.sql.expression.gen.processor.ConstantProcessor; +import org.elasticsearch.xpack.sql.expression.gen.processor.Processor; import org.elasticsearch.xpack.sql.expression.predicate.fulltext.FullTextPredicate; -import org.elasticsearch.xpack.sql.expression.regex.LikePattern; +import org.elasticsearch.xpack.sql.expression.predicate.regex.LikePattern; import org.elasticsearch.xpack.sql.tree.NodeTests.ChildrenAreAProperty; import org.elasticsearch.xpack.sql.tree.NodeTests.Dummy; import org.elasticsearch.xpack.sql.tree.NodeTests.NoChildren; @@ -414,7 +417,7 @@ public class NodeSubclassTests> extends ESTestCas } } else if (toBuildClass == ChildrenAreAProperty.class) { /* - * While any subclass of Dummy will do here we want to prevent + * While any subclass of DummyFunction will do here we want to prevent * stack overflow so we use the one without children. */ if (argClass == Dummy.class) { @@ -454,6 +457,23 @@ public class NodeSubclassTests> extends ESTestCas */ return UnresolvedAttributeTests.randomUnresolvedAttribute(); } + + if (Pipe.class == argClass) { + /* + * Similar to expressions, mock pipes to avoid + * stackoverflow errors while building the tree. + */ + return BinaryPipesTests.randomUnaryPipe(); + } + + if (Processor.class == argClass) { + /* + * Similar to expressions, mock pipes to avoid + * stackoverflow errors while building the tree. + */ + return new ConstantProcessor(randomAlphaOfLength(16)); + } + if (Node.class.isAssignableFrom(argClass)) { /* * Rather than attempting to mock subclasses of node diff --git a/x-pack/plugin/src/test/java/org/elasticsearch/xpack/test/rest/XPackRestIT.java b/x-pack/plugin/src/test/java/org/elasticsearch/xpack/test/rest/XPackRestIT.java index 31d6312f662..b020109ca29 100644 --- a/x-pack/plugin/src/test/java/org/elasticsearch/xpack/test/rest/XPackRestIT.java +++ b/x-pack/plugin/src/test/java/org/elasticsearch/xpack/test/rest/XPackRestIT.java @@ -6,7 +6,6 @@ package org.elasticsearch.xpack.test.rest; import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; - import org.apache.http.HttpStatus; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.client.Request; diff --git a/x-pack/plugin/src/test/resources/rest-api-spec/api/ccr.follow_index.json b/x-pack/plugin/src/test/resources/rest-api-spec/api/ccr.follow.json similarity index 80% rename from x-pack/plugin/src/test/resources/rest-api-spec/api/ccr.follow_index.json rename to x-pack/plugin/src/test/resources/rest-api-spec/api/ccr.follow.json index 749aae48d91..ea6c2256794 100644 --- a/x-pack/plugin/src/test/resources/rest-api-spec/api/ccr.follow_index.json +++ b/x-pack/plugin/src/test/resources/rest-api-spec/api/ccr.follow.json @@ -1,7 +1,7 @@ { - "ccr.follow_index": { + "ccr.follow": { "documentation": "http://www.elastic.co/guide/en/elasticsearch/reference/current", - "methods": [ "POST" ], + "methods": [ "PUT" ], "url": { "path": "/{index}/_ccr/follow", "paths": [ "/{index}/_ccr/follow" ], @@ -9,7 +9,7 @@ "index": { "type": "string", "required": true, - "description": "The name of the follower index." + "description": "The name of the follower index" } } }, diff --git a/x-pack/plugin/src/test/resources/rest-api-spec/api/ccr.get_auto_follow_pattern.json b/x-pack/plugin/src/test/resources/rest-api-spec/api/ccr.get_auto_follow_pattern.json new file mode 100644 index 00000000000..c8826dc5f1b --- /dev/null +++ b/x-pack/plugin/src/test/resources/rest-api-spec/api/ccr.get_auto_follow_pattern.json @@ -0,0 +1,16 @@ +{ + "ccr.get_auto_follow_pattern": { + "documentation": "http://www.elastic.co/guide/en/elasticsearch/reference/current", + "methods": [ "GET" ], + "url": { + "path": "/_ccr/auto_follow/{leader_cluster_alias}", + "paths": [ "/_ccr/auto_follow", "/_ccr/auto_follow/{leader_cluster_alias}" ], + "parts": { + "leader_cluster_alias": { + "type": "string", + "description": "The name of the leader cluster alias." + } + } + } + } +} diff --git a/x-pack/plugin/src/test/resources/rest-api-spec/api/ccr.pause_follow.json b/x-pack/plugin/src/test/resources/rest-api-spec/api/ccr.pause_follow.json new file mode 100644 index 00000000000..b6cfbe9fd0c --- /dev/null +++ b/x-pack/plugin/src/test/resources/rest-api-spec/api/ccr.pause_follow.json @@ -0,0 +1,17 @@ +{ + "ccr.pause_follow": { + "documentation": "http://www.elastic.co/guide/en/elasticsearch/reference/current", + "methods": [ "POST" ], + "url": { + "path": "/{index}/_ccr/pause_follow", + "paths": [ "/{index}/_ccr/pause_follow" ], + "parts": { + "index": { + "type": "string", + "required": true, + "description": "The name of the follower index that should pause following its leader index." + } + } + } + } +} diff --git a/x-pack/plugin/src/test/resources/rest-api-spec/api/ccr.create_and_follow_index.json b/x-pack/plugin/src/test/resources/rest-api-spec/api/ccr.resume_follow.json similarity index 66% rename from x-pack/plugin/src/test/resources/rest-api-spec/api/ccr.create_and_follow_index.json rename to x-pack/plugin/src/test/resources/rest-api-spec/api/ccr.resume_follow.json index 46ff872a1a4..b4f806e8b7f 100644 --- a/x-pack/plugin/src/test/resources/rest-api-spec/api/ccr.create_and_follow_index.json +++ b/x-pack/plugin/src/test/resources/rest-api-spec/api/ccr.resume_follow.json @@ -1,15 +1,15 @@ { - "ccr.create_and_follow_index": { + "ccr.resume_follow": { "documentation": "http://www.elastic.co/guide/en/elasticsearch/reference/current", "methods": [ "POST" ], "url": { - "path": "/{index}/_ccr/create_and_follow", - "paths": [ "/{index}/_ccr/create_and_follow" ], + "path": "/{index}/_ccr/resume_follow", + "paths": [ "/{index}/_ccr/resume_follow" ], "parts": { "index": { "type": "string", "required": true, - "description": "The name of the follower index" + "description": "The name of the follow index to resume following." } } }, diff --git a/x-pack/plugin/src/test/resources/rest-api-spec/api/ccr.stats.json b/x-pack/plugin/src/test/resources/rest-api-spec/api/ccr.stats.json index 7f5cda09f25..aa9e9a7fec3 100644 --- a/x-pack/plugin/src/test/resources/rest-api-spec/api/ccr.stats.json +++ b/x-pack/plugin/src/test/resources/rest-api-spec/api/ccr.stats.json @@ -4,7 +4,7 @@ "methods": [ "GET" ], "url": { "path": "/_ccr/stats", - "paths": [ "/_ccr/stats", "/_ccr/stats/{index}" ], + "paths": [ "/_ccr/stats", "/{index}/_ccr/stats" ], "parts": { "index": { "type": "list", diff --git a/x-pack/plugin/src/test/resources/rest-api-spec/api/ccr.unfollow_index.json b/x-pack/plugin/src/test/resources/rest-api-spec/api/ccr.unfollow.json similarity index 86% rename from x-pack/plugin/src/test/resources/rest-api-spec/api/ccr.unfollow_index.json rename to x-pack/plugin/src/test/resources/rest-api-spec/api/ccr.unfollow.json index 5e9a111496a..41be574421f 100644 --- a/x-pack/plugin/src/test/resources/rest-api-spec/api/ccr.unfollow_index.json +++ b/x-pack/plugin/src/test/resources/rest-api-spec/api/ccr.unfollow.json @@ -1,5 +1,5 @@ { - "ccr.unfollow_index": { + "ccr.unfollow": { "documentation": "http://www.elastic.co/guide/en/elasticsearch/reference/current", "methods": [ "POST" ], "url": { @@ -9,7 +9,7 @@ "index": { "type": "string", "required": true, - "description": "The name of the follower index that should stop following its leader index." + "description": "The name of the follower index that should be turned into a regular index." } } } diff --git a/x-pack/plugin/src/test/resources/rest-api-spec/api/xpack.ml.find_file_structure.json b/x-pack/plugin/src/test/resources/rest-api-spec/api/xpack.ml.find_file_structure.json index 20a5c8e0c2a..fd1cbb986a6 100644 --- a/x-pack/plugin/src/test/resources/rest-api-spec/api/xpack.ml.find_file_structure.json +++ b/x-pack/plugin/src/test/resources/rest-api-spec/api/xpack.ml.find_file_structure.json @@ -8,7 +8,13 @@ "params": { "lines_to_sample": { "type": "int", - "description": "Optional parameter to specify how many lines of the file to include in the analysis" + "description": "How many lines of the file should be included in the analysis", + "default": 1000 + }, + "timeout": { + "type": "time", + "description": "Timeout after which the analysis will be aborted", + "default": "25s" }, "charset": { "type": "string", @@ -49,11 +55,12 @@ }, "timestamp_format": { "type": "string", - "description": "Optional parameter to specify the timestamp format in the file" + "description": "Optional parameter to specify the timestamp format in the file - may be either a Joda or Java time format" }, "explain": { "type": "boolean", - "description": "Optional parameter to include a commentary on how the structure was derived" + "description": "Whether to include a commentary on how the structure was derived", + "default": false } } }, diff --git a/x-pack/plugin/src/test/resources/rest-api-spec/test/ml/find_file_structure.yml b/x-pack/plugin/src/test/resources/rest-api-spec/test/ml/find_file_structure.yml index 1f6964b9193..6a0414fe9dd 100644 --- a/x-pack/plugin/src/test/resources/rest-api-spec/test/ml/find_file_structure.yml +++ b/x-pack/plugin/src/test/resources/rest-api-spec/test/ml/find_file_structure.yml @@ -7,6 +7,7 @@ Content-Type: "application/json" xpack.ml.find_file_structure: lines_to_sample: 3 + timeout: 10s body: - airline: AAL responsetime: 132.2046 @@ -27,7 +28,8 @@ - match: { has_byte_order_marker: false } - match: { format: json } - match: { timestamp_field: time } - - match: { timestamp_formats.0: UNIX } + - match: { joda_timestamp_formats.0: UNIX } + - match: { java_timestamp_formats.0: UNIX } - match: { need_client_timezone: false } - match: { mappings.airline.type: keyword } - match: { mappings.responsetime.type: double } @@ -38,6 +40,11 @@ - match: { field_stats.airline.cardinality: 2 } - match: { field_stats.responsetime.count: 3 } - match: { field_stats.responsetime.cardinality: 3 } + - match: { field_stats.responsetime.min_value: 132.2046 } + - match: { field_stats.responsetime.max_value: 990.4628 } + # Not asserting on field_stats.responsetime.mean as it's a recurring decimal + # so its representation in the response could cause spurious failures + - match: { field_stats.responsetime.median_value: 134.2046 } - match: { field_stats.sourcetype.count: 3 } - match: { field_stats.sourcetype.cardinality: 1 } - match: { field_stats.time.count: 3 } @@ -78,7 +85,8 @@ - match: { has_byte_order_marker: false } - match: { format: json } - match: { timestamp_field: time } - - match: { timestamp_formats.0: UNIX } + - match: { joda_timestamp_formats.0: UNIX } + - match: { java_timestamp_formats.0: UNIX } - match: { need_client_timezone: false } - match: { mappings.airline.type: keyword } - match: { mappings.responsetime.type: double } @@ -89,6 +97,11 @@ - match: { field_stats.airline.cardinality: 2 } - match: { field_stats.responsetime.count: 3 } - match: { field_stats.responsetime.cardinality: 3 } + - match: { field_stats.responsetime.min_value: 132.2046 } + - match: { field_stats.responsetime.max_value: 990.4628 } + # Not asserting on field_stats.responsetime.mean as it's a recurring decimal + # so its representation in the response could cause spurious failures + - match: { field_stats.responsetime.median_value: 134.2046 } - match: { field_stats.sourcetype.count: 3 } - match: { field_stats.sourcetype.cardinality: 1 } - match: { field_stats.time.count: 3 } diff --git a/x-pack/plugin/src/test/resources/rest-api-spec/test/rollup/get_rollup_index_caps.yml b/x-pack/plugin/src/test/resources/rest-api-spec/test/rollup/get_rollup_index_caps.yml index 7a539edcc67..bd49f2c3389 100644 --- a/x-pack/plugin/src/test/resources/rest-api-spec/test/rollup/get_rollup_index_caps.yml +++ b/x-pack/plugin/src/test/resources/rest-api-spec/test/rollup/get_rollup_index_caps.yml @@ -288,6 +288,109 @@ setup: - agg: "max" - agg: "sum" +--- +"Verify job caps by rollup index comma delimited list": + +- skip: + version: " - 6.99.99" + reason: "comma delimited index support was fixed in 7.0" + +- do: + headers: + Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser + xpack.rollup.put_job: + id: foo2 + body: > + { + "index_pattern": "foo2", + "rollup_index": "foo_rollup", + "cron": "*/30 * * * * ?", + "page_size" :10, + "groups" : { + "date_histogram": { + "field": "the_field", + "interval": "1h" + } + }, + "metrics": [ + { + "field": "value_field", + "metrics": ["min", "max", "sum"] + } + ] + } +- do: + headers: + Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser + xpack.rollup.put_job: + id: foo3 + body: > + { + "index_pattern": "foo3", + "rollup_index": "foo_rollup2", + "cron": "*/30 * * * * ?", + "page_size" :10, + "groups" : { + "date_histogram": { + "field": "the_field", + "interval": "1h" + } + }, + "metrics": [ + { + "field": "value_field", + "metrics": ["min", "max", "sum"] + } + ] + } + +- do: + xpack.rollup.get_rollup_index_caps: + index: "foo_rollup2,foo_rollup" + +- match: + $body: + foo_rollup: + rollup_jobs: + - job_id: "foo" + rollup_index: "foo_rollup" + index_pattern: "foo" + fields: + the_field: + - agg: "date_histogram" + interval: "1h" + time_zone: "UTC" + value_field: + - agg: "min" + - agg: "max" + - agg: "sum" + - job_id: "foo2" + rollup_index: "foo_rollup" + index_pattern: "foo2" + fields: + the_field: + - agg: "date_histogram" + interval: "1h" + time_zone: "UTC" + value_field: + - agg: "min" + - agg: "max" + - agg: "sum" + foo_rollup2: + rollup_jobs: + - job_id: "foo3" + rollup_index: "foo_rollup2" + index_pattern: "foo3" + fields: + the_field: + - agg: "date_histogram" + interval: "1h" + time_zone: "UTC" + value_field: + - agg: "min" + - agg: "max" + - agg: "sum" + --- "Verify index pattern": diff --git a/x-pack/plugin/src/test/resources/rest-api-spec/test/watcher/usage/10_basic.yml b/x-pack/plugin/src/test/resources/rest-api-spec/test/watcher/usage/10_basic.yml index 7a22ad322bf..432308581f6 100644 --- a/x-pack/plugin/src/test/resources/rest-api-spec/test/watcher/usage/10_basic.yml +++ b/x-pack/plugin/src/test/resources/rest-api-spec/test/watcher/usage/10_basic.yml @@ -1,8 +1,5 @@ --- "Test watcher usage stats output": - - skip: - version: "all" - reason: AwaitsFix at https://github.com/elastic/elasticsearch/issues/33326 - do: catch: missing xpack.watcher.delete_watch: diff --git a/x-pack/plugin/upgrade/src/test/java/org/elasticsearch/xpack/upgrade/IndexUpgradeTasksIT.java b/x-pack/plugin/upgrade/src/test/java/org/elasticsearch/xpack/upgrade/IndexUpgradeTasksIT.java index 7605af041e6..1cc6ce8e547 100644 --- a/x-pack/plugin/upgrade/src/test/java/org/elasticsearch/xpack/upgrade/IndexUpgradeTasksIT.java +++ b/x-pack/plugin/upgrade/src/test/java/org/elasticsearch/xpack/upgrade/IndexUpgradeTasksIT.java @@ -88,7 +88,7 @@ public class IndexUpgradeTasksIT extends ESIntegTestCase { @Override public ScriptEngine getScriptEngine(Settings settings, Collection> contexts) { - return new MockScriptEngine(pluginScriptLang(), pluginScripts()); + return new MockScriptEngine(pluginScriptLang(), pluginScripts(), Collections.emptyMap()); } public String pluginScriptLang() { diff --git a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/Watcher.java b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/Watcher.java index 32d492b78a7..eaf64e6ef8f 100644 --- a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/Watcher.java +++ b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/Watcher.java @@ -48,7 +48,6 @@ import org.elasticsearch.plugins.ReloadablePlugin; import org.elasticsearch.plugins.ScriptPlugin; import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestHandler; -import org.elasticsearch.script.ExecutableScript; import org.elasticsearch.script.ScriptContext; import org.elasticsearch.script.ScriptService; import org.elasticsearch.script.SearchScript; @@ -106,6 +105,7 @@ import org.elasticsearch.xpack.watcher.condition.CompareCondition; import org.elasticsearch.xpack.watcher.condition.InternalAlwaysCondition; import org.elasticsearch.xpack.watcher.condition.NeverCondition; import org.elasticsearch.xpack.watcher.condition.ScriptCondition; +import org.elasticsearch.xpack.watcher.condition.WatcherConditionScript; import org.elasticsearch.xpack.watcher.execution.AsyncTriggerEventConsumer; import org.elasticsearch.xpack.watcher.execution.ExecutionService; import org.elasticsearch.xpack.watcher.execution.InternalWatchExecutor; @@ -152,6 +152,7 @@ import org.elasticsearch.xpack.watcher.support.WatcherIndexTemplateRegistry; import org.elasticsearch.xpack.watcher.support.search.WatcherSearchTemplateService; import org.elasticsearch.xpack.watcher.transform.script.ScriptTransform; import org.elasticsearch.xpack.watcher.transform.script.ScriptTransformFactory; +import org.elasticsearch.xpack.watcher.transform.script.WatcherTransformScript; import org.elasticsearch.xpack.watcher.transform.search.SearchTransform; import org.elasticsearch.xpack.watcher.transform.search.SearchTransformFactory; import org.elasticsearch.xpack.watcher.transport.actions.ack.TransportAckWatchAction; @@ -225,9 +226,6 @@ public class Watcher extends Plugin implements ActionPlugin, ScriptPlugin, Reloa public static final ScriptContext SCRIPT_SEARCH_CONTEXT = new ScriptContext<>("xpack", SearchScript.Factory.class); - // TODO: remove this context when each xpack script use case has their own contexts - public static final ScriptContext SCRIPT_EXECUTABLE_CONTEXT - = new ScriptContext<>("xpack_executable", ExecutableScript.Factory.class); public static final ScriptContext SCRIPT_TEMPLATE_CONTEXT = new ScriptContext<>("xpack_template", TemplateScript.Factory.class); @@ -315,7 +313,7 @@ public class Watcher extends Plugin implements ActionPlugin, ScriptPlugin, Reloa final Map transformFactories = new HashMap<>(); transformFactories.put(ScriptTransform.TYPE, new ScriptTransformFactory(settings, scriptService)); transformFactories.put(SearchTransform.TYPE, new SearchTransformFactory(settings, client, xContentRegistry, scriptService)); - final TransformRegistry transformRegistry = new TransformRegistry(settings, Collections.unmodifiableMap(transformFactories)); + final TransformRegistry transformRegistry = new TransformRegistry(Collections.unmodifiableMap(transformFactories)); // actions final Map actionFactoryMap = new HashMap<>(); @@ -673,7 +671,8 @@ public class Watcher extends Plugin implements ActionPlugin, ScriptPlugin, Reloa @Override public List> getContexts() { - return Arrays.asList(Watcher.SCRIPT_SEARCH_CONTEXT, Watcher.SCRIPT_EXECUTABLE_CONTEXT, Watcher.SCRIPT_TEMPLATE_CONTEXT); + return Arrays.asList(Watcher.SCRIPT_SEARCH_CONTEXT, WatcherTransformScript.CONTEXT, + WatcherConditionScript.CONTEXT, Watcher.SCRIPT_TEMPLATE_CONTEXT); } @Override diff --git a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/WatcherService.java b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/WatcherService.java index 599287bb50a..75fd13915de 100644 --- a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/WatcherService.java +++ b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/WatcherService.java @@ -27,12 +27,12 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.util.concurrent.AbstractRunnable; import org.elasticsearch.common.util.concurrent.EsExecutors; -import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.builder.SearchSourceBuilder; import org.elasticsearch.search.sort.SortBuilders; import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.xpack.core.ClientHelper; import org.elasticsearch.xpack.core.upgrade.UpgradeField; import org.elasticsearch.xpack.core.watcher.execution.TriggeredWatchStoreField; import org.elasticsearch.xpack.core.watcher.watch.Watch; @@ -63,7 +63,6 @@ import static org.elasticsearch.cluster.routing.ShardRoutingState.RELOCATING; import static org.elasticsearch.cluster.routing.ShardRoutingState.STARTED; import static org.elasticsearch.common.util.concurrent.EsExecutors.daemonThreadFactory; import static org.elasticsearch.xpack.core.ClientHelper.WATCHER_ORIGIN; -import static org.elasticsearch.xpack.core.ClientHelper.stashWithOrigin; import static org.elasticsearch.xpack.core.watcher.support.Exceptions.illegalState; import static org.elasticsearch.xpack.core.watcher.watch.Watch.INDEX; @@ -92,7 +91,7 @@ public class WatcherService extends AbstractComponent { this.scrollSize = settings.getAsInt("xpack.watcher.watch.scroll.size", 100); this.defaultSearchTimeout = settings.getAsTime("xpack.watcher.internal.ops.search.default_timeout", TimeValue.timeValueSeconds(30)); this.parser = parser; - this.client = client; + this.client = ClientHelper.clientWithOrigin(client, WATCHER_ORIGIN); this.executor = executor; } @@ -184,6 +183,10 @@ public class WatcherService extends AbstractComponent { // changes processedClusterStateVersion.set(state.getVersion()); + triggerService.pauseExecution(); + int cancelledTaskCount = executionService.clearExecutionsAndQueue(); + logger.info("reloading watcher, reason [{}], cancelled [{}] queued tasks", reason, cancelledTaskCount); + executor.execute(wrapWatcherService(() -> reloadInner(state, reason, false), e -> logger.error("error reloading watcher", e))); } @@ -232,10 +235,6 @@ public class WatcherService extends AbstractComponent { // also this is the place where we pause the trigger service execution and clear the current execution service, so that we make sure // that existing executions finish, but no new ones are executed if (processedClusterStateVersion.get() == state.getVersion()) { - triggerService.pauseExecution(); - int cancelledTaskCount = executionService.clearExecutionsAndQueue(); - logger.info("reloading watcher, reason [{}], cancelled [{}] queued tasks", reason, cancelledTaskCount); - executionService.unPause(); triggerService.start(watches); if (triggeredWatches.isEmpty() == false) { @@ -273,7 +272,7 @@ public class WatcherService extends AbstractComponent { SearchResponse response = null; List watches = new ArrayList<>(); - try (ThreadContext.StoredContext ignore = stashWithOrigin(client.threadPool().getThreadContext(), WATCHER_ORIGIN)) { + try { RefreshResponse refreshResponse = client.admin().indices().refresh(new RefreshRequest(INDEX)) .actionGet(TimeValue.timeValueSeconds(5)); if (refreshResponse.getSuccessfulShards() < indexMetaData.getNumberOfShards()) { @@ -357,11 +356,9 @@ public class WatcherService extends AbstractComponent { } } finally { if (response != null) { - try (ThreadContext.StoredContext ignore = stashWithOrigin(client.threadPool().getThreadContext(), WATCHER_ORIGIN)) { - ClearScrollRequest clearScrollRequest = new ClearScrollRequest(); - clearScrollRequest.addScrollId(response.getScrollId()); - client.clearScroll(clearScrollRequest).actionGet(scrollTimeout); - } + ClearScrollRequest clearScrollRequest = new ClearScrollRequest(); + clearScrollRequest.addScrollId(response.getScrollId()); + client.clearScroll(clearScrollRequest).actionGet(scrollTimeout); } } diff --git a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/actions/email/ExecutableEmailAction.java b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/actions/email/ExecutableEmailAction.java index f737d89c128..fcc4eb0e942 100644 --- a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/actions/email/ExecutableEmailAction.java +++ b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/actions/email/ExecutableEmailAction.java @@ -41,7 +41,7 @@ public class ExecutableEmailAction extends ExecutableAction { } public Action.Result execute(String actionId, WatchExecutionContext ctx, Payload payload) throws Exception { - Map model = Variables.createCtxModel(ctx, payload); + Map model = Variables.createCtxParamsMap(ctx, payload); Map attachments = new HashMap<>(); DataAttachment dataAttachment = action.getDataAttachment(); diff --git a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/actions/hipchat/ExecutableHipChatAction.java b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/actions/hipchat/ExecutableHipChatAction.java index 5772d1ffa6f..176de8b945d 100644 --- a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/actions/hipchat/ExecutableHipChatAction.java +++ b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/actions/hipchat/ExecutableHipChatAction.java @@ -39,7 +39,7 @@ public class ExecutableHipChatAction extends ExecutableAction { // watch/action were created. account.validateParsedTemplate(ctx.id().watchId(), actionId, action.message); - Map model = Variables.createCtxModel(ctx, payload); + Map model = Variables.createCtxParamsMap(ctx, payload); HipChatMessage message = account.render(ctx.id().watchId(), actionId, templateEngine, action.message, model); if (ctx.simulateAction(actionId)) { diff --git a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/actions/jira/ExecutableJiraAction.java b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/actions/jira/ExecutableJiraAction.java index acb0c8ce591..89f9af8e1d5 100644 --- a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/actions/jira/ExecutableJiraAction.java +++ b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/actions/jira/ExecutableJiraAction.java @@ -42,7 +42,7 @@ public class ExecutableJiraAction extends ExecutableAction { throw new IllegalStateException("account [" + action.account + "] was not found. perhaps it was deleted"); } - final Function render = s -> engine.render(new TextTemplate(s), Variables.createCtxModel(ctx, payload)); + final Function render = s -> engine.render(new TextTemplate(s), Variables.createCtxParamsMap(ctx, payload)); Map fields = new HashMap<>(); // Apply action fields diff --git a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/actions/logging/ExecutableLoggingAction.java b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/actions/logging/ExecutableLoggingAction.java index b1cb723949d..83fdb4e7c1f 100644 --- a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/actions/logging/ExecutableLoggingAction.java +++ b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/actions/logging/ExecutableLoggingAction.java @@ -40,7 +40,7 @@ public class ExecutableLoggingAction extends ExecutableAction { @Override public Action.Result execute(String actionId, WatchExecutionContext ctx, Payload payload) throws Exception { - Map model = Variables.createCtxModel(ctx, payload); + Map model = Variables.createCtxParamsMap(ctx, payload); String loggedText = templateEngine.render(action.text, model); if (ctx.simulateAction(actionId)) { diff --git a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/actions/pagerduty/ExecutablePagerDutyAction.java b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/actions/pagerduty/ExecutablePagerDutyAction.java index 59381dc3336..0c995ce94ef 100644 --- a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/actions/pagerduty/ExecutablePagerDutyAction.java +++ b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/actions/pagerduty/ExecutablePagerDutyAction.java @@ -40,7 +40,7 @@ public class ExecutablePagerDutyAction extends ExecutableAction throw new IllegalStateException("account [" + action.event.account + "] was not found. perhaps it was deleted"); } - Map model = Variables.createCtxModel(ctx, payload); + Map model = Variables.createCtxParamsMap(ctx, payload); IncidentEvent event = action.event.render(ctx.watch().id(), actionId, templateEngine, model, account.getDefaults()); if (ctx.simulateAction(actionId)) { diff --git a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/actions/slack/ExecutableSlackAction.java b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/actions/slack/ExecutableSlackAction.java index 9ab4a028ca1..b904b05ada7 100644 --- a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/actions/slack/ExecutableSlackAction.java +++ b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/actions/slack/ExecutableSlackAction.java @@ -40,7 +40,7 @@ public class ExecutableSlackAction extends ExecutableAction { throw new IllegalStateException("account [" + action.account + "] was not found. perhaps it was deleted"); } - Map model = Variables.createCtxModel(ctx, payload); + Map model = Variables.createCtxParamsMap(ctx, payload); SlackMessage message = action.message.render(ctx.id().watchId(), actionId, templateEngine, model, account.getMessageDefaults()); if (ctx.simulateAction(actionId)) { diff --git a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/actions/webhook/ExecutableWebhookAction.java b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/actions/webhook/ExecutableWebhookAction.java index 7313d529b4a..ec1f5774b13 100644 --- a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/actions/webhook/ExecutableWebhookAction.java +++ b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/actions/webhook/ExecutableWebhookAction.java @@ -31,7 +31,7 @@ public class ExecutableWebhookAction extends ExecutableAction { @Override public Action.Result execute(String actionId, WatchExecutionContext ctx, Payload payload) throws Exception { - Map model = Variables.createCtxModel(ctx, payload); + Map model = Variables.createCtxParamsMap(ctx, payload); HttpRequest request = action.requestTemplate.render(templateEngine, model); diff --git a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/common/text/TextTemplateEngine.java b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/common/text/TextTemplateEngine.java index 7b87a9e87a5..d2159fd572f 100644 --- a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/common/text/TextTemplateEngine.java +++ b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/common/text/TextTemplateEngine.java @@ -35,6 +35,11 @@ public class TextTemplateEngine extends AbstractComponent { String mediaType = compileParams(detectContentType(template)); template = trimContentType(textTemplate); + int indexStartMustacheExpression = template.indexOf("{{"); + if (indexStartMustacheExpression == -1) { + return template; + } + Map mergedModel = new HashMap<>(); if (textTemplate.getParams() != null) { mergedModel.putAll(textTemplate.getParams()); diff --git a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/condition/AbstractCompareCondition.java b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/condition/AbstractCompareCondition.java index 555f9d46545..81e3eb464e6 100644 --- a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/condition/AbstractCompareCondition.java +++ b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/condition/AbstractCompareCondition.java @@ -36,7 +36,7 @@ abstract class AbstractCompareCondition implements ExecutableCondition { @Override public final Result execute(WatchExecutionContext ctx) { Map resolvedValues = new HashMap<>(); - Map model = Variables.createCtxModel(ctx, ctx.payload()); + Map model = Variables.createCtxParamsMap(ctx, ctx.payload()); return doExecute(model, resolvedValues); } diff --git a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/condition/ScriptCondition.java b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/condition/ScriptCondition.java index e2befe9a24e..19307569ce0 100644 --- a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/condition/ScriptCondition.java +++ b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/condition/ScriptCondition.java @@ -8,19 +8,15 @@ package org.elasticsearch.xpack.watcher.condition; import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.script.ExecutableScript; import org.elasticsearch.script.Script; import org.elasticsearch.script.ScriptService; import org.elasticsearch.xpack.core.watcher.condition.ExecutableCondition; import org.elasticsearch.xpack.core.watcher.execution.WatchExecutionContext; -import org.elasticsearch.xpack.watcher.Watcher; import org.elasticsearch.xpack.watcher.support.Variables; import java.io.IOException; import java.util.Map; -import static org.elasticsearch.xpack.core.watcher.support.Exceptions.illegalState; - /** * This class executes a script against the ctx payload and returns a boolean */ @@ -29,20 +25,17 @@ public final class ScriptCondition implements ExecutableCondition { private static final Result MET = new Result(null, TYPE, true); private static final Result UNMET = new Result(null, TYPE, false); - private final ScriptService scriptService; private final Script script; - private final ExecutableScript.Factory scriptFactory; + private final WatcherConditionScript.Factory scriptFactory; public ScriptCondition(Script script) { this.script = script; - scriptService = null; - scriptFactory = null; + this.scriptFactory = null; } ScriptCondition(Script script, ScriptService scriptService) { - this.scriptService = scriptService; this.script = script; - scriptFactory = scriptService.compile(script, Watcher.SCRIPT_EXECUTABLE_CONTEXT); + this.scriptFactory = scriptService.compile(script, WatcherConditionScript.CONTEXT); } public Script getScript() { @@ -65,17 +58,12 @@ public final class ScriptCondition implements ExecutableCondition { } public Result doExecute(WatchExecutionContext ctx) { - Map parameters = Variables.createCtxModel(ctx, ctx.payload()); + Map parameters = Variables.createCtxParamsMap(ctx, ctx.payload()); if (script.getParams() != null && !script.getParams().isEmpty()) { parameters.putAll(script.getParams()); } - ExecutableScript executable = scriptFactory.newInstance(parameters); - Object value = executable.run(); - if (value instanceof Boolean) { - return (Boolean) value ? MET : UNMET; - } - throw illegalState("condition [{}] must return a boolean value (true|false) but instead returned [{}]", type(), ctx.watch().id(), - script, value); + WatcherConditionScript conditionScript = scriptFactory.newInstance(script.getParams(), ctx); + return conditionScript.execute() ? MET : UNMET; } @Override diff --git a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/condition/WatcherConditionScript.java b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/condition/WatcherConditionScript.java new file mode 100644 index 00000000000..1148cc6a58e --- /dev/null +++ b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/condition/WatcherConditionScript.java @@ -0,0 +1,62 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.watcher.condition; + +import org.elasticsearch.script.ParameterMap; +import org.elasticsearch.script.ScriptContext; +import org.elasticsearch.xpack.core.watcher.execution.WatchExecutionContext; +import org.elasticsearch.xpack.watcher.support.Variables; + +import java.util.Collections; +import java.util.HashMap; +import java.util.Map; + +/** + * A script to determine whether a watch should be run. + */ +public abstract class WatcherConditionScript { + public static final String[] PARAMETERS = {}; + + private static final Map DEPRECATIONS; + + static { + Map deprecations = new HashMap<>(); + deprecations.put( + "ctx", + "Accessing variable [ctx] via [params.ctx] from within a watcher_condition script " + + "is deprecated in favor of directly accessing [ctx]." + ); + DEPRECATIONS = Collections.unmodifiableMap(deprecations); + } + + private final Map params; + // TODO: ctx should have its members extracted into execute parameters, but it needs to be a member for bwc access in params + private final Map ctx; + + public WatcherConditionScript(Map params, WatchExecutionContext watcherContext) { + Map paramsWithCtx = new HashMap<>(params); + Map ctx = Variables.createCtx(watcherContext, watcherContext.payload()); + paramsWithCtx.put("ctx", ctx); + this.params = new ParameterMap(Collections.unmodifiableMap(paramsWithCtx), DEPRECATIONS); + this.ctx = ctx; + } + + public abstract boolean execute(); + + public Map getParams() { + return params; + } + + public Map getCtx() { + return ctx; + } + + public interface Factory { + WatcherConditionScript newInstance(Map params, WatchExecutionContext watcherContext); + } + + public static ScriptContext CONTEXT = new ScriptContext<>("watcher_condition", Factory.class); +} diff --git a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/input/http/ExecutableHttpInput.java b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/input/http/ExecutableHttpInput.java index 1bc7ab309f0..5d738772f21 100644 --- a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/input/http/ExecutableHttpInput.java +++ b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/input/http/ExecutableHttpInput.java @@ -44,7 +44,7 @@ public class ExecutableHttpInput extends ExecutableInput model = Variables.createCtxModel(ctx, payload); + Map model = Variables.createCtxParamsMap(ctx, payload); request = input.getRequest().render(templateEngine, model); return doExecute(ctx, request); } catch (Exception e) { diff --git a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/notification/email/attachment/DataAttachmentParser.java b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/notification/email/attachment/DataAttachmentParser.java index 9164e1db7ea..34730c88ce0 100644 --- a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/notification/email/attachment/DataAttachmentParser.java +++ b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/notification/email/attachment/DataAttachmentParser.java @@ -57,7 +57,7 @@ public class DataAttachmentParser implements EmailAttachmentParser model = Variables.createCtxModel(ctx, payload); + Map model = Variables.createCtxParamsMap(ctx, payload); return attachment.getDataAttachment().create(attachment.id(), model); } } diff --git a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/notification/email/attachment/HttpEmailAttachementParser.java b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/notification/email/attachment/HttpEmailAttachementParser.java index 7c5d68a126b..076c57c832f 100644 --- a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/notification/email/attachment/HttpEmailAttachementParser.java +++ b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/notification/email/attachment/HttpEmailAttachementParser.java @@ -79,7 +79,7 @@ public class HttpEmailAttachementParser implements EmailAttachmentParser model = Variables.createCtxModel(context, payload); + Map model = Variables.createCtxParamsMap(context, payload); HttpRequest httpRequest = attachment.getRequestTemplate().render(templateEngine, model); HttpResponse response = httpClient.execute(httpRequest); diff --git a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/notification/email/attachment/ReportingAttachmentParser.java b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/notification/email/attachment/ReportingAttachmentParser.java index 28a8c194b57..f6026c0efce 100644 --- a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/notification/email/attachment/ReportingAttachmentParser.java +++ b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/notification/email/attachment/ReportingAttachmentParser.java @@ -91,7 +91,7 @@ public class ReportingAttachmentParser implements EmailAttachmentParser model = Variables.createCtxModel(context, payload); + Map model = Variables.createCtxParamsMap(context, payload); String initialUrl = templateEngine.render(new TextTemplate(attachment.url()), model); diff --git a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/notification/pagerduty/PagerDutyAccount.java b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/notification/pagerduty/PagerDutyAccount.java index b2498a749d7..fdc5ca07b84 100644 --- a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/notification/pagerduty/PagerDutyAccount.java +++ b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/notification/pagerduty/PagerDutyAccount.java @@ -5,7 +5,6 @@ */ package org.elasticsearch.xpack.watcher.notification.pagerduty; -import org.apache.logging.log4j.Logger; import org.elasticsearch.common.settings.SecureSetting; import org.elasticsearch.common.settings.SecureString; import org.elasticsearch.common.settings.Setting; @@ -29,15 +28,13 @@ public class PagerDutyAccount { private final String serviceKey; private final HttpClient httpClient; private final IncidentEventDefaults eventDefaults; - private final Logger logger; - PagerDutyAccount(String name, Settings accountSettings, Settings serviceSettings, HttpClient httpClient, Logger logger) { + PagerDutyAccount(String name, Settings accountSettings, Settings serviceSettings, HttpClient httpClient) { this.name = name; this.serviceKey = getServiceKey(name, accountSettings, serviceSettings); this.httpClient = httpClient; this.eventDefaults = new IncidentEventDefaults(accountSettings.getAsSettings(TRIGGER_DEFAULTS_SETTING)); - this.logger = logger; } public String getName() { diff --git a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/notification/pagerduty/PagerDutyService.java b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/notification/pagerduty/PagerDutyService.java index 32a6dcb91aa..c10bcf4782f 100644 --- a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/notification/pagerduty/PagerDutyService.java +++ b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/notification/pagerduty/PagerDutyService.java @@ -51,7 +51,7 @@ public class PagerDutyService extends NotificationService { @Override protected PagerDutyAccount createAccount(String name, Settings accountSettings) { - return new PagerDutyAccount(name, accountSettings, accountSettings, httpClient, logger); + return new PagerDutyAccount(name, accountSettings, accountSettings, httpClient); } public static List> getSettings() { diff --git a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/support/Variables.java b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/support/Variables.java index 858f6707f29..cf3c9f81039 100644 --- a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/support/Variables.java +++ b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/support/Variables.java @@ -22,7 +22,15 @@ public final class Variables { public static final String METADATA = "metadata"; public static final String VARS = "vars"; - public static Map createCtxModel(WatchExecutionContext ctx, Payload payload) { + /** Creates a ctx map and puts it into the returned map as "ctx". */ + public static Map createCtxParamsMap(WatchExecutionContext ctx, Payload payload) { + Map model = new HashMap<>(); + model.put(CTX, createCtx(ctx, payload)); + return model; + } + + /** Creates a ctx map. */ + public static Map createCtx(WatchExecutionContext ctx, Payload payload) { Map ctxModel = new HashMap<>(); ctxModel.put(ID, ctx.id().value()); ctxModel.put(WATCH_ID, ctx.id().watchId()); @@ -33,10 +41,6 @@ public final class Variables { } ctxModel.put(METADATA, ctx.watch().metadata()); ctxModel.put(VARS, ctx.vars()); - Map model = new HashMap<>(); - model.put(CTX, ctxModel); - return model; + return ctxModel; } - - } diff --git a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/support/search/WatcherSearchTemplateService.java b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/support/search/WatcherSearchTemplateService.java index 9df4f5f8b52..2208aab428a 100644 --- a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/support/search/WatcherSearchTemplateService.java +++ b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/support/search/WatcherSearchTemplateService.java @@ -45,7 +45,7 @@ public class WatcherSearchTemplateService extends AbstractComponent { public String renderTemplate(Script source, WatchExecutionContext ctx, Payload payload) throws IOException { // Due the inconsistency with templates in ES 1.x, we maintain our own template format. // This template format we use now, will become the template structure in ES 2.0 - Map watcherContextParams = Variables.createCtxModel(ctx, payload); + Map watcherContextParams = Variables.createCtxParamsMap(ctx, payload); // Here we convert watcher template into a ES core templates. Due to the different format we use, we // convert to the template format used in ES core if (source.getParams() != null) { diff --git a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/transform/script/ExecutableScriptTransform.java b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/transform/script/ExecutableScriptTransform.java index e2b1cf882cc..20cd8e7b6ef 100644 --- a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/transform/script/ExecutableScriptTransform.java +++ b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/transform/script/ExecutableScriptTransform.java @@ -8,19 +8,16 @@ package org.elasticsearch.xpack.watcher.transform.script; import org.apache.logging.log4j.Logger; import org.apache.logging.log4j.message.ParameterizedMessage; import org.apache.logging.log4j.util.Supplier; -import org.elasticsearch.script.ExecutableScript; import org.elasticsearch.script.Script; import org.elasticsearch.script.ScriptService; import org.elasticsearch.xpack.core.watcher.execution.WatchExecutionContext; import org.elasticsearch.xpack.core.watcher.transform.ExecutableTransform; import org.elasticsearch.xpack.core.watcher.watch.Payload; -import org.elasticsearch.xpack.watcher.Watcher; import java.io.IOException; import java.util.HashMap; import java.util.Map; -import static org.elasticsearch.xpack.watcher.support.Variables.createCtxModel; import static org.elasticsearch.xpack.watcher.transform.script.ScriptTransform.TYPE; public class ExecutableScriptTransform extends ExecutableTransform { @@ -32,7 +29,7 @@ public class ExecutableScriptTransform extends ExecutableTransform model = new HashMap<>(); - if (script.getParams() != null) { - model.putAll(script.getParams()); - } - model.putAll(createCtxModel(ctx, payload)); - ExecutableScript.Factory factory = scriptService.compile(script, Watcher.SCRIPT_EXECUTABLE_CONTEXT); - ExecutableScript executable = factory.newInstance(model); - Object value = executable.run(); + WatcherTransformScript.Factory factory = scriptService.compile(script, WatcherTransformScript.CONTEXT); + WatcherTransformScript transformScript = factory.newInstance(script.getParams(), ctx, payload); + Object value = transformScript.execute(); // TODO: deprecate one of these styles (returning a map or returning an opaque value below) if (value instanceof Map) { return new ScriptTransform.Result(new Payload.Simple((Map) value)); diff --git a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/transform/script/WatcherTransformScript.java b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/transform/script/WatcherTransformScript.java new file mode 100644 index 00000000000..6d84c32578b --- /dev/null +++ b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/transform/script/WatcherTransformScript.java @@ -0,0 +1,63 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.watcher.transform.script; + +import org.elasticsearch.script.ParameterMap; +import org.elasticsearch.script.ScriptContext; +import org.elasticsearch.xpack.core.watcher.execution.WatchExecutionContext; +import org.elasticsearch.xpack.core.watcher.watch.Payload; +import org.elasticsearch.xpack.watcher.support.Variables; + +import java.util.Collections; +import java.util.HashMap; +import java.util.Map; + +/** + * A script to transform the results of a watch execution. + */ +public abstract class WatcherTransformScript { + public static final String[] PARAMETERS = {}; + + private static final Map DEPRECATIONS; + + static { + Map deprecations = new HashMap<>(); + deprecations.put( + "ctx", + "Accessing variable [ctx] via [params.ctx] from within a watcher_transform script " + + "is deprecated in favor of directly accessing [ctx]." + ); + DEPRECATIONS = Collections.unmodifiableMap(deprecations); + } + + private final Map params; + // TODO: ctx should have its members extracted into execute parameters, but it needs to be a member bwc access in params + private final Map ctx; + + public WatcherTransformScript(Map params, WatchExecutionContext watcherContext, Payload payload) { + Map paramsWithCtx = new HashMap<>(params); + Map ctx = Variables.createCtx(watcherContext, payload); + paramsWithCtx.put("ctx", ctx); + this.params = new ParameterMap(Collections.unmodifiableMap(paramsWithCtx), DEPRECATIONS); + this.ctx = ctx; + } + + public abstract Object execute(); + + public Map getParams() { + return params; + } + + public Map getCtx() { + return ctx; + } + + public interface Factory { + WatcherTransformScript newInstance(Map params, WatchExecutionContext watcherContext, Payload payload); + } + + public static ScriptContext CONTEXT = new ScriptContext<>("watcher_transform", Factory.class); +} diff --git a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/trigger/schedule/engine/TickerScheduleTriggerEngine.java b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/trigger/schedule/engine/TickerScheduleTriggerEngine.java index 4c10f794880..bd0204766af 100644 --- a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/trigger/schedule/engine/TickerScheduleTriggerEngine.java +++ b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/trigger/schedule/engine/TickerScheduleTriggerEngine.java @@ -22,6 +22,7 @@ import org.joda.time.DateTime; import java.time.Clock; import java.util.ArrayList; import java.util.Collection; +import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.concurrent.ConcurrentHashMap; @@ -49,14 +50,23 @@ public class TickerScheduleTriggerEngine extends ScheduleTriggerEngine { @Override public synchronized void start(Collection jobs) { long startTime = clock.millis(); - Map schedules = new ConcurrentHashMap<>(); + Map schedules = new HashMap<>(jobs.size()); for (Watch job : jobs) { if (job.trigger() instanceof ScheduleTrigger) { ScheduleTrigger trigger = (ScheduleTrigger) job.trigger(); schedules.put(job.id(), new ActiveSchedule(job.id(), trigger.getSchedule(), startTime)); } } - this.schedules = schedules; + // why are we calling putAll() here instead of assigning a brand + // new concurrent hash map you may ask yourself over here + // This requires some explanation how TriggerEngine.start() is + // invoked, when a reload due to the cluster state listener is done + // If the watches index does not exist, and new document is stored, + // then the creation of that index will trigger a reload which calls + // this method. The index operation however will run at the same time + // as the reload, so if we clean out the old data structure here, + // that can lead to that one watch not being triggered + this.schedules.putAll(schedules); } @Override diff --git a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/WatcherServiceTests.java b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/WatcherServiceTests.java index 73f9271e3ef..f1c711ae00a 100644 --- a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/WatcherServiceTests.java +++ b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/WatcherServiceTests.java @@ -6,19 +6,21 @@ package org.elasticsearch.xpack.watcher; import org.elasticsearch.Version; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.admin.indices.refresh.RefreshAction; import org.elasticsearch.action.admin.indices.refresh.RefreshRequest; import org.elasticsearch.action.admin.indices.refresh.RefreshResponse; +import org.elasticsearch.action.search.ClearScrollAction; import org.elasticsearch.action.search.ClearScrollRequest; import org.elasticsearch.action.search.ClearScrollResponse; +import org.elasticsearch.action.search.SearchAction; import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.search.SearchResponseSections; +import org.elasticsearch.action.search.SearchScrollAction; import org.elasticsearch.action.search.SearchScrollRequest; import org.elasticsearch.action.search.ShardSearchFailure; -import org.elasticsearch.action.support.PlainActionFuture; -import org.elasticsearch.client.AdminClient; import org.elasticsearch.client.Client; -import org.elasticsearch.client.IndicesAdminClient; import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.IndexMetaData; @@ -42,7 +44,6 @@ import org.elasticsearch.search.SearchHits; import org.elasticsearch.search.SearchShardTarget; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.threadpool.ThreadPool; -import org.elasticsearch.xpack.core.XPackSettings; import org.elasticsearch.xpack.core.watcher.trigger.Trigger; import org.elasticsearch.xpack.core.watcher.watch.Watch; import org.elasticsearch.xpack.core.watcher.watch.WatchStatus; @@ -55,6 +56,7 @@ import org.elasticsearch.xpack.watcher.trigger.TriggerService; import org.elasticsearch.xpack.watcher.watch.WatchParser; import org.joda.time.DateTime; import org.joda.time.DateTimeZone; +import org.junit.Before; import org.mockito.ArgumentCaptor; import java.util.Collections; @@ -67,6 +69,7 @@ import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.Matchers.is; import static org.mockito.Matchers.any; import static org.mockito.Matchers.eq; +import static org.mockito.Mockito.doAnswer; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.never; import static org.mockito.Mockito.verify; @@ -76,6 +79,16 @@ public class WatcherServiceTests extends ESTestCase { private final ExecutorService executorService = EsExecutors.newDirectExecutorService(); + private final Client client = mock(Client.class); + + @Before + public void configureMockClient() { + when(client.settings()).thenReturn(Settings.EMPTY); + ThreadPool threadPool = mock(ThreadPool.class); + when(client.threadPool()).thenReturn(threadPool); + when(threadPool.getThreadContext()).thenReturn(new ThreadContext(Settings.EMPTY)); + } + public void testValidateStartWithClosedIndex() { TriggerService triggerService = mock(TriggerService.class); TriggeredWatchStore triggeredWatchStore = mock(TriggeredWatchStore.class); @@ -83,7 +96,7 @@ public class WatcherServiceTests extends ESTestCase { WatchParser parser = mock(WatchParser.class); WatcherService service = new WatcherService(Settings.EMPTY, triggerService, triggeredWatchStore, - executionService, parser, mock(Client.class), executorService) { + executionService, parser, client, executorService) { @Override void stopExecutor() { } @@ -102,18 +115,11 @@ public class WatcherServiceTests extends ESTestCase { } public void testLoadOnlyActiveWatches() throws Exception { - // this is just, so we dont have to add any mocking to the threadpool - Settings settings = Settings.builder().put(XPackSettings.SECURITY_ENABLED.getKey(), false).build(); - TriggerService triggerService = mock(TriggerService.class); TriggeredWatchStore triggeredWatchStore = mock(TriggeredWatchStore.class); ExecutionService executionService = mock(ExecutionService.class); WatchParser parser = mock(WatchParser.class); - Client client = mock(Client.class); - ThreadPool threadPool = mock(ThreadPool.class); - when(client.threadPool()).thenReturn(threadPool); - when(threadPool.getThreadContext()).thenReturn(new ThreadContext(Settings.EMPTY)); - WatcherService service = new WatcherService(settings, triggerService, triggeredWatchStore, + WatcherService service = new WatcherService(Settings.EMPTY, triggerService, triggeredWatchStore, executionService, parser, client, executorService) { @Override void stopExecutor() { @@ -150,21 +156,21 @@ public class WatcherServiceTests extends ESTestCase { RefreshResponse refreshResponse = mock(RefreshResponse.class); when(refreshResponse.getSuccessfulShards()) .thenReturn(clusterState.getMetaData().getIndices().get(Watch.INDEX).getNumberOfShards()); - AdminClient adminClient = mock(AdminClient.class); - IndicesAdminClient indicesAdminClient = mock(IndicesAdminClient.class); - when(client.admin()).thenReturn(adminClient); - when(adminClient.indices()).thenReturn(indicesAdminClient); - PlainActionFuture refreshFuture = new PlainActionFuture<>(); - when(indicesAdminClient.refresh(any(RefreshRequest.class))).thenReturn(refreshFuture); - refreshFuture.onResponse(refreshResponse); + doAnswer(invocation -> { + ActionListener listener = (ActionListener) invocation.getArguments()[2]; + listener.onResponse(refreshResponse); + return null; + }).when(client).execute(eq(RefreshAction.INSTANCE), any(RefreshRequest.class), any(ActionListener.class)); // empty scroll response, no further scrolling needed SearchResponseSections scrollSearchSections = new SearchResponseSections(SearchHits.empty(), null, null, false, false, null, 1); SearchResponse scrollSearchResponse = new SearchResponse(scrollSearchSections, "scrollId", 1, 1, 0, 10, ShardSearchFailure.EMPTY_ARRAY, SearchResponse.Clusters.EMPTY); - PlainActionFuture searchScrollResponseFuture = new PlainActionFuture<>(); - when(client.searchScroll(any(SearchScrollRequest.class))).thenReturn(searchScrollResponseFuture); - searchScrollResponseFuture.onResponse(scrollSearchResponse); + doAnswer(invocation -> { + ActionListener listener = (ActionListener) invocation.getArguments()[2]; + listener.onResponse(scrollSearchResponse); + return null; + }).when(client).execute(eq(SearchScrollAction.INSTANCE), any(SearchScrollRequest.class), any(ActionListener.class)); // one search response containing active and inactive watches int count = randomIntBetween(2, 200); @@ -192,13 +198,17 @@ public class WatcherServiceTests extends ESTestCase { SearchResponseSections sections = new SearchResponseSections(searchHits, null, null, false, false, null, 1); SearchResponse searchResponse = new SearchResponse(sections, "scrollId", 1, 1, 0, 10, ShardSearchFailure.EMPTY_ARRAY, SearchResponse.Clusters.EMPTY); - PlainActionFuture searchResponseFuture = new PlainActionFuture<>(); - when(client.search(any(SearchRequest.class))).thenReturn(searchResponseFuture); - searchResponseFuture.onResponse(searchResponse); + doAnswer(invocation -> { + ActionListener listener = (ActionListener) invocation.getArguments()[2]; + listener.onResponse(searchResponse); + return null; + }).when(client).execute(eq(SearchAction.INSTANCE), any(SearchRequest.class), any(ActionListener.class)); - PlainActionFuture clearScrollFuture = new PlainActionFuture<>(); - when(client.clearScroll(any(ClearScrollRequest.class))).thenReturn(clearScrollFuture); - clearScrollFuture.onResponse(new ClearScrollResponse(true, 1)); + doAnswer(invocation -> { + ActionListener listener = (ActionListener) invocation.getArguments()[2]; + listener.onResponse(new ClearScrollResponse(true, 1)); + return null; + }).when(client).execute(eq(ClearScrollAction.INSTANCE), any(ClearScrollRequest.class), any(ActionListener.class)); service.start(clusterState, () -> {}); @@ -228,7 +238,7 @@ public class WatcherServiceTests extends ESTestCase { assertThat(triggerService.count(), is(1L)); WatcherService service = new WatcherService(Settings.EMPTY, triggerService, mock(TriggeredWatchStore.class), - mock(ExecutionService.class), mock(WatchParser.class), mock(Client.class), executorService) { + mock(ExecutionService.class), mock(WatchParser.class), client, executorService) { @Override void stopExecutor() { } @@ -245,7 +255,7 @@ public class WatcherServiceTests extends ESTestCase { ExecutionService executionService = mock(ExecutionService.class); TriggerService triggerService = mock(TriggerService.class); WatcherService service = new WatcherService(Settings.EMPTY, triggerService, mock(TriggeredWatchStore.class), - executionService, mock(WatchParser.class), mock(Client.class), executorService) { + executionService, mock(WatchParser.class), client, executorService) { @Override void stopExecutor() { } diff --git a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/common/text/TextTemplateTests.java b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/common/text/TextTemplateTests.java index 0e084af23e1..002d833c209 100644 --- a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/common/text/TextTemplateTests.java +++ b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/common/text/TextTemplateTests.java @@ -21,6 +21,7 @@ import org.junit.Before; import java.util.Collections; import java.util.HashMap; +import java.util.Locale; import java.util.Map; import static java.util.Collections.singletonMap; @@ -31,7 +32,10 @@ import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.notNullValue; import static org.hamcrest.Matchers.nullValue; +import static org.mockito.Matchers.any; import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.verifyZeroInteractions; import static org.mockito.Mockito.when; public class TextTemplateTests extends ESTestCase { @@ -47,7 +51,7 @@ public class TextTemplateTests extends ESTestCase { } public void testRender() throws Exception { - String templateText = "_template"; + String templateText = "{{_template}}"; Map params = singletonMap("param_key", "param_val"); Map model = singletonMap("model_key", "model_val"); Map merged = new HashMap<>(params); @@ -72,7 +76,7 @@ public class TextTemplateTests extends ESTestCase { } public void testRenderOverridingModel() throws Exception { - String templateText = "_template"; + String templateText = "{{_template}}"; Map params = singletonMap("key", "param_val"); Map model = singletonMap("key", "model_val"); ScriptType type = randomFrom(ScriptType.values()); @@ -94,7 +98,7 @@ public class TextTemplateTests extends ESTestCase { } public void testRenderDefaults() throws Exception { - String templateText = "_template"; + String templateText = "{{_template}}"; Map model = singletonMap("key", "model_val"); TemplateScript.Factory compiledTemplate = templateParams -> @@ -113,6 +117,39 @@ public class TextTemplateTests extends ESTestCase { assertThat(engine.render(template, model), is("rendered_text")); } + public void testDontInvokeScriptServiceOnNonMustacheText() { + assertNoCompilation("this is my text"); + assertScriptServiceInvoked("}}{{"); + assertScriptServiceInvoked("}}{{ctx.payload}}"); + } + + private void assertNoCompilation(String input) { + String output = engine.render(new TextTemplate(input), Collections.emptyMap()); + assertThat(input, is(output)); + verifyZeroInteractions(service); + } + + private void assertScriptServiceInvoked(final String input) { + ScriptService scriptService = mock(ScriptService.class); + TextTemplateEngine e = new TextTemplateEngine(Settings.EMPTY, scriptService); + + TemplateScript.Factory compiledTemplate = templateParams -> + new TemplateScript(templateParams) { + @Override + public String execute() { + return input.toUpperCase(Locale.ROOT); + } + }; + + when(scriptService.compile(new Script(ScriptType.INLINE, lang, input, + Collections.singletonMap("content_type", "text/plain"), Collections.emptyMap()), Watcher.SCRIPT_TEMPLATE_CONTEXT)) + .thenReturn(compiledTemplate); + + String output = e.render(new TextTemplate(input), Collections.emptyMap()); + verify(scriptService).compile(any(), any()); + assertThat(output, is(input.toUpperCase(Locale.ROOT))); + } + public void testParser() throws Exception { ScriptType type = randomScriptType(); TextTemplate template = diff --git a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/condition/AlwaysConditionTests.java b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/condition/AlwaysConditionTests.java index 8ae0025066e..20bf83ef4ed 100644 --- a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/condition/AlwaysConditionTests.java +++ b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/condition/AlwaysConditionTests.java @@ -8,6 +8,7 @@ package org.elasticsearch.xpack.watcher.condition; import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.script.Script; import org.elasticsearch.script.ScriptService; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xpack.core.watcher.condition.ExecutableCondition; @@ -53,7 +54,8 @@ public class AlwaysConditionTests extends ESTestCase { String type = randomFrom(ScriptCondition.TYPE, InternalAlwaysCondition.TYPE, CompareCondition.TYPE, ArrayCompareCondition.TYPE); switch (type) { case ScriptCondition.TYPE: - return new ScriptCondition(mockScript("_script"), scriptService); + Script mockScript = mockScript("_script"); + return new ScriptCondition(mockScript, scriptService); case CompareCondition.TYPE: return new CompareCondition("_path", randomFrom(CompareCondition.Op.values()), randomFrom(5, "3"), Clock.systemUTC()); diff --git a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/condition/ScriptConditionTests.java b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/condition/ScriptConditionTests.java index c7b7f2c63cd..fc6161cb927 100644 --- a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/condition/ScriptConditionTests.java +++ b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/condition/ScriptConditionTests.java @@ -13,15 +13,12 @@ import org.elasticsearch.cluster.ClusterChangedEvent; import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.MetaData; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.support.XContentMapValues; import org.elasticsearch.script.GeneralScriptException; -import org.elasticsearch.script.MockScriptEngine; import org.elasticsearch.script.Script; -import org.elasticsearch.script.ScriptEngine; import org.elasticsearch.script.ScriptException; import org.elasticsearch.script.ScriptMetaData; import org.elasticsearch.script.ScriptService; @@ -30,9 +27,12 @@ import org.elasticsearch.search.internal.InternalSearchResponse; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xpack.core.watcher.condition.ExecutableCondition; import org.elasticsearch.xpack.core.watcher.execution.WatchExecutionContext; +import org.elasticsearch.xpack.core.watcher.execution.Wid; +import org.elasticsearch.xpack.core.watcher.trigger.TriggerEvent; import org.elasticsearch.xpack.core.watcher.watch.Payload; -import org.elasticsearch.xpack.watcher.Watcher; +import org.elasticsearch.xpack.core.watcher.watch.Watch; import org.elasticsearch.xpack.watcher.test.AbstractWatcherIntegrationTestCase; +import org.elasticsearch.xpack.watcher.test.WatcherMockScriptPlugin; import org.joda.time.DateTime; import org.joda.time.DateTimeZone; import org.junit.Before; @@ -51,6 +51,8 @@ import static org.elasticsearch.xpack.core.watcher.support.Exceptions.illegalArg import static org.elasticsearch.xpack.watcher.test.WatcherTestUtils.mockExecutionContext; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.is; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; public class ScriptConditionTests extends ESTestCase { @@ -77,15 +79,13 @@ public class ScriptConditionTests extends ESTestCase { return total > 1; }); - scripts.put("ctx.payload.hits.total > threshold", vars -> { + scripts.put("ctx.payload.hits.total > params.threshold", vars -> { int total = (int) XContentMapValues.extractValue("ctx.payload.hits.total", vars); - int threshold = (int) XContentMapValues.extractValue("threshold", vars); + int threshold = (int) XContentMapValues.extractValue("params.threshold", vars); return total > threshold; }); - ScriptEngine engine = new MockScriptEngine(MockScriptEngine.NAME, scripts); - scriptService = new ScriptService(Settings.EMPTY, Collections.singletonMap(engine.getType(), engine), - Collections.singletonMap(Watcher.SCRIPT_EXECUTABLE_CONTEXT.name, Watcher.SCRIPT_EXECUTABLE_CONTEXT)); + scriptService = WatcherMockScriptPlugin.newMockScriptService(scripts); ClusterState.Builder clusterState = new ClusterState.Builder(new ClusterName("_name")); clusterState.metaData(MetaData.builder().putCustom(ScriptMetaData.TYPE, new ScriptMetaData.Builder(null).build())); @@ -102,7 +102,8 @@ public class ScriptConditionTests extends ESTestCase { } public void testExecuteMergedParams() throws Exception { - Script script = new Script(ScriptType.INLINE, "mockscript", "ctx.payload.hits.total > threshold", singletonMap("threshold", 1)); + Script script = new Script(ScriptType.INLINE, "mockscript", + "ctx.payload.hits.total > params.threshold", singletonMap("threshold", 1)); ScriptCondition executable = new ScriptCondition(script, scriptService); SearchResponse response = new SearchResponse(InternalSearchResponse.empty(), "", 3, 3, 0, 500L, ShardSearchFailure.EMPTY_ARRAY, SearchResponse.Clusters.EMPTY); @@ -190,16 +191,6 @@ public class ScriptConditionTests extends ESTestCase { assertThat(exception.getMessage(), containsString("Error evaluating null.foo")); } - public void testScriptConditionReturnObjectThrowsException() throws Exception { - ScriptCondition condition = new ScriptCondition(mockScript("return new Object()"), scriptService); - SearchResponse response = new SearchResponse(InternalSearchResponse.empty(), "", 3, 3, 0, 500L, ShardSearchFailure.EMPTY_ARRAY, - SearchResponse.Clusters.EMPTY); - WatchExecutionContext ctx = mockExecutionContext("_name", new Payload.XContent(response)); - Exception exception = expectThrows(IllegalStateException.class, () -> condition.execute(ctx)); - assertThat(exception.getMessage(), - containsString("condition [script] must return a boolean value (true|false) but instead returned [_name]")); - } - public void testScriptConditionAccessCtx() throws Exception { ScriptCondition condition = new ScriptCondition(mockScript("ctx.trigger.scheduled_time.getMillis() < new Date().time"), scriptService); @@ -210,6 +201,23 @@ public class ScriptConditionTests extends ESTestCase { assertThat(condition.execute(ctx).met(), is(true)); } + public void testParamsCtxDeprecated() throws Exception { + WatchExecutionContext watcherContext = mock(WatchExecutionContext.class); + when(watcherContext.id()).thenReturn(mock(Wid.class)); + when(watcherContext.watch()).thenReturn(mock(Watch.class)); + when(watcherContext.triggerEvent()).thenReturn(mock(TriggerEvent.class)); + WatcherConditionScript watcherScript = new WatcherConditionScript(Collections.emptyMap(), watcherContext) { + @Override + public boolean execute() { + assertThat(getParams().get("ctx"), is(getCtx())); + return true; + } + }; + watcherScript.execute(); + assertWarnings("Accessing variable [ctx] via [params.ctx] from within a watcher_condition script " + + "is deprecated in favor of directly accessing [ctx]."); + } + private static XContentBuilder createConditionContent(String script, String scriptLang, ScriptType scriptType) throws IOException { XContentBuilder builder = jsonBuilder(); if (scriptType == null) { diff --git a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/history/HistoryActionConditionTests.java b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/history/HistoryActionConditionTests.java index 9093a6f86ae..78fc9c290ed 100644 --- a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/history/HistoryActionConditionTests.java +++ b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/history/HistoryActionConditionTests.java @@ -24,6 +24,7 @@ import org.elasticsearch.xpack.watcher.condition.InternalAlwaysCondition; import org.elasticsearch.xpack.watcher.condition.NeverCondition; import org.elasticsearch.xpack.watcher.condition.ScriptCondition; import org.elasticsearch.xpack.watcher.test.AbstractWatcherIntegrationTestCase; +import org.elasticsearch.xpack.watcher.test.WatcherMockScriptPlugin; import java.util.ArrayList; import java.util.Arrays; @@ -66,7 +67,7 @@ public class HistoryActionConditionTests extends AbstractWatcherIntegrationTestC return types; } - public static class CustomScriptPlugin extends MockScriptPlugin { + public static class CustomScriptPlugin extends WatcherMockScriptPlugin { @Override protected Map, Object>> pluginScripts() { diff --git a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/input/transform/TransformInputTests.java b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/input/transform/TransformInputTests.java index aee5a5e07f0..0ac5932586e 100644 --- a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/input/transform/TransformInputTests.java +++ b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/input/transform/TransformInputTests.java @@ -13,8 +13,6 @@ import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.script.MockScriptEngine; import org.elasticsearch.script.Script; -import org.elasticsearch.script.ScriptContext; -import org.elasticsearch.script.ScriptEngine; import org.elasticsearch.script.ScriptService; import org.elasticsearch.script.ScriptType; import org.elasticsearch.test.ESTestCase; @@ -25,7 +23,7 @@ import org.elasticsearch.xpack.core.watcher.transform.ExecutableTransform; import org.elasticsearch.xpack.core.watcher.transform.TransformFactory; import org.elasticsearch.xpack.core.watcher.transform.TransformRegistry; import org.elasticsearch.xpack.core.watcher.watch.Payload; -import org.elasticsearch.xpack.watcher.Watcher; +import org.elasticsearch.xpack.watcher.test.WatcherMockScriptPlugin; import org.elasticsearch.xpack.watcher.test.WatcherTestUtils; import org.elasticsearch.xpack.watcher.transform.script.ExecutableScriptTransform; import org.elasticsearch.xpack.watcher.transform.script.ScriptTransform; @@ -33,7 +31,6 @@ import org.elasticsearch.xpack.watcher.transform.script.ScriptTransformFactory; import org.junit.Before; import java.util.Collections; -import java.util.HashMap; import java.util.Map; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; @@ -46,13 +43,7 @@ public class TransformInputTests extends ESTestCase { @Before public void setupScriptService() { - Map engines = new HashMap<>(); - engines.put(MockScriptEngine.NAME, new MockScriptEngine(MockScriptEngine.NAME, Collections.singletonMap("1", s -> "2"))); - Map> contexts = new HashMap<>(); - contexts.put(Watcher.SCRIPT_TEMPLATE_CONTEXT.name, Watcher.SCRIPT_TEMPLATE_CONTEXT); - contexts.put(Watcher.SCRIPT_SEARCH_CONTEXT.name, Watcher.SCRIPT_SEARCH_CONTEXT); - contexts.put(Watcher.SCRIPT_EXECUTABLE_CONTEXT.name, Watcher.SCRIPT_EXECUTABLE_CONTEXT); - scriptService = new ScriptService(Settings.EMPTY, engines, contexts); + scriptService = WatcherMockScriptPlugin.newMockScriptService(Collections.singletonMap("1", s -> "2")); } public void testExecute() { @@ -72,7 +63,7 @@ public class TransformInputTests extends ESTestCase { public void testParserValid() throws Exception { Map transformFactories = Collections.singletonMap("script", new ScriptTransformFactory(Settings.EMPTY, scriptService)); - TransformRegistry registry = new TransformRegistry(Settings.EMPTY, transformFactories); + TransformRegistry registry = new TransformRegistry(transformFactories); TransformInputFactory factory = new TransformInputFactory(Settings.EMPTY, registry); // { "script" : { "lang" : "mockscript", "source" : "1" } } @@ -96,7 +87,7 @@ public class TransformInputTests extends ESTestCase { Map transformFactories = Collections.singletonMap("script", new ScriptTransformFactory(Settings.EMPTY, scriptService)); - TransformRegistry registry = new TransformRegistry(Settings.EMPTY, transformFactories); + TransformRegistry registry = new TransformRegistry(transformFactories); TransformInputFactory factory = new TransformInputFactory(Settings.EMPTY, registry); XContentParser parser = createParser(jsonBuilder); @@ -115,7 +106,7 @@ public class TransformInputTests extends ESTestCase { public void testTransformInputToXContentIsSameAsParsing() throws Exception { Map transformFactories = Collections.singletonMap("script", new ScriptTransformFactory(Settings.EMPTY, scriptService)); - TransformRegistry registry = new TransformRegistry(Settings.EMPTY, transformFactories); + TransformRegistry registry = new TransformRegistry(transformFactories); TransformInputFactory factory = new TransformInputFactory(Settings.EMPTY, registry); XContentBuilder jsonBuilder = jsonBuilder().startObject().startObject("script") diff --git a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/support/VariablesTests.java b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/support/VariablesTests.java index 74396a32906..4ab5b7b7b87 100644 --- a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/support/VariablesTests.java +++ b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/support/VariablesTests.java @@ -40,7 +40,7 @@ public class VariablesTests extends ESTestCase { .metadata(metatdata) .buildMock(); - Map model = Variables.createCtxModel(ctx, payload); + Map model = Variables.createCtxParamsMap(ctx, payload); assertThat(model, notNullValue()); assertThat(model.size(), is(1)); diff --git a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/test/AbstractWatcherIntegrationTestCase.java b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/test/AbstractWatcherIntegrationTestCase.java index 3461c530b44..8cce6fd6663 100644 --- a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/test/AbstractWatcherIntegrationTestCase.java +++ b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/test/AbstractWatcherIntegrationTestCase.java @@ -5,7 +5,6 @@ */ package org.elasticsearch.xpack.watcher.test; -import org.apache.logging.log4j.Logger; import org.elasticsearch.action.admin.indices.alias.Alias; import org.elasticsearch.action.admin.indices.create.CreateIndexResponse; import org.elasticsearch.action.admin.indices.template.get.GetIndexTemplatesResponse; @@ -181,7 +180,7 @@ public abstract class AbstractWatcherIntegrationTestCase extends ESIntegTestCase public void _setup() throws Exception { if (timeWarped()) { timeWarp = new TimeWarp(internalCluster().getInstances(ScheduleTriggerEngineMock.class), - (ClockMock)getInstanceFromMaster(Clock.class), logger); + (ClockMock)getInstanceFromMaster(Clock.class)); } if (internalCluster().size() > 0) { @@ -541,12 +540,10 @@ public abstract class AbstractWatcherIntegrationTestCase extends ESIntegTestCase private final List schedulers; private final ClockMock clock; - private final Logger logger; - TimeWarp(Iterable schedulers, ClockMock clock, Logger logger) { + TimeWarp(Iterable schedulers, ClockMock clock) { this.schedulers = StreamSupport.stream(schedulers.spliterator(), false).collect(Collectors.toList()); this.clock = clock; - this.logger = logger; } public void trigger(String jobName) { diff --git a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/test/WatcherMockScriptPlugin.java b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/test/WatcherMockScriptPlugin.java new file mode 100644 index 00000000000..2908dbaa6cc --- /dev/null +++ b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/test/WatcherMockScriptPlugin.java @@ -0,0 +1,74 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.watcher.test; + +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.script.MockScriptEngine; +import org.elasticsearch.script.MockScriptPlugin; +import org.elasticsearch.script.ScriptContext; +import org.elasticsearch.script.ScriptEngine; +import org.elasticsearch.script.ScriptService; +import org.elasticsearch.xpack.watcher.Watcher; +import org.elasticsearch.xpack.watcher.condition.WatcherConditionScript; +import org.elasticsearch.xpack.watcher.transform.script.WatcherTransformScript; + +import java.util.Arrays; +import java.util.Collections; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.function.Function; +import java.util.stream.Collectors; + +/** + * Provides a mock script engine with mock versions of watcher scripts. + */ +public abstract class WatcherMockScriptPlugin extends MockScriptPlugin { + public static final Map, MockScriptEngine.ContextCompiler> CONTEXT_COMPILERS; + static { + Map, MockScriptEngine.ContextCompiler> compilers = new HashMap<>(); + compilers.put(WatcherConditionScript.CONTEXT, (script, options) -> + (WatcherConditionScript.Factory) (params, watcherContext) -> + new WatcherConditionScript(params, watcherContext) { + @Override + public boolean execute() { + Map vars = new HashMap<>(); + vars.put("params", getParams()); + vars.put("ctx", getCtx()); + return (boolean) script.apply(vars); + } + }); + compilers.put(WatcherTransformScript.CONTEXT, (script, options) -> + (WatcherTransformScript.Factory) (params, watcherContext, payload) -> + new WatcherTransformScript(params, watcherContext, payload) { + @Override + public Object execute() { + Map vars = new HashMap<>(); + vars.put("params", getParams()); + vars.put("ctx", getCtx()); + return script.apply(vars); + } + }); + CONTEXT_COMPILERS = Collections.unmodifiableMap(compilers); + } + + public static final List> CONTEXTS = Collections.unmodifiableList(Arrays.asList( + WatcherConditionScript.CONTEXT, WatcherTransformScript.CONTEXT, Watcher.SCRIPT_TEMPLATE_CONTEXT, Watcher.SCRIPT_SEARCH_CONTEXT + )); + + @Override + protected Map, MockScriptEngine.ContextCompiler> pluginContextCompilers() { + return CONTEXT_COMPILERS; + } + + public static ScriptService newMockScriptService(Map, Object>> scripts) { + Map engines = new HashMap<>(); + engines.put(MockScriptEngine.NAME, + new MockScriptEngine(MockScriptEngine.NAME, scripts, CONTEXT_COMPILERS)); + Map> contexts = CONTEXTS.stream().collect(Collectors.toMap(o -> o.name, Function.identity())); + return new ScriptService(Settings.EMPTY, engines, contexts); + } +} diff --git a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/test/integration/ExecutionVarsIntegrationTests.java b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/test/integration/ExecutionVarsIntegrationTests.java index 0b3c0fc28ec..90b14233b8d 100644 --- a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/test/integration/ExecutionVarsIntegrationTests.java +++ b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/test/integration/ExecutionVarsIntegrationTests.java @@ -9,13 +9,13 @@ import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.common.xcontent.support.XContentMapValues; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.protocol.xpack.watcher.PutWatchResponse; -import org.elasticsearch.script.MockScriptPlugin; import org.elasticsearch.xpack.core.watcher.client.WatcherClient; import org.elasticsearch.xpack.core.watcher.support.xcontent.ObjectPath; import org.elasticsearch.xpack.core.watcher.support.xcontent.XContentSource; import org.elasticsearch.xpack.core.watcher.transport.actions.execute.ExecuteWatchResponse; import org.elasticsearch.xpack.watcher.condition.ScriptCondition; import org.elasticsearch.xpack.watcher.test.AbstractWatcherIntegrationTestCase; +import org.elasticsearch.xpack.watcher.test.WatcherMockScriptPlugin; import org.hamcrest.Matcher; import java.util.HashMap; @@ -46,7 +46,7 @@ public class ExecutionVarsIntegrationTests extends AbstractWatcherIntegrationTes return types; } - public static class CustomScriptPlugin extends MockScriptPlugin { + public static class CustomScriptPlugin extends WatcherMockScriptPlugin { @Override @SuppressWarnings("unchecked") diff --git a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/test/integration/SearchInputTests.java b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/test/integration/SearchInputTests.java index 6d7f4bef213..b66a70c23af 100644 --- a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/test/integration/SearchInputTests.java +++ b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/test/integration/SearchInputTests.java @@ -42,6 +42,7 @@ import org.elasticsearch.xpack.watcher.input.search.SearchInputFactory; import org.elasticsearch.xpack.watcher.support.search.WatcherSearchTemplateRequest; import org.elasticsearch.xpack.watcher.support.search.WatcherSearchTemplateService; import org.elasticsearch.xpack.watcher.test.WatcherTestUtils; +import org.elasticsearch.xpack.watcher.transform.script.WatcherTransformScript; import org.junit.Before; import org.mockito.ArgumentCaptor; @@ -76,7 +77,7 @@ public class SearchInputTests extends ESTestCase { Map> contexts = new HashMap<>(); contexts.put(Watcher.SCRIPT_TEMPLATE_CONTEXT.name, Watcher.SCRIPT_TEMPLATE_CONTEXT); contexts.put(Watcher.SCRIPT_SEARCH_CONTEXT.name, Watcher.SCRIPT_SEARCH_CONTEXT); - contexts.put(Watcher.SCRIPT_EXECUTABLE_CONTEXT.name, Watcher.SCRIPT_EXECUTABLE_CONTEXT); + contexts.put(WatcherTransformScript.CONTEXT.name, WatcherTransformScript.CONTEXT); scriptService = new ScriptService(Settings.EMPTY, engines, contexts); ThreadPool threadPool = mock(ThreadPool.class); diff --git a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/test/integration/SearchTransformTests.java b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/test/integration/SearchTransformTests.java index 10c61677a4c..72bc71cdb08 100644 --- a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/test/integration/SearchTransformTests.java +++ b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/test/integration/SearchTransformTests.java @@ -70,7 +70,7 @@ public class SearchTransformTests extends ESTestCase { XContentParser parser = createParser(builder); parser.nextToken(); - final MockScriptEngine engine = new MockScriptEngine("mock", Collections.emptyMap()); + final MockScriptEngine engine = new MockScriptEngine("mock", Collections.emptyMap(), Collections.emptyMap()); Map engines = Collections.singletonMap(engine.getType(), engine); ScriptService scriptService = new ScriptService(Settings.EMPTY, engines, ScriptModule.CORE_CONTEXTS); diff --git a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/test/integration/SingleNodeTests.java b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/test/integration/SingleNodeTests.java new file mode 100644 index 00000000000..2109f2a2d95 --- /dev/null +++ b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/test/integration/SingleNodeTests.java @@ -0,0 +1,66 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.watcher.test.integration; + +import org.elasticsearch.action.admin.cluster.state.ClusterStateResponse; +import org.elasticsearch.action.search.SearchResponse; +import org.elasticsearch.cluster.metadata.IndexMetaData; +import org.elasticsearch.protocol.xpack.watcher.PutWatchResponse; +import org.elasticsearch.test.ESIntegTestCase.ClusterScope; +import org.elasticsearch.xpack.core.watcher.watch.Watch; +import org.elasticsearch.xpack.watcher.test.AbstractWatcherIntegrationTestCase; +import org.elasticsearch.xpack.watcher.trigger.schedule.IntervalSchedule; +import org.elasticsearch.xpack.watcher.watch.WatchStoreUtils; + +import java.util.concurrent.TimeUnit; + +import static org.elasticsearch.test.ESIntegTestCase.Scope.SUITE; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; +import static org.elasticsearch.xpack.watcher.actions.ActionBuilders.loggingAction; +import static org.elasticsearch.xpack.watcher.client.WatchSourceBuilders.watchBuilder; +import static org.elasticsearch.xpack.watcher.input.InputBuilders.simpleInput; +import static org.elasticsearch.xpack.watcher.trigger.TriggerBuilders.schedule; +import static org.elasticsearch.xpack.watcher.trigger.schedule.Schedules.interval; +import static org.hamcrest.Matchers.greaterThanOrEqualTo; +import static org.hamcrest.Matchers.is; + +@ClusterScope(scope = SUITE, numClientNodes = 0, transportClientRatio = 0, maxNumDataNodes = 1, supportsDedicatedMasters = false) +public class SingleNodeTests extends AbstractWatcherIntegrationTestCase { + + @Override + protected boolean timeWarped() { + return false; + } + + // this is the standard setup when starting watcher in a regular cluster + // the index does not exist, a watch gets added + // the watch should be executed properly, despite the index being created and the cluster state listener being reloaded + public void testThatLoadingWithNonExistingIndexWorks() throws Exception { + stopWatcher(); + ClusterStateResponse clusterStateResponse = client().admin().cluster().prepareState().get(); + IndexMetaData metaData = WatchStoreUtils.getConcreteIndex(Watch.INDEX, clusterStateResponse.getState().metaData()); + String watchIndexName = metaData.getIndex().getName(); + assertAcked(client().admin().indices().prepareDelete(watchIndexName)); + startWatcher(); + + String watchId = randomAlphaOfLength(20); + // now we start with an empty set up, store a watch and expected it to be executed + PutWatchResponse putWatchResponse = watcherClient().preparePutWatch(watchId) + .setSource(watchBuilder() + .trigger(schedule(interval(1, IntervalSchedule.Interval.Unit.SECONDS))) + .input(simpleInput()) + .addAction("_logger", loggingAction("logging of watch _name"))) + .get(); + assertThat(putWatchResponse.isCreated(), is(true)); + + assertBusy(() -> { + client().admin().indices().prepareRefresh(".watcher-history*"); + SearchResponse searchResponse = client().prepareSearch(".watcher-history*").setSize(0).get(); + assertThat(searchResponse.getHits().getTotalHits(), is(greaterThanOrEqualTo(1L))); + }, 5, TimeUnit.SECONDS); + } + +} diff --git a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/transform/TransformIntegrationTests.java b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/transform/TransformIntegrationTests.java index d629c54934f..044c3d3061e 100644 --- a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/transform/TransformIntegrationTests.java +++ b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/transform/TransformIntegrationTests.java @@ -18,6 +18,7 @@ import org.elasticsearch.script.ScriptType; import org.elasticsearch.xpack.watcher.condition.InternalAlwaysCondition; import org.elasticsearch.xpack.watcher.support.search.WatcherSearchTemplateRequest; import org.elasticsearch.xpack.watcher.test.AbstractWatcherIntegrationTestCase; +import org.elasticsearch.xpack.watcher.test.WatcherMockScriptPlugin; import java.io.IOException; import java.io.UncheckedIOException; @@ -78,7 +79,7 @@ public class TransformIntegrationTests extends AbstractWatcherIntegrationTestCas return config; } - public static class CustomScriptPlugin extends MockScriptPlugin { + public static class CustomScriptPlugin extends WatcherMockScriptPlugin { @Override protected Map, Object>> pluginScripts() { diff --git a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/transform/chain/ChainTransformTests.java b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/transform/chain/ChainTransformTests.java index f3493c9c354..edf05fac338 100644 --- a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/transform/chain/ChainTransformTests.java +++ b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/transform/chain/ChainTransformTests.java @@ -8,7 +8,6 @@ package org.elasticsearch.xpack.watcher.transform.chain; import org.apache.logging.log4j.Logger; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.logging.Loggers; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.json.JsonXContent; @@ -113,10 +112,9 @@ public class ChainTransformTests extends ESTestCase { } public void testParser() throws Exception { - TransformRegistry registry = new TransformRegistry(Settings.EMPTY, - singletonMap("named", new NamedExecutableTransform.Factory(logger))); + TransformRegistry registry = new TransformRegistry(singletonMap("named", new NamedExecutableTransform.Factory(logger))); - ChainTransformFactory transformParser = new ChainTransformFactory(Settings.EMPTY, registry); + ChainTransformFactory transformParser = new ChainTransformFactory(registry); XContentBuilder builder = jsonBuilder().startArray() .startObject().startObject("named").field("name", "name1").endObject().endObject() diff --git a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/transform/script/ScriptTransformTests.java b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/transform/script/ScriptTransformTests.java index bc79561f726..752b753f028 100644 --- a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/transform/script/ScriptTransformTests.java +++ b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/transform/script/ScriptTransformTests.java @@ -8,7 +8,6 @@ package org.elasticsearch.xpack.watcher.transform.script; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.script.ExecutableScript; import org.elasticsearch.script.Script; import org.elasticsearch.script.ScriptContext; import org.elasticsearch.script.ScriptException; @@ -17,10 +16,12 @@ import org.elasticsearch.script.ScriptService; import org.elasticsearch.script.ScriptType; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xpack.core.watcher.execution.WatchExecutionContext; +import org.elasticsearch.xpack.core.watcher.execution.Wid; import org.elasticsearch.xpack.core.watcher.transform.Transform; +import org.elasticsearch.xpack.core.watcher.trigger.TriggerEvent; import org.elasticsearch.xpack.core.watcher.watch.Payload; +import org.elasticsearch.xpack.core.watcher.watch.Watch; import org.elasticsearch.xpack.watcher.Watcher; -import org.elasticsearch.xpack.watcher.support.Variables; import java.util.Collections; import java.util.HashMap; @@ -49,21 +50,19 @@ public class ScriptTransformTests extends ESTestCase { ScriptType type = randomFrom(ScriptType.values()); Map params = Collections.emptyMap(); Script script = new Script(type, type == ScriptType.STORED ? null : "_lang", "_script", params); - ExecutableScript.Factory factory = mock(ExecutableScript.Factory.class); - when(service.compile(script, Watcher.SCRIPT_EXECUTABLE_CONTEXT)).thenReturn(factory); + WatcherTransformScript.Factory factory = mock(WatcherTransformScript.Factory.class); + when(service.compile(script, WatcherTransformScript.CONTEXT)).thenReturn(factory); ExecutableScriptTransform transform = new ExecutableScriptTransform(new ScriptTransform(script), logger, service); WatchExecutionContext ctx = mockExecutionContext("_name", Payload.EMPTY); Payload payload = new Payload.Simple("key", "value"); - Map model = Variables.createCtxModel(ctx, payload); - Map transformed = singletonMap("key", "value"); - ExecutableScript executable = mock(ExecutableScript.class); - when(executable.run()).thenReturn(transformed); - when(factory.newInstance(model)).thenReturn(executable); + WatcherTransformScript executable = mock(WatcherTransformScript.class); + when(executable.execute()).thenReturn(transformed); + when(factory.newInstance(params, ctx, payload)).thenReturn(executable); Transform.Result result = transform.execute(ctx, payload); assertThat(result, notNullValue()); @@ -77,19 +76,17 @@ public class ScriptTransformTests extends ESTestCase { ScriptType type = randomFrom(ScriptType.values()); Map params = Collections.emptyMap(); Script script = new Script(type, type == ScriptType.STORED ? null : "_lang", "_script", params); - ExecutableScript.Factory factory = mock(ExecutableScript.Factory.class); - when(service.compile(script, Watcher.SCRIPT_EXECUTABLE_CONTEXT)).thenReturn(factory); + WatcherTransformScript.Factory factory = mock(WatcherTransformScript.Factory.class); + when(service.compile(script, WatcherTransformScript.CONTEXT)).thenReturn(factory); ExecutableScriptTransform transform = new ExecutableScriptTransform(new ScriptTransform(script), logger, service); WatchExecutionContext ctx = mockExecutionContext("_name", Payload.EMPTY); Payload payload = new Payload.Simple("key", "value"); - Map model = Variables.createCtxModel(ctx, payload); - - ExecutableScript executable = mock(ExecutableScript.class); - when(executable.run()).thenThrow(new RuntimeException("_error")); - when(factory.newInstance(model)).thenReturn(executable); + WatcherTransformScript executable = mock(WatcherTransformScript.class); + when(executable.execute()).thenThrow(new RuntimeException("_error")); + when(factory.newInstance(params, ctx, payload)).thenReturn(executable); Transform.Result result = transform.execute(ctx, payload); assertThat(result, notNullValue()); @@ -103,20 +100,18 @@ public class ScriptTransformTests extends ESTestCase { ScriptType type = randomFrom(ScriptType.values()); Map params = Collections.emptyMap(); Script script = new Script(type, type == ScriptType.STORED ? null : "_lang", "_script", params); - ExecutableScript.Factory factory = mock(ExecutableScript.Factory.class); - when(service.compile(script, Watcher.SCRIPT_EXECUTABLE_CONTEXT)).thenReturn(factory); + WatcherTransformScript.Factory factory = mock(WatcherTransformScript.Factory.class); + when(service.compile(script, WatcherTransformScript.CONTEXT)).thenReturn(factory); ExecutableScriptTransform transform = new ExecutableScriptTransform(new ScriptTransform(script), logger, service); WatchExecutionContext ctx = mockExecutionContext("_name", Payload.EMPTY); Payload payload = new Payload.Simple("key", "value"); - Map model = Variables.createCtxModel(ctx, payload); - - ExecutableScript executable = mock(ExecutableScript.class); + WatcherTransformScript executable = mock(WatcherTransformScript.class); Object value = randomFrom("value", 1, new String[] { "value" }, Collections.singletonList("value"), singleton("value")); - when(executable.run()).thenReturn(value); - when(factory.newInstance(model)).thenReturn(executable); + when(executable.execute()).thenReturn(value); + when(factory.newInstance(params, ctx, payload)).thenReturn(executable); Transform.Result result = transform.execute(ctx, payload); assertThat(result, notNullValue()); @@ -158,7 +153,7 @@ public class ScriptTransformTests extends ESTestCase { String errorMessage = "expected error message"; ScriptException scriptException = new ScriptException(errorMessage, new RuntimeException("foo"), Collections.emptyList(), "whatever", "whatever"); - when(scriptService.compile(anyObject(), eq(Watcher.SCRIPT_EXECUTABLE_CONTEXT))).thenThrow(scriptException); + when(scriptService.compile(anyObject(), eq(WatcherTransformScript.CONTEXT))).thenThrow(scriptException); ScriptTransformFactory transformFactory = new ScriptTransformFactory(Settings.builder().build(), scriptService); @@ -191,6 +186,23 @@ public class ScriptTransformTests extends ESTestCase { assertThat(e.getMessage(), containsString("script_lang not supported [not_a_valid_lang]")); } + public void testParamsCtxDeprecated() throws Exception { + WatchExecutionContext watcherContext = mock(WatchExecutionContext.class); + when(watcherContext.id()).thenReturn(mock(Wid.class)); + when(watcherContext.watch()).thenReturn(mock(Watch.class)); + when(watcherContext.triggerEvent()).thenReturn(mock(TriggerEvent.class)); + Payload payload = mock(Payload.class); + WatcherTransformScript watcherScript = new WatcherTransformScript(Collections.emptyMap(), watcherContext, payload) { + @Override + public Object execute() { + return getParams().get("ctx"); + } + }; + assertThat(watcherScript.execute(), is(watcherScript.getCtx())); + assertWarnings("Accessing variable [ctx] via [params.ctx] from within a watcher_transform script " + + "is deprecated in favor of directly accessing [ctx]."); + } + static String scriptTypeField(ScriptType type) { switch (type) { case INLINE: return "source"; @@ -205,7 +217,7 @@ public class ScriptTransformTests extends ESTestCase { .put("path.home", createTempDir()) .build(); Map contexts = new HashMap<>(ScriptModule.CORE_CONTEXTS); - contexts.put(Watcher.SCRIPT_EXECUTABLE_CONTEXT.name, Watcher.SCRIPT_EXECUTABLE_CONTEXT); + contexts.put(WatcherTransformScript.CONTEXT.name, WatcherTransformScript.CONTEXT); contexts.put(Watcher.SCRIPT_TEMPLATE_CONTEXT.name, Watcher.SCRIPT_TEMPLATE_CONTEXT); return new ScriptService(settings, Collections.emptyMap(), Collections.emptyMap()); } diff --git a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/trigger/schedule/engine/TickerScheduleEngineTests.java b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/trigger/schedule/engine/TickerScheduleEngineTests.java index 6680b38ab94..db1d3767b59 100644 --- a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/trigger/schedule/engine/TickerScheduleEngineTests.java +++ b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/trigger/schedule/engine/TickerScheduleEngineTests.java @@ -35,9 +35,7 @@ import java.util.function.Consumer; import static org.elasticsearch.xpack.watcher.trigger.schedule.Schedules.daily; import static org.elasticsearch.xpack.watcher.trigger.schedule.Schedules.interval; import static org.elasticsearch.xpack.watcher.trigger.schedule.Schedules.weekly; -import static org.hamcrest.Matchers.everyItem; import static org.hamcrest.Matchers.is; -import static org.hamcrest.Matchers.startsWith; import static org.joda.time.DateTimeZone.UTC; import static org.mockito.Mockito.mock; @@ -110,40 +108,6 @@ public class TickerScheduleEngineTests extends ESTestCase { assertThat(bits.cardinality(), is(count)); } - public void testStartClearsExistingSchedules() throws Exception { - final CountDownLatch latch = new CountDownLatch(1); - List firedWatchIds = new ArrayList<>(); - engine.register(new Consumer>() { - @Override - public void accept(Iterable events) { - for (TriggerEvent event : events) { - firedWatchIds.add(event.jobName()); - } - latch.countDown(); - } - }); - - int count = randomIntBetween(2, 5); - List watches = new ArrayList<>(); - for (int i = 0; i < count; i++) { - watches.add(createWatch(String.valueOf(i), interval("1s"))); - } - engine.start(watches); - - watches.clear(); - for (int i = 0; i < count; i++) { - watches.add(createWatch("another_id" + i, interval("1s"))); - } - engine.start(watches); - - advanceClockIfNeeded(new DateTime(clock.millis(), UTC).plusMillis(1100)); - if (!latch.await(3 * count, TimeUnit.SECONDS)) { - fail("waiting too long for all watches to be triggered"); - } - - assertThat(firedWatchIds, everyItem(startsWith("another_id"))); - } - public void testAddHourly() throws Exception { final String name = "job_name"; final CountDownLatch latch = new CountDownLatch(1); diff --git a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/watch/WatchTests.java b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/watch/WatchTests.java index ae3066a3ee6..fff07cfa010 100644 --- a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/watch/WatchTests.java +++ b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/watch/WatchTests.java @@ -236,8 +236,7 @@ public class WatchTests extends ESTestCase { TriggerService triggerService = new TriggerService(Settings.EMPTY, Collections.emptySet()) { @Override public Trigger parseTrigger(String jobName, XContentParser parser) throws IOException { - XContentParser.Token token; - while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { + while ((parser.nextToken()) != XContentParser.Token.END_OBJECT) { } return new ScheduleTrigger(randomSchedule()); @@ -571,7 +570,7 @@ public class WatchTests extends ESTestCase { Map factories = new HashMap<>(); factories.put(ScriptTransform.TYPE, new ScriptTransformFactory(settings, scriptService)); factories.put(SearchTransform.TYPE, new SearchTransformFactory(settings, client, xContentRegistry(), scriptService)); - return new TransformRegistry(Settings.EMPTY, unmodifiableMap(factories)); + return new TransformRegistry(unmodifiableMap(factories)); } private List randomActions() { diff --git a/x-pack/qa/security-migrate-tests/src/test/java/org/elasticsearch/xpack/security/MigrateToolIT.java b/x-pack/qa/security-migrate-tests/src/test/java/org/elasticsearch/xpack/security/MigrateToolIT.java index 4ac927c6646..3581bf2fda7 100644 --- a/x-pack/qa/security-migrate-tests/src/test/java/org/elasticsearch/xpack/security/MigrateToolIT.java +++ b/x-pack/qa/security-migrate-tests/src/test/java/org/elasticsearch/xpack/security/MigrateToolIT.java @@ -7,7 +7,7 @@ package org.elasticsearch.xpack.security; import joptsimple.OptionParser; import joptsimple.OptionSet; -import org.elasticsearch.action.search.SearchResponse; + import org.elasticsearch.cli.MockTerminal; import org.elasticsearch.client.Client; import org.elasticsearch.client.Requests; @@ -125,6 +125,6 @@ public class MigrateToolIT extends MigrateToolTestCase { .waitForEvents(Priority.LANGUID) .waitForNoRelocatingShards(true)) .actionGet(); - SearchResponse searchResp = client.filterWithHeader(Collections.singletonMap("Authorization", token)).prepareSearch("index1").get(); + client.filterWithHeader(Collections.singletonMap("Authorization", token)).prepareSearch("index1").get(); } } diff --git a/x-pack/qa/security-migrate-tests/src/test/java/org/elasticsearch/xpack/security/MigrateToolTestCase.java b/x-pack/qa/security-migrate-tests/src/test/java/org/elasticsearch/xpack/security/MigrateToolTestCase.java index 2987c1afc8d..0111aeff4cc 100644 --- a/x-pack/qa/security-migrate-tests/src/test/java/org/elasticsearch/xpack/security/MigrateToolTestCase.java +++ b/x-pack/qa/security-migrate-tests/src/test/java/org/elasticsearch/xpack/security/MigrateToolTestCase.java @@ -6,11 +6,11 @@ package org.elasticsearch.xpack.security; import org.apache.logging.log4j.Logger; +import org.apache.logging.log4j.LogManager; import org.apache.lucene.util.LuceneTestCase; import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse; import org.elasticsearch.client.Client; import org.elasticsearch.client.transport.TransportClient; -import org.elasticsearch.common.logging.ESLoggerFactory; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.transport.TransportAddress; import org.elasticsearch.xpack.client.PreBuiltXPackTransportClient; @@ -58,7 +58,7 @@ public abstract class MigrateToolTestCase extends LuceneTestCase { */ public static final String TESTS_CLUSTER_DEFAULT = "localhost:9300"; - protected static final Logger logger = ESLoggerFactory.getLogger(MigrateToolTestCase.class.getName()); + protected static final Logger logger = LogManager.getLogger(MigrateToolTestCase.class); private static final AtomicInteger counter = new AtomicInteger(); private static Client client; @@ -129,7 +129,6 @@ public abstract class MigrateToolTestCase extends LuceneTestCase { @BeforeClass public static void initializeSettings() throws UnknownHostException { - String port = System.getProperty("integ.http.port"); clusterAddresses = System.getProperty(TESTS_CLUSTER); clusterHttpAddresses = System.getProperty(TESTS_HTTP_CLUSTER); if (clusterAddresses == null || clusterAddresses.isEmpty()) { diff --git a/x-pack/qa/smoke-test-watcher-with-security/src/test/java/org/elasticsearch/smoketest/SmokeTestWatcherWithSecurityIT.java b/x-pack/qa/smoke-test-watcher-with-security/src/test/java/org/elasticsearch/smoketest/SmokeTestWatcherWithSecurityIT.java index 17fbf0769fd..25b19aeea3b 100644 --- a/x-pack/qa/smoke-test-watcher-with-security/src/test/java/org/elasticsearch/smoketest/SmokeTestWatcherWithSecurityIT.java +++ b/x-pack/qa/smoke-test-watcher-with-security/src/test/java/org/elasticsearch/smoketest/SmokeTestWatcherWithSecurityIT.java @@ -59,20 +59,20 @@ public class SmokeTestWatcherWithSecurityIT extends ESRestTestCase { String state = objectPath.evaluate("stats.0.watcher_state"); switch (state) { - case "stopped": - Response startResponse = adminClient().performRequest(new Request("POST", "/_xpack/watcher/_start")); - String body = EntityUtils.toString(startResponse.getEntity()); - assertThat(body, containsString("\"acknowledged\":true")); - break; - case "stopping": - throw new AssertionError("waiting until stopping state reached stopped state to start again"); - case "starting": - throw new AssertionError("waiting until starting state reached started state"); - case "started": - // all good here, we are done - break; - default: - throw new AssertionError("unknown state[" + state + "]"); + case "stopped": + Response startResponse = adminClient().performRequest(new Request("POST", "/_xpack/watcher/_start")); + Map responseMap = entityAsMap(startResponse); + assertThat(responseMap, hasEntry("acknowledged", true)); + break; + case "stopping": + throw new AssertionError("waiting until stopping state reached stopped state to start again"); + case "starting": + throw new AssertionError("waiting until starting state reached started state"); + case "started": + // all good here, we are done + break; + default: + throw new AssertionError("unknown state[" + state + "]"); } } catch (IOException e) { throw new AssertionError(e); @@ -135,7 +135,6 @@ public class SmokeTestWatcherWithSecurityIT extends ESRestTestCase { } - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/33320") public void testSearchInputHasPermissions() throws Exception { try (XContentBuilder builder = jsonBuilder()) { builder.startObject(); @@ -159,7 +158,6 @@ public class SmokeTestWatcherWithSecurityIT extends ESRestTestCase { assertThat(conditionMet, is(true)); } - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/29893") public void testSearchInputWithInsufficientPrivileges() throws Exception { String indexName = "index_not_allowed_to_read"; try (XContentBuilder builder = jsonBuilder()) { @@ -186,7 +184,6 @@ public class SmokeTestWatcherWithSecurityIT extends ESRestTestCase { assertThat(conditionMet, is(false)); } - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/33320") public void testSearchTransformHasPermissions() throws Exception { try (XContentBuilder builder = jsonBuilder()) { builder.startObject(); @@ -216,7 +213,6 @@ public class SmokeTestWatcherWithSecurityIT extends ESRestTestCase { assertThat(value, is("15")); } - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/33291") public void testSearchTransformInsufficientPermissions() throws Exception { try (XContentBuilder builder = jsonBuilder()) { builder.startObject(); @@ -244,7 +240,6 @@ public class SmokeTestWatcherWithSecurityIT extends ESRestTestCase { assertThat(response.getStatusLine().getStatusCode(), is(404)); } - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/30777") public void testIndexActionHasPermissions() throws Exception { try (XContentBuilder builder = jsonBuilder()) { builder.startObject(); @@ -269,7 +264,6 @@ public class SmokeTestWatcherWithSecurityIT extends ESRestTestCase { assertThat(spam, is("eggs")); } - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/33320") public void testIndexActionInsufficientPrivileges() throws Exception { try (XContentBuilder builder = jsonBuilder()) { builder.startObject(); @@ -299,6 +293,8 @@ public class SmokeTestWatcherWithSecurityIT extends ESRestTestCase { Response response = client().performRequest(request); Map responseMap = entityAsMap(response); assertThat(responseMap, hasEntry("_id", watchId)); + assertThat(responseMap, hasEntry("created", true)); + assertThat(responseMap, hasEntry("_version", 1)); } private ObjectPath getWatchHistoryEntry(String watchId) throws Exception { diff --git a/x-pack/qa/sql/no-security/src/test/java/org/elasticsearch/xpack/qa/sql/nosecurity/JdbcDocCsvSpecIT.java b/x-pack/qa/sql/no-security/src/test/java/org/elasticsearch/xpack/qa/sql/nosecurity/JdbcDocCsvSpecIT.java index 017fc4b5238..aca0e37cd2d 100644 --- a/x-pack/qa/sql/no-security/src/test/java/org/elasticsearch/xpack/qa/sql/nosecurity/JdbcDocCsvSpecIT.java +++ b/x-pack/qa/sql/no-security/src/test/java/org/elasticsearch/xpack/qa/sql/nosecurity/JdbcDocCsvSpecIT.java @@ -74,7 +74,7 @@ public class JdbcDocCsvSpecIT extends SpecBaseIntegrationTestCase { @Override protected boolean logEsResultSet() { - return true; + return false; } @Override diff --git a/x-pack/qa/sql/src/main/java/org/elasticsearch/xpack/qa/sql/cli/ShowTestCase.java b/x-pack/qa/sql/src/main/java/org/elasticsearch/xpack/qa/sql/cli/ShowTestCase.java index 8dbd4b187f7..f0697f553ae 100644 --- a/x-pack/qa/sql/src/main/java/org/elasticsearch/xpack/qa/sql/cli/ShowTestCase.java +++ b/x-pack/qa/sql/src/main/java/org/elasticsearch/xpack/qa/sql/cli/ShowTestCase.java @@ -49,16 +49,18 @@ public abstract class ShowTestCase extends CliIntegrationTestCase { assertThat(readLine(), RegexMatcher.matches("\\s*LOG\\s*\\|\\s*SCALAR\\s*")); assertThat(readLine(), RegexMatcher.matches("\\s*LOG10\\s*\\|\\s*SCALAR\\s*")); assertThat(readLine(), RegexMatcher.matches("\\s*LCASE\\s*\\|\\s*SCALAR\\s*")); - assertThat(readLine(), RegexMatcher.matches("\\s*LENGTH\\s*\\|\\s*SCALAR\\s*")); - assertThat(readLine(), RegexMatcher.matches("\\s*LTRIM\\s*\\|\\s*SCALAR\\s*")); assertThat(readLine(), RegexMatcher.matches("\\s*LEFT\\s*\\|\\s*SCALAR\\s*")); + assertThat(readLine(), RegexMatcher.matches("\\s*LENGTH\\s*\\|\\s*SCALAR\\s*")); assertThat(readLine(), RegexMatcher.matches("\\s*LOCATE\\s*\\|\\s*SCALAR\\s*")); + assertThat(readLine(), RegexMatcher.matches("\\s*LTRIM\\s*\\|\\s*SCALAR\\s*")); assertEquals("", readLine()); } public void testShowFunctionsLikeInfix() throws IOException { assertThat(command("SHOW FUNCTIONS LIKE '%DAY%'"), RegexMatcher.matches("\\s*name\\s*\\|\\s*type\\s*")); assertThat(readLine(), containsString("----------")); + assertThat(readLine(), RegexMatcher.matches("\\s*DAY_NAME\\s*\\|\\s*SCALAR\\s*")); + assertThat(readLine(), RegexMatcher.matches("\\s*DAYNAME\\s*\\|\\s*SCALAR\\s*")); assertThat(readLine(), RegexMatcher.matches("\\s*DAY_OF_MONTH\\s*\\|\\s*SCALAR\\s*")); assertThat(readLine(), RegexMatcher.matches("\\s*DAYOFMONTH\\s*\\|\\s*SCALAR\\s*")); assertThat(readLine(), RegexMatcher.matches("\\s*DAY\\s*\\|\\s*SCALAR\\s*")); @@ -68,8 +70,6 @@ public abstract class ShowTestCase extends CliIntegrationTestCase { assertThat(readLine(), RegexMatcher.matches("\\s*DAYOFYEAR\\s*\\|\\s*SCALAR\\s*")); assertThat(readLine(), RegexMatcher.matches("\\s*HOUR_OF_DAY\\s*\\|\\s*SCALAR\\s*")); assertThat(readLine(), RegexMatcher.matches("\\s*MINUTE_OF_DAY\\s*\\|\\s*SCALAR\\s*")); - assertThat(readLine(), RegexMatcher.matches("\\s*DAY_NAME\\s*\\|\\s*SCALAR\\s*")); - assertThat(readLine(), RegexMatcher.matches("\\s*DAYNAME\\s*\\|\\s*SCALAR\\s*")); assertEquals("", readLine()); } } diff --git a/x-pack/qa/sql/src/main/java/org/elasticsearch/xpack/qa/sql/jdbc/CsvSpecTestCase.java b/x-pack/qa/sql/src/main/java/org/elasticsearch/xpack/qa/sql/jdbc/CsvSpecTestCase.java index 4aa599290e6..a8ce308f323 100644 --- a/x-pack/qa/sql/src/main/java/org/elasticsearch/xpack/qa/sql/jdbc/CsvSpecTestCase.java +++ b/x-pack/qa/sql/src/main/java/org/elasticsearch/xpack/qa/sql/jdbc/CsvSpecTestCase.java @@ -38,6 +38,7 @@ public abstract class CsvSpecTestCase extends SpecBaseIntegrationTestCase { tests.addAll(readScriptSpec("/nulls.csv-spec", parser)); tests.addAll(readScriptSpec("/nested.csv-spec", parser)); tests.addAll(readScriptSpec("/functions.csv-spec", parser)); + tests.addAll(readScriptSpec("/math.csv-spec", parser)); return tests; } diff --git a/x-pack/qa/sql/src/main/java/org/elasticsearch/xpack/qa/sql/jdbc/JdbcAssert.java b/x-pack/qa/sql/src/main/java/org/elasticsearch/xpack/qa/sql/jdbc/JdbcAssert.java index 133006c66a8..b0a0d36fba4 100644 --- a/x-pack/qa/sql/src/main/java/org/elasticsearch/xpack/qa/sql/jdbc/JdbcAssert.java +++ b/x-pack/qa/sql/src/main/java/org/elasticsearch/xpack/qa/sql/jdbc/JdbcAssert.java @@ -14,10 +14,8 @@ import java.sql.ResultSetMetaData; import java.sql.SQLException; import java.sql.Types; import java.util.ArrayList; -import java.util.Calendar; import java.util.List; import java.util.Locale; -import java.util.TimeZone; import static java.lang.String.format; import static java.sql.Types.BIGINT; @@ -35,8 +33,6 @@ import static org.junit.Assert.fail; * Utility class for doing JUnit-style asserts over JDBC. */ public class JdbcAssert { - private static final Calendar UTC_CALENDAR = Calendar.getInstance(TimeZone.getTimeZone("UTC"), Locale.ROOT); - public static void assertResultSets(ResultSet expected, ResultSet actual) throws SQLException { assertResultSets(expected, actual, null); } @@ -133,7 +129,7 @@ public class JdbcAssert { doAssertResultSetData(ex, ac, logger, lenient); } } - + private static void doAssertResultSetData(ResultSet expected, ResultSet actual, Logger logger, boolean lenient) throws SQLException { ResultSetMetaData metaData = expected.getMetaData(); int columns = metaData.getColumnCount(); @@ -172,7 +168,7 @@ public class JdbcAssert { } catch (ClassNotFoundException cnfe) { throw new SQLException(cnfe); } - + Object expectedObject = expected.getObject(column); Object actualObject = lenient ? actual.getObject(column, expectedColumnClass) : actual.getObject(column); diff --git a/x-pack/qa/sql/src/main/java/org/elasticsearch/xpack/qa/sql/jdbc/JdbcIntegrationTestCase.java b/x-pack/qa/sql/src/main/java/org/elasticsearch/xpack/qa/sql/jdbc/JdbcIntegrationTestCase.java index 301e15c8efb..c6594d72051 100644 --- a/x-pack/qa/sql/src/main/java/org/elasticsearch/xpack/qa/sql/jdbc/JdbcIntegrationTestCase.java +++ b/x-pack/qa/sql/src/main/java/org/elasticsearch/xpack/qa/sql/jdbc/JdbcIntegrationTestCase.java @@ -61,7 +61,8 @@ public abstract class JdbcIntegrationTestCase extends ESRestTestCase { // tag::connect-dm String address = "jdbc:es://" + elasticsearchAddress; // <1> Properties connectionProperties = connectionProperties(); // <2> - Connection connection = DriverManager.getConnection(address, connectionProperties); + Connection connection = + DriverManager.getConnection(address, connectionProperties); // end::connect-dm assertNotNull("The timezone should be specified", connectionProperties.getProperty(JdbcConfiguration.TIME_ZONE)); return connection; diff --git a/x-pack/qa/sql/src/main/java/org/elasticsearch/xpack/qa/sql/jdbc/ResultSetTestCase.java b/x-pack/qa/sql/src/main/java/org/elasticsearch/xpack/qa/sql/jdbc/ResultSetTestCase.java index 447fc4f17e1..80580f3461a 100644 --- a/x-pack/qa/sql/src/main/java/org/elasticsearch/xpack/qa/sql/jdbc/ResultSetTestCase.java +++ b/x-pack/qa/sql/src/main/java/org/elasticsearch/xpack/qa/sql/jdbc/ResultSetTestCase.java @@ -24,13 +24,13 @@ import java.sql.Blob; import java.sql.Clob; import java.sql.Connection; import java.sql.DriverManager; -import java.sql.JDBCType; import java.sql.NClob; import java.sql.PreparedStatement; import java.sql.ResultSet; import java.sql.ResultSetMetaData; import java.sql.SQLException; import java.sql.SQLFeatureNotSupportedException; +import java.sql.SQLType; import java.sql.Timestamp; import java.sql.Types; import java.util.Arrays; @@ -64,7 +64,7 @@ public class ResultSetTestCase extends JdbcIntegrationTestCase { static final Set fieldsNames = Stream.of("test_byte", "test_integer", "test_long", "test_short", "test_double", "test_float", "test_keyword") .collect(Collectors.toCollection(HashSet::new)); - static final Map,JDBCType> dateTimeTestingFields = new HashMap,JDBCType>(); + static final Map, SQLType> dateTimeTestingFields = new HashMap<>(); static final String SELECT_ALL_FIELDS = "SELECT test_boolean, test_byte, test_integer," + "test_long, test_short, test_double, test_float, test_keyword, test_date FROM test"; static final String SELECT_WILDCARD = "SELECT * FROM test"; @@ -193,17 +193,17 @@ public class ResultSetTestCase extends JdbcIntegrationTestCase { assertEquals(format(Locale.ROOT, "Numeric %s out of range", Double.toString(floatNotByte)), sqle.getMessage()); sqle = expectThrows(SQLException.class, () -> results.getByte("test_keyword")); - assertEquals(format(Locale.ROOT, "Unable to convert value [%.128s] of type [VARCHAR] to a Byte", randomString), + assertEquals(format(Locale.ROOT, "Unable to convert value [%.128s] of type [VARCHAR] to a Byte", randomString), sqle.getMessage()); sqle = expectThrows(SQLException.class, () -> results.getObject("test_keyword", Byte.class)); - assertEquals(format(Locale.ROOT, "Unable to convert value [%.128s] of type [VARCHAR] to a Byte", randomString), + assertEquals(format(Locale.ROOT, "Unable to convert value [%.128s] of type [VARCHAR] to a Byte", randomString), sqle.getMessage()); sqle = expectThrows(SQLException.class, () -> results.getByte("test_date")); - assertEquals(format(Locale.ROOT, "Unable to convert value [%.128s] of type [TIMESTAMP] to a Byte", randomDate), + assertEquals(format(Locale.ROOT, "Unable to convert value [%.128s] of type [TIMESTAMP] to a Byte", randomDate), sqle.getMessage()); sqle = expectThrows(SQLException.class, () -> results.getObject("test_date", Byte.class)); - assertEquals(format(Locale.ROOT, "Unable to convert value [%.128s] of type [TIMESTAMP] to a Byte", randomDate), + assertEquals(format(Locale.ROOT, "Unable to convert value [%.128s] of type [TIMESTAMP] to a Byte", randomDate), sqle.getMessage()); }); } @@ -249,7 +249,7 @@ public class ResultSetTestCase extends JdbcIntegrationTestCase { doWithQuery(SELECT_WILDCARD, (results) -> { results.next(); for(Entry e : map.entrySet()) { - short actual = (short) results.getObject(e.getKey(), Short.class); + short actual = results.getObject(e.getKey(), Short.class); if (e.getValue() instanceof Double) { assertEquals("For field " + e.getKey(), Math.round(e.getValue().doubleValue()), results.getShort(e.getKey())); assertEquals("For field " + e.getKey(), Math.round(e.getValue().doubleValue()), actual); @@ -590,7 +590,7 @@ public class ResultSetTestCase extends JdbcIntegrationTestCase { results.next(); for(Entry e : map.entrySet()) { assertEquals("For field " + e.getKey(), e.getValue().doubleValue(), results.getDouble(e.getKey()), 0.0d); - assertEquals("For field " + e.getKey(), + assertEquals("For field " + e.getKey(), e.getValue().doubleValue(), results.getObject(e.getKey(), Double.class), 0.0d); } }); @@ -673,7 +673,7 @@ public class ResultSetTestCase extends JdbcIntegrationTestCase { results.next(); for(Entry e : map.entrySet()) { assertEquals("For field " + e.getKey(), e.getValue().floatValue(), results.getFloat(e.getKey()), 0.0f); - assertEquals("For field " + e.getKey(), + assertEquals("For field " + e.getKey(), e.getValue().floatValue(), results.getObject(e.getKey(), Float.class), 0.0f); } }); @@ -746,7 +746,7 @@ public class ResultSetTestCase extends JdbcIntegrationTestCase { builder.field("test_integer", randomValueOtherThan(0, () -> randomInt())); builder.field("test_long", randomValueOtherThan(0L, () -> randomLong())); builder.field("test_short", randomValueOtherThan((short) 0, () -> randomShort())); - builder.field("test_double", randomValueOtherThanMany(i -> i < 1.0d && i > -1.0d && i < Double.MAX_VALUE + builder.field("test_double", randomValueOtherThanMany(i -> i < 1.0d && i > -1.0d && i < Double.MAX_VALUE && i > Double.MIN_VALUE, () -> randomDouble() * randomInt())); builder.field("test_float", randomValueOtherThanMany(i -> i < 1.0f && i > -1.0f && i < Float.MAX_VALUE && i > Float.MIN_VALUE, @@ -820,9 +820,9 @@ public class ResultSetTestCase extends JdbcIntegrationTestCase { assertEquals(results.getDate("test_date"), new java.sql.Date(connCalendar.getTimeInMillis())); assertEquals(results.getDate(9), new java.sql.Date(connCalendar.getTimeInMillis())); - assertEquals(results.getObject("test_date", java.sql.Date.class), + assertEquals(results.getObject("test_date", java.sql.Date.class), new java.sql.Date(randomLongDate - (randomLongDate % 86400000L))); - assertEquals(results.getObject(9, java.sql.Date.class), + assertEquals(results.getObject(9, java.sql.Date.class), new java.sql.Date(randomLongDate - (randomLongDate % 86400000L))); // bulk validation for all fields which are not of type date @@ -889,9 +889,9 @@ public class ResultSetTestCase extends JdbcIntegrationTestCase { assertEquals(results.getTime("test_date"), new java.sql.Time(c.getTimeInMillis())); assertEquals(results.getTime(9), new java.sql.Time(c.getTimeInMillis())); - assertEquals(results.getObject("test_date", java.sql.Time.class), + assertEquals(results.getObject("test_date", java.sql.Time.class), new java.sql.Time(randomLongDate % 86400000L)); - assertEquals(results.getObject(9, java.sql.Time.class), + assertEquals(results.getObject(9, java.sql.Time.class), new java.sql.Time(randomLongDate % 86400000L)); validateErrorsForDateTimeTestsWithoutCalendar(results::getTime); @@ -1126,7 +1126,7 @@ public class ResultSetTestCase extends JdbcIntegrationTestCase { assertThrowsUnsupportedAndExpectErrorMessage(() -> r.getRowId("test"), "RowId not supported"); assertThrowsUnsupportedAndExpectErrorMessage(() -> r.getRowId(1), "RowId not supported"); assertThrowsUnsupportedAndExpectErrorMessage(() -> r.getSQLXML("test"), "SQLXML not supported"); - assertThrowsUnsupportedAndExpectErrorMessage(() -> r.getSQLXML(1), "SQLXML not supported"); + assertThrowsUnsupportedAndExpectErrorMessage(() -> r.getSQLXML(1), "SQLXML not supported"); assertThrowsUnsupportedAndExpectErrorMessage(() -> r.getURL("test"), "URL not supported"); assertThrowsUnsupportedAndExpectErrorMessage(() -> r.getURL(1), "URL not supported"); } @@ -1425,7 +1425,7 @@ public class ResultSetTestCase extends JdbcIntegrationTestCase { * It returns a map containing the field name and its randomly generated value to be later used in checking the returned values. */ private Map createTestDataForNumericValueTypes(Supplier randomGenerator) throws Exception, IOException { - Map map = new HashMap(); + Map map = new HashMap<>(); createIndex("test"); updateMappingForNumericValuesTests("test"); @@ -1482,20 +1482,20 @@ public class ResultSetTestCase extends JdbcIntegrationTestCase { private void validateErrorsForDateTimeTestsWithoutCalendar(CheckedFunction method) { SQLException sqle; - for(Entry,JDBCType> field : dateTimeTestingFields.entrySet()) { + for (Entry, SQLType> field : dateTimeTestingFields.entrySet()) { sqle = expectThrows(SQLException.class, () -> method.apply(field.getKey().v1())); assertEquals( - format(Locale.ROOT, "Unable to convert value [%.128s] of type [%s] to a Long", + format(Locale.ROOT, "Unable to convert value [%.128s] of type [%s] to a Long", field.getKey().v2(), field.getValue()), sqle.getMessage()); } } private void validateErrorsForDateTimeTestsWithCalendar(Calendar c, CheckedBiFunction method) { SQLException sqle; - for(Entry,JDBCType> field : dateTimeTestingFields.entrySet()) { + for (Entry, SQLType> field : dateTimeTestingFields.entrySet()) { sqle = expectThrows(SQLException.class, () -> method.apply(field.getKey().v1(), c)); assertEquals( - format(Locale.ROOT, "Unable to convert value [%.128s] of type [%s] to a Long", + format(Locale.ROOT, "Unable to convert value [%.128s] of type [%s] to a Long", field.getKey().v2(), field.getValue()), sqle.getMessage()); } } diff --git a/x-pack/qa/sql/src/main/java/org/elasticsearch/xpack/qa/sql/jdbc/SimpleExampleTestCase.java b/x-pack/qa/sql/src/main/java/org/elasticsearch/xpack/qa/sql/jdbc/SimpleExampleTestCase.java index f5d559d9bf0..35f2dba7779 100644 --- a/x-pack/qa/sql/src/main/java/org/elasticsearch/xpack/qa/sql/jdbc/SimpleExampleTestCase.java +++ b/x-pack/qa/sql/src/main/java/org/elasticsearch/xpack/qa/sql/jdbc/SimpleExampleTestCase.java @@ -10,6 +10,8 @@ import java.sql.ResultSet; import java.sql.SQLException; import java.sql.Statement; +import static org.hamcrest.Matchers.containsString; + public class SimpleExampleTestCase extends JdbcIntegrationTestCase { public void testSimpleExample() throws Exception { index("library", builder -> { @@ -20,13 +22,17 @@ public class SimpleExampleTestCase extends JdbcIntegrationTestCase { // tag::simple_example try (Statement statement = connection.createStatement(); ResultSet results = statement.executeQuery( - "SELECT name, page_count FROM library ORDER BY page_count DESC LIMIT 1")) { + " SELECT name, page_count" + + " FROM library" + + " ORDER BY page_count DESC" + + " LIMIT 1")) { assertTrue(results.next()); assertEquals("Don Quixote", results.getString(1)); assertEquals(1072, results.getInt(2)); - SQLException e = expectThrows(SQLException.class, () -> results.getInt(1)); - assertTrue(e.getMessage(), - e.getMessage().contains("Unable to convert value [Don Quixote] of type [VARCHAR] to an Integer")); + SQLException e = expectThrows(SQLException.class, () -> + results.getInt(1)); + assertThat(e.getMessage(), containsString("Unable to convert " + + "value [Don Quixote] of type [VARCHAR] to an Integer")); assertFalse(results.next()); } // end::simple_example diff --git a/x-pack/qa/sql/src/main/java/org/elasticsearch/xpack/qa/sql/rest/RestSqlTestCase.java b/x-pack/qa/sql/src/main/java/org/elasticsearch/xpack/qa/sql/rest/RestSqlTestCase.java index 7403bee5448..4c5141dc9f6 100644 --- a/x-pack/qa/sql/src/main/java/org/elasticsearch/xpack/qa/sql/rest/RestSqlTestCase.java +++ b/x-pack/qa/sql/src/main/java/org/elasticsearch/xpack/qa/sql/rest/RestSqlTestCase.java @@ -6,6 +6,7 @@ package org.elasticsearch.xpack.qa.sql.rest; import com.fasterxml.jackson.core.io.JsonStringEncoder; + import org.apache.http.HttpEntity; import org.apache.http.entity.ContentType; import org.apache.http.entity.StringEntity; @@ -28,7 +29,9 @@ import java.io.InputStream; import java.io.InputStreamReader; import java.nio.charset.StandardCharsets; import java.sql.JDBCType; +import java.util.ArrayList; import java.util.Arrays; +import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Locale; @@ -415,6 +418,109 @@ public abstract class RestSqlTestCase extends ESRestTestCase implements ErrorsTe assertEquals("foo", matchQuery.get("query")); } + public void testTranslateQueryWithGroupByAndHaving() throws IOException { + index("{\"salary\":100}", + "{\"age\":20}"); + + Map response = runSql("", + new StringEntity("{\"query\":\"SELECT avg(salary) FROM test GROUP BY abs(age) HAVING avg(salary) > 50 LIMIT 10\"}", + ContentType.APPLICATION_JSON), "/translate/" + ); + + assertEquals(response.get("size"), 0); + assertEquals(false, response.get("_source")); + assertEquals("_none_", response.get("stored_fields")); + + @SuppressWarnings("unchecked") + Map aggregations = (Map) response.get("aggregations"); + assertEquals(1, aggregations.size()); + assertNotNull(aggregations); + + @SuppressWarnings("unchecked") + Map groupby = (Map) aggregations.get("groupby"); + assertEquals(2, groupby.size()); + + @SuppressWarnings("unchecked") + Map composite = (Map) groupby.get("composite"); + assertEquals(2, composite.size()); + assertEquals(10, composite.get("size")); + + @SuppressWarnings("unchecked") + List sources = (List) composite.get("sources"); + assertEquals(1, sources.size()); + + @SuppressWarnings("unchecked") + Map sourcesListMap = + (Map) ((Map) sources.get(0)).values().iterator().next(); + assertEquals(1, sourcesListMap.size()); + + @SuppressWarnings("unchecked") + Map terms = (Map) sourcesListMap.get("terms"); + assertEquals(4, terms.size()); + assertEquals("long", terms.get("value_type")); + assertEquals(true, terms.get("missing_bucket")); + assertEquals("asc", terms.get("order")); + + @SuppressWarnings("unchecked") + Map termsScript = (Map) terms.get("script"); + assertEquals(3, termsScript.size()); + assertEquals("Math.abs(doc[params.v0].value)", termsScript.get("source")); + assertEquals("painless", termsScript.get("lang")); + + @SuppressWarnings("unchecked") + Map termsScriptParams = (Map) termsScript.get("params"); + assertEquals(1, termsScriptParams.size()); + assertEquals("age", termsScriptParams.get("v0")); + + @SuppressWarnings("unchecked") + Map aggregations2 = (Map) groupby.get("aggregations"); + assertEquals(3, aggregations2.size()); + + List aggKeys = new ArrayList<>(2); + String aggFilterKey = null; + for (Map.Entry entry : aggregations2.entrySet()) { + String key = entry.getKey(); + if (key.startsWith("having")) { + aggFilterKey = key; + } else { + aggKeys.add(Integer.valueOf(key)); + @SuppressWarnings("unchecked") + Map aggr = (Map) entry.getValue(); + assertEquals(1, aggr.size()); + @SuppressWarnings("unchecked") + Map avg = (Map) aggr.get("avg"); + assertEquals(1, avg.size()); + assertEquals("salary", avg.get("field")); + } + } + Collections.sort(aggKeys); + assertEquals("having" + aggKeys.get(1), aggFilterKey); + + @SuppressWarnings("unchecked") + Map having = (Map) aggregations2.get(aggFilterKey); + assertEquals(1, having.size()); + + @SuppressWarnings("unchecked") + Map bucketSelector = (Map) having.get("bucket_selector"); + assertEquals(3, bucketSelector.size()); + assertEquals("skip", bucketSelector.get("gap_policy")); + + @SuppressWarnings("unchecked") + Map bucketsPath = (Map) bucketSelector.get("buckets_path"); + assertEquals(1, bucketsPath.size()); + assertEquals(aggKeys.get(1).toString(), bucketsPath.get("a0")); + + @SuppressWarnings("unchecked") + Map filterScript = (Map) bucketSelector.get("script"); + assertEquals(3, filterScript.size()); + assertEquals("(params.a0) > (params.v0)", filterScript.get("source")); + assertEquals("painless", filterScript.get("lang")); + @SuppressWarnings("unchecked") + Map filterScriptParams = (Map) filterScript.get("params"); + assertEquals(1, filterScriptParams.size()); + assertEquals(50, filterScriptParams.get("v0")); + } + public void testBasicQueryText() throws IOException { index("{\"test\":\"test\"}", "{\"test\":\"test\"}"); diff --git a/x-pack/qa/sql/src/main/resources/agg.sql-spec b/x-pack/qa/sql/src/main/resources/agg.sql-spec index e2213caa597..a61c825623c 100644 --- a/x-pack/qa/sql/src/main/resources/agg.sql-spec +++ b/x-pack/qa/sql/src/main/resources/agg.sql-spec @@ -370,7 +370,7 @@ SELECT MIN(salary) mi, MAX(salary) ma, MAX(salary) - MIN(salary) AS d FROM test_ aggHavingWithMultipleScalarFunctionsBasedOnAliasFromGroupBy SELECT MIN(salary) mi, MAX(salary) ma, MAX(salary) - MIN(salary) AS d FROM test_emp GROUP BY languages HAVING d - ma % mi > 0 AND AVG(salary) > 30000 ORDER BY languages; aggHavingWithMultipleScalarFunctionsBasedOnAliasFromGroupByAndAggNotInGroupBy -SELECT MIN(salary) mi, MAX(salary) ma, MAX(salary) - MIN(salary) AS d FROM test_emp GROUP BY languages HAVING ROUND(d - ABS(ma % mi)) + AVG(salary) > 0 AND AVG(salary) > 30000 ORDER BY languages; +SELECT MIN(salary) mi, MAX(salary) ma, MAX(salary) - MIN(salary) AS d FROM test_emp GROUP BY languages HAVING ROUND((d - ABS(ma % mi))) + AVG(salary) > 0 AND AVG(salary) > 30000 ORDER BY languages; aggHavingScalarOnAggFunctionsWithoutAliasesInAndNotInGroupBy SELECT MIN(salary) mi, MAX(salary) ma, MAX(salary) - MIN(salary) AS d FROM test_emp GROUP BY languages HAVING MAX(salary) % MIN(salary) + AVG(salary) > 3000 ORDER BY languages; @@ -385,7 +385,7 @@ SELECT MIN(salary) mi, MAX(salary) ma, MAX(salary) - MIN(salary) AS d FROM test_ aggMultiGroupByHavingWithMultipleScalarFunctionsBasedOnAliasFromGroupBy SELECT MIN(salary) mi, MAX(salary) ma, MAX(salary) - MIN(salary) AS d FROM test_emp GROUP BY gender, languages HAVING d - ma % mi > 0 AND AVG(salary) > 30000 ORDER BY gender, languages; aggMultiGroupByHavingWithMultipleScalarFunctionsBasedOnAliasFromGroupByAndAggNotInGroupBy -SELECT MIN(salary) mi, MAX(salary) ma, MAX(salary) - MIN(salary) AS d FROM test_emp GROUP BY gender, languages HAVING ROUND(d - ABS(ma % mi)) + AVG(salary) > 0 AND AVG(salary) > 30000 ORDER BY gender, languages; +SELECT MIN(salary) mi, MAX(salary) ma, MAX(salary) - MIN(salary) AS d FROM test_emp GROUP BY gender, languages HAVING ROUND((d - ABS(ma % mi))) + AVG(salary) > 0 AND AVG(salary) > 30000 ORDER BY gender, languages; aggMultiGroupByHavingScalarOnAggFunctionsWithoutAliasesInAndNotInGroupBy SELECT MIN(salary) mi, MAX(salary) ma, MAX(salary) - MIN(salary) AS d FROM test_emp GROUP BY gender, languages HAVING MAX(salary) % MIN(salary) + AVG(salary) > 3000 ORDER BY gender, languages; diff --git a/x-pack/qa/sql/src/main/resources/alias.csv-spec b/x-pack/qa/sql/src/main/resources/alias.csv-spec index f1fa900706a..57c7307065e 100644 --- a/x-pack/qa/sql/src/main/resources/alias.csv-spec +++ b/x-pack/qa/sql/src/main/resources/alias.csv-spec @@ -47,7 +47,7 @@ salary | INTEGER ; describePattern -DESCRIBE test_*; +DESCRIBE "test_*"; column:s | type:s @@ -99,7 +99,7 @@ F | 10099.28 ; testGroupByOnPattern -SELECT gender, PERCENTILE(emp_no, 97) p1 FROM test_* WHERE gender is NOT NULL GROUP BY gender; +SELECT gender, PERCENTILE(emp_no, 97) p1 FROM "test_*" WHERE gender is NOT NULL GROUP BY gender; gender:s | p1:d diff --git a/x-pack/qa/sql/src/main/resources/command.csv-spec b/x-pack/qa/sql/src/main/resources/command.csv-spec index 81aa18b2e84..26e94b445c7 100644 --- a/x-pack/qa/sql/src/main/resources/command.csv-spec +++ b/x-pack/qa/sql/src/main/resources/command.csv-spec @@ -19,6 +19,8 @@ PERCENTILE_RANK |AGGREGATE SUM_OF_SQUARES |AGGREGATE SKEWNESS |AGGREGATE KURTOSIS |AGGREGATE +DAY_NAME |SCALAR +DAYNAME |SCALAR DAY_OF_MONTH |SCALAR DAYOFMONTH |SCALAR DAY |SCALAR @@ -34,18 +36,16 @@ HOUR |SCALAR MINUTE_OF_DAY |SCALAR MINUTE_OF_HOUR |SCALAR MINUTE |SCALAR -SECOND_OF_MINUTE|SCALAR -SECOND |SCALAR -MONTH_OF_YEAR |SCALAR -MONTH |SCALAR -YEAR |SCALAR -WEEK_OF_YEAR |SCALAR -WEEK |SCALAR -DAY_NAME |SCALAR -DAYNAME |SCALAR MONTH_NAME |SCALAR MONTHNAME |SCALAR +MONTH_OF_YEAR |SCALAR +MONTH |SCALAR +SECOND_OF_MINUTE|SCALAR +SECOND |SCALAR QUARTER |SCALAR +YEAR |SCALAR +WEEK_OF_YEAR |SCALAR +WEEK |SCALAR ABS |SCALAR ACOS |SCALAR ASIN |SCALAR @@ -77,24 +77,25 @@ SIN |SCALAR SINH |SCALAR SQRT |SCALAR TAN |SCALAR +TRUNCATE |SCALAR ASCII |SCALAR -CHAR |SCALAR BIT_LENGTH |SCALAR +CHAR |SCALAR CHAR_LENGTH |SCALAR -CHARACTER_LENGTH|SCALAR +CHARACTER_LENGTH|SCALAR +CONCAT |SCALAR +INSERT |SCALAR LCASE |SCALAR -LENGTH |SCALAR -LTRIM |SCALAR -RTRIM |SCALAR -SPACE |SCALAR -CONCAT |SCALAR -INSERT |SCALAR LEFT |SCALAR +LENGTH |SCALAR LOCATE |SCALAR +LTRIM |SCALAR POSITION |SCALAR REPEAT |SCALAR REPLACE |SCALAR -RIGHT |SCALAR +RIGHT |SCALAR +RTRIM |SCALAR +SPACE |SCALAR SUBSTRING |SCALAR UCASE |SCALAR SCORE |SCORE @@ -133,6 +134,8 @@ showFunctionsWithLeadingPattern SHOW FUNCTIONS LIKE '%DAY%'; name:s | type:s +DAY_NAME |SCALAR +DAYNAME |SCALAR DAY_OF_MONTH |SCALAR DAYOFMONTH |SCALAR DAY |SCALAR @@ -142,8 +145,6 @@ DAY_OF_YEAR |SCALAR DAYOFYEAR |SCALAR HOUR_OF_DAY |SCALAR MINUTE_OF_DAY |SCALAR -DAY_NAME |SCALAR -DAYNAME |SCALAR ; showTables diff --git a/x-pack/qa/sql/src/main/resources/docs.csv-spec b/x-pack/qa/sql/src/main/resources/docs.csv-spec index 280e9a5edf0..570bbb052f0 100644 --- a/x-pack/qa/sql/src/main/resources/docs.csv-spec +++ b/x-pack/qa/sql/src/main/resources/docs.csv-spec @@ -195,6 +195,8 @@ PERCENTILE_RANK |AGGREGATE SUM_OF_SQUARES |AGGREGATE SKEWNESS |AGGREGATE KURTOSIS |AGGREGATE +DAY_NAME |SCALAR +DAYNAME |SCALAR DAY_OF_MONTH |SCALAR DAYOFMONTH |SCALAR DAY |SCALAR @@ -210,18 +212,16 @@ HOUR |SCALAR MINUTE_OF_DAY |SCALAR MINUTE_OF_HOUR |SCALAR MINUTE |SCALAR -SECOND_OF_MINUTE|SCALAR -SECOND |SCALAR -MONTH_OF_YEAR |SCALAR -MONTH |SCALAR -YEAR |SCALAR -WEEK_OF_YEAR |SCALAR -WEEK |SCALAR -DAY_NAME |SCALAR -DAYNAME |SCALAR MONTH_NAME |SCALAR MONTHNAME |SCALAR +MONTH_OF_YEAR |SCALAR +MONTH |SCALAR +SECOND_OF_MINUTE|SCALAR +SECOND |SCALAR QUARTER |SCALAR +YEAR |SCALAR +WEEK_OF_YEAR |SCALAR +WEEK |SCALAR ABS |SCALAR ACOS |SCALAR ASIN |SCALAR @@ -253,27 +253,28 @@ SIN |SCALAR SINH |SCALAR SQRT |SCALAR TAN |SCALAR +TRUNCATE |SCALAR ASCII |SCALAR -CHAR |SCALAR BIT_LENGTH |SCALAR -CHAR_LENGTH |SCALAR -CHARACTER_LENGTH|SCALAR +CHAR |SCALAR +CHAR_LENGTH |SCALAR +CHARACTER_LENGTH|SCALAR +CONCAT |SCALAR +INSERT |SCALAR LCASE |SCALAR -LENGTH |SCALAR -LTRIM |SCALAR -RTRIM |SCALAR -SPACE |SCALAR -CONCAT |SCALAR -INSERT |SCALAR LEFT |SCALAR +LENGTH |SCALAR LOCATE |SCALAR +LTRIM |SCALAR POSITION |SCALAR REPEAT |SCALAR REPLACE |SCALAR -RIGHT |SCALAR +RIGHT |SCALAR +RTRIM |SCALAR +SPACE |SCALAR SUBSTRING |SCALAR UCASE |SCALAR -SCORE |SCORE +SCORE |SCORE // end::showFunctions ; @@ -321,6 +322,8 @@ SHOW FUNCTIONS LIKE '%DAY%'; name | type ---------------+--------------- +DAY_NAME |SCALAR +DAYNAME |SCALAR DAY_OF_MONTH |SCALAR DAYOFMONTH |SCALAR DAY |SCALAR @@ -330,8 +333,6 @@ DAY_OF_YEAR |SCALAR DAYOFYEAR |SCALAR HOUR_OF_DAY |SCALAR MINUTE_OF_DAY |SCALAR -DAY_NAME |SCALAR -DAYNAME |SCALAR // end::showFunctionsWithPattern ; @@ -357,8 +358,8 @@ selectInline // tag::selectInline SELECT 1 + 1; - (1 + 1) ---------------- + 1 + 1 +-------------- 2 // end::selectInline @@ -548,7 +549,7 @@ M |63 groupByAndAggExpression // tag::groupByAndAggExpression -SELECT gender AS g, ROUND(MIN(salary) / 100) AS salary FROM emp GROUP BY gender; +SELECT gender AS g, ROUND((MIN(salary) / 100)) AS salary FROM emp GROUP BY gender; g | salary ---------------+--------------- @@ -1124,3 +1125,310 @@ SELECT YEAR(CAST('2018-05-19T11:23:45Z' AS TIMESTAMP)) AS year; 2018 // end::conversionStringToDateCast ; + +/////////////////////////////// +// +// Math +// +/////////////////////////////// + +mathInlineAbs +// tag::abs +SELECT ABS(-123.5), ABS(55); + + ABS(-123.5) | ABS(55) +---------------+--------------- +123.5 |55 +// end::abs +; + +mathInlineAcos +// tag::mathInlineAcos +SELECT ACOS(COS(PI())), PI(); + + ACOS(COS(PI)) | PI +-----------------+----------------- +3.141592653589793|3.141592653589793 +// end::mathInlineAcos +; + +mathInlineAsin +// tag::mathInlineAsin +SELECT ROUND(DEGREES(ASIN(0.7071067811865475))) AS "ASIN(0.707)", ROUND(SIN(RADIANS(45)), 3) AS "SIN(45)"; + + ASIN(0.707) | SIN(45) +---------------+--------------- +45.0 |0.707 +// end::mathInlineAsin +; + +mathInlineAtan +// tag::mathInlineAtan +SELECT DEGREES(ATAN(TAN(RADIANS(90)))); + +DEGREES(ATAN(TAN(RADIANS(90)))) +------------------------------- +90.0 +// end::mathInlineAtan +; + +mathInlineAtan2 +// tag::mathInlineAtan2 +SELECT ATAN2(5 * SIN(RADIANS(45)), 5 * COS(RADIANS(45))) AS "ATAN2(5*SIN(45), 5*COS(45))", RADIANS(45); + +ATAN2(5*SIN(45), 5*COS(45))| RADIANS(45) +---------------------------+------------------ +0.7853981633974483 |0.7853981633974483 +// end::mathInlineAtan2 +; + +mathInlineCbrtWithNegativeValue +// tag::mathInlineCbrtWithNegativeValue +SELECT CBRT(-125.5); + + CBRT(-125.5) +------------------- +-5.0066577974783435 +// end::mathInlineCbrtWithNegativeValue +; + +mathInlineCeiling +// tag::mathInlineCeiling +SELECT CEIL(125.01), CEILING(-125.99); + + CEIL(125.01) | CEIL(-125.99) +---------------+--------------- +126 |-125 +// end::mathInlineCeiling +; + +mathInlineCosine +// tag::mathInlineCosine +SELECT COS(RADIANS(180)), POWER(SIN(RADIANS(54)), 2) + POWER(COS(RADIANS(54)), 2) AS pythagorean_identity; + +COS(RADIANS(180))|pythagorean_identity +-----------------+-------------------- +-1.0 |1.0 +// end::mathInlineCosine +; + +mathInlineCosh +// tag::mathInlineCosh +SELECT COSH(5), (POWER(E(), 5) + POWER(E(), -5)) / 2 AS "(e^5 + e^-5)/2"; + + COSH(5) | (e^5 + e^-5)/2 +-----------------+----------------- +74.20994852478785|74.20994852478783 +// end::mathInlineCosh +; + +mathInlineCotangent +// tag::mathInlineCotangent +SELECT COT(RADIANS(30)) AS "COT(30)", COS(RADIANS(30)) / SIN(RADIANS(30)) AS "COS(30)/SIN(30)"; + + COT(30) | COS(30)/SIN(30) +------------------+------------------ +1.7320508075688774|1.7320508075688776 +// end::mathInlineCotangent +; + +mathInlineDegrees +// tag::mathInlineDegrees +SELECT DEGREES(PI() * 2), DEGREES(PI()); + +DEGREES((PI) * 2)| DEGREES(PI) +-----------------+--------------- +360.0 |180.0 +// end::mathInlineDegrees +; + +mathEulersNumber +// tag::mathEulersNumber +SELECT E(), CEIL(E()); + + E | CEIL(E) +-----------------+--------------- +2.718281828459045|3 +// end::mathEulersNumber +; + +mathExpInline +// tag::mathExpInline +SELECT EXP(1), E(), EXP(2), E() * E(); + + EXP(1) | E | EXP(2) | (E) * (E) +-----------------+-----------------+----------------+------------------ +2.718281828459045|2.718281828459045|7.38905609893065|7.3890560989306495 +// end::mathExpInline +; + +mathExpm1Inline +// tag::mathExpm1Inline +SELECT E(), EXP(2), EXPM1(2); + + E | EXP(2) | EXPM1(2) +-----------------+----------------+---------------- +2.718281828459045|7.38905609893065|6.38905609893065 +// end::mathExpm1Inline +; + +mathInlineFloor +// tag::mathInlineFloor +SELECT FLOOR(125.01), FLOOR(-125.99); + + FLOOR(125.01) |FLOOR(-125.99) +---------------+--------------- +125 |-126 +// end::mathInlineFloor +; + +mathInlineLog +// tag::mathInlineLog +SELECT EXP(3), LOG(20.085536923187668); + + EXP(3) |LOG(20.085536923187668) +------------------+----------------------- +20.085536923187668|3.0 +// end::mathInlineLog +; + +mathInlineLog10 +// tag::mathInlineLog10 +SELECT LOG10(5), LOG(5)/LOG(10); + + LOG10(5) |(LOG(5)) / (LOG(10)) +------------------+-------------------- +0.6989700043360189|0.6989700043360187 +// end::mathInlineLog10 +; + +mathPINumber +// tag::mathPINumber +SELECT PI(); + + PI +----------------- +3.141592653589793 +// end::mathPINumber +; + +mathInlinePowerPositive +// tag::mathInlinePowerPositive +SELECT POWER(3, 2), POWER(3, 3); + + POWER(3,2) | POWER(3,3) +---------------+--------------- +9.0 |27.0 +// end::mathInlinePowerPositive +; + +mathInlinePowerNegative +// tag::mathInlinePowerNegative +SELECT POWER(5, -1), POWER(5, -2); + + POWER(5,-1) | POWER(5,-2) +---------------+--------------- +0.2 |0.04 +// end::mathInlinePowerNegative +; + +mathInlineRadians +// tag::mathInlineRadians +SELECT RADIANS(90), PI()/2; + + RADIANS(90) | (PI) / 2 +------------------+------------------ +1.5707963267948966|1.5707963267948966 +// end::mathInlineRadians +; + +mathRoundWithNegativeParameter +// tag::mathRoundWithNegativeParameter +SELECT ROUND(-345.153, -1) AS rounded; + + rounded +--------------- +-350.0 +// end::mathRoundWithNegativeParameter +; + +mathRoundWithPositiveParameter +// tag::mathRoundWithPositiveParameter +SELECT ROUND(-345.153, 1) AS rounded; + + rounded +--------------- +-345.2 +// end::mathRoundWithPositiveParameter +; + +mathInlineSign +// tag::mathInlineSign +SELECT SIGN(-123), SIGN(0), SIGN(415); + + SIGN(-123) | SIGN(0) | SIGN(415) +---------------+---------------+--------------- +-1 |0 |1 +// end::mathInlineSign +; + +mathInlineSine +// tag::mathInlineSine +SELECT SIN(RADIANS(90)), POWER(SIN(RADIANS(67)), 2) + POWER(COS(RADIANS(67)), 2) AS pythagorean_identity; + +SIN(RADIANS(90))|pythagorean_identity +----------------+-------------------- +1.0 |1.0 +// end::mathInlineSine +; + +mathInlineSinh +// tag::mathInlineSinh +SELECT SINH(5), (POWER(E(), 5) - POWER(E(), -5)) / 2 AS "(e^5 - e^-5)/2"; + + SINH(5) | (e^5 - e^-5)/2 +-----------------+----------------- +74.20321057778875|74.20321057778874 +// end::mathInlineSinh +; + +mathInlineSqrt +// tag::mathInlineSqrt +SELECT SQRT(EXP(2)), E(), SQRT(25); + + SQRT(EXP(2)) | E | SQRT(25) +-----------------+-----------------+--------------- +2.718281828459045|2.718281828459045|5.0 +// end::mathInlineSqrt +; + +mathInlineTanget +// tag::mathInlineTanget +SELECT TAN(RADIANS(66)) AS "TAN(66)", SIN(RADIANS(66))/COS(RADIANS(66)) AS "SIN(66)/COS(66)=TAN(66)"; + + TAN(66) |SIN(66)/COS(66)=TAN(66) +------------------+----------------------- +2.2460367739042164|2.246036773904216 +// end::mathInlineTanget +; + +mathTruncateWithNegativeParameter +// tag::mathTruncateWithNegativeParameter +SELECT TRUNCATE(-345.153, -1) AS trimmed; + + trimmed +--------------- +-340.0 +// end::mathTruncateWithNegativeParameter +; + +mathTruncateWithPositiveParameter +// tag::mathTruncateWithPositiveParameter +SELECT TRUNCATE(-345.153, 1) AS trimmed; + + trimmed +--------------- +-345.1 +// end::mathTruncateWithPositiveParameter +; + diff --git a/x-pack/qa/sql/src/main/resources/functions.csv-spec b/x-pack/qa/sql/src/main/resources/functions.csv-spec index 3622cfe0433..b5e98c26440 100644 --- a/x-pack/qa/sql/src/main/resources/functions.csv-spec +++ b/x-pack/qa/sql/src/main/resources/functions.csv-spec @@ -412,14 +412,14 @@ AlejandRo |2 checkColumnNameWithNestedArithmeticFunctionCallsOnTableColumn SELECT CHAR(emp_no % 10000) FROM "test_emp" WHERE emp_no > 10064 ORDER BY emp_no LIMIT 1; -CHAR(((emp_no) % 10000)):s +CHAR((emp_no) % 10000):s A ; checkColumnNameWithComplexNestedArithmeticFunctionCallsOnTableColumn1 SELECT CHAR(emp_no % (7000 + 3000)) FROM "test_emp" WHERE emp_no > 10065 ORDER BY emp_no LIMIT 1; -CHAR(((emp_no) % ((7000 + 3000)))):s +CHAR((emp_no) % (7000 + 3000)):s B ; @@ -427,6 +427,6 @@ B checkColumnNameWithComplexNestedArithmeticFunctionCallsOnTableColumn2 SELECT CHAR((emp_no % (emp_no - 1 + 1)) + 67) FROM "test_emp" WHERE emp_no > 10066 ORDER BY emp_no LIMIT 1; -CHAR(((((emp_no) % (((((emp_no) - 1)) + 1)))) + 67)):s +CHAR(((emp_no) % (((emp_no) - 1) + 1)) + 67):s C ; diff --git a/x-pack/qa/sql/src/main/resources/math.csv-spec b/x-pack/qa/sql/src/main/resources/math.csv-spec new file mode 100644 index 00000000000..1c292200ccd --- /dev/null +++ b/x-pack/qa/sql/src/main/resources/math.csv-spec @@ -0,0 +1,185 @@ +// this one doesn't work in H2 at all +truncateWithAsciiHavingAndOrderBy +SELECT TRUNCATE(ASCII(LEFT(first_name, 1)), 1), COUNT(*) count FROM test_emp GROUP BY ASCII(LEFT(first_name, 1)) HAVING COUNT(*) > 5 ORDER BY TRUNCATE(ASCII(LEFT(first_name, 1)), 1) DESC; + +TRUNCATE(ASCII(LEFT(first_name,1)),1):i| count:l +---------------------------------------+--------------- +65 |9 +66 |8 +72 |7 +75 |8 +77 |9 +80 |6 +83 |11 +; + +truncateWithNoSecondParameterWithAsciiHavingAndOrderBy +SELECT TRUNCATE(ASCII(LEFT(first_name, 1))), COUNT(*) count FROM test_emp GROUP BY ASCII(LEFT(first_name, 1)) HAVING COUNT(*) > 5 ORDER BY TRUNCATE(ASCII(LEFT(first_name, 1))) DESC; + +TRUNCATE(ASCII(LEFT(first_name,1)),0):i| count:l +---------------------------------------+--------------- +65 |9 +66 |8 +72 |7 +75 |8 +77 |9 +80 |6 +83 |11 +; + +roundWithGroupByAndOrderBy +SELECT ROUND(salary, 2) ROUNDED, salary FROM test_emp GROUP BY ROUNDED, salary ORDER BY ROUNDED LIMIT 10; + + ROUNDED | salary +-------------+--------------- +25324 |25324 +25945 |25945 +25976 |25976 +26436 |26436 +27215 |27215 +28035 |28035 +28336 |28336 +28941 |28941 +29175 |29175 +30404 |30404 +; + +truncateWithGroupByAndOrderBy +SELECT TRUNCATE(salary, 2) TRUNCATED, salary FROM test_emp GROUP BY TRUNCATED, salary ORDER BY TRUNCATED LIMIT 10; + + TRUNCATED | salary +-------------+--------------- +25324 |25324 +25945 |25945 +25976 |25976 +26436 |26436 +27215 |27215 +28035 |28035 +28336 |28336 +28941 |28941 +29175 |29175 +30404 |30404 +; + +truncateWithAsciiAndOrderBy +SELECT TRUNCATE(ASCII(LEFT(first_name,1)), -1) AS initial, first_name, ASCII(LEFT(first_name, 1)) FROM test_emp ORDER BY ASCII(LEFT(first_name, 1)) DESC LIMIT 15; + + initial | first_name |ASCII(LEFT(first_name,1)) +---------------+---------------+------------------------- +90 |Zvonko |90 +90 |Zhongwei |90 +80 |Yongqiao |89 +80 |Yishay |89 +80 |Yinghua |89 +80 |Xinglin |88 +80 |Weiyi |87 +80 |Vishv |86 +80 |Valdiodio |86 +80 |Valter |86 +80 |Uri |85 +80 |Udi |85 +80 |Tzvetan |84 +80 |Tse |84 +80 |Tuval |84 +; + +truncateWithHavingAndGroupBy +SELECT MIN(salary) mi, MAX(salary) ma, COUNT(*) c, TRUNCATE(AVG(salary)) tr FROM test_emp GROUP BY languages HAVING TRUNCATE(AVG(salary)) > 40000 ORDER BY languages; + + mi:i | ma:I | c:l | tr:i +---------------+---------------+-----------------+----------------- +25976 |73717 |16 |49875 +29175 |73578 |20 |48164 +26436 |74999 |22 |52154 +27215 |74572 |18 |47733 +25324 |73851 |24 |44040 +; + +// https://github.com/elastic/elasticsearch/issues/33773 +minMaxTruncateAndRoundOfAverageWithHavingRoundAndTruncate-Ignore +SELECT MIN(salary) mi, MAX(salary) ma, YEAR(hire_date) year, ROUND(AVG(languages), 1), TRUNCATE(AVG(languages), 1), COUNT(*) FROM test_emp GROUP BY YEAR(hire_date) HAVING ROUND(AVG(languages), 1) > 2.5 AND TRUNCATE(AVG(languages), 1) <= 3.0 ORDER BY YEAR(hire_date); + + mi | ma | year |ROUND(AVG(languages),1)|TRUNCATE(AVG(languages),1)| COUNT(1) +-------------+-------------+---------------+-----------------------+--------------------------+-------------- +26436 |74999 |1985 |3.1 |3.0 |11 +25976 |74970 |1989 |3.1 |3.0 |13 +31120 |71165 |1990 |3.1 |3.0 |12 +32568 |65030 |1991 |2.8 |2.8 |6 +30404 |58715 |1993 |3.0 |3.0 |3 +35742 |67492 |1994 |2.8 |2.7 |4 +28035 |65367 |1995 |2.6 |2.6 |5 +45656 |45656 |1996 |3.0 |3.0 |1 +64675 |64675 |1997 |3.0 |3.0 |1 +; + +// https://github.com/elastic/elasticsearch/issues/33773 +minMaxRoundWithHavingRound-Ignore +SELECT MIN(salary) mi, MAX(salary) ma, YEAR(hire_date) year, ROUND(AVG(languages), 1), COUNT(*) FROM test_emp GROUP BY YEAR(hire_date) HAVING ROUND(AVG(languages), 1) > 2.5 ORDER BY YEAR(hire_date); + + mi | ma | year |ROUND(AVG(languages),1)| COUNT(1) +-------------+-------------+---------------+-----------------------+-------------- +26436 |74999 |1985 |3.1 |11 +31897 |61805 |1986 |3.5 |11 +25324 |70011 |1987 |3.1 |15 +25945 |73578 |1988 |3.1 |9 +25976 |74970 |1989 |3.1 |13 +31120 |71165 |1990 |3.1 |12 +32568 |65030 |1991 |2.8 |6 +27215 |60781 |1992 |4.1 |8 +30404 |58715 |1993 |3.0 |3 +35742 |67492 |1994 |2.8 |4 +28035 |65367 |1995 |2.6 |5 +45656 |45656 |1996 |3.0 |1 +64675 |64675 |1997 |3.0 |1 +; + +groupByAndOrderByTruncateWithPositiveParameter +SELECT TRUNCATE(AVG(salary), 2), AVG(salary), COUNT(*) FROM test_emp GROUP BY TRUNCATE(salary, 2) ORDER BY TRUNCATE(salary, 2) DESC LIMIT 10; + +TRUNCATE(AVG(salary),2):i|AVG(salary):i | COUNT(1):l +-------------------------+---------------+--------------- +74999 |74999 |1 +74970 |74970 |1 +74572 |74572 |1 +73851 |73851 |1 +73717 |73717 |1 +73578 |73578 |1 +71165 |71165 |1 +70011 |70011 |1 +69904 |69904 |1 +68547 |68547 |1 +; + +groupByAndOrderByRoundWithPositiveParameter +SELECT ROUND(AVG(salary), 2), AVG(salary), COUNT(*) FROM test_emp GROUP BY ROUND(salary, 2) ORDER BY ROUND(salary, 2) DESC LIMIT 10; + +ROUND(AVG(salary),2):i| AVG(salary):i | COUNT(1):l +----------------------+-----------------+--------------- +74999 |74999 |1 +74970 |74970 |1 +74572 |74572 |1 +73851 |73851 |1 +73717 |73717 |1 +73578 |73578 |1 +71165 |71165 |1 +70011 |70011 |1 +69904 |69904 |1 +68547 |68547 |1 +; + +groupByAndOrderByRoundWithNoSecondParameter +SELECT ROUND(AVG(salary)), ROUND(salary) rounded, AVG(salary), COUNT(*) FROM test_emp GROUP BY rounded ORDER BY rounded DESC LIMIT 10; + +ROUND(AVG(salary),0):i| rounded:i | AVG(salary):i | COUNT(1):l +----------------------+-----------------+-----------------+--------------- +74999 |74999 |74999 |1 +74970 |74970 |74970 |1 +74572 |74572 |74572 |1 +73851 |73851 |73851 |1 +73717 |73717 |73717 |1 +73578 |73578 |73578 |1 +71165 |71165 |71165 |1 +70011 |70011 |70011 |1 +69904 |69904 |69904 |1 +68547 |68547 |68547 |1 +; diff --git a/x-pack/qa/sql/src/main/resources/math.sql-spec b/x-pack/qa/sql/src/main/resources/math.sql-spec index 6452d2a3ac0..a8ab87b087b 100644 --- a/x-pack/qa/sql/src/main/resources/math.sql-spec +++ b/x-pack/qa/sql/src/main/resources/math.sql-spec @@ -3,86 +3,50 @@ // mathAbs -// tag::abs SELECT ABS(emp_no) m, first_name FROM "test_emp" WHERE emp_no < 10010 ORDER BY emp_no; -// end::abs mathACos -// tag::acos SELECT ACOS(emp_no) m, first_name FROM "test_emp" WHERE emp_no < 10010 ORDER BY emp_no; -// end::acos mathASin -// tag::asin SELECT ASIN(emp_no) m, first_name FROM "test_emp" WHERE emp_no < 10010 ORDER BY emp_no; -// end::asin mathATan -// tag::atan SELECT ATAN(emp_no) m, first_name FROM "test_emp" WHERE emp_no < 10010 ORDER BY emp_no; -// end::atan //mathCbrt //SELECT CBRT(emp_no) m, first_name FROM "test_emp" WHERE emp_no < 10010 ORDER BY emp_no; mathCeil // H2 returns CEIL as a double despite the value being an integer; we return a long as the other DBs SELECT CAST(CEIL(emp_no) AS INT) m, first_name FROM "test_emp" WHERE emp_no < 10010 ORDER BY emp_no; mathCos -// tag::cos SELECT COS(emp_no) m, first_name FROM "test_emp" WHERE emp_no < 10010 ORDER BY emp_no; -// end::cos mathCosh -// tag::cosh SELECT COSH(emp_no) m, first_name FROM "test_emp" WHERE emp_no < 10010 ORDER BY emp_no; -// end::cosh mathCot -// tag::cot SELECT COT(emp_no) m, first_name FROM "test_emp" WHERE emp_no < 10010 ORDER BY emp_no; -// end::cot mathDegrees -// tag::degrees SELECT DEGREES(emp_no) m, first_name FROM "test_emp" WHERE emp_no < 10010 ORDER BY emp_no; -// end::degrees mathExp -// tag::exp SELECT EXP(emp_no) m, first_name FROM "test_emp" WHERE emp_no < 10010 ORDER BY emp_no; -// end::exp mathExpm1 -// tag::expm1 SELECT EXP(emp_no) m, first_name FROM "test_emp" WHERE emp_no < 10010 ORDER BY emp_no; -// end::expm1 mathFloor SELECT CAST(FLOOR(emp_no) AS INT) m, first_name FROM "test_emp" WHERE emp_no < 10010 ORDER BY emp_no; mathLog -// tag::log SELECT LOG(emp_no) m, first_name FROM "test_emp" WHERE emp_no < 10010 ORDER BY emp_no; -// end::log mathLog10 -// tag::log10 SELECT LOG10(emp_no) m, first_name FROM "test_emp" WHERE emp_no < 10010 ORDER BY emp_no; -// end::log10 mathRadians -// tag::radians SELECT RADIANS(emp_no) m, first_name FROM "test_emp" WHERE emp_no < 10010 ORDER BY emp_no; -// end::radians mathRound -SELECT CAST(ROUND(emp_no) AS INT) m, first_name FROM "test_emp" WHERE emp_no < 10010 ORDER BY emp_no; +SELECT CAST(ROUND(emp_no, 0) AS INT) m, first_name FROM "test_emp" WHERE emp_no < 10010 ORDER BY emp_no; mathSign -// tag::sign SELECT SIGN(emp_no) m, first_name FROM "test_emp" WHERE emp_no < 10010 ORDER BY emp_no; -// end::sign mathSin -// tag::sin SELECT SIN(emp_no) m, first_name FROM "test_emp" WHERE emp_no < 10010 ORDER BY emp_no; -// end::sin mathSinH -// tag::sinh SELECT SINH(emp_no) m, first_name FROM "test_emp" WHERE emp_no < 10010 ORDER BY emp_no; -// end::sinh mathSqrt -// tag::sqrt SELECT SQRT(emp_no) m, first_name FROM "test_emp" WHERE emp_no < 10010 ORDER BY emp_no; -// end::sqrt mathTan -// tag::tan SELECT TAN(emp_no) m, first_name FROM "test_emp" WHERE emp_no < 10010 ORDER BY emp_no; -// end::tan // // Combined methods @@ -125,12 +89,53 @@ SELECT 5 + 2 * 3 / 2 % 2 AS c, PI() as e, first_name FROM "test_emp" WHERE emp_n // binary functions // mathATan2 -// tag::atan2 SELECT ATAN2(emp_no, emp_no) m, first_name FROM "test_emp" WHERE emp_no < 10010 ORDER BY emp_no; -// end::atan2 -// tag::power mathPowerPositive SELECT POWER(emp_no, 2) m, first_name FROM "test_emp" WHERE emp_no < 10010 ORDER BY emp_no; mathPowerNegative SELECT POWER(salary, -1) m, first_name FROM "test_emp" WHERE emp_no < 10010 ORDER BY emp_no; -// end::power + +roundInline1 +SELECT ROUND(-345.123, -2) AS rounded; + +roundInline2 +SELECT ROUND(-345.123, 2) AS rounded; + +roundInline3 +SELECT ROUND(-345.123, 0) AS rounded; + +roundInline4 +SELECT ROUND(-345.123,-51231231) AS rounded; + +roundInline5 +SELECT ROUND(134.51, 1) AS rounded; + +roundInline6 +SELECT ROUND(134.56, 1) AS rounded; + +roundInline7 +SELECT ROUND(-345.123) AS rounded; + +truncateInline1 +SELECT TRUNCATE(-345.123, -2) AS trimmed; + +truncateInline2 +SELECT TRUNCATE(-345.123, 2) AS trimmed; + +truncateInline3 +SELECT TRUNCATE(-345.123, 0) AS trimmed; + +truncateInline4 +SELECT TRUNCATE(-345.123,-51231231) AS trimmed; + +truncateInline5 +SELECT TRUNCATE(134.51, 1) AS trimmed; + +truncateInline6 +SELECT TRUNCATE(134.56, 1) AS trimmed; + +truncateInline7 +SELECT TRUNCATE(-345.123) AS trimmed; + +truncateAndRoundInline +SELECT ROUND(134.56,1) AS rounded, TRUNCATE(134.56,1) AS trimmed; diff --git a/x-pack/qa/third-party/active-directory/src/test/java/org/elasticsearch/xpack/security/authc/ldap/AbstractAdLdapRealmTestCase.java b/x-pack/qa/third-party/active-directory/src/test/java/org/elasticsearch/xpack/security/authc/ldap/AbstractAdLdapRealmTestCase.java index d6b2e321d6c..135ae14b9c8 100644 --- a/x-pack/qa/third-party/active-directory/src/test/java/org/elasticsearch/xpack/security/authc/ldap/AbstractAdLdapRealmTestCase.java +++ b/x-pack/qa/third-party/active-directory/src/test/java/org/elasticsearch/xpack/security/authc/ldap/AbstractAdLdapRealmTestCase.java @@ -5,6 +5,7 @@ */ package org.elasticsearch.xpack.security.authc.ldap; +import org.apache.logging.log4j.LogManager; import org.elasticsearch.ElasticsearchSecurityException; import org.elasticsearch.action.ActionFuture; import org.elasticsearch.action.DocWriteResponse; @@ -13,7 +14,6 @@ import org.elasticsearch.action.index.IndexResponse; import org.elasticsearch.client.Client; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.bytes.BytesArray; -import org.elasticsearch.common.logging.ESLoggerFactory; import org.elasticsearch.common.settings.MockSecureSettings; import org.elasticsearch.common.settings.SecureString; import org.elasticsearch.common.settings.Settings; @@ -110,8 +110,9 @@ public abstract class AbstractAdLdapRealmTestCase extends SecurityIntegTestCase realmConfig = randomFrom(RealmConfig.values()); roleMappings = realmConfig.selectRoleMappings(ESTestCase::randomBoolean); useGlobalSSL = randomBoolean(); - ESLoggerFactory.getLogger("test").info("running test with realm configuration [{}], with direct group to role mapping [{}]. " + - "Settings [{}]", realmConfig, realmConfig.mapGroupsAsRoles, realmConfig.settings); + LogManager.getLogger(AbstractAdLdapRealmTestCase.class).info( + "running test with realm configuration [{}], with direct group to role mapping [{}]. Settings [{}]", + realmConfig, realmConfig.mapGroupsAsRoles, realmConfig.settings); } @AfterClass diff --git a/x-pack/qa/third-party/active-directory/src/test/java/org/elasticsearch/xpack/security/authc/ldap/MultipleAdRealmIT.java b/x-pack/qa/third-party/active-directory/src/test/java/org/elasticsearch/xpack/security/authc/ldap/MultipleAdRealmIT.java index 3d1cdb202d1..2231d23296a 100644 --- a/x-pack/qa/third-party/active-directory/src/test/java/org/elasticsearch/xpack/security/authc/ldap/MultipleAdRealmIT.java +++ b/x-pack/qa/third-party/active-directory/src/test/java/org/elasticsearch/xpack/security/authc/ldap/MultipleAdRealmIT.java @@ -5,7 +5,7 @@ */ package org.elasticsearch.xpack.security.authc.ldap; -import org.elasticsearch.common.logging.ESLoggerFactory; +import org.apache.logging.log4j.LogManager; import org.elasticsearch.common.settings.Settings; import org.junit.BeforeClass; @@ -31,9 +31,9 @@ public class MultipleAdRealmIT extends AbstractAdLdapRealmTestCase { .filter(config -> config.name().startsWith("AD")) .collect(Collectors.toList()); secondaryRealmConfig = randomFrom(configs); - ESLoggerFactory.getLogger("test") - .info("running test with secondary realm configuration [{}], with direct group to role mapping [{}]. Settings [{}]", - secondaryRealmConfig, secondaryRealmConfig.mapGroupsAsRoles, secondaryRealmConfig.settings); + LogManager.getLogger(MultipleAdRealmIT.class).info( + "running test with secondary realm configuration [{}], with direct group to role mapping [{}]. Settings [{}]", + secondaryRealmConfig, secondaryRealmConfig.mapGroupsAsRoles, secondaryRealmConfig.settings); // It's easier to test 2 realms when using file based role mapping, and for the purposes of // this test, there's no need to test native mappings. diff --git a/x-pack/qa/transport-client-tests/src/test/java/org/elasticsearch/xpack/ml/client/ESXPackSmokeClientTestCase.java b/x-pack/qa/transport-client-tests/src/test/java/org/elasticsearch/xpack/ml/client/ESXPackSmokeClientTestCase.java index c77715431ec..28267614dd3 100644 --- a/x-pack/qa/transport-client-tests/src/test/java/org/elasticsearch/xpack/ml/client/ESXPackSmokeClientTestCase.java +++ b/x-pack/qa/transport-client-tests/src/test/java/org/elasticsearch/xpack/ml/client/ESXPackSmokeClientTestCase.java @@ -6,11 +6,11 @@ package org.elasticsearch.xpack.ml.client; import org.apache.logging.log4j.Logger; +import org.apache.logging.log4j.LogManager; import org.apache.lucene.util.LuceneTestCase; import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse; import org.elasticsearch.client.Client; import org.elasticsearch.client.transport.TransportClient; -import org.elasticsearch.common.logging.ESLoggerFactory; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.transport.TransportAddress; import org.elasticsearch.env.Environment; @@ -54,8 +54,7 @@ public abstract class ESXPackSmokeClientTestCase extends LuceneTestCase { */ public static final String TESTS_CLUSTER = "tests.cluster"; - protected static final Logger logger = ESLoggerFactory - .getLogger(ESXPackSmokeClientTestCase.class.getName()); + protected static final Logger logger = LogManager.getLogger(ESXPackSmokeClientTestCase.class); private static final AtomicInteger counter = new AtomicInteger(); private static Client client;