diff --git a/buildSrc/src/main/java/org/elasticsearch/gradle/precommit/TestingConventionsTasks.java b/buildSrc/src/main/java/org/elasticsearch/gradle/precommit/TestingConventionsTasks.java index 74d3a8dccaf..105deabfd40 100644 --- a/buildSrc/src/main/java/org/elasticsearch/gradle/precommit/TestingConventionsTasks.java +++ b/buildSrc/src/main/java/org/elasticsearch/gradle/precommit/TestingConventionsTasks.java @@ -70,6 +70,59 @@ public class TestingConventionsTasks extends DefaultTask { // Run only after everything is compiled Boilerplate.getJavaSourceSets(getProject()).all(sourceSet -> dependsOn(sourceSet.getClassesTaskName())); } + + @Input + public Map> classFilesPerTask(FileTree testClassFiles) { + Map> collector = new HashMap<>(); + // RandomizedTestingTask + collector.putAll( + Stream.concat( + getProject().getTasks().withType(getRandomizedTestingTask()).stream(), + // Look at sub-projects too. As sometimes tests are implemented in parent but ran in sub-projects against + // different configurations + getProject().getSubprojects().stream().flatMap(subproject -> + subproject.getTasks().withType(getRandomizedTestingTask()).stream() + ) + ) + .filter(Task::getEnabled) + .collect(Collectors.toMap( + Task::getPath, + task -> testClassFiles.matching(getRandomizedTestingPatternSet(task)).getFiles() + )) + ); + // Gradle Test + collector.putAll( + Stream.concat( + getProject().getTasks().withType(Test.class).stream(), + getProject().getSubprojects().stream().flatMap(subproject -> + subproject.getTasks().withType(Test.class).stream() + ) + ) + .filter(Task::getEnabled) + .collect(Collectors.toMap( + Task::getPath, + task -> task.getCandidateClassFiles().getFiles() + )) + ); + return Collections.unmodifiableMap(collector); + } + + @Input + public Map getTestClassNames() { + if (testClassNames == null) { + testClassNames = Boilerplate.getJavaSourceSets(getProject()).getByName("test").getOutput().getClassesDirs() + .getFiles().stream() + .filter(File::exists) + .flatMap(testRoot -> walkPathAndLoadClasses(testRoot).entrySet().stream()) + .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue)); + } + return testClassNames; + } + + @OutputFile + public File getSuccessMarker() { + return new File(getProject().getBuildDir(), "markers/" + getName()); + } @TaskAction public void doCheck() throws IOException { @@ -112,7 +165,7 @@ public class TestingConventionsTasks extends DefaultTask { .collect(Collectors.toSet()) ) ); - + problems = collectProblems( checkNoneExists( "Test classes implemented by inner classes will not run", @@ -130,13 +183,13 @@ public class TestingConventionsTasks extends DefaultTask { ), collectProblems( testClassesPerTask.entrySet().stream() - .map( entry -> - checkAtLeastOneExists( - "test class in " + entry.getKey(), - entry.getValue().stream() + .map( entry -> + checkAtLeastOneExists( + "test class in " + entry.getKey(), + entry.getValue().stream() + ) ) - ) - .collect(Collectors.joining()) + .collect(Collectors.joining()) ), checkNoneExists( "Test classes are not included in any enabled task (" + @@ -161,7 +214,6 @@ public class TestingConventionsTasks extends DefaultTask { } } - private String collectProblems(String... problems) { return Stream.of(problems) .map(String::trim) @@ -170,42 +222,6 @@ public class TestingConventionsTasks extends DefaultTask { .collect(Collectors.joining()); } - @Input - public Map> classFilesPerTask(FileTree testClassFiles) { - Map> collector = new HashMap<>(); - // RandomizedTestingTask - collector.putAll( - Stream.concat( - getProject().getTasks().withType(getRandomizedTestingTask()).stream(), - // Look at sub-projects too. As sometimes tests are implemented in parent but ran in sub-projects against - // different configurations - getProject().getSubprojects().stream().flatMap(subproject -> - subproject.getTasks().withType(getRandomizedTestingTask()).stream() - ) - ) - .filter(Task::getEnabled) - .collect(Collectors.toMap( - Task::getPath, - task -> testClassFiles.matching(getRandomizedTestingPatternSet(task)).getFiles() - )) - ); - // Gradle Test - collector.putAll( - Stream.concat( - getProject().getTasks().withType(Test.class).stream(), - getProject().getSubprojects().stream().flatMap(subproject -> - subproject.getTasks().withType(Test.class).stream() - ) - ) - .filter(Task::getEnabled) - .collect(Collectors.toMap( - Task::getPath, - task -> task.getCandidateClassFiles().getFiles() - )) - ); - return Collections.unmodifiableMap(collector); - } - @SuppressWarnings("unchecked") private PatternFilterable getRandomizedTestingPatternSet(Task task) { try { @@ -232,23 +248,6 @@ public class TestingConventionsTasks extends DefaultTask { } } - @Input - public Map getTestClassNames() { - if (testClassNames == null) { - testClassNames = Boilerplate.getJavaSourceSets(getProject()).getByName("test").getOutput().getClassesDirs() - .getFiles().stream() - .filter(File::exists) - .flatMap(testRoot -> walkPathAndLoadClasses(testRoot).entrySet().stream()) - .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue)); - } - return testClassNames; - } - - @OutputFile - public File getSuccessMarker() { - return new File(getProject().getBuildDir(), "markers/" + getName()); - } - private String checkNoneExists(String message, Stream> stream) { String problem = stream .map(each -> " * " + each.getName()) diff --git a/buildSrc/src/main/resources/checkstyle_suppressions.xml b/buildSrc/src/main/resources/checkstyle_suppressions.xml index 06a389e64c6..6e628eab0cb 100644 --- a/buildSrc/src/main/resources/checkstyle_suppressions.xml +++ b/buildSrc/src/main/resources/checkstyle_suppressions.xml @@ -62,7 +62,6 @@ - @@ -70,7 +69,6 @@ - diff --git a/buildSrc/version.properties b/buildSrc/version.properties index 420a20808d7..05dfe546f78 100644 --- a/buildSrc/version.properties +++ b/buildSrc/version.properties @@ -1,5 +1,5 @@ elasticsearch = 7.0.0 -lucene = 8.0.0-snapshot-7e4555a2fd +lucene = 8.0.0-snapshot-774e9aefbc # optional dependencies spatial4j = 0.7 diff --git a/client/benchmark/src/main/java/org/elasticsearch/client/benchmark/transport/TransportClientBenchmark.java b/client/benchmark/src/main/java/org/elasticsearch/client/benchmark/transport/TransportClientBenchmark.java index c9ab38fe355..66b3d7bf060 100644 --- a/client/benchmark/src/main/java/org/elasticsearch/client/benchmark/transport/TransportClientBenchmark.java +++ b/client/benchmark/src/main/java/org/elasticsearch/client/benchmark/transport/TransportClientBenchmark.java @@ -19,8 +19,10 @@ package org.elasticsearch.client.benchmark.transport; import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.action.bulk.BulkRequest; import org.elasticsearch.action.bulk.BulkResponse; import org.elasticsearch.action.index.IndexRequest; +import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.client.benchmark.AbstractBenchmark; import org.elasticsearch.client.benchmark.ops.bulk.BulkRequestExecutor; @@ -32,10 +34,9 @@ import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.plugin.noop.NoopPlugin; import org.elasticsearch.plugin.noop.action.bulk.NoopBulkAction; -import org.elasticsearch.plugin.noop.action.bulk.NoopBulkRequestBuilder; import org.elasticsearch.plugin.noop.action.search.NoopSearchAction; -import org.elasticsearch.plugin.noop.action.search.NoopSearchRequestBuilder; import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.search.builder.SearchSourceBuilder; import org.elasticsearch.transport.client.PreBuiltTransportClient; import java.net.InetAddress; @@ -79,13 +80,13 @@ public final class TransportClientBenchmark extends AbstractBenchmark bulkData) { - NoopBulkRequestBuilder builder = new NoopBulkRequestBuilder(client,NoopBulkAction.INSTANCE); + BulkRequest bulkRequest = new BulkRequest(); for (String bulkItem : bulkData) { - builder.add(new IndexRequest(indexName, typeName).source(bulkItem.getBytes(StandardCharsets.UTF_8), XContentType.JSON)); + bulkRequest.add(new IndexRequest(indexName, typeName).source(bulkItem.getBytes(StandardCharsets.UTF_8), XContentType.JSON)); } BulkResponse bulkResponse; try { - bulkResponse = builder.execute().get(); + bulkResponse = client.execute(NoopBulkAction.INSTANCE, bulkRequest).get(); } catch (InterruptedException e) { Thread.currentThread().interrupt(); return false; @@ -108,11 +109,12 @@ public final class TransportClientBenchmark extends AbstractBenchmark - implements WriteRequestBuilder { - - public NoopBulkRequestBuilder(ElasticsearchClient client, NoopBulkAction action) { - super(client, action, new BulkRequest()); - } - - /** - * Adds an {@link IndexRequest} to the list of actions to execute. Follows the same behavior of {@link IndexRequest} - * (for example, if no id is provided, one will be generated, or usage of the create flag). - */ - public NoopBulkRequestBuilder add(IndexRequest request) { - super.request.add(request); - return this; - } - - /** - * Adds an {@link IndexRequest} to the list of actions to execute. Follows the same behavior of {@link IndexRequest} - * (for example, if no id is provided, one will be generated, or usage of the create flag). - */ - public NoopBulkRequestBuilder add(IndexRequestBuilder request) { - super.request.add(request.request()); - return this; - } - - /** - * Adds an {@link DeleteRequest} to the list of actions to execute. - */ - public NoopBulkRequestBuilder add(DeleteRequest request) { - super.request.add(request); - return this; - } - - /** - * Adds an {@link DeleteRequest} to the list of actions to execute. - */ - public NoopBulkRequestBuilder add(DeleteRequestBuilder request) { - super.request.add(request.request()); - return this; - } - - - /** - * Adds an {@link UpdateRequest} to the list of actions to execute. - */ - public NoopBulkRequestBuilder add(UpdateRequest request) { - super.request.add(request); - return this; - } - - /** - * Adds an {@link UpdateRequest} to the list of actions to execute. - */ - public NoopBulkRequestBuilder add(UpdateRequestBuilder request) { - super.request.add(request.request()); - return this; - } - - /** - * Adds a framed data in binary format - */ - public NoopBulkRequestBuilder add(byte[] data, int from, int length, XContentType xContentType) throws Exception { - request.add(data, from, length, null, null, xContentType); - return this; - } - - /** - * Adds a framed data in binary format - */ - public NoopBulkRequestBuilder add(byte[] data, int from, int length, @Nullable String defaultIndex, @Nullable String defaultType, - XContentType xContentType) throws Exception { - request.add(data, from, length, defaultIndex, defaultType, xContentType); - return this; - } - - /** - * Sets the number of shard copies that must be active before proceeding with the write. - * See {@link ReplicationRequest#waitForActiveShards(ActiveShardCount)} for details. - */ - public NoopBulkRequestBuilder setWaitForActiveShards(ActiveShardCount waitForActiveShards) { - request.waitForActiveShards(waitForActiveShards); - return this; - } - - /** - * A shortcut for {@link #setWaitForActiveShards(ActiveShardCount)} where the numerical - * shard count is passed in, instead of having to first call {@link ActiveShardCount#from(int)} - * to get the ActiveShardCount. - */ - public NoopBulkRequestBuilder setWaitForActiveShards(final int waitForActiveShards) { - return setWaitForActiveShards(ActiveShardCount.from(waitForActiveShards)); - } - - /** - * A timeout to wait if the index operation can't be performed immediately. - * Defaults to {@code 1m}. - */ - public final NoopBulkRequestBuilder setTimeout(TimeValue timeout) { - request.timeout(timeout); - return this; - } - - /** - * A timeout to wait if the index operation can't be performed immediately. - * Defaults to {@code 1m}. - */ - public final NoopBulkRequestBuilder setTimeout(String timeout) { - request.timeout(timeout); - return this; - } - - /** - * The number of actions currently in the bulk. - */ - public int numberOfActions() { - return request.numberOfActions(); - } -} diff --git a/client/client-benchmark-noop-api-plugin/src/main/java/org/elasticsearch/plugin/noop/action/search/NoopSearchRequestBuilder.java b/client/client-benchmark-noop-api-plugin/src/main/java/org/elasticsearch/plugin/noop/action/search/NoopSearchRequestBuilder.java deleted file mode 100644 index e66ef6208a6..00000000000 --- a/client/client-benchmark-noop-api-plugin/src/main/java/org/elasticsearch/plugin/noop/action/search/NoopSearchRequestBuilder.java +++ /dev/null @@ -1,496 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.elasticsearch.plugin.noop.action.search; - -import org.elasticsearch.action.ActionRequestBuilder; -import org.elasticsearch.action.search.SearchRequest; -import org.elasticsearch.action.search.SearchResponse; -import org.elasticsearch.action.search.SearchType; -import org.elasticsearch.action.support.IndicesOptions; -import org.elasticsearch.client.ElasticsearchClient; -import org.elasticsearch.common.Nullable; -import org.elasticsearch.common.unit.TimeValue; -import org.elasticsearch.index.query.QueryBuilder; -import org.elasticsearch.script.Script; -import org.elasticsearch.search.Scroll; -import org.elasticsearch.search.aggregations.AggregationBuilder; -import org.elasticsearch.search.aggregations.PipelineAggregationBuilder; -import org.elasticsearch.search.builder.SearchSourceBuilder; -import org.elasticsearch.search.fetch.subphase.highlight.HighlightBuilder; -import org.elasticsearch.search.rescore.RescorerBuilder; -import org.elasticsearch.search.slice.SliceBuilder; -import org.elasticsearch.search.sort.SortBuilder; -import org.elasticsearch.search.sort.SortOrder; -import org.elasticsearch.search.suggest.SuggestBuilder; - -import java.util.Arrays; -import java.util.List; - -public class NoopSearchRequestBuilder extends ActionRequestBuilder { - - public NoopSearchRequestBuilder(ElasticsearchClient client, NoopSearchAction action) { - super(client, action, new SearchRequest()); - } - - /** - * Sets the indices the search will be executed on. - */ - public NoopSearchRequestBuilder setIndices(String... indices) { - request.indices(indices); - return this; - } - - /** - * The document types to execute the search against. Defaults to be executed against - * all types. - */ - public NoopSearchRequestBuilder setTypes(String... types) { - request.types(types); - return this; - } - - /** - * The search type to execute, defaults to {@link org.elasticsearch.action.search.SearchType#DEFAULT}. - */ - public NoopSearchRequestBuilder setSearchType(SearchType searchType) { - request.searchType(searchType); - return this; - } - - /** - * The a string representation search type to execute, defaults to {@link org.elasticsearch.action.search.SearchType#DEFAULT}. Can be - * one of "dfs_query_then_fetch"/"dfsQueryThenFetch", "dfs_query_and_fetch"/"dfsQueryAndFetch", - * "query_then_fetch"/"queryThenFetch", and "query_and_fetch"/"queryAndFetch". - */ - public NoopSearchRequestBuilder setSearchType(String searchType) { - request.searchType(searchType); - return this; - } - - /** - * If set, will enable scrolling of the search request. - */ - public NoopSearchRequestBuilder setScroll(Scroll scroll) { - request.scroll(scroll); - return this; - } - - /** - * If set, will enable scrolling of the search request for the specified timeout. - */ - public NoopSearchRequestBuilder setScroll(TimeValue keepAlive) { - request.scroll(keepAlive); - return this; - } - - /** - * If set, will enable scrolling of the search request for the specified timeout. - */ - public NoopSearchRequestBuilder setScroll(String keepAlive) { - request.scroll(keepAlive); - return this; - } - - /** - * An optional timeout to control how long search is allowed to take. - */ - public NoopSearchRequestBuilder setTimeout(TimeValue timeout) { - sourceBuilder().timeout(timeout); - return this; - } - - /** - * An optional document count, upon collecting which the search - * query will early terminate - */ - public NoopSearchRequestBuilder setTerminateAfter(int terminateAfter) { - sourceBuilder().terminateAfter(terminateAfter); - return this; - } - - /** - * A comma separated list of routing values to control the shards the search will be executed on. - */ - public NoopSearchRequestBuilder setRouting(String routing) { - request.routing(routing); - return this; - } - - /** - * The routing values to control the shards that the search will be executed on. - */ - public NoopSearchRequestBuilder setRouting(String... routing) { - request.routing(routing); - return this; - } - - /** - * Sets the preference to execute the search. Defaults to randomize across shards. Can be set to - * {@code _local} to prefer local shards or a custom value, which guarantees that the same order - * will be used across different requests. - */ - public NoopSearchRequestBuilder setPreference(String preference) { - request.preference(preference); - return this; - } - - /** - * Specifies what type of requested indices to ignore and wildcard indices expressions. - *

- * For example indices that don't exist. - */ - public NoopSearchRequestBuilder setIndicesOptions(IndicesOptions indicesOptions) { - request().indicesOptions(indicesOptions); - return this; - } - - /** - * Constructs a new search source builder with a search query. - * - * @see org.elasticsearch.index.query.QueryBuilders - */ - public NoopSearchRequestBuilder setQuery(QueryBuilder queryBuilder) { - sourceBuilder().query(queryBuilder); - return this; - } - - /** - * Sets a filter that will be executed after the query has been executed and only has affect on the search hits - * (not aggregations). This filter is always executed as last filtering mechanism. - */ - public NoopSearchRequestBuilder setPostFilter(QueryBuilder postFilter) { - sourceBuilder().postFilter(postFilter); - return this; - } - - /** - * Sets the minimum score below which docs will be filtered out. - */ - public NoopSearchRequestBuilder setMinScore(float minScore) { - sourceBuilder().minScore(minScore); - return this; - } - - /** - * From index to start the search from. Defaults to {@code 0}. - */ - public NoopSearchRequestBuilder setFrom(int from) { - sourceBuilder().from(from); - return this; - } - - /** - * The number of search hits to return. Defaults to {@code 10}. - */ - public NoopSearchRequestBuilder setSize(int size) { - sourceBuilder().size(size); - return this; - } - - /** - * Should each {@link org.elasticsearch.search.SearchHit} be returned with an - * explanation of the hit (ranking). - */ - public NoopSearchRequestBuilder setExplain(boolean explain) { - sourceBuilder().explain(explain); - return this; - } - - /** - * Should each {@link org.elasticsearch.search.SearchHit} be returned with its - * version. - */ - public NoopSearchRequestBuilder setVersion(boolean version) { - sourceBuilder().version(version); - return this; - } - - /** - * Sets the boost a specific index will receive when the query is executed against it. - * - * @param index The index to apply the boost against - * @param indexBoost The boost to apply to the index - */ - public NoopSearchRequestBuilder addIndexBoost(String index, float indexBoost) { - sourceBuilder().indexBoost(index, indexBoost); - return this; - } - - /** - * The stats groups this request will be aggregated under. - */ - public NoopSearchRequestBuilder setStats(String... statsGroups) { - sourceBuilder().stats(Arrays.asList(statsGroups)); - return this; - } - - /** - * The stats groups this request will be aggregated under. - */ - public NoopSearchRequestBuilder setStats(List statsGroups) { - sourceBuilder().stats(statsGroups); - return this; - } - - /** - * Indicates whether the response should contain the stored _source for every hit - */ - public NoopSearchRequestBuilder setFetchSource(boolean fetch) { - sourceBuilder().fetchSource(fetch); - return this; - } - - /** - * Indicate that _source should be returned with every hit, with an "include" and/or "exclude" set which can include simple wildcard - * elements. - * - * @param include An optional include (optionally wildcarded) pattern to filter the returned _source - * @param exclude An optional exclude (optionally wildcarded) pattern to filter the returned _source - */ - public NoopSearchRequestBuilder setFetchSource(@Nullable String include, @Nullable String exclude) { - sourceBuilder().fetchSource(include, exclude); - return this; - } - - /** - * Indicate that _source should be returned with every hit, with an "include" and/or "exclude" set which can include simple wildcard - * elements. - * - * @param includes An optional list of include (optionally wildcarded) pattern to filter the returned _source - * @param excludes An optional list of exclude (optionally wildcarded) pattern to filter the returned _source - */ - public NoopSearchRequestBuilder setFetchSource(@Nullable String[] includes, @Nullable String[] excludes) { - sourceBuilder().fetchSource(includes, excludes); - return this; - } - - /** - * Adds a docvalue based field to load and return. The field does not have to be stored, - * but its recommended to use non analyzed or numeric fields. - * - * @param name The field to get from the docvalue - */ - public NoopSearchRequestBuilder addDocValueField(String name) { - sourceBuilder().docValueField(name); - return this; - } - - /** - * Adds a stored field to load and return (note, it must be stored) as part of the search request. - * If none are specified, the source of the document will be return. - */ - public NoopSearchRequestBuilder addStoredField(String field) { - sourceBuilder().storedField(field); - return this; - } - - - /** - * Adds a script based field to load and return. The field does not have to be stored, - * but its recommended to use non analyzed or numeric fields. - * - * @param name The name that will represent this value in the return hit - * @param script The script to use - */ - public NoopSearchRequestBuilder addScriptField(String name, Script script) { - sourceBuilder().scriptField(name, script); - return this; - } - - /** - * Adds a sort against the given field name and the sort ordering. - * - * @param field The name of the field - * @param order The sort ordering - */ - public NoopSearchRequestBuilder addSort(String field, SortOrder order) { - sourceBuilder().sort(field, order); - return this; - } - - /** - * Adds a generic sort builder. - * - * @see org.elasticsearch.search.sort.SortBuilders - */ - public NoopSearchRequestBuilder addSort(SortBuilder sort) { - sourceBuilder().sort(sort); - return this; - } - - /** - * Set the sort values that indicates which docs this request should "search after". - */ - public NoopSearchRequestBuilder searchAfter(Object[] values) { - sourceBuilder().searchAfter(values); - return this; - } - - public NoopSearchRequestBuilder slice(SliceBuilder builder) { - sourceBuilder().slice(builder); - return this; - } - - /** - * Applies when sorting, and controls if scores will be tracked as well. Defaults to - * {@code false}. - */ - public NoopSearchRequestBuilder setTrackScores(boolean trackScores) { - sourceBuilder().trackScores(trackScores); - return this; - } - - - /** - * Sets the fields to load and return as part of the search request. If none - * are specified, the source of the document will be returned. - */ - public NoopSearchRequestBuilder storedFields(String... fields) { - sourceBuilder().storedFields(Arrays.asList(fields)); - return this; - } - - /** - * Adds an aggregation to the search operation. - */ - public NoopSearchRequestBuilder addAggregation(AggregationBuilder aggregation) { - sourceBuilder().aggregation(aggregation); - return this; - } - - /** - * Adds an aggregation to the search operation. - */ - public NoopSearchRequestBuilder addAggregation(PipelineAggregationBuilder aggregation) { - sourceBuilder().aggregation(aggregation); - return this; - } - - public NoopSearchRequestBuilder highlighter(HighlightBuilder highlightBuilder) { - sourceBuilder().highlighter(highlightBuilder); - return this; - } - - /** - * Delegates to {@link org.elasticsearch.search.builder.SearchSourceBuilder#suggest(SuggestBuilder)} - */ - public NoopSearchRequestBuilder suggest(SuggestBuilder suggestBuilder) { - sourceBuilder().suggest(suggestBuilder); - return this; - } - - /** - * Clears all rescorers on the builder and sets the first one. To use multiple rescore windows use - * {@link #addRescorer(org.elasticsearch.search.rescore.RescorerBuilder, int)}. - * - * @param rescorer rescorer configuration - * @return this for chaining - */ - public NoopSearchRequestBuilder setRescorer(RescorerBuilder rescorer) { - sourceBuilder().clearRescorers(); - return addRescorer(rescorer); - } - - /** - * Clears all rescorers on the builder and sets the first one. To use multiple rescore windows use - * {@link #addRescorer(org.elasticsearch.search.rescore.RescorerBuilder, int)}. - * - * @param rescorer rescorer configuration - * @param window rescore window - * @return this for chaining - */ - public NoopSearchRequestBuilder setRescorer(RescorerBuilder rescorer, int window) { - sourceBuilder().clearRescorers(); - return addRescorer(rescorer.windowSize(window)); - } - - /** - * Adds a new rescorer. - * - * @param rescorer rescorer configuration - * @return this for chaining - */ - public NoopSearchRequestBuilder addRescorer(RescorerBuilder rescorer) { - sourceBuilder().addRescorer(rescorer); - return this; - } - - /** - * Adds a new rescorer. - * - * @param rescorer rescorer configuration - * @param window rescore window - * @return this for chaining - */ - public NoopSearchRequestBuilder addRescorer(RescorerBuilder rescorer, int window) { - sourceBuilder().addRescorer(rescorer.windowSize(window)); - return this; - } - - /** - * Clears all rescorers from the builder. - * - * @return this for chaining - */ - public NoopSearchRequestBuilder clearRescorers() { - sourceBuilder().clearRescorers(); - return this; - } - - /** - * Sets the source of the request as a SearchSourceBuilder. - */ - public NoopSearchRequestBuilder setSource(SearchSourceBuilder source) { - request.source(source); - return this; - } - - /** - * Sets if this request should use the request cache or not, assuming that it can (for - * example, if "now" is used, it will never be cached). By default (not set, or null, - * will default to the index level setting if request cache is enabled or not). - */ - public NoopSearchRequestBuilder setRequestCache(Boolean requestCache) { - request.requestCache(requestCache); - return this; - } - - /** - * Should the query be profiled. Defaults to false - */ - public NoopSearchRequestBuilder setProfile(boolean profile) { - sourceBuilder().profile(profile); - return this; - } - - @Override - public String toString() { - if (request.source() != null) { - return request.source().toString(); - } - return new SearchSourceBuilder().toString(); - } - - private SearchSourceBuilder sourceBuilder() { - if (request.source() == null) { - request.source(new SearchSourceBuilder()); - } - return request.source(); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/RequestConverters.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/RequestConverters.java index f0cff9a3158..6791b5f8259 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/RequestConverters.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/RequestConverters.java @@ -70,6 +70,7 @@ import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.index.VersionType; +import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.rankeval.RankEvalRequest; import org.elasticsearch.index.reindex.AbstractBulkByScrollRequest; import org.elasticsearch.index.reindex.DeleteByQueryRequest; @@ -316,7 +317,9 @@ final class RequestConverters { } static Request update(UpdateRequest updateRequest) throws IOException { - String endpoint = endpoint(updateRequest.index(), updateRequest.type(), updateRequest.id(), "_update"); + String endpoint = updateRequest.type().equals(MapperService.SINGLE_MAPPING_NAME) + ? endpoint(updateRequest.index(), "_update", updateRequest.id()) + : endpoint(updateRequest.index(), updateRequest.type(), updateRequest.id(), "_update"); Request request = new Request(HttpPost.METHOD_NAME, endpoint); Params parameters = new Params(request); diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/SecurityClient.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/SecurityClient.java index d02c15b5970..48a1cdb7782 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/SecurityClient.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/SecurityClient.java @@ -50,6 +50,8 @@ import org.elasticsearch.client.security.GetSslCertificatesRequest; import org.elasticsearch.client.security.GetSslCertificatesResponse; import org.elasticsearch.client.security.GetUserPrivilegesRequest; import org.elasticsearch.client.security.GetUserPrivilegesResponse; +import org.elasticsearch.client.security.GetUsersRequest; +import org.elasticsearch.client.security.GetUsersResponse; import org.elasticsearch.client.security.HasPrivilegesRequest; import org.elasticsearch.client.security.HasPrivilegesResponse; import org.elasticsearch.client.security.InvalidateTokenRequest; @@ -81,6 +83,33 @@ public final class SecurityClient { this.restHighLevelClient = restHighLevelClient; } + /** + * Get a user, or list of users, in the native realm synchronously. + * See + * the docs for more information. + * @param request the request with the user's name + * @param options the request options (e.g., headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized + * @return the response from the get users call + * @throws IOException in case there is a problem sending the request or parsing back the response + */ + public GetUsersResponse getUsers(GetUsersRequest request, RequestOptions options) throws IOException { + return restHighLevelClient.performRequestAndParseEntity(request, SecurityRequestConverters::getUsers, options, + GetUsersResponse::fromXContent, emptySet()); + } + + /** + * Get a user, or list of users, in the native realm asynchronously. + * See + * the docs for more information. + * @param request the request with the user's name + * @param options the request options (e.g., headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized + * @param listener the listener to be notified upon request completion + */ + public void getUsersAsync(GetUsersRequest request, RequestOptions options, ActionListener listener) { + restHighLevelClient.performRequestAsyncAndParseEntity(request, SecurityRequestConverters::getUsers, options, + GetUsersResponse::fromXContent, listener, emptySet()); + } + /** * Create/update a user in the native realm synchronously. * See diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/SecurityRequestConverters.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/SecurityRequestConverters.java index aa09b9596a8..9e9698ded1c 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/SecurityRequestConverters.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/SecurityRequestConverters.java @@ -36,6 +36,7 @@ import org.elasticsearch.client.security.EnableUserRequest; import org.elasticsearch.client.security.GetPrivilegesRequest; import org.elasticsearch.client.security.GetRoleMappingsRequest; import org.elasticsearch.client.security.GetRolesRequest; +import org.elasticsearch.client.security.GetUsersRequest; import org.elasticsearch.client.security.HasPrivilegesRequest; import org.elasticsearch.client.security.InvalidateTokenRequest; import org.elasticsearch.client.security.PutPrivilegesRequest; @@ -67,6 +68,15 @@ final class SecurityRequestConverters { return request; } + static Request getUsers(GetUsersRequest getUsersRequest) { + RequestConverters.EndpointBuilder builder = new RequestConverters.EndpointBuilder() + .addPathPartAsIs("_security/user"); + if (getUsersRequest.getUsernames().size() > 0) { + builder.addPathPart(Strings.collectionToCommaDelimitedString(getUsersRequest.getUsernames())); + } + return new Request(HttpGet.METHOD_NAME, builder.build()); + } + static Request putUser(PutUserRequest putUserRequest) throws IOException { String endpoint = new RequestConverters.EndpointBuilder() .addPathPartAsIs("_security/user") diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/security/GetUsersRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/security/GetUsersRequest.java new file mode 100644 index 00000000000..0a6b5e9bb25 --- /dev/null +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/security/GetUsersRequest.java @@ -0,0 +1,58 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.client.security; + +import org.elasticsearch.client.Validatable; +import org.elasticsearch.common.util.set.Sets; + +import java.util.Collections; +import java.util.Objects; +import java.util.Set; + +/** + * Request object to retrieve users from the native realm + */ +public class GetUsersRequest implements Validatable { + private final Set usernames; + + public GetUsersRequest(final String... usernames) { + if (usernames != null) { + this.usernames = Collections.unmodifiableSet(Sets.newHashSet(usernames)); + } else { + this.usernames = Collections.emptySet(); + } + } + + public Set getUsernames() { + return usernames; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (!(o instanceof GetUsersRequest)) return false; + GetUsersRequest that = (GetUsersRequest) o; + return Objects.equals(usernames, that.usernames); + } + + @Override + public int hashCode() { + return Objects.hash(usernames); + } +} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/security/GetUsersResponse.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/security/GetUsersResponse.java new file mode 100644 index 00000000000..107b93afe7c --- /dev/null +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/security/GetUsersResponse.java @@ -0,0 +1,134 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.client.security; + +import org.elasticsearch.client.security.user.User; +import org.elasticsearch.common.Nullable; +import org.elasticsearch.common.ParseField; +import org.elasticsearch.common.xcontent.ConstructingObjectParser; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.common.xcontent.XContentParser.Token; +import org.elasticsearch.common.xcontent.XContentParserUtils; + +import java.io.IOException; +import java.util.Collection; +import java.util.Collections; +import java.util.HashSet; +import java.util.Map; +import java.util.Objects; +import java.util.Set; + +import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg; +import static org.elasticsearch.common.xcontent.ConstructingObjectParser.optionalConstructorArg; + +/** + * Response when requesting zero or more users. + * Returns a List of {@link User} objects + */ +public class GetUsersResponse { + private final Set users; + private final Set enabledUsers; + + public GetUsersResponse(Set users, Set enabledUsers) { + this.users = Collections.unmodifiableSet(users); + this.enabledUsers = Collections.unmodifiableSet(enabledUsers); + } + + public Set getUsers() { + return users; + } + + public Set getEnabledUsers() { + return enabledUsers; + } + + public static GetUsersResponse fromXContent(XContentParser parser) throws IOException { + XContentParserUtils.ensureExpectedToken(Token.START_OBJECT, parser.nextToken(), parser::getTokenLocation); + final Set users = new HashSet<>(); + final Set enabledUsers = new HashSet<>(); + Token token; + while ((token = parser.nextToken()) != Token.END_OBJECT) { + XContentParserUtils.ensureExpectedToken(Token.FIELD_NAME, token, parser::getTokenLocation); + ParsedUser parsedUser = USER_PARSER.parse(parser, parser.currentName()); + users.add(parsedUser.user); + if (parsedUser.enabled) { + enabledUsers.add(parsedUser.user); + } + } + return new GetUsersResponse(users, enabledUsers); + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (!(o instanceof GetUsersResponse)) return false; + GetUsersResponse that = (GetUsersResponse) o; + return Objects.equals(users, that.users); + } + + @Override + public int hashCode() { + return Objects.hash(users); + } + + public static final ParseField USERNAME = new ParseField("username"); + public static final ParseField ROLES = new ParseField("roles"); + public static final ParseField FULL_NAME = new ParseField("full_name"); + public static final ParseField EMAIL = new ParseField("email"); + public static final ParseField METADATA = new ParseField("metadata"); + public static final ParseField ENABLED = new ParseField("enabled"); + + @SuppressWarnings("unchecked") + public static final ConstructingObjectParser USER_PARSER = new ConstructingObjectParser<>("user_info", + (constructorObjects) -> { + int i = 0; + final String username = (String) constructorObjects[i++]; + final Collection roles = (Collection) constructorObjects[i++]; + final Map metadata = (Map) constructorObjects[i++]; + final Boolean enabled = (Boolean) constructorObjects[i++]; + final String fullName = (String) constructorObjects[i++]; + final String email = (String) constructorObjects[i++]; + return new ParsedUser(username, roles, metadata, enabled, fullName, email); + }); + + static { + USER_PARSER.declareString(constructorArg(), USERNAME); + USER_PARSER.declareStringArray(constructorArg(), ROLES); + USER_PARSER.declareObject(constructorArg(), (parser, c) -> parser.map(), METADATA); + USER_PARSER.declareBoolean(constructorArg(), ENABLED); + USER_PARSER.declareStringOrNull(optionalConstructorArg(), FULL_NAME); + USER_PARSER.declareStringOrNull(optionalConstructorArg(), EMAIL); + } + + protected static final class ParsedUser { + protected User user; + protected boolean enabled; + + public ParsedUser(String username, Collection roles, Map metadata, Boolean enabled, + @Nullable String fullName, @Nullable String email) { + String checkedUsername = username = Objects.requireNonNull(username, "`username` is required, cannot be null"); + Collection checkedRoles = Collections.unmodifiableSet(new HashSet<>( + Objects.requireNonNull(roles, "`roles` is required, cannot be null. Pass an empty Collection instead."))); + Map checkedMetadata = Collections + .unmodifiableMap(Objects.requireNonNull(metadata, "`metadata` is required, cannot be null. Pass an empty map instead.")); + this.user = new User(checkedUsername, checkedRoles, checkedMetadata, fullName, email); + this.enabled = enabled; + } + } +} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/security/user/User.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/security/user/User.java index ba6cd5f2f8e..4ac8f54c474 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/security/user/User.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/security/user/User.java @@ -29,7 +29,6 @@ import java.util.Map; import java.util.Objects; import java.util.Set; - /** * A user to be utilized with security APIs. * Can be an existing authenticated user or it can be a new user to be enrolled to the native realm. diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/CrudIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/CrudIT.java index c82988a98ad..6fa65046672 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/CrudIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/CrudIT.java @@ -65,6 +65,10 @@ import org.elasticsearch.index.reindex.DeleteByQueryRequest; import org.elasticsearch.index.reindex.UpdateByQueryAction; import org.elasticsearch.index.reindex.UpdateByQueryRequest; import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.rest.action.document.RestDeleteAction; +import org.elasticsearch.rest.action.document.RestGetAction; +import org.elasticsearch.rest.action.document.RestMultiGetAction; +import org.elasticsearch.rest.action.document.RestUpdateAction; import org.elasticsearch.script.Script; import org.elasticsearch.script.ScriptType; import org.elasticsearch.search.fetch.subphase.FetchSourceContext; @@ -173,6 +177,23 @@ public class CrudIT extends ESRestHighLevelClientTestCase { } } + public void testDeleteWithTypes() throws IOException { + String docId = "id"; + highLevelClient().index(new IndexRequest("index", "type", docId) + .source(Collections.singletonMap("foo", "bar")), RequestOptions.DEFAULT); + + DeleteRequest deleteRequest = new DeleteRequest("index", "type", docId); + DeleteResponse deleteResponse = execute(deleteRequest, + highLevelClient()::delete, + highLevelClient()::deleteAsync, + expectWarnings(RestDeleteAction.TYPES_DEPRECATION_MESSAGE)); + + assertEquals("index", deleteResponse.getIndex()); + assertEquals("type", deleteResponse.getType()); + assertEquals(docId, deleteResponse.getId()); + assertEquals(DocWriteResponse.Result.DELETED, deleteResponse.getResult()); + } + public void testExists() throws IOException { { GetRequest getRequest = new GetRequest("index", "id"); @@ -331,6 +352,29 @@ public class CrudIT extends ESRestHighLevelClientTestCase { } } + public void testGetWithTypes() throws IOException { + String document = "{\"field\":\"value\"}"; + IndexRequest index = new IndexRequest("index", "type", "id"); + index.source(document, XContentType.JSON); + index.setRefreshPolicy(RefreshPolicy.IMMEDIATE); + highLevelClient().index(index, RequestOptions.DEFAULT); + + GetRequest getRequest = new GetRequest("index", "type", "id"); + GetResponse getResponse = execute(getRequest, + highLevelClient()::get, + highLevelClient()::getAsync, + expectWarnings(RestGetAction.TYPES_DEPRECATION_MESSAGE)); + + assertEquals("index", getResponse.getIndex()); + assertEquals("type", getResponse.getType()); + assertEquals("id", getResponse.getId()); + + assertTrue(getResponse.isExists()); + assertFalse(getResponse.isSourceEmpty()); + assertEquals(1L, getResponse.getVersion()); + assertEquals(document, getResponse.getSourceAsString()); + } + public void testMultiGet() throws IOException { { MultiGetRequest multiGetRequest = new MultiGetRequest(); @@ -387,6 +431,36 @@ public class CrudIT extends ESRestHighLevelClientTestCase { } } + public void testMultiGetWithTypes() throws IOException { + BulkRequest bulk = new BulkRequest(); + bulk.setRefreshPolicy(RefreshPolicy.IMMEDIATE); + bulk.add(new IndexRequest("index", "type", "id1") + .source("{\"field\":\"value1\"}", XContentType.JSON)); + bulk.add(new IndexRequest("index", "type", "id2") + .source("{\"field\":\"value2\"}", XContentType.JSON)); + + highLevelClient().bulk(bulk, RequestOptions.DEFAULT); + MultiGetRequest multiGetRequest = new MultiGetRequest(); + multiGetRequest.add("index", "id1"); + multiGetRequest.add("index", "type", "id2"); + + MultiGetResponse response = execute(multiGetRequest, + highLevelClient()::mget, + highLevelClient()::mgetAsync, + expectWarnings(RestMultiGetAction.TYPES_DEPRECATION_MESSAGE)); + assertEquals(2, response.getResponses().length); + + GetResponse firstResponse = response.getResponses()[0].getResponse(); + assertEquals("index", firstResponse.getIndex()); + assertEquals("type", firstResponse.getType()); + assertEquals("id1", firstResponse.getId()); + + GetResponse secondResponse = response.getResponses()[1].getResponse(); + assertEquals("index", secondResponse.getIndex()); + assertEquals("type", secondResponse.getType()); + assertEquals("id2", secondResponse.getId()); + } + public void testIndex() throws IOException { final XContentType xContentType = randomFrom(XContentType.values()); { @@ -492,7 +566,7 @@ public class CrudIT extends ESRestHighLevelClientTestCase { public void testUpdate() throws IOException { { - UpdateRequest updateRequest = new UpdateRequest("index", "_doc", "does_not_exist"); + UpdateRequest updateRequest = new UpdateRequest("index", "does_not_exist"); updateRequest.doc(singletonMap("field", "value"), randomFrom(XContentType.values())); ElasticsearchStatusException exception = expectThrows(ElasticsearchStatusException.class, () -> @@ -507,14 +581,14 @@ public class CrudIT extends ESRestHighLevelClientTestCase { IndexResponse indexResponse = highLevelClient().index(indexRequest, RequestOptions.DEFAULT); assertEquals(RestStatus.CREATED, indexResponse.status()); - UpdateRequest updateRequest = new UpdateRequest("index", "_doc", "id"); + UpdateRequest updateRequest = new UpdateRequest("index", "id"); updateRequest.doc(singletonMap("field", "updated"), randomFrom(XContentType.values())); UpdateResponse updateResponse = execute(updateRequest, highLevelClient()::update, highLevelClient()::updateAsync); assertEquals(RestStatus.OK, updateResponse.status()); assertEquals(indexResponse.getVersion() + 1, updateResponse.getVersion()); - UpdateRequest updateRequestConflict = new UpdateRequest("index", "_doc", "id"); + UpdateRequest updateRequestConflict = new UpdateRequest("index", "id"); updateRequestConflict.doc(singletonMap("field", "with_version_conflict"), randomFrom(XContentType.values())); updateRequestConflict.version(indexResponse.getVersion()); @@ -530,7 +604,7 @@ public class CrudIT extends ESRestHighLevelClientTestCase { IndexResponse indexResponse = highLevelClient().index(indexRequest, RequestOptions.DEFAULT); assertEquals(RestStatus.CREATED, indexResponse.status()); - UpdateRequest updateRequest = new UpdateRequest("index", "_doc", "with_script"); + UpdateRequest updateRequest = new UpdateRequest("index", "with_script"); Script script = new Script(ScriptType.INLINE, "painless", "ctx._source.counter += params.count", singletonMap("count", 8)); updateRequest.script(script); updateRequest.fetchSource(true); @@ -551,7 +625,7 @@ public class CrudIT extends ESRestHighLevelClientTestCase { assertEquals(RestStatus.CREATED, indexResponse.status()); assertEquals(12L, indexResponse.getVersion()); - UpdateRequest updateRequest = new UpdateRequest("index", "_doc", "with_doc"); + UpdateRequest updateRequest = new UpdateRequest("index", "with_doc"); updateRequest.doc(singletonMap("field_2", "two"), randomFrom(XContentType.values())); updateRequest.fetchSource("field_*", "field_3"); @@ -573,7 +647,7 @@ public class CrudIT extends ESRestHighLevelClientTestCase { assertEquals(RestStatus.CREATED, indexResponse.status()); assertEquals(1L, indexResponse.getVersion()); - UpdateRequest updateRequest = new UpdateRequest("index", "_doc", "noop"); + UpdateRequest updateRequest = new UpdateRequest("index", "noop"); updateRequest.doc(singletonMap("field", "value"), randomFrom(XContentType.values())); UpdateResponse updateResponse = execute(updateRequest, highLevelClient()::update, highLevelClient()::updateAsync); @@ -589,7 +663,7 @@ public class CrudIT extends ESRestHighLevelClientTestCase { assertEquals(2L, updateResponse.getVersion()); } { - UpdateRequest updateRequest = new UpdateRequest("index", "_doc", "with_upsert"); + UpdateRequest updateRequest = new UpdateRequest("index", "with_upsert"); updateRequest.upsert(singletonMap("doc_status", "created")); updateRequest.doc(singletonMap("doc_status", "updated")); updateRequest.fetchSource(true); @@ -604,7 +678,7 @@ public class CrudIT extends ESRestHighLevelClientTestCase { assertEquals("created", getResult.sourceAsMap().get("doc_status")); } { - UpdateRequest updateRequest = new UpdateRequest("index", "_doc", "with_doc_as_upsert"); + UpdateRequest updateRequest = new UpdateRequest("index", "with_doc_as_upsert"); updateRequest.doc(singletonMap("field", "initialized")); updateRequest.fetchSource(true); updateRequest.docAsUpsert(true); @@ -619,7 +693,7 @@ public class CrudIT extends ESRestHighLevelClientTestCase { assertEquals("initialized", getResult.sourceAsMap().get("field")); } { - UpdateRequest updateRequest = new UpdateRequest("index", "_doc", "with_scripted_upsert"); + UpdateRequest updateRequest = new UpdateRequest("index", "with_scripted_upsert"); updateRequest.fetchSource(true); updateRequest.script(new Script(ScriptType.INLINE, "painless", "ctx._source.level = params.test", singletonMap("test", "C"))); updateRequest.scriptedUpsert(true); @@ -637,7 +711,7 @@ public class CrudIT extends ESRestHighLevelClientTestCase { } { IllegalStateException exception = expectThrows(IllegalStateException.class, () -> { - UpdateRequest updateRequest = new UpdateRequest("index", "_doc", "id"); + UpdateRequest updateRequest = new UpdateRequest("index", "id"); updateRequest.doc(new IndexRequest().source(Collections.singletonMap("field", "doc"), XContentType.JSON)); updateRequest.upsert(new IndexRequest().source(Collections.singletonMap("field", "upsert"), XContentType.YAML)); execute(updateRequest, highLevelClient()::update, highLevelClient()::updateAsync); @@ -647,6 +721,22 @@ public class CrudIT extends ESRestHighLevelClientTestCase { } } + public void testUpdateWithTypes() throws IOException { + IndexRequest indexRequest = new IndexRequest("index", "type", "id"); + indexRequest.source(singletonMap("field", "value")); + IndexResponse indexResponse = highLevelClient().index(indexRequest, RequestOptions.DEFAULT); + + UpdateRequest updateRequest = new UpdateRequest("index", "type", "id"); + updateRequest.doc(singletonMap("field", "updated"), randomFrom(XContentType.values())); + UpdateResponse updateResponse = execute(updateRequest, + highLevelClient()::update, + highLevelClient()::updateAsync, + expectWarnings(RestUpdateAction.TYPES_DEPRECATION_MESSAGE)); + + assertEquals(RestStatus.OK, updateResponse.status()); + assertEquals(indexResponse.getVersion() + 1, updateResponse.getVersion()); + } + public void testBulk() throws IOException { int nbItems = randomIntBetween(10, 100); boolean[] errors = new boolean[nbItems]; @@ -687,7 +777,7 @@ public class CrudIT extends ESRestHighLevelClientTestCase { bulkRequest.add(createRequest); } else if (opType == DocWriteRequest.OpType.UPDATE) { - UpdateRequest updateRequest = new UpdateRequest("index", "_doc", id) + UpdateRequest updateRequest = new UpdateRequest("index", id) .doc(new IndexRequest().source(source, xContentType)); if (erroneous == false) { assertEquals(RestStatus.CREATED, @@ -996,7 +1086,7 @@ public class CrudIT extends ESRestHighLevelClientTestCase { processor.add(createRequest); } else if (opType == DocWriteRequest.OpType.UPDATE) { - UpdateRequest updateRequest = new UpdateRequest("index", "_doc", id) + UpdateRequest updateRequest = new UpdateRequest("index", id) .doc(new IndexRequest().source(xContentType, "id", i)); if (erroneous == false) { assertEquals(RestStatus.CREATED, diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/RequestConvertersTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/RequestConvertersTests.java index f0b918b033a..b50d2c1265e 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/RequestConvertersTests.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/RequestConvertersTests.java @@ -630,10 +630,9 @@ public class RequestConvertersTests extends ESTestCase { Map expectedParams = new HashMap<>(); String index = randomAlphaOfLengthBetween(3, 10); - String type = randomAlphaOfLengthBetween(3, 10); String id = randomAlphaOfLengthBetween(3, 10); - UpdateRequest updateRequest = new UpdateRequest(index, type, id); + UpdateRequest updateRequest = new UpdateRequest(index, id); updateRequest.detectNoop(randomBoolean()); if (randomBoolean()) { @@ -687,7 +686,7 @@ public class RequestConvertersTests extends ESTestCase { } Request request = RequestConverters.update(updateRequest); - assertEquals("/" + index + "/" + type + "/" + id + "/_update", request.getEndpoint()); + assertEquals("/" + index + "/_update/" + id, request.getEndpoint()); assertEquals(expectedParams, request.getParameters()); assertEquals(HttpPost.METHOD_NAME, request.getMethod()); @@ -718,6 +717,23 @@ public class RequestConvertersTests extends ESTestCase { } } + public void testUpdateWithType() throws IOException { + String index = randomAlphaOfLengthBetween(3, 10); + String type = randomAlphaOfLengthBetween(3, 10); + String id = randomAlphaOfLengthBetween(3, 10); + + UpdateRequest updateRequest = new UpdateRequest(index, type, id); + + XContentType xContentType = XContentType.JSON; + BytesReference source = RandomObjects.randomSource(random(), xContentType); + updateRequest.doc(new IndexRequest().source(source, xContentType)); + + Request request = RequestConverters.update(updateRequest); + assertEquals("/" + index + "/" + type + "/" + id + "/_update", request.getEndpoint()); + assertEquals(HttpPost.METHOD_NAME, request.getMethod()); + assertToXContentBody(updateRequest, request.getEntity()); + } + public void testUpdateWithDifferentContentTypes() { IllegalStateException exception = expectThrows(IllegalStateException.class, () -> { UpdateRequest updateRequest = new UpdateRequest(); diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/SecurityIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/SecurityIT.java index 05a854299a6..abf65d19df3 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/SecurityIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/SecurityIT.java @@ -28,6 +28,8 @@ import org.elasticsearch.client.security.DeleteUserRequest; import org.elasticsearch.client.security.DeleteUserResponse; import org.elasticsearch.client.security.GetRolesRequest; import org.elasticsearch.client.security.GetRolesResponse; +import org.elasticsearch.client.security.GetUsersRequest; +import org.elasticsearch.client.security.GetUsersResponse; import org.elasticsearch.client.security.PutRoleRequest; import org.elasticsearch.client.security.PutRoleResponse; import org.elasticsearch.client.security.PutUserRequest; @@ -42,6 +44,7 @@ import org.elasticsearch.client.security.user.privileges.IndicesPrivilegesTests; import org.elasticsearch.client.security.user.privileges.Role; import org.elasticsearch.common.CharArrays; +import java.util.ArrayList; import java.util.Arrays; import java.util.Base64; import java.util.HashMap; @@ -74,6 +77,22 @@ public class SecurityIT extends ESRestHighLevelClientTestCase { highLevelClient().getLowLevelClient().performRequest(deleteUserRequest); } + public void testGetUser() throws Exception { + final SecurityClient securityClient = highLevelClient().security(); + // create user + final PutUserRequest putUserRequest = randomPutUserRequest(randomBoolean()); + final PutUserResponse putUserResponse = execute(putUserRequest, securityClient::putUser, securityClient::putUserAsync); + // assert user created + assertThat(putUserResponse.isCreated(), is(true)); + // get user + final GetUsersRequest getUsersRequest = new GetUsersRequest(putUserRequest.getUser().getUsername()); + final GetUsersResponse getUsersResponse = execute(getUsersRequest, securityClient::getUsers, securityClient::getUsersAsync); + // assert user was correctly retrieved + ArrayList users = new ArrayList<>(); + users.addAll(getUsersResponse.getUsers()); + assertThat(users.get(0), is(putUserRequest.getUser())); + } + public void testAuthenticate() throws Exception { final SecurityClient securityClient = highLevelClient().security(); // test fixture: put enabled user @@ -89,6 +108,15 @@ public class SecurityIT extends ESRestHighLevelClientTestCase { assertThat(authenticateResponse.getUser(), is(putUserRequest.getUser())); assertThat(authenticateResponse.enabled(), is(true)); + // get user + final GetUsersRequest getUsersRequest = + new GetUsersRequest(putUserRequest.getUser().getUsername()); + final GetUsersResponse getUsersResponse = + execute(getUsersRequest, securityClient::getUsers, securityClient::getUsersAsync); + ArrayList users = new ArrayList<>(); + users.addAll(getUsersResponse.getUsers()); + assertThat(users.get(0), is(putUserRequest.getUser())); + // delete user final DeleteUserRequest deleteUserRequest = new DeleteUserRequest(putUserRequest.getUser().getUsername(), putUserRequest.getRefreshPolicy()); diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/SecurityRequestConvertersTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/SecurityRequestConvertersTests.java index 87c692d9f2a..900f4210a99 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/SecurityRequestConvertersTests.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/SecurityRequestConvertersTests.java @@ -34,6 +34,7 @@ import org.elasticsearch.client.security.EnableUserRequest; import org.elasticsearch.client.security.GetPrivilegesRequest; import org.elasticsearch.client.security.GetRoleMappingsRequest; import org.elasticsearch.client.security.GetRolesRequest; +import org.elasticsearch.client.security.GetUsersRequest; import org.elasticsearch.client.security.PutPrivilegesRequest; import org.elasticsearch.client.security.PutRoleMappingRequest; import org.elasticsearch.client.security.PutRoleRequest; @@ -101,6 +102,21 @@ public class SecurityRequestConvertersTests extends ESTestCase { assertNull(request.getEntity()); } + public void testGetUsers() { + final String[] users = randomArray(0, 5, String[]::new, () -> randomAlphaOfLength(5)); + GetUsersRequest getUsersRequest = new GetUsersRequest(users); + Request request = SecurityRequestConverters.getUsers(getUsersRequest); + assertEquals(HttpGet.METHOD_NAME, request.getMethod()); + if (users.length == 0) { + assertEquals("/_security/user", request.getEndpoint()); + } else { + assertEquals("/_security/user/" + Strings.collectionToCommaDelimitedString(getUsersRequest.getUsernames()), + request.getEndpoint()); + } + assertNull(request.getEntity()); + assertEquals(Collections.emptyMap(), request.getParameters()); + } + public void testPutRoleMapping() throws IOException { final String username = randomAlphaOfLengthBetween(4, 7); final String rolename = randomAlphaOfLengthBetween(4, 7); diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/CRUDDocumentationIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/CRUDDocumentationIT.java index 2495cec7ce8..a9430b67aef 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/CRUDDocumentationIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/CRUDDocumentationIT.java @@ -296,8 +296,7 @@ public class CRUDDocumentationIT extends ESRestHighLevelClientTestCase { //tag::update-request UpdateRequest request = new UpdateRequest( "posts", // <1> - "_doc", // <2> - "1"); // <3> + "1"); // <2> //end::update-request request.fetchSource(true); //tag::update-request-with-inline-script @@ -311,7 +310,7 @@ public class CRUDDocumentationIT extends ESRestHighLevelClientTestCase { assertEquals(DocWriteResponse.Result.UPDATED, updateResponse.getResult()); assertEquals(4, updateResponse.getGetResult().getSource().get("field")); - request = new UpdateRequest("posts", "_doc", "1").fetchSource(true); + request = new UpdateRequest("posts", "1").fetchSource(true); //tag::update-request-with-stored-script Script stored = new Script( ScriptType.STORED, null, "increment-field", parameters); // <1> @@ -326,7 +325,7 @@ public class CRUDDocumentationIT extends ESRestHighLevelClientTestCase { Map jsonMap = new HashMap<>(); jsonMap.put("updated", new Date()); jsonMap.put("reason", "daily update"); - UpdateRequest request = new UpdateRequest("posts", "_doc", "1") + UpdateRequest request = new UpdateRequest("posts", "1") .doc(jsonMap); // <1> //end::update-request-with-doc-as-map UpdateResponse updateResponse = client.update(request, RequestOptions.DEFAULT); @@ -341,7 +340,7 @@ public class CRUDDocumentationIT extends ESRestHighLevelClientTestCase { builder.field("reason", "daily update"); } builder.endObject(); - UpdateRequest request = new UpdateRequest("posts", "_doc", "1") + UpdateRequest request = new UpdateRequest("posts", "1") .doc(builder); // <1> //end::update-request-with-doc-as-xcontent UpdateResponse updateResponse = client.update(request, RequestOptions.DEFAULT); @@ -349,7 +348,7 @@ public class CRUDDocumentationIT extends ESRestHighLevelClientTestCase { } { //tag::update-request-shortcut - UpdateRequest request = new UpdateRequest("posts", "_doc", "1") + UpdateRequest request = new UpdateRequest("posts", "1") .doc("updated", new Date(), "reason", "daily update"); // <1> //end::update-request-shortcut @@ -358,7 +357,7 @@ public class CRUDDocumentationIT extends ESRestHighLevelClientTestCase { } { //tag::update-request-with-doc-as-string - UpdateRequest request = new UpdateRequest("posts", "_doc", "1"); + UpdateRequest request = new UpdateRequest("posts", "1"); String jsonString = "{" + "\"updated\":\"2017-01-01\"," + "\"reason\":\"daily update\"" + @@ -374,7 +373,6 @@ public class CRUDDocumentationIT extends ESRestHighLevelClientTestCase { // tag::update-response String index = updateResponse.getIndex(); - String type = updateResponse.getType(); String id = updateResponse.getId(); long version = updateResponse.getVersion(); if (updateResponse.getResult() == DocWriteResponse.Result.CREATED) { @@ -415,7 +413,7 @@ public class CRUDDocumentationIT extends ESRestHighLevelClientTestCase { } { //tag::update-docnotfound - UpdateRequest request = new UpdateRequest("posts", "_doc", "does_not_exist") + UpdateRequest request = new UpdateRequest("posts", "does_not_exist") .doc("field", "value"); try { UpdateResponse updateResponse = client.update( @@ -429,7 +427,7 @@ public class CRUDDocumentationIT extends ESRestHighLevelClientTestCase { } { // tag::update-conflict - UpdateRequest request = new UpdateRequest("posts", "_doc", "1") + UpdateRequest request = new UpdateRequest("posts", "1") .doc("field", "value") .version(1); try { @@ -443,7 +441,7 @@ public class CRUDDocumentationIT extends ESRestHighLevelClientTestCase { // end::update-conflict } { - UpdateRequest request = new UpdateRequest("posts", "_doc", "1").doc("reason", "no source"); + UpdateRequest request = new UpdateRequest("posts", "1").doc("reason", "no source"); //tag::update-request-no-source request.fetchSource(true); // <1> //end::update-request-no-source @@ -453,7 +451,7 @@ public class CRUDDocumentationIT extends ESRestHighLevelClientTestCase { assertEquals(3, updateResponse.getGetResult().sourceAsMap().size()); } { - UpdateRequest request = new UpdateRequest("posts", "_doc", "1").doc("reason", "source includes"); + UpdateRequest request = new UpdateRequest("posts", "1").doc("reason", "source includes"); //tag::update-request-source-include String[] includes = new String[]{"updated", "r*"}; String[] excludes = Strings.EMPTY_ARRAY; @@ -468,7 +466,7 @@ public class CRUDDocumentationIT extends ESRestHighLevelClientTestCase { assertTrue(sourceAsMap.containsKey("updated")); } { - UpdateRequest request = new UpdateRequest("posts", "_doc", "1").doc("reason", "source excludes"); + UpdateRequest request = new UpdateRequest("posts", "1").doc("reason", "source excludes"); //tag::update-request-source-exclude String[] includes = Strings.EMPTY_ARRAY; String[] excludes = new String[]{"updated"}; @@ -483,7 +481,7 @@ public class CRUDDocumentationIT extends ESRestHighLevelClientTestCase { assertTrue(sourceAsMap.containsKey("field")); } { - UpdateRequest request = new UpdateRequest("posts", "_doc", "id"); + UpdateRequest request = new UpdateRequest("posts", "id"); // tag::update-request-routing request.routing("routing"); // <1> // end::update-request-routing @@ -520,7 +518,7 @@ public class CRUDDocumentationIT extends ESRestHighLevelClientTestCase { // end::update-request-active-shards } { - UpdateRequest request = new UpdateRequest("posts", "_doc", "async").doc("reason", "async update").docAsUpsert(true); + UpdateRequest request = new UpdateRequest("posts", "async").doc("reason", "async update").docAsUpsert(true); ActionListener listener; // tag::update-execute-listener @@ -695,7 +693,7 @@ public class CRUDDocumentationIT extends ESRestHighLevelClientTestCase { // tag::bulk-request-with-mixed-operations BulkRequest request = new BulkRequest(); request.add(new DeleteRequest("posts", "3")); // <1> - request.add(new UpdateRequest("posts", "_doc", "2") // <2> + request.add(new UpdateRequest("posts", "2") // <2> .doc(XContentType.JSON,"other", "test")); request.add(new IndexRequest("posts", "_doc", "4") // <3> .source(XContentType.JSON,"field", "baz")); diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/SecurityDocumentationIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/SecurityDocumentationIT.java index 7d0438238e5..8bd285cd31f 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/SecurityDocumentationIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/SecurityDocumentationIT.java @@ -54,6 +54,8 @@ import org.elasticsearch.client.security.GetRolesRequest; import org.elasticsearch.client.security.GetRolesResponse; import org.elasticsearch.client.security.GetSslCertificatesResponse; import org.elasticsearch.client.security.GetUserPrivilegesResponse; +import org.elasticsearch.client.security.GetUsersRequest; +import org.elasticsearch.client.security.GetUsersResponse; import org.elasticsearch.client.security.HasPrivilegesRequest; import org.elasticsearch.client.security.HasPrivilegesResponse; import org.elasticsearch.client.security.InvalidateTokenRequest; @@ -109,6 +111,98 @@ import static org.hamcrest.Matchers.nullValue; public class SecurityDocumentationIT extends ESRestHighLevelClientTestCase { + public void testGetUsers() throws Exception { + final RestHighLevelClient client = highLevelClient(); + String[] usernames = new String[] {"user1", "user2", "user3"}; + addUser(client, usernames[0], randomAlphaOfLengthBetween(6, 10)); + addUser(client, usernames[1], randomAlphaOfLengthBetween(6, 10)); + addUser(client, usernames[2], randomAlphaOfLengthBetween(6, 10)); + { + //tag::get-users-request + GetUsersRequest request = new GetUsersRequest(usernames[0]); + //end::get-users-request + //tag::get-users-execute + GetUsersResponse response = client.security().getUsers(request, RequestOptions.DEFAULT); + //end::get-users-execute + //tag::get-users-response + List users = new ArrayList<>(1); + users.addAll(response.getUsers()); + //end::get-users-response + + assertNotNull(response); + assertThat(users.size(), equalTo(1)); + assertThat(users.get(0).getUsername(), is(usernames[0])); + } + + { + //tag::get-users-list-request + GetUsersRequest request = new GetUsersRequest(usernames); + GetUsersResponse response = client.security().getUsers(request, RequestOptions.DEFAULT); + //end::get-users-list-request + + List users = new ArrayList<>(3); + users.addAll(response.getUsers()); + assertNotNull(response); + assertThat(users.size(), equalTo(3)); + assertThat(users.get(0).getUsername(), equalTo(usernames[0])); + assertThat(users.get(1).getUsername(), equalTo(usernames[1])); + assertThat(users.get(2).getUsername(), equalTo(usernames[2])); + assertThat(users.size(), equalTo(3)); + } + + { + //tag::get-users-all-request + GetUsersRequest request = new GetUsersRequest(); + GetUsersResponse response = client.security().getUsers(request, RequestOptions.DEFAULT); + //end::get-users-all-request + + List users = new ArrayList<>(3); + users.addAll(response.getUsers()); + assertNotNull(response); + // 9 users are expected to be returned + // test_users (3): user1, user2, user3 + // system_users (6): elastic, beats_system, apm_system, logstash_system, kibana, remote_monitoring_user + assertThat(users.size(), equalTo(9)); + } + + { + GetUsersRequest request = new GetUsersRequest(usernames[0]); + ActionListener listener; + + //tag::get-users-execute-listener + listener = new ActionListener() { + @Override + public void onResponse(GetUsersResponse getRolesResponse) { + // <1> + } + + @Override + public void onFailure(Exception e) { + // <2> + } + }; + //end::get-users-execute-listener + + assertNotNull(listener); + + // Replace the empty listener by a blocking listener in test + final PlainActionFuture future = new PlainActionFuture<>(); + listener = future; + + //tag::get-users-execute-async + client.security().getUsersAsync(request, RequestOptions.DEFAULT, listener); // <1> + //end::get-users-execute-async + + final GetUsersResponse response = future.get(30, TimeUnit.SECONDS); + List users = new ArrayList<>(1); + users.addAll(response.getUsers()); + assertNotNull(response); + assertThat(users.size(), equalTo(1)); + assertThat(users.get(0).getUsername(), equalTo(usernames[0])); + } + } + + public void testPutUser() throws Exception { RestHighLevelClient client = highLevelClient(); diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/security/AuthenticateResponseTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/security/AuthenticateResponseTests.java index f09340fa09f..f59038af55a 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/security/AuthenticateResponseTests.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/security/AuthenticateResponseTests.java @@ -38,12 +38,12 @@ public class AuthenticateResponseTests extends ESTestCase { public void testFromXContent() throws IOException { xContentTester( - this::createParser, - this::createTestInstance, - this::toXContent, - AuthenticateResponse::fromXContent) - .supportsUnknownFields(false) - .test(); + this::createParser, + this::createTestInstance, + this::toXContent, + AuthenticateResponse::fromXContent) + .supportsUnknownFields(false) + .test(); } public void testEqualsAndHashCode() { @@ -108,7 +108,7 @@ public class AuthenticateResponseTests extends ESTestCase { private AuthenticateResponse copy(AuthenticateResponse response) { final User originalUser = response.getUser(); final User copyUser = new User(originalUser.getUsername(), originalUser.getRoles(), originalUser.getMetadata(), - originalUser.getFullName(), originalUser.getEmail()); + originalUser.getFullName(), originalUser.getEmail()); return new AuthenticateResponse(copyUser, response.enabled(), response.getAuthenticationRealm(), response.getLookupRealm()); } @@ -117,9 +117,9 @@ public class AuthenticateResponseTests extends ESTestCase { final User originalUser = response.getUser(); switch (randomIntBetween(1, 8)) { case 1: - return new AuthenticateResponse(new User(originalUser.getUsername() + "wrong", originalUser.getRoles(), + return new AuthenticateResponse(new User(originalUser.getUsername() + "wrong", originalUser.getRoles(), originalUser.getMetadata(), originalUser.getFullName(), originalUser.getEmail()), response.enabled(), - response.getAuthenticationRealm(), response.getLookupRealm()); + response.getAuthenticationRealm(), response.getLookupRealm()); case 2: final Collection wrongRoles = new ArrayList<>(originalUser.getRoles()); wrongRoles.add(randomAlphaOfLengthBetween(1, 4)); @@ -134,11 +134,11 @@ public class AuthenticateResponseTests extends ESTestCase { response.getLookupRealm()); case 4: return new AuthenticateResponse(new User(originalUser.getUsername(), originalUser.getRoles(), originalUser.getMetadata(), - originalUser.getFullName() + "wrong", originalUser.getEmail()), response.enabled(), + originalUser.getFullName() + "wrong", originalUser.getEmail()), response.enabled(), response.getAuthenticationRealm(), response.getLookupRealm()); case 5: return new AuthenticateResponse(new User(originalUser.getUsername(), originalUser.getRoles(), originalUser.getMetadata(), - originalUser.getFullName(), originalUser.getEmail() + "wrong"), response.enabled(), + originalUser.getFullName(), originalUser.getEmail() + "wrong"), response.enabled(), response.getAuthenticationRealm(), response.getLookupRealm()); case 6: return new AuthenticateResponse(new User(originalUser.getUsername(), originalUser.getRoles(), originalUser.getMetadata(), diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/security/GetUsersRequestTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/security/GetUsersRequestTests.java new file mode 100644 index 00000000000..68b1751716e --- /dev/null +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/security/GetUsersRequestTests.java @@ -0,0 +1,53 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.client.security; + +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.test.EqualsHashCodeTestUtils; + +import static org.hamcrest.Matchers.containsInAnyOrder; +import static org.hamcrest.Matchers.equalTo; + +public class GetUsersRequestTests extends ESTestCase { + + public void testGetUsersRequest() { + final String[] users = randomArray(0, 5, String[]::new, () -> randomAlphaOfLength(5)); + GetUsersRequest getUsersRequest = new GetUsersRequest(users); + assertThat(getUsersRequest.getUsernames().size(), equalTo(users.length)); + assertThat(getUsersRequest.getUsernames(), containsInAnyOrder(users)); + } + + public void testEqualsHashCode() { + final String[] users = randomArray(0, 5, String[]::new, () -> randomAlphaOfLength(5)); + final GetUsersRequest getUsersRequest = new GetUsersRequest(users); + assertNotNull(getUsersRequest); + EqualsHashCodeTestUtils.checkEqualsAndHashCode(getUsersRequest, (original) -> { + return new GetUsersRequest(original.getUsernames().toArray(new String[0])); + }); + EqualsHashCodeTestUtils.checkEqualsAndHashCode(getUsersRequest, (original) -> { + return new GetUsersRequest(original.getUsernames().toArray(new String[0])); + }, GetUsersRequestTests::mutateTestItem); + } + + private static GetUsersRequest mutateTestItem(GetUsersRequest original) { + final int minRoles = original.getUsernames().isEmpty() ? 1 : 0; + return new GetUsersRequest(randomArray(minRoles, 5, String[]::new, () -> randomAlphaOfLength(6))); + } + +} diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/security/GetUsersResponseTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/security/GetUsersResponseTests.java new file mode 100644 index 00000000000..3025241bb39 --- /dev/null +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/security/GetUsersResponseTests.java @@ -0,0 +1,126 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.client.security; + +import org.elasticsearch.client.security.user.User; +import org.elasticsearch.common.xcontent.DeprecationHandler; +import org.elasticsearch.common.xcontent.NamedXContentRegistry; +import org.elasticsearch.common.xcontent.XContentType; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.test.EqualsHashCodeTestUtils; + +import java.io.IOException; +import java.util.Arrays; +import java.util.Collections; +import java.util.HashMap; +import java.util.HashSet; +import java.util.Map; +import java.util.Set; +import java.util.stream.Collectors; + +import static org.hamcrest.Matchers.equalTo; + +/** tests the Response for getting users from the security HLRC */ +public class GetUsersResponseTests extends ESTestCase { + public void testFromXContent() throws IOException { + String json = + "{\n" + + " \"jacknich\": {\n" + + " \"username\": \"jacknich\",\n" + + " \"roles\": [\n" + + " \"admin\", \"other_role1\"\n" + + " ],\n" + + " \"full_name\": \"Jack Nicholson\",\n" + + " \"email\": \"jacknich@example.com\",\n" + + " \"metadata\": { \"intelligence\" : 7 },\n" + + " \"enabled\": true\n" + + " }\n" + + "}"; + final GetUsersResponse response = GetUsersResponse.fromXContent((XContentType.JSON.xContent().createParser( + new NamedXContentRegistry(Collections.emptyList()), new DeprecationHandler() { + @Override + public void usedDeprecatedName(String usedName, String modernName) { + } + + @Override + public void usedDeprecatedField(String usedName, String replacedWith) { + } + }, json))); + assertThat(response.getUsers().size(), equalTo(1)); + final User user = response.getUsers().iterator().next(); + assertThat(user.getUsername(), equalTo("jacknich")); + assertThat(user.getRoles().size(), equalTo(2)); + assertThat(user.getFullName(), equalTo("Jack Nicholson")); + assertThat(user.getEmail(), equalTo("jacknich@example.com")); + final Map metadata = new HashMap<>(); + metadata.put("intelligence", 7); + assertThat(metadata, equalTo(user.getMetadata())); + } + + public void testEqualsHashCode() { + final Set users = new HashSet<>(); + final Set enabledUsers = new HashSet<>(); + Map metadata = new HashMap<>(); + metadata.put("intelligence", 1); + final User user1 = new User("testUser1", Arrays.asList(new String[] {"admin", "other_role1"}), + metadata, "Test User 1", null); + users.add(user1); + enabledUsers.add(user1); + Map metadata2 = new HashMap<>(); + metadata2.put("intelligence", 9); + metadata2.put("specialty", "geo"); + final User user2 = new User("testUser2", Arrays.asList(new String[] {"admin"}), + metadata, "Test User 2", "testuser2@example.com"); + users.add(user2); + enabledUsers.add(user2); + final GetUsersResponse getUsersResponse = new GetUsersResponse(users, enabledUsers); + assertNotNull(getUsersResponse); + EqualsHashCodeTestUtils.checkEqualsAndHashCode(getUsersResponse, (original) -> { + return new GetUsersResponse(original.getUsers(), original.getEnabledUsers()); + }); + EqualsHashCodeTestUtils.checkEqualsAndHashCode(getUsersResponse, (original) -> { + return new GetUsersResponse(original.getUsers(), original.getEnabledUsers()); + }, GetUsersResponseTests::mutateTestItem); + } + + private static GetUsersResponse mutateTestItem(GetUsersResponse original) { + if (randomBoolean()) { + final Set users = new HashSet<>(); + final Set enabledUsers = new HashSet<>(); + Map metadata = new HashMap<>(); + metadata.put("intelligence", 1); + final User user1 = new User("testUser1", Arrays.asList(new String[] {"admin", "other_role1"}), + metadata, "Test User 1", null); + users.add(user1); + enabledUsers.add(user1); + return new GetUsersResponse(users, enabledUsers); + } + Map metadata = new HashMap<>(); + metadata.put("intelligence", 5); // change intelligence + final User user1 = new User("testUser1", Arrays.asList(new String[] {"admin", "other_role1"}), + metadata, "Test User 1", null); + Set newUsers = original.getUsers().stream().collect(Collectors.toSet()); + Set enabledUsers = original.getEnabledUsers().stream().collect(Collectors.toSet()); + newUsers.clear(); + enabledUsers.clear(); + newUsers.add(user1); + enabledUsers.add(user1); + return new GetUsersResponse(newUsers, enabledUsers); + } +} diff --git a/distribution/archives/integ-test-zip/src/test/java/org/elasticsearch/test/rest/CreatedLocationHeaderIT.java b/distribution/archives/integ-test-zip/src/test/java/org/elasticsearch/test/rest/CreatedLocationHeaderIT.java index 5b0457d01b0..a054066ef5a 100644 --- a/distribution/archives/integ-test-zip/src/test/java/org/elasticsearch/test/rest/CreatedLocationHeaderIT.java +++ b/distribution/archives/integ-test-zip/src/test/java/org/elasticsearch/test/rest/CreatedLocationHeaderIT.java @@ -46,7 +46,7 @@ public class CreatedLocationHeaderIT extends ESRestTestCase { } public void testUpsert() throws IOException { - Request request = new Request("POST", "test/_doc/1/_update"); + Request request = new Request("POST", "test/_update/1"); request.setJsonEntity("{" + "\"doc\": {\"test\": \"test\"}," + "\"doc_as_upsert\": true}"); diff --git a/distribution/archives/integ-test-zip/src/test/java/org/elasticsearch/test/rest/WaitForRefreshAndCloseIT.java b/distribution/archives/integ-test-zip/src/test/java/org/elasticsearch/test/rest/WaitForRefreshAndCloseIT.java index d6f5f723b46..2292def6d4a 100644 --- a/distribution/archives/integ-test-zip/src/test/java/org/elasticsearch/test/rest/WaitForRefreshAndCloseIT.java +++ b/distribution/archives/integ-test-zip/src/test/java/org/elasticsearch/test/rest/WaitForRefreshAndCloseIT.java @@ -69,7 +69,7 @@ public class WaitForRefreshAndCloseIT extends ESRestTestCase { Request createDoc = new Request("PUT", docPath()); createDoc.setJsonEntity("{\"test\":\"test\"}"); client().performRequest(createDoc); - Request updateDoc = new Request("POST", docPath() + "/_update"); + Request updateDoc = new Request("POST", "test/_update/1"); updateDoc.setJsonEntity("{\"doc\":{\"name\":\"test\"}}"); closeWhileListenerEngaged(start(updateDoc)); } diff --git a/distribution/src/config/elasticsearch.yml b/distribution/src/config/elasticsearch.yml index 445c6f5c07f..ceb1fc07864 100644 --- a/distribution/src/config/elasticsearch.yml +++ b/distribution/src/config/elasticsearch.yml @@ -67,11 +67,11 @@ ${path.logs} # #discovery.zen.ping.unicast.hosts: ["host1", "host2"] # -# Prevent the "split brain" by configuring the majority of nodes (total number of master-eligible nodes / 2 + 1): +# Bootstrap the cluster using an initial set of master-eligible nodes: # -#discovery.zen.minimum_master_nodes: +#cluster.initial_master_nodes: ["node-1", "node-2"] # -# For more information, consult the zen discovery module documentation. +# For more information, consult the discovery and cluster formation module documentation. # # ---------------------------------- Gateway ----------------------------------- # diff --git a/docs/java-rest/high-level/document/update.asciidoc b/docs/java-rest/high-level/document/update.asciidoc index 743eb3da0a8..3112d855122 100644 --- a/docs/java-rest/high-level/document/update.asciidoc +++ b/docs/java-rest/high-level/document/update.asciidoc @@ -17,8 +17,7 @@ An +{request}+ requires the following arguments: include-tagged::{doc-tests-file}[{api}-request] -------------------------------------------------- <1> Index -<2> Type -<3> Document id +<2> Document id The Update API allows to update an existing document by using a script or by passing a partial document. diff --git a/docs/java-rest/high-level/security/get-users.asciidoc b/docs/java-rest/high-level/security/get-users.asciidoc new file mode 100644 index 00000000000..e9e4a0d9491 --- /dev/null +++ b/docs/java-rest/high-level/security/get-users.asciidoc @@ -0,0 +1,48 @@ + +-- +:api: get-users +:request: GetUsersRequest +:respnse: GetUsersResponse +-- + +[id="{upid}-{api}"] +=== Get Users API + +[id="{upid}-{api}-request"] +==== Get Users Request + +Retrieving a user can be performed using the `security().getUsers()` +method and by setting the username on +{request}+: + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests-file}[{api}-request] +-------------------------------------------------- + +Retrieving multiple users can be performed using the `security().getUsers()` +method and by setting multiple usernames on +{request}+: + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests-file}[{api}-list-request] +-------------------------------------------------- + +Retrieving all users can be performed using the `security().getUsers()` +method without specifying any usernames on +{request}+: + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests-file}[{api}-all-request] +-------------------------------------------------- + +include::../execution.asciidoc[] + +[id="{upid}-{api}-response"] +==== Get Users Response + +The returned +{response}+ allows getting information about the retrieved users as follows. + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests-file}[{api}-response] +-------------------------------------------------- \ No newline at end of file diff --git a/docs/java-rest/high-level/supported-apis.asciidoc b/docs/java-rest/high-level/supported-apis.asciidoc index 70b66074aad..0b4a2570c89 100644 --- a/docs/java-rest/high-level/supported-apis.asciidoc +++ b/docs/java-rest/high-level/supported-apis.asciidoc @@ -387,6 +387,7 @@ include::rollup/get_rollup_index_caps.asciidoc[] The Java High Level REST Client supports the following Security APIs: * <> +* <<{upid}-get-users>> * <<{upid}-delete-user>> * <> * <> @@ -410,6 +411,7 @@ The Java High Level REST Client supports the following Security APIs: * <<{upid}-delete-privileges>> include::security/put-user.asciidoc[] +include::security/get-users.asciidoc[] include::security/delete-user.asciidoc[] include::security/enable-user.asciidoc[] include::security/disable-user.asciidoc[] diff --git a/docs/plugins/discovery-ec2.asciidoc b/docs/plugins/discovery-ec2.asciidoc index f79e73fcf55..2710cf46bff 100644 --- a/docs/plugins/discovery-ec2.asciidoc +++ b/docs/plugins/discovery-ec2.asciidoc @@ -11,11 +11,12 @@ include::install_remove.asciidoc[] [[discovery-ec2-usage]] ==== Getting started with AWS -The plugin provides a hosts provider for zen discovery named `ec2`. This hosts provider -finds other Elasticsearch instances in EC2 through AWS metadata. Authentication is done using -http://docs.aws.amazon.com/AWSEC2/latest/UserGuide/iam-roles-for-amazon-ec2.html[IAM Role] -credentials by default. The only necessary configuration change to enable the plugin -is setting the unicast host provider for zen discovery: +The plugin provides a hosts provider for zen discovery named `ec2`. This hosts +provider finds other Elasticsearch instances in EC2 through AWS metadata. +Authentication is done using +http://docs.aws.amazon.com/AWSEC2/latest/UserGuide/iam-roles-for-amazon-ec2.html[IAM +Role] credentials by default. To enable the plugin, set the unicast host +provider for Zen discovery to `ec2`: [source,yaml] ---- @@ -51,9 +52,9 @@ Those that must be stored in the keystore are marked as `Secure`. `endpoint`:: - The ec2 service endpoint to connect to. This will be automatically - figured out by the ec2 client based on the instance location, but - can be specified explicitly. See http://docs.aws.amazon.com/general/latest/gr/rande.html#ec2_region. + The ec2 service endpoint to connect to. See + http://docs.aws.amazon.com/general/latest/gr/rande.html#ec2_region. This + defaults to `ec2.us-east-1.amazonaws.com`. `protocol`:: diff --git a/docs/reference/aggregations/pipeline/percentiles-bucket-aggregation.asciidoc b/docs/reference/aggregations/pipeline/percentiles-bucket-aggregation.asciidoc index 6c4329f6f20..032b6ef4e41 100644 --- a/docs/reference/aggregations/pipeline/percentiles-bucket-aggregation.asciidoc +++ b/docs/reference/aggregations/pipeline/percentiles-bucket-aggregation.asciidoc @@ -27,6 +27,7 @@ A `percentiles_bucket` aggregation looks like this in isolation: details)|Optional | `skip` |`format` |format to apply to the output value of this aggregation |Optional | `null` |`percents` |The list of percentiles to calculate |Optional | `[ 1, 5, 25, 50, 75, 95, 99 ]` +|`keyed` |Flag which returns the range as an hash instead of an array of key-value pairs |Optional | `true` |=== The following snippet calculates the percentiles for the total monthly `sales` buckets: diff --git a/docs/reference/ccr/requirements.asciidoc b/docs/reference/ccr/requirements.asciidoc index 944e317dd02..4736d641296 100644 --- a/docs/reference/ccr/requirements.asciidoc +++ b/docs/reference/ccr/requirements.asciidoc @@ -49,8 +49,8 @@ For more information about index settings, see {ref}/index-modules.html[Index mo If you want to replicate indices created by APM Server or Beats, and are allowing APM Server or Beats to manage index templates, you need to configure soft deletes on the underlying index templates. To configure soft deletes on the -underlying index templates, add the following changes to the relevant APM Server -or Beats configuration file. +underlying index templates, incorporate the following changes to the relevant +APM Server or Beats configuration file. ["source","yaml"] ---------------------------------------------------------------------- @@ -62,3 +62,37 @@ setup.template.settings: For additional information on controlling the index templates managed by APM Server or Beats, see the relevant documentation on loading the Elasticsearch index template. + +[float] +[[ccr-overview-logstash]] +==== Setting soft deletes on indices created by Logstash + +If you want to replicate indices created by Logstash, and are using Logstash to +manage index templates, you need to configure soft deletes on a custom Logstash +index template. To configure soft deletes on the underlying index template, +incorporate the following change to a custom Logstash template. + +["source","js"] +---------------------------------------------------------------------- +{ + "settings" : { + "index.soft_deletes.retention.operations" : 1024 + } +} +---------------------------------------------------------------------- +// NOTCONSOLE + +Additionally, you will need to configure the Elasticsearch output plugin to use +this custom template. + +["source","ruby"] +---------------------------------------------------------------------- +output { + elasticsearch { + template => "/path/to/custom/logstash/template.json" + } +} +---------------------------------------------------------------------- + +For additional information on controlling the index templates managed by +Logstash, see the relevant documentation on the Elasticsearch output plugin. diff --git a/docs/reference/ilm/set-up-lifecycle-policy.asciidoc b/docs/reference/ilm/set-up-lifecycle-policy.asciidoc index 5997e618bf6..99fa155f9c3 100644 --- a/docs/reference/ilm/set-up-lifecycle-policy.asciidoc +++ b/docs/reference/ilm/set-up-lifecycle-policy.asciidoc @@ -42,6 +42,7 @@ PUT _ilm/policy/my_policy settings for a particular index {ilm} will not manage that index. To set the policy for an index there are two options: + 1. Apply the policy to an index template and bootstrap creating the first index 2. Apply the policy to a new index in a create index request diff --git a/docs/reference/indices/recovery.asciidoc b/docs/reference/indices/recovery.asciidoc index 49f58e645bc..0929b36e774 100644 --- a/docs/reference/indices/recovery.asciidoc +++ b/docs/reference/indices/recovery.asciidoc @@ -90,7 +90,8 @@ Response: "repository" : "my_repository", "snapshot" : "my_snapshot", "index" : "index1", - "version" : "{version}" + "version" : "{version}", + "restoreUUID": "PDh1ZAOaRbiGIVtCvZOMww" }, "target" : { "id" : "ryqJ5lO5S4-lSFbGntkEkg", diff --git a/docs/reference/indices/templates.asciidoc b/docs/reference/indices/templates.asciidoc index 88f4ca83d88..02c0b7a4cbe 100644 --- a/docs/reference/indices/templates.asciidoc +++ b/docs/reference/indices/templates.asciidoc @@ -33,7 +33,7 @@ PUT _template/template_1 }, "created_at": { "type": "date", - "format": "EEE MMM dd HH:mm:ss Z YYYY" + "format": "EEE MMM dd HH:mm:ss Z yyyy" } } } diff --git a/docs/reference/migration/migrate_7_0/scripting.asciidoc b/docs/reference/migration/migrate_7_0/scripting.asciidoc index 01d8805c896..99afca91e01 100644 --- a/docs/reference/migration/migrate_7_0/scripting.asciidoc +++ b/docs/reference/migration/migrate_7_0/scripting.asciidoc @@ -29,3 +29,9 @@ To check if a document is missing a value, you can use Malformed scripts, either in search templates, ingest pipelines or search requests, return `400 - Bad request` while they would previously return `500 - Internal Server Error`. This also applies for stored scripts. + +[float] +==== getValues() removed + +The `ScriptDocValues#getValues()` method is deprecated in 6.6 and will +be removed in 7.0. Use `doc["foo"]` in place of `doc["foo"].values`. diff --git a/docs/reference/query-dsl/full-text-queries.asciidoc b/docs/reference/query-dsl/full-text-queries.asciidoc index aaa0a911372..f9714c1be3c 100644 --- a/docs/reference/query-dsl/full-text-queries.asciidoc +++ b/docs/reference/query-dsl/full-text-queries.asciidoc @@ -40,6 +40,11 @@ The queries in this group are: A simpler, more robust version of the `query_string` syntax suitable for exposing directly to users. +<>:: + + A full text query that allows fine-grained control of the ordering and + proximity of matching terms + include::match-query.asciidoc[] include::match-phrase-query.asciidoc[] @@ -53,3 +58,5 @@ include::common-terms-query.asciidoc[] include::query-string-query.asciidoc[] include::simple-query-string-query.asciidoc[] + +include::intervals-query.asciidoc[] diff --git a/docs/reference/query-dsl/intervals-query.asciidoc b/docs/reference/query-dsl/intervals-query.asciidoc new file mode 100644 index 00000000000..790fdf08bfd --- /dev/null +++ b/docs/reference/query-dsl/intervals-query.asciidoc @@ -0,0 +1,260 @@ +[[query-dsl-intervals-query]] +=== Intervals query + +An `intervals` query allows fine-grained control over the order and proximity of +matching terms. Matching rules are constructed from a small set of definitions, +and the rules are then applied to terms from a particular `field`. + +The definitions produce sequences of minimal intervals that span terms in a +body of text. These intervals can be further combined and filtered by +parent sources. + +The example below will search for the phrase `my favourite food` appearing +before the terms `hot` and `water` or `cold` and `porridge` in any order, in +the field `my_text` + +[source,js] +-------------------------------------------------- +POST _search +{ + "query": { + "intervals" : { + "my_text" : { + "all_of" : { + "ordered" : true, + "intervals" : [ + { + "match" : { + "query" : "my favourite food", + "max_gaps" : 0, + "ordered" : true + } + }, + { + "any_of" : { + "intervals" : [ + { "match" : { "query" : "hot water" } }, + { "match" : { "query" : "cold porridge" } } + ] + } + } + ] + }, + "boost" : 2.0, + "_name" : "favourite_food" + } + } + } +} +-------------------------------------------------- +// CONSOLE + +In the above example, the text `my favourite food is cold porridge` would +match because the two intervals matching `my favourite food` and `cold +porridge` appear in the correct order, but the text `when it's cold my +favourite food is porridge` would not match, because the interval matching +`cold porridge` starts before the interval matching `my favourite food`. + +[[intervals-match]] +==== `match` + +The `match` rule matches analyzed text, and takes the following parameters: + +[horizontal] +`query`:: +The text to match. +`max_gaps`:: +Specify a maximum number of gaps between the terms in the text. Terms that +appear further apart than this will not match. If unspecified, or set to -1, +then there is no width restriction on the match. If set to 0 then the terms +must appear next to each other. +`ordered`:: +Whether or not the terms must appear in their specified order. Defaults to +`false` +`analyzer`:: +Which analyzer should be used to analyze terms in the `query`. By +default, the search analyzer of the top-level field will be used. +`filter`:: +An optional <> + +[[intervals-all_of]] +==== `all_of` + +`all_of` returns returns matches that span a combination of other rules. + +[horizontal] +`intervals`:: +An array of rules to combine. All rules must produce a match in a +document for the overall source to match. +`max_gaps`:: +Specify a maximum number of gaps between the rules. Combinations that match +across a distance greater than this will not match. If set to -1 or +unspecified, there is no restriction on this distance. If set to 0, then the +matches produced by the rules must all appear immediately next to each other. +`ordered`:: +Whether the intervals produced by the rules should appear in the order in +which they are specified. Defaults to `false` +`filter`:: +An optional <> + +[[intervals-any_of]] +==== `any_of` + +The `any_of` rule emits intervals produced by any of its sub-rules. + +[horizontal] +`intervals`:: +An array of rules to match +`filter`:: +An optional <> + +[[interval_filter]] +==== filters + +You can filter intervals produced by any rules by their relation to the +intervals produced by another rule. The following example will return +documents that have the words `hot` and `porridge` within 10 positions +of each other, without the word `salty` in between: + +[source,js] +-------------------------------------------------- +POST _search +{ + "query": { + "intervals" : { + "my_text" : { + "match" : { + "query" : "hot porridge", + "max_gaps" : 10, + "filter" : { + "not_containing" : { + "match" : { + "query" : "salty" + } + } + } + } + } + } + } +} +-------------------------------------------------- +// CONSOLE + +The following filters are available: +[horizontal] +`containing`:: +Produces intervals that contain an interval from the filter rule +`contained_by`:: +Produces intervals that are contained by an interval from the filter rule +`not_containing`:: +Produces intervals that do not contain an interval from the filter rule +`not_contained_by`:: +Produces intervals that are not contained by an interval from the filter rule +`not_overlapping`:: +Produces intervals that do not overlap with an interval from the filter rule + +[[interval-minimization]] +==== Minimization + +The intervals query always minimizes intervals, to ensure that queries can +run in linear time. This can sometimes cause surprising results, particularly +when using `max_gaps` restrictions or filters. For example, take the +following query, searching for `salty` contained within the phrase `hot +porridge`: + +[source,js] +-------------------------------------------------- +POST _search +{ + "query": { + "intervals" : { + "my_text" : { + "match" : { + "query" : "salty", + "filter" : { + "contained_by" : { + "match" : { + "query" : "hot porridge" + } + } + } + } + } + } + } +} +-------------------------------------------------- +// CONSOLE + +This query will *not* match a document containing the phrase `hot porridge is +salty porridge`, because the intervals returned by the match query for `hot +porridge` only cover the initial two terms in this document, and these do not +overlap the intervals covering `salty`. + +Another restriction to be aware of is the case of `any_of` rules that contain +sub-rules which overlap. In particular, if one of the rules is a strict +prefix of the other, then the longer rule will never be matched, which can +cause surprises when used in combination with `max_gaps`. Consider the +following query, searching for `the` immediately followed by `big` or `big bad`, +immediately followed by `wolf`: + +[source,js] +-------------------------------------------------- +POST _search +{ + "query": { + "intervals" : { + "my_text" : { + "all_of" : { + "intervals" : [ + { "match" : { "query" : "the" } }, + { "any_of" : { + "intervals" : [ + { "match" : { "query" : "big" } }, + { "match" : { "query" : "big bad" } } + ] } }, + { "match" : { "query" : "wolf" } } + ], + "max_gaps" : 0, + "ordered" : true + } + } + } + } +} +-------------------------------------------------- +// CONSOLE + +Counter-intuitively, this query *will not* match the document `the big bad +wolf`, because the `any_of` rule in the middle will only produce intervals +for `big` - intervals for `big bad` being longer than those for `big`, while +starting at the same position, and so being minimized away. In these cases, +it's better to rewrite the query so that all of the options are explicitly +laid out at the top level: + +[source,js] +-------------------------------------------------- +POST _search +{ + "query": { + "intervals" : { + "my_text" : { + "any_of" : { + "intervals" : [ + { "match" : { + "query" : "the big bad wolf", + "ordered" : true, + "max_gaps" : 0 } }, + { "match" : { + "query" : "the big wolf", + "ordered" : true, + "max_gaps" : 0 } } + ] + } + } + } + } +} +-------------------------------------------------- +// CONSOLE \ No newline at end of file diff --git a/docs/reference/sql/concepts.asciidoc b/docs/reference/sql/concepts.asciidoc index 617091446ea..a3ea1f02e72 100644 --- a/docs/reference/sql/concepts.asciidoc +++ b/docs/reference/sql/concepts.asciidoc @@ -64,4 +64,4 @@ Multiple clusters, each with its own namespace, connected to each other in a fed |=== -As one can see while the mapping between the concepts are not exactly one to one and the semantics somewhat different, there are more things in common than differences. In fact, thanks to SQL declarative nature, many concepts can move across {es} transparently and the terminology of the two likely to be used interchangeably through-out the rest of the material. +As one can see while the mapping between the concepts are not exactly one to one and the semantics somewhat different, there are more things in common than differences. In fact, thanks to SQL declarative nature, many concepts can move across {es} transparently and the terminology of the two likely to be used interchangeably throughout the rest of the material. \ No newline at end of file diff --git a/docs/reference/sql/functions/date-time.asciidoc b/docs/reference/sql/functions/date-time.asciidoc index 558944e9dd7..2ec6529f8f8 100644 --- a/docs/reference/sql/functions/date-time.asciidoc +++ b/docs/reference/sql/functions/date-time.asciidoc @@ -1,10 +1,151 @@ [role="xpack"] [testenv="basic"] [[sql-functions-datetime]] -=== Date and Time Functions +=== Date/Time and Interval Functions and Operators beta[] +{es-sql} offers a wide range of facilities for performing date/time manipulations. + +[[sql-functions-datetime-interval]] +==== Intervals + +A common requirement when dealing with date/time in general revolves around +the notion of ``interval``s, a topic that is worth exploring in the context of {es} and {es-sql}. + +{es} has comprehensive support for <> both inside <> and <>. +Inside {es-sql} the former is supported as is by passing the expression in the table name, while the latter is supported through the standard SQL `INTERVAL`. + +The table below shows the mapping between {es} and {es-sql}: + +[cols="^m,^m",options="header"] + +|=== +| {es} | {es-sql} + +2+h| Index/Table date math + +2+| + +2+h| Query date math + +| 1y | INTERVAL 1 YEAR +| 2M | INTERVAL 2 MONTH +| 3w | INTERVAL 21 DAY +| 4d | INTERVAL 4 DAY +| 5h | INTERVAL 5 HOUR +| 6m | INTERVAL 6 MINUTE +| 7s | INTERVAL 7 SECOND + +|=== + +`INTERVAL` allows either `YEAR` and `MONTH` to be mixed together _or_ `DAY`, `HOUR`, `MINUTE` and `SECOND`. + +TIP: {es-sql} accepts also the plural for each time unit (e.g. both `YEAR` and `YEARS` are valid). + +Example of the possible combinations below: + +[cols="^,^",options="header"] + +|=== +| Interval | Description + +| `INTERVAL '1-2' YEAR TO MONTH` | 1 year and 2 months +| `INTERVAL '3 4' DAYS TO HOURS` | 3 days and 4 hours +| `INTERVAL '5 6:12' DAYS TO MINUTES` | 5 days, 6 hours and 12 minutes +| `INTERVAL '3 4:56:01' DAY TO SECOND` | 3 days, 4 hours, 56 minutes and 1 second +| `INTERVAL '2 3:45:01.23456789' DAY TO SECOND` | 2 days, 3 hours, 45 minutes, 1 second and 234567890 nanoseconds +| `INTERVAL '123:45' HOUR TO MINUTES` | 123 hours and 45 minutes +| `INTERVAL '65:43:21.0123' HOUR TO SECONDS` | 65 hours, 43 minutes, 21 seconds and 12300000 nanoseconds +| `INTERVAL '45:01.23' MINUTES TO SECONDS` | 45 minutes, 1 second and 230000000 nanoseconds + +|=== + +==== Operators + +Basic arithmetic operators (`+`, `-`, etc) support date-time parameters as indicated below: + +["source","sql",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{sql-specs}/docs.csv-spec[dtIntervalPlusInterval] +-------------------------------------------------- + +["source","sql",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{sql-specs}/docs.csv-spec[dtDatePlusInterval] +-------------------------------------------------- + +["source","sql",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{sql-specs}/docs.csv-spec[dtMinusInterval] +-------------------------------------------------- + +["source","sql",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{sql-specs}/docs.csv-spec[dtIntervalMinusInterval] +-------------------------------------------------- + +["source","sql",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{sql-specs}/docs.csv-spec[dtDateMinusInterval] +-------------------------------------------------- + +["source","sql",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{sql-specs}/docs.csv-spec[dtIntervalMul] +-------------------------------------------------- + +==== Functions + +beta[] + +[[sql-functions-current-timestamp]] +==== `CURRENT_TIMESTAMP`/`NOW` + +.Synopsis: +[source, sql] +-------------------------------------------------- +CURRENT_TIMESTAMP +CURRENT_TIMESTAMP(precision <1>) +NOW() +-------------------------------------------------- + +*Input*: + +<1> fractional digits - optional + +*Output*: date/time + +.Description: + +Returns the date/time when the current query reached the server. +As a function, `CURRENT_TIMESTAMP()` accepts _precision_ as an optional +parameter for rounding the second fractional digits (nanoseconds). + +This method always returns the same value within a query. + +["source","sql",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{sql-specs}/docs.csv-spec[curTs] +-------------------------------------------------- + +["source","sql",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{sql-specs}/docs.csv-spec[curTsFunction] +-------------------------------------------------- + +["source","sql",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{sql-specs}/docs.csv-spec[curTsFunctionPrecision] +-------------------------------------------------- + +Typically this function is used for relative date/time filtering: + +["source","sql",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{sql-specs}/docs.csv-spec[filterNow] +-------------------------------------------------- + [[sql-functions-datetime-day]] ==== `DAY_OF_MONTH`/`DOM`/`DAY` diff --git a/docs/reference/sql/functions/grouping.asciidoc b/docs/reference/sql/functions/grouping.asciidoc new file mode 100644 index 00000000000..9a8c5c5ef53 --- /dev/null +++ b/docs/reference/sql/functions/grouping.asciidoc @@ -0,0 +1,54 @@ +[role="xpack"] +[testenv="basic"] +[[sql-functions-grouping]] +=== Grouping Functions + +beta[] + +Functions for creating special __grouping__s (also known as _bucketing_); as such these need to be used +as part of the <>. + +[[sql-functions-grouping-histogram]] +==== `HISTOGRAM` + +.Synopsis +[source, sql] +---- +HISTOGRAM ( numeric_exp<1>, numeric_interval<2>) +HISTOGRAM ( date_exp<3>, date_time_interval<4>) +---- + +*Input*: + +<1> numeric expression (typically a field) +<2> numeric interval +<3> date/time expression (typically a field) +<4> date/time <> + +*Output*: non-empty buckets or groups of the given expression divided according to the given interval + +.Description + +The histogram function takes all matching values and divides them into buckets with fixed size matching the given interval, using (roughly) the following formula: + +[source, sql] +---- +bucket_key = Math.floor(value / interval) * interval +---- + +`Histogram` can be applied on either numeric fields: + + +["source","sql",subs="attributes,callouts,macros"] +---- +include-tagged::{sql-specs}/docs.csv-spec[histogramNumeric] +---- + +or date/time fields: + +["source","sql",subs="attributes,callouts,macros"] +---- +include-tagged::{sql-specs}/docs.csv-spec[histogramDate] +---- + + diff --git a/docs/reference/sql/functions/index.asciidoc b/docs/reference/sql/functions/index.asciidoc index 552a70955cf..85c2e25f865 100644 --- a/docs/reference/sql/functions/index.asciidoc +++ b/docs/reference/sql/functions/index.asciidoc @@ -9,6 +9,7 @@ beta[] * <> * <> +* <> * <> * <> * <> @@ -19,6 +20,7 @@ beta[] include::operators.asciidoc[] include::aggs.asciidoc[] +include::grouping.asciidoc[] include::date-time.asciidoc[] include::search.asciidoc[] include::math.asciidoc[] diff --git a/docs/reference/sql/language/data-types.asciidoc b/docs/reference/sql/language/data-types.asciidoc index 0b2384de1ff..572fd094dbc 100644 --- a/docs/reference/sql/language/data-types.asciidoc +++ b/docs/reference/sql/language/data-types.asciidoc @@ -7,42 +7,71 @@ beta[] Most of {es} <> are available in {es-sql}, as indicated below: -[cols="^,^,^",options="header"] +[cols="^,^m,^",options="header"] |=== -| {es} type | SQL type | SQL precision +| {es} type | SQL type | SQL precision 3+h| Core types -| <> | `null` | 0 -| <> | `boolean` | 1 -| <> | `tinyint` | 3 -| <> | `smallint` | 5 -| <> | `integer` | 10 -| <> | `bigint` | 19 -| <> | `double` | 15 -| <> | `real` | 7 -| <> | `float` | 16 -| <> | `float` | 19 -| <> | `varchar` | based on <> -| <> | `varchar` | 2,147,483,647 -| <> | `varbinary` | 2,147,483,647 -| <> | `timestamp` | 24 - -3+h| Complex types - -| <> | `struct` | 0 -| <> | `struct` | 0 +| <> | null | 0 +| <> | boolean | 1 +| <> | tinyint | 3 +| <> | smallint | 5 +| <> | integer | 10 +| <> | bigint | 19 +| <> | double | 15 +| <> | real | 7 +| <> | float | 16 +| <> | float | 19 +| <> | varchar | based on <> +| <> | varchar | 2,147,483,647 +| <> | varbinary | 2,147,483,647 +| <> | timestamp | 24 +| <> | varchar | 39 + +3+h| Complex types + +| <> | struct | 0 +| <> | struct | 0 3+h| Unsupported types -| _types not mentioned above_ | `unsupported`| 0 +| _types not mentioned above_ | unsupported | 0 |=== + Obviously, not all types in {es} have an equivalent in SQL and vice-versa hence why, {es-sql} uses the data type _particularities_ of the former over the latter as ultimately {es} is the backing store. +In addition to the types above, {es-sql} also supports at _runtime_ SQL-specific types that do not have an equivalent in {es}. +Such types cannot be loaded from {es} (as it does not know about them) however can be used inside {es-sql} in queries or their results. + +The table below indicates these types: + +[cols="^m,^",options="header"] + +|=== +| SQL type | SQL precision + + +| interval_year | 7 +| interval_month | 7 +| interval_day | 23 +| interval_hour | 23 +| interval_minute | 23 +| interval_second | 23 +| interval_year_to_month | 7 +| interval_day_to_hour | 23 +| interval_day_to_minute | 23 +| interval_day_to_second | 23 +| interval_hour_to_minute | 23 +| interval_hour_to_second | 23 +| interval_minute_to_second | 23 + +|=== + [[sql-multi-field]] [float] diff --git a/modules/lang-expression/licenses/lucene-expressions-8.0.0-snapshot-774e9aefbc.jar.sha1 b/modules/lang-expression/licenses/lucene-expressions-8.0.0-snapshot-774e9aefbc.jar.sha1 new file mode 100644 index 00000000000..2e1ae79a4f7 --- /dev/null +++ b/modules/lang-expression/licenses/lucene-expressions-8.0.0-snapshot-774e9aefbc.jar.sha1 @@ -0,0 +1 @@ +69279f27885c43662ca7216a6939dacbdf9b4795 \ No newline at end of file diff --git a/modules/lang-expression/licenses/lucene-expressions-8.0.0-snapshot-7e4555a2fd.jar.sha1 b/modules/lang-expression/licenses/lucene-expressions-8.0.0-snapshot-7e4555a2fd.jar.sha1 deleted file mode 100644 index 70fd9bf558c..00000000000 --- a/modules/lang-expression/licenses/lucene-expressions-8.0.0-snapshot-7e4555a2fd.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -70b328502ac1cc2d27c899a642ffb2f4f1d2b9f3 \ No newline at end of file diff --git a/modules/lang-painless/src/main/resources/org/elasticsearch/painless/spi/org.elasticsearch.txt b/modules/lang-painless/src/main/resources/org/elasticsearch/painless/spi/org.elasticsearch.txt index becf34c49ea..d5ced84ebcb 100644 --- a/modules/lang-painless/src/main/resources/org/elasticsearch/painless/spi/org.elasticsearch.txt +++ b/modules/lang-painless/src/main/resources/org/elasticsearch/painless/spi/org.elasticsearch.txt @@ -67,13 +67,11 @@ class org.elasticsearch.common.geo.GeoPoint { class org.elasticsearch.index.fielddata.ScriptDocValues$Strings { String get(int) String getValue() - List getValues() } class org.elasticsearch.index.fielddata.ScriptDocValues$Longs { Long get(int) long getValue() - List getValues() } class org.elasticsearch.script.JodaCompatibleZonedDateTime { @@ -131,9 +129,9 @@ class org.elasticsearch.script.JodaCompatibleZonedDateTime { #### Joda methods that exist in java time boolean equals(Object) int hashCode() - boolean isAfter(ZonedDateTime) - boolean isBefore(ZonedDateTime) - boolean isEqual(ZonedDateTime) + boolean isAfter(JodaCompatibleZonedDateTime) + boolean isBefore(JodaCompatibleZonedDateTime) + boolean isEqual(JodaCompatibleZonedDateTime) String toString() #### Joda time methods @@ -163,19 +161,16 @@ class org.elasticsearch.script.JodaCompatibleZonedDateTime { class org.elasticsearch.index.fielddata.ScriptDocValues$Dates { JodaCompatibleZonedDateTime get(int) JodaCompatibleZonedDateTime getValue() - List getValues() } class org.elasticsearch.index.fielddata.ScriptDocValues$Doubles { Double get(int) double getValue() - List getValues() } class org.elasticsearch.index.fielddata.ScriptDocValues$GeoPoints { org.elasticsearch.common.geo.GeoPoint get(int) org.elasticsearch.common.geo.GeoPoint getValue() - List getValues() double getLat() double getLon() double[] getLats() @@ -193,13 +188,11 @@ class org.elasticsearch.index.fielddata.ScriptDocValues$GeoPoints { class org.elasticsearch.index.fielddata.ScriptDocValues$Booleans { Boolean get(int) boolean getValue() - List getValues() } class org.elasticsearch.index.fielddata.ScriptDocValues$BytesRefs { BytesRef get(int) BytesRef getValue() - List getValues() } class org.apache.lucene.util.BytesRef { @@ -213,7 +206,6 @@ class org.apache.lucene.util.BytesRef { class org.elasticsearch.index.mapper.IpFieldMapper$IpFieldType$IpScriptDocValues { String get(int) String getValue() - List getValues() } class org.elasticsearch.search.lookup.FieldLookup { @@ -268,4 +260,4 @@ static_import { int staticAddIntsTest(int, int) from_class org.elasticsearch.painless.StaticTest float staticAddFloatsTest(float, float) from_class org.elasticsearch.painless.FeatureTest int testAddWithState(int, int, int, double) bound_to org.elasticsearch.painless.BindingTest -} \ No newline at end of file +} diff --git a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/BulkByScrollParallelizationHelper.java b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/BulkByScrollParallelizationHelper.java index 799d5874e15..2eca1433935 100644 --- a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/BulkByScrollParallelizationHelper.java +++ b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/BulkByScrollParallelizationHelper.java @@ -154,19 +154,9 @@ class BulkByScrollParallelizationHelper { } slicedSource = request.source().copyWithNewSlice(sliceBuilder); } - slices[slice] = new SearchRequest() - .source(slicedSource) - .searchType(request.searchType()) - .indices(request.indices()) - .types(request.types()) - .routing(request.routing()) - .preference(request.preference()) - .requestCache(request.requestCache()) - .scroll(request.scroll()) - .indicesOptions(request.indicesOptions()); - if (request.allowPartialSearchResults() != null) { - slices[slice].allowPartialSearchResults(request.allowPartialSearchResults()); - } + SearchRequest searchRequest = new SearchRequest(request); + searchRequest.source(slicedSource); + slices[slice] = searchRequest; } return slices; } diff --git a/modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/Netty4Transport.java b/modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/Netty4Transport.java index 8e8a8abcc37..4823bbbc3d7 100644 --- a/modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/Netty4Transport.java +++ b/modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/Netty4Transport.java @@ -56,6 +56,7 @@ import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.indices.breaker.CircuitBreakerService; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TcpTransport; +import org.elasticsearch.transport.TransportSettings; import java.io.IOException; import java.net.InetSocketAddress; @@ -149,22 +150,22 @@ public class Netty4Transport extends TcpTransport { bootstrap.group(eventLoopGroup); bootstrap.channel(NioSocketChannel.class); - bootstrap.option(ChannelOption.TCP_NODELAY, TCP_NO_DELAY.get(settings)); - bootstrap.option(ChannelOption.SO_KEEPALIVE, TCP_KEEP_ALIVE.get(settings)); + bootstrap.option(ChannelOption.TCP_NODELAY, TransportSettings.TCP_NO_DELAY.get(settings)); + bootstrap.option(ChannelOption.SO_KEEPALIVE, TransportSettings.TCP_KEEP_ALIVE.get(settings)); - final ByteSizeValue tcpSendBufferSize = TCP_SEND_BUFFER_SIZE.get(settings); + final ByteSizeValue tcpSendBufferSize = TransportSettings.TCP_SEND_BUFFER_SIZE.get(settings); if (tcpSendBufferSize.getBytes() > 0) { bootstrap.option(ChannelOption.SO_SNDBUF, Math.toIntExact(tcpSendBufferSize.getBytes())); } - final ByteSizeValue tcpReceiveBufferSize = TCP_RECEIVE_BUFFER_SIZE.get(settings); + final ByteSizeValue tcpReceiveBufferSize = TransportSettings.TCP_RECEIVE_BUFFER_SIZE.get(settings); if (tcpReceiveBufferSize.getBytes() > 0) { bootstrap.option(ChannelOption.SO_RCVBUF, Math.toIntExact(tcpReceiveBufferSize.getBytes())); } bootstrap.option(ChannelOption.RCVBUF_ALLOCATOR, recvByteBufAllocator); - final boolean reuseAddress = TCP_REUSE_ADDRESS.get(settings); + final boolean reuseAddress = TransportSettings.TCP_REUSE_ADDRESS.get(settings); bootstrap.option(ChannelOption.SO_REUSEADDR, reuseAddress); return bootstrap; diff --git a/modules/transport-netty4/src/test/java/org/elasticsearch/transport/netty4/Netty4SizeHeaderFrameDecoderTests.java b/modules/transport-netty4/src/test/java/org/elasticsearch/transport/netty4/Netty4SizeHeaderFrameDecoderTests.java index f467a8ad8f3..0e90559bd51 100644 --- a/modules/transport-netty4/src/test/java/org/elasticsearch/transport/netty4/Netty4SizeHeaderFrameDecoderTests.java +++ b/modules/transport-netty4/src/test/java/org/elasticsearch/transport/netty4/Netty4SizeHeaderFrameDecoderTests.java @@ -30,7 +30,7 @@ import org.elasticsearch.indices.breaker.NoneCircuitBreakerService; import org.elasticsearch.mocksocket.MockSocket; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.threadpool.ThreadPool; -import org.elasticsearch.transport.TcpTransport; +import org.elasticsearch.transport.TransportSettings; import org.junit.After; import org.junit.Before; @@ -51,8 +51,8 @@ public class Netty4SizeHeaderFrameDecoderTests extends ESTestCase { private final Settings settings = Settings.builder() .put("node.name", "NettySizeHeaderFrameDecoderTests") - .put(TcpTransport.BIND_HOST.getKey(), "127.0.0.1") - .put(TcpTransport.PORT.getKey(), "0") + .put(TransportSettings.BIND_HOST.getKey(), "127.0.0.1") + .put(TransportSettings.PORT.getKey(), "0") .build(); private ThreadPool threadPool; diff --git a/modules/transport-netty4/src/test/java/org/elasticsearch/transport/netty4/Netty4TransportIT.java b/modules/transport-netty4/src/test/java/org/elasticsearch/transport/netty4/Netty4TransportIT.java index ae0109a83b0..28d32f50bfc 100644 --- a/modules/transport-netty4/src/test/java/org/elasticsearch/transport/netty4/Netty4TransportIT.java +++ b/modules/transport-netty4/src/test/java/org/elasticsearch/transport/netty4/Netty4TransportIT.java @@ -37,8 +37,8 @@ import org.elasticsearch.test.ESIntegTestCase.ClusterScope; import org.elasticsearch.test.ESIntegTestCase.Scope; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TcpChannel; -import org.elasticsearch.transport.TcpTransport; import org.elasticsearch.transport.Transport; +import org.elasticsearch.transport.TransportSettings; import java.io.IOException; import java.net.InetSocketAddress; @@ -80,7 +80,7 @@ public class Netty4TransportIT extends ESNetty4IntegTestCase { fail("Expected exception, but didn't happen"); } catch (ElasticsearchException e) { assertThat(e.getMessage(), containsString("MY MESSAGE")); - assertThat(channelProfileName, is(TcpTransport.DEFAULT_PROFILE)); + assertThat(channelProfileName, is(TransportSettings.DEFAULT_PROFILE)); } } @@ -116,7 +116,7 @@ public class Netty4TransportIT extends ESNetty4IntegTestCase { InetSocketAddress remoteAddress, byte status) throws IOException { String action = super.handleRequest(channel, profileName, stream, requestId, messageLengthBytes, version, remoteAddress, status); - channelProfileName = TcpTransport.DEFAULT_PROFILE; + channelProfileName = TransportSettings.DEFAULT_PROFILE; return action; } diff --git a/modules/transport-netty4/src/test/java/org/elasticsearch/transport/netty4/NettyTransportMultiPortTests.java b/modules/transport-netty4/src/test/java/org/elasticsearch/transport/netty4/NettyTransportMultiPortTests.java index ecb720173f7..5d3e897202c 100644 --- a/modules/transport-netty4/src/test/java/org/elasticsearch/transport/netty4/NettyTransportMultiPortTests.java +++ b/modules/transport-netty4/src/test/java/org/elasticsearch/transport/netty4/NettyTransportMultiPortTests.java @@ -31,6 +31,7 @@ import org.elasticsearch.test.ESTestCase; import org.elasticsearch.threadpool.TestThreadPool; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TcpTransport; +import org.elasticsearch.transport.TransportSettings; import org.junit.Before; import java.util.Collections; @@ -53,7 +54,7 @@ public class NettyTransportMultiPortTests extends ESTestCase { public void testThatNettyCanBindToMultiplePorts() throws Exception { Settings settings = Settings.builder() .put("network.host", host) - .put(TcpTransport.PORT.getKey(), 22) // will not actually bind to this + .put(TransportSettings.PORT.getKey(), 22) // will not actually bind to this .put("transport.profiles.default.port", 0) .put("transport.profiles.client1.port", 0) .build(); @@ -70,7 +71,7 @@ public class NettyTransportMultiPortTests extends ESTestCase { public void testThatDefaultProfileInheritsFromStandardSettings() throws Exception { Settings settings = Settings.builder() .put("network.host", host) - .put(TcpTransport.PORT.getKey(), 0) + .put(TransportSettings.PORT.getKey(), 0) .put("transport.profiles.client1.port", 0) .build(); @@ -87,7 +88,7 @@ public class NettyTransportMultiPortTests extends ESTestCase { Settings settings = Settings.builder() .put("network.host", host) - .put(TcpTransport.PORT.getKey(), 0) + .put(TransportSettings.PORT.getKey(), 0) .put("transport.profiles.client1.whatever", "foo") .build(); @@ -103,7 +104,7 @@ public class NettyTransportMultiPortTests extends ESTestCase { public void testThatDefaultProfilePortOverridesGeneralConfiguration() throws Exception { Settings settings = Settings.builder() .put("network.host", host) - .put(TcpTransport.PORT.getKey(), 22) // will not actually bind to this + .put(TransportSettings.PORT.getKey(), 22) // will not actually bind to this .put("transport.profiles.default.port", 0) .build(); diff --git a/modules/transport-netty4/src/test/java/org/elasticsearch/transport/netty4/SimpleNetty4TransportTests.java b/modules/transport-netty4/src/test/java/org/elasticsearch/transport/netty4/SimpleNetty4TransportTests.java index 5c3279eaf15..10c91b4e8d7 100644 --- a/modules/transport-netty4/src/test/java/org/elasticsearch/transport/netty4/SimpleNetty4TransportTests.java +++ b/modules/transport-netty4/src/test/java/org/elasticsearch/transport/netty4/SimpleNetty4TransportTests.java @@ -37,9 +37,8 @@ import org.elasticsearch.transport.BindTransportException; import org.elasticsearch.transport.ConnectTransportException; import org.elasticsearch.transport.ConnectionProfile; import org.elasticsearch.transport.TcpChannel; -import org.elasticsearch.transport.TcpTransport; import org.elasticsearch.transport.Transport; -import org.elasticsearch.transport.TransportService; +import org.elasticsearch.transport.TransportSettings; import java.net.InetAddress; import java.net.UnknownHostException; @@ -75,7 +74,7 @@ public class SimpleNetty4TransportTests extends AbstractSimpleTransportTestCase @Override protected MockTransportService build(Settings settings, Version version, ClusterSettings clusterSettings, boolean doHandshake) { - settings = Settings.builder().put(settings).put(TcpTransport.PORT.getKey(), "0").build(); + settings = Settings.builder().put(settings).put(TransportSettings.PORT.getKey(), "0").build(); MockTransportService transportService = nettyFromThreadPool(settings, threadPool, version, clusterSettings, doHandshake); transportService.start(); return transportService; @@ -97,9 +96,9 @@ public class SimpleNetty4TransportTests extends AbstractSimpleTransportTestCase int port = serviceA.boundAddress().publishAddress().getPort(); Settings settings = Settings.builder() .put(Node.NODE_NAME_SETTING.getKey(), "foobar") - .put(TransportService.TRACE_LOG_INCLUDE_SETTING.getKey(), "") - .put(TransportService.TRACE_LOG_EXCLUDE_SETTING.getKey(), "NOTHING") - .put("transport.tcp.port", port) + .put(TransportSettings.TRACE_LOG_INCLUDE_SETTING.getKey(), "") + .put(TransportSettings.TRACE_LOG_EXCLUDE_SETTING.getKey(), "NOTHING") + .put(TransportSettings.PORT.getKey(), port) .build(); ClusterSettings clusterSettings = new ClusterSettings(settings, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS); BindTransportException bindTransportException = expectThrows(BindTransportException.class, () -> { diff --git a/plugins/analysis-icu/licenses/lucene-analyzers-icu-8.0.0-snapshot-774e9aefbc.jar.sha1 b/plugins/analysis-icu/licenses/lucene-analyzers-icu-8.0.0-snapshot-774e9aefbc.jar.sha1 new file mode 100644 index 00000000000..bb3b62e257c --- /dev/null +++ b/plugins/analysis-icu/licenses/lucene-analyzers-icu-8.0.0-snapshot-774e9aefbc.jar.sha1 @@ -0,0 +1 @@ +143e925924dcc9cb8ad1b584727c2c3b6c9e5633 \ No newline at end of file diff --git a/plugins/analysis-icu/licenses/lucene-analyzers-icu-8.0.0-snapshot-7e4555a2fd.jar.sha1 b/plugins/analysis-icu/licenses/lucene-analyzers-icu-8.0.0-snapshot-7e4555a2fd.jar.sha1 deleted file mode 100644 index f59d9fd9554..00000000000 --- a/plugins/analysis-icu/licenses/lucene-analyzers-icu-8.0.0-snapshot-7e4555a2fd.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -643eede8327f69cf0332cecd13100536daa5f04a \ No newline at end of file diff --git a/plugins/analysis-kuromoji/licenses/lucene-analyzers-kuromoji-8.0.0-snapshot-774e9aefbc.jar.sha1 b/plugins/analysis-kuromoji/licenses/lucene-analyzers-kuromoji-8.0.0-snapshot-774e9aefbc.jar.sha1 new file mode 100644 index 00000000000..6619c51a126 --- /dev/null +++ b/plugins/analysis-kuromoji/licenses/lucene-analyzers-kuromoji-8.0.0-snapshot-774e9aefbc.jar.sha1 @@ -0,0 +1 @@ +53281a354724cf52babb0460e51d5b6ec99ecad4 \ No newline at end of file diff --git a/plugins/analysis-kuromoji/licenses/lucene-analyzers-kuromoji-8.0.0-snapshot-7e4555a2fd.jar.sha1 b/plugins/analysis-kuromoji/licenses/lucene-analyzers-kuromoji-8.0.0-snapshot-7e4555a2fd.jar.sha1 deleted file mode 100644 index 7e66613b2a5..00000000000 --- a/plugins/analysis-kuromoji/licenses/lucene-analyzers-kuromoji-8.0.0-snapshot-7e4555a2fd.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -191f07773cd9e8fa76914d7ba1a79292fd465230 \ No newline at end of file diff --git a/plugins/analysis-nori/licenses/lucene-analyzers-nori-8.0.0-snapshot-774e9aefbc.jar.sha1 b/plugins/analysis-nori/licenses/lucene-analyzers-nori-8.0.0-snapshot-774e9aefbc.jar.sha1 new file mode 100644 index 00000000000..1c3043e79f4 --- /dev/null +++ b/plugins/analysis-nori/licenses/lucene-analyzers-nori-8.0.0-snapshot-774e9aefbc.jar.sha1 @@ -0,0 +1 @@ +d12356cdbcf4ed17586fef5e6fd1a6ea068821b5 \ No newline at end of file diff --git a/plugins/analysis-nori/licenses/lucene-analyzers-nori-8.0.0-snapshot-7e4555a2fd.jar.sha1 b/plugins/analysis-nori/licenses/lucene-analyzers-nori-8.0.0-snapshot-7e4555a2fd.jar.sha1 deleted file mode 100644 index 46cc031cc5a..00000000000 --- a/plugins/analysis-nori/licenses/lucene-analyzers-nori-8.0.0-snapshot-7e4555a2fd.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -f981e4522f24af287a47e582080e6f4eae3bfbd9 \ No newline at end of file diff --git a/plugins/analysis-phonetic/licenses/lucene-analyzers-phonetic-8.0.0-snapshot-774e9aefbc.jar.sha1 b/plugins/analysis-phonetic/licenses/lucene-analyzers-phonetic-8.0.0-snapshot-774e9aefbc.jar.sha1 new file mode 100644 index 00000000000..d1aaaa89ee9 --- /dev/null +++ b/plugins/analysis-phonetic/licenses/lucene-analyzers-phonetic-8.0.0-snapshot-774e9aefbc.jar.sha1 @@ -0,0 +1 @@ +b0f3b0409db20717a5229bc639f703eca97ebd4c \ No newline at end of file diff --git a/plugins/analysis-phonetic/licenses/lucene-analyzers-phonetic-8.0.0-snapshot-7e4555a2fd.jar.sha1 b/plugins/analysis-phonetic/licenses/lucene-analyzers-phonetic-8.0.0-snapshot-7e4555a2fd.jar.sha1 deleted file mode 100644 index ef46ed664e6..00000000000 --- a/plugins/analysis-phonetic/licenses/lucene-analyzers-phonetic-8.0.0-snapshot-7e4555a2fd.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -4e79b61404330b938aee53c19a7c1628b23c06e8 \ No newline at end of file diff --git a/plugins/analysis-smartcn/licenses/lucene-analyzers-smartcn-8.0.0-snapshot-774e9aefbc.jar.sha1 b/plugins/analysis-smartcn/licenses/lucene-analyzers-smartcn-8.0.0-snapshot-774e9aefbc.jar.sha1 new file mode 100644 index 00000000000..3c9b6983643 --- /dev/null +++ b/plugins/analysis-smartcn/licenses/lucene-analyzers-smartcn-8.0.0-snapshot-774e9aefbc.jar.sha1 @@ -0,0 +1 @@ +79f18e781a83062919eb60e06a96184ffda4a0c3 \ No newline at end of file diff --git a/plugins/analysis-smartcn/licenses/lucene-analyzers-smartcn-8.0.0-snapshot-7e4555a2fd.jar.sha1 b/plugins/analysis-smartcn/licenses/lucene-analyzers-smartcn-8.0.0-snapshot-7e4555a2fd.jar.sha1 deleted file mode 100644 index 8e7ae277ca0..00000000000 --- a/plugins/analysis-smartcn/licenses/lucene-analyzers-smartcn-8.0.0-snapshot-7e4555a2fd.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -dbd77958185f15d27510ae554b4f91366f477e41 \ No newline at end of file diff --git a/plugins/analysis-stempel/licenses/lucene-analyzers-stempel-8.0.0-snapshot-774e9aefbc.jar.sha1 b/plugins/analysis-stempel/licenses/lucene-analyzers-stempel-8.0.0-snapshot-774e9aefbc.jar.sha1 new file mode 100644 index 00000000000..f73173c28e3 --- /dev/null +++ b/plugins/analysis-stempel/licenses/lucene-analyzers-stempel-8.0.0-snapshot-774e9aefbc.jar.sha1 @@ -0,0 +1 @@ +3c78ca17dd641a3efe1bea980e5290159867b85d \ No newline at end of file diff --git a/plugins/analysis-stempel/licenses/lucene-analyzers-stempel-8.0.0-snapshot-7e4555a2fd.jar.sha1 b/plugins/analysis-stempel/licenses/lucene-analyzers-stempel-8.0.0-snapshot-7e4555a2fd.jar.sha1 deleted file mode 100644 index 040c35e123b..00000000000 --- a/plugins/analysis-stempel/licenses/lucene-analyzers-stempel-8.0.0-snapshot-7e4555a2fd.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -0b7bcb14961ad1ff22157dff78497b409fd76050 \ No newline at end of file diff --git a/plugins/analysis-ukrainian/licenses/lucene-analyzers-morfologik-8.0.0-snapshot-774e9aefbc.jar.sha1 b/plugins/analysis-ukrainian/licenses/lucene-analyzers-morfologik-8.0.0-snapshot-774e9aefbc.jar.sha1 new file mode 100644 index 00000000000..d54fe8faafb --- /dev/null +++ b/plugins/analysis-ukrainian/licenses/lucene-analyzers-morfologik-8.0.0-snapshot-774e9aefbc.jar.sha1 @@ -0,0 +1 @@ +019b424ea61617788f460218fbdd9c2107a7ff5a \ No newline at end of file diff --git a/plugins/analysis-ukrainian/licenses/lucene-analyzers-morfologik-8.0.0-snapshot-7e4555a2fd.jar.sha1 b/plugins/analysis-ukrainian/licenses/lucene-analyzers-morfologik-8.0.0-snapshot-7e4555a2fd.jar.sha1 deleted file mode 100644 index 6c4b4eab5d1..00000000000 --- a/plugins/analysis-ukrainian/licenses/lucene-analyzers-morfologik-8.0.0-snapshot-7e4555a2fd.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -4e2d601547ffe3e8ccc814a25ce35e3ba7e369b3 \ No newline at end of file diff --git a/plugins/discovery-azure-classic/src/main/java/org/elasticsearch/discovery/azure/classic/AzureUnicastHostsProvider.java b/plugins/discovery-azure-classic/src/main/java/org/elasticsearch/discovery/azure/classic/AzureUnicastHostsProvider.java index 2d46ec09862..576bdc7b242 100644 --- a/plugins/discovery-azure-classic/src/main/java/org/elasticsearch/discovery/azure/classic/AzureUnicastHostsProvider.java +++ b/plugins/discovery-azure-classic/src/main/java/org/elasticsearch/discovery/azure/classic/AzureUnicastHostsProvider.java @@ -166,7 +166,7 @@ public class AzureUnicastHostsProvider implements UnicastHostsProvider { InetAddress ipAddress = null; try { ipAddress = networkService.resolvePublishHostAddresses( - NetworkService.GLOBAL_NETWORK_PUBLISHHOST_SETTING.get(settings).toArray(Strings.EMPTY_ARRAY)); + NetworkService.GLOBAL_NETWORK_PUBLISH_HOST_SETTING.get(settings).toArray(Strings.EMPTY_ARRAY)); logger.trace("ip of current node: [{}]", ipAddress); } catch (IOException e) { // We can't find the publish host address... Hmmm. Too bad :-( diff --git a/plugins/discovery-azure-classic/src/test/java/org/elasticsearch/discovery/azure/classic/AzureDiscoveryClusterFormationTests.java b/plugins/discovery-azure-classic/src/test/java/org/elasticsearch/discovery/azure/classic/AzureDiscoveryClusterFormationTests.java index 35c2e7336a7..a833d196ed5 100644 --- a/plugins/discovery-azure-classic/src/test/java/org/elasticsearch/discovery/azure/classic/AzureDiscoveryClusterFormationTests.java +++ b/plugins/discovery-azure-classic/src/test/java/org/elasticsearch/discovery/azure/classic/AzureDiscoveryClusterFormationTests.java @@ -37,7 +37,7 @@ import org.elasticsearch.node.Node; import org.elasticsearch.plugin.discovery.azure.classic.AzureDiscoveryPlugin; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.ESIntegTestCase; -import org.elasticsearch.transport.TcpTransport; +import org.elasticsearch.transport.TransportSettings; import org.junit.AfterClass; import org.junit.BeforeClass; import org.junit.ClassRule; @@ -118,7 +118,7 @@ public class AzureDiscoveryClusterFormationTests extends ESIntegTestCase { return Settings.builder().put(super.nodeSettings(nodeOrdinal)) .put(DiscoveryModule.DISCOVERY_HOSTS_PROVIDER_SETTING.getKey(), AzureDiscoveryPlugin.AZURE) .put(Environment.PATH_LOGS_SETTING.getKey(), resolve) - .put(TcpTransport.PORT.getKey(), 0) + .put(TransportSettings.PORT.getKey(), 0) .put(Node.WRITE_PORTS_FILE_SETTING.getKey(), "true") .put(AzureComputeService.Management.ENDPOINT_SETTING.getKey(), "https://" + InetAddress.getLoopbackAddress().getHostAddress() + ":" + httpsServer.getAddress().getPort()) diff --git a/plugins/discovery-ec2/src/test/java/org/elasticsearch/discovery/ec2/Ec2DiscoveryTests.java b/plugins/discovery-ec2/src/test/java/org/elasticsearch/discovery/ec2/Ec2DiscoveryTests.java index 98f2febd795..1cdcca1f222 100644 --- a/plugins/discovery-ec2/src/test/java/org/elasticsearch/discovery/ec2/Ec2DiscoveryTests.java +++ b/plugins/discovery-ec2/src/test/java/org/elasticsearch/discovery/ec2/Ec2DiscoveryTests.java @@ -20,19 +20,20 @@ package org.elasticsearch.discovery.ec2; import com.amazonaws.services.ec2.model.Tag; +import org.elasticsearch.Version; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.network.NetworkService; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.transport.TransportAddress; -import org.elasticsearch.common.util.BigArrays; +import org.elasticsearch.common.util.PageCacheRecycler; import org.elasticsearch.indices.breaker.NoneCircuitBreakerService; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.transport.MockTransportService; import org.elasticsearch.threadpool.TestThreadPool; import org.elasticsearch.threadpool.ThreadPool; -import org.elasticsearch.transport.MockTcpTransport; import org.elasticsearch.transport.Transport; import org.elasticsearch.transport.TransportService; +import org.elasticsearch.transport.nio.MockNioTransport; import org.junit.AfterClass; import org.junit.Before; import org.junit.BeforeClass; @@ -72,8 +73,9 @@ public class Ec2DiscoveryTests extends ESTestCase { @Before public void createTransportService() { NamedWriteableRegistry namedWriteableRegistry = new NamedWriteableRegistry(Collections.emptyList()); - final Transport transport = new MockTcpTransport(Settings.EMPTY, threadPool, BigArrays.NON_RECYCLING_INSTANCE, - new NoneCircuitBreakerService(), namedWriteableRegistry, new NetworkService(Collections.emptyList())) { + final Transport transport = new MockNioTransport(Settings.EMPTY, Version.CURRENT, threadPool, + new NetworkService(Collections.emptyList()), PageCacheRecycler.NON_RECYCLING_INSTANCE, namedWriteableRegistry, + new NoneCircuitBreakerService()) { @Override public TransportAddress[] addressesFromString(String address, int perAddressLimit) throws UnknownHostException { // we just need to ensure we don't resolve DNS here diff --git a/plugins/discovery-ec2/src/test/java/org/elasticsearch/discovery/ec2/Ec2NetworkTests.java b/plugins/discovery-ec2/src/test/java/org/elasticsearch/discovery/ec2/Ec2NetworkTests.java index dedf56b836e..9e7f2429b08 100644 --- a/plugins/discovery-ec2/src/test/java/org/elasticsearch/discovery/ec2/Ec2NetworkTests.java +++ b/plugins/discovery-ec2/src/test/java/org/elasticsearch/discovery/ec2/Ec2NetworkTests.java @@ -164,7 +164,7 @@ public class Ec2NetworkTests extends ESTestCase { NetworkService networkService = new NetworkService(Collections.singletonList(new Ec2NameResolver())); InetAddress[] addresses = networkService.resolveBindHostAddresses( - NetworkService.GLOBAL_NETWORK_BINDHOST_SETTING.get(nodeSettings).toArray(Strings.EMPTY_ARRAY)); + NetworkService.GLOBAL_NETWORK_BIND_HOST_SETTING.get(nodeSettings).toArray(Strings.EMPTY_ARRAY)); if (expected == null) { fail("We should get an IOException, resolved addressed:" + Arrays.toString(addresses)); } diff --git a/plugins/discovery-gce/src/main/java/org/elasticsearch/discovery/gce/GceUnicastHostsProvider.java b/plugins/discovery-gce/src/main/java/org/elasticsearch/discovery/gce/GceUnicastHostsProvider.java index a593faabcf6..4b733dd6823 100644 --- a/plugins/discovery-gce/src/main/java/org/elasticsearch/discovery/gce/GceUnicastHostsProvider.java +++ b/plugins/discovery-gce/src/main/java/org/elasticsearch/discovery/gce/GceUnicastHostsProvider.java @@ -120,7 +120,7 @@ public class GceUnicastHostsProvider implements UnicastHostsProvider { String ipAddress = null; try { InetAddress inetAddress = networkService.resolvePublishHostAddresses( - NetworkService.GLOBAL_NETWORK_PUBLISHHOST_SETTING.get(settings).toArray(Strings.EMPTY_ARRAY)); + NetworkService.GLOBAL_NETWORK_PUBLISH_HOST_SETTING.get(settings).toArray(Strings.EMPTY_ARRAY)); if (inetAddress != null) { ipAddress = NetworkAddress.format(inetAddress); } diff --git a/plugins/discovery-gce/src/test/java/org/elasticsearch/discovery/gce/GceNetworkTests.java b/plugins/discovery-gce/src/test/java/org/elasticsearch/discovery/gce/GceNetworkTests.java index 94f2959917d..c72173dd20a 100644 --- a/plugins/discovery-gce/src/test/java/org/elasticsearch/discovery/gce/GceNetworkTests.java +++ b/plugins/discovery-gce/src/test/java/org/elasticsearch/discovery/gce/GceNetworkTests.java @@ -110,7 +110,7 @@ public class GceNetworkTests extends ESTestCase { NetworkService networkService = new NetworkService(Collections.singletonList(new GceNameResolver(mock))); try { InetAddress[] addresses = networkService.resolveBindHostAddresses( - NetworkService.GLOBAL_NETWORK_BINDHOST_SETTING.get(nodeSettings).toArray(Strings.EMPTY_ARRAY)); + NetworkService.GLOBAL_NETWORK_BIND_HOST_SETTING.get(nodeSettings).toArray(Strings.EMPTY_ARRAY)); if (expected == null) { fail("We should get a IllegalArgumentException when setting network.host: _gce:doesnotexist_"); } diff --git a/plugins/mapper-annotated-text/src/test/java/org/elasticsearch/index/mapper/annotatedtext/AnnotatedTextFieldMapperTests.java b/plugins/mapper-annotated-text/src/test/java/org/elasticsearch/index/mapper/annotatedtext/AnnotatedTextFieldMapperTests.java index 06f4b728c8c..4156fd20672 100644 --- a/plugins/mapper-annotated-text/src/test/java/org/elasticsearch/index/mapper/annotatedtext/AnnotatedTextFieldMapperTests.java +++ b/plugins/mapper-annotated-text/src/test/java/org/elasticsearch/index/mapper/annotatedtext/AnnotatedTextFieldMapperTests.java @@ -51,6 +51,7 @@ import org.elasticsearch.index.mapper.MapperService.MergeReason; import org.elasticsearch.index.mapper.ParsedDocument; import org.elasticsearch.index.mapper.SourceToParse; import org.elasticsearch.index.mapper.TextFieldMapper; +import org.elasticsearch.index.seqno.SequenceNumbers; import org.elasticsearch.index.shard.IndexShard; import org.elasticsearch.index.termvectors.TermVectorsService; import org.elasticsearch.indices.IndicesService; @@ -130,7 +131,7 @@ public class AnnotatedTextFieldMapperTests extends ESSingleNodeTestCase { IndexShard shard = indexService.getShard(0); shard.applyIndexOperationOnPrimary(Versions.MATCH_ANY, VersionType.INTERNAL, - sourceToParse, IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP, false); + sourceToParse, SequenceNumbers.UNASSIGNED_SEQ_NO, 0, IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP, false); shard.refresh("test"); try (Engine.Searcher searcher = shard.acquireSearcher("test")) { LeafReader leaf = searcher.getDirectoryReader().leaves().get(0).reader(); @@ -185,7 +186,7 @@ public class AnnotatedTextFieldMapperTests extends ESSingleNodeTestCase { IndexShard shard = indexService.getShard(0); shard.applyIndexOperationOnPrimary(Versions.MATCH_ANY, VersionType.INTERNAL, - sourceToParse, IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP, false); + sourceToParse, SequenceNumbers.UNASSIGNED_SEQ_NO, 0, IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP, false); shard.refresh("test"); try (Engine.Searcher searcher = shard.acquireSearcher("test")) { LeafReader leaf = searcher.getDirectoryReader().leaves().get(0).reader(); @@ -384,7 +385,7 @@ public class AnnotatedTextFieldMapperTests extends ESSingleNodeTestCase { IndexShard shard = indexService.getShard(0); shard.applyIndexOperationOnPrimary(Versions.MATCH_ANY, VersionType.INTERNAL, - sourceToParse, IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP, false); + sourceToParse, SequenceNumbers.UNASSIGNED_SEQ_NO, 0, IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP, false); shard.refresh("test"); try (Engine.Searcher searcher = shard.acquireSearcher("test")) { LeafReader leaf = searcher.getDirectoryReader().leaves().get(0).reader(); @@ -426,7 +427,7 @@ public class AnnotatedTextFieldMapperTests extends ESSingleNodeTestCase { IndexShard shard = indexService.getShard(0); shard.applyIndexOperationOnPrimary(Versions.MATCH_ANY, VersionType.INTERNAL, - sourceToParse, IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP, false); + sourceToParse, SequenceNumbers.UNASSIGNED_SEQ_NO, 0, IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP, false); shard.refresh("test"); try (Engine.Searcher searcher = shard.acquireSearcher("test")) { LeafReader leaf = searcher.getDirectoryReader().leaves().get(0).reader(); diff --git a/plugins/repository-gcs/build.gradle b/plugins/repository-gcs/build.gradle index d35a248f5a5..b86e5fb81f7 100644 --- a/plugins/repository-gcs/build.gradle +++ b/plugins/repository-gcs/build.gradle @@ -23,8 +23,8 @@ esplugin { } dependencies { - compile 'com.google.cloud:google-cloud-storage:1.40.0' - compile 'com.google.cloud:google-cloud-core:1.40.0' + compile 'com.google.cloud:google-cloud-storage:1.55.0' + compile 'com.google.cloud:google-cloud-core:1.55.0' compile 'com.google.guava:guava:20.0' compile "joda-time:joda-time:${versions.joda}" compile 'com.google.http-client:google-http-client:1.24.1' @@ -40,7 +40,7 @@ dependencies { compile 'com.google.code.gson:gson:2.7' compile 'com.google.api.grpc:proto-google-common-protos:1.12.0' compile 'com.google.api.grpc:proto-google-iam-v1:0.12.0' - compile 'com.google.cloud:google-cloud-core-http:1.40.0' + compile 'com.google.cloud:google-cloud-core-http:1.55.0' compile 'com.google.auth:google-auth-library-credentials:0.10.0' compile 'com.google.auth:google-auth-library-oauth2-http:0.10.0' compile 'com.google.oauth-client:google-oauth-client:1.24.1' diff --git a/plugins/repository-gcs/licenses/google-cloud-core-1.40.0.jar.sha1 b/plugins/repository-gcs/licenses/google-cloud-core-1.40.0.jar.sha1 deleted file mode 100644 index 7562ead12e9..00000000000 --- a/plugins/repository-gcs/licenses/google-cloud-core-1.40.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -4985701f989030e262cf8f4e38cc954115f5b082 \ No newline at end of file diff --git a/plugins/repository-gcs/licenses/google-cloud-core-1.55.0.jar.sha1 b/plugins/repository-gcs/licenses/google-cloud-core-1.55.0.jar.sha1 new file mode 100644 index 00000000000..7c00bf52c41 --- /dev/null +++ b/plugins/repository-gcs/licenses/google-cloud-core-1.55.0.jar.sha1 @@ -0,0 +1 @@ +9e50a2a559128b7938cfd6598753d4c7383472dc \ No newline at end of file diff --git a/plugins/repository-gcs/licenses/google-cloud-core-http-1.40.0.jar.sha1 b/plugins/repository-gcs/licenses/google-cloud-core-http-1.40.0.jar.sha1 deleted file mode 100644 index 2761bfdc745..00000000000 --- a/plugins/repository-gcs/licenses/google-cloud-core-http-1.40.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -67f5806beda32894f1e6c9527925b64199fd2e4f \ No newline at end of file diff --git a/plugins/repository-gcs/licenses/google-cloud-core-http-1.55.0.jar.sha1 b/plugins/repository-gcs/licenses/google-cloud-core-http-1.55.0.jar.sha1 new file mode 100644 index 00000000000..f4179201bcc --- /dev/null +++ b/plugins/repository-gcs/licenses/google-cloud-core-http-1.55.0.jar.sha1 @@ -0,0 +1 @@ +f26862445efffd8cb3a7f4b1f2a91b7c5143ee1f \ No newline at end of file diff --git a/plugins/repository-gcs/licenses/google-cloud-storage-1.40.0.jar.sha1 b/plugins/repository-gcs/licenses/google-cloud-storage-1.40.0.jar.sha1 deleted file mode 100644 index 33e83b73712..00000000000 --- a/plugins/repository-gcs/licenses/google-cloud-storage-1.40.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -fabefef46f07d1e334123f0de17702708b4dfbd1 \ No newline at end of file diff --git a/plugins/repository-gcs/licenses/google-cloud-storage-1.55.0.jar.sha1 b/plugins/repository-gcs/licenses/google-cloud-storage-1.55.0.jar.sha1 new file mode 100644 index 00000000000..abe0065ab5c --- /dev/null +++ b/plugins/repository-gcs/licenses/google-cloud-storage-1.55.0.jar.sha1 @@ -0,0 +1 @@ +ca19f55eeb96609243bf3a15fdafd497432f6673 \ No newline at end of file diff --git a/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/MockStorage.java b/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/MockStorage.java index cf7395ea1f1..97c7e2ab76b 100644 --- a/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/MockStorage.java +++ b/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/MockStorage.java @@ -80,6 +80,11 @@ class MockStorage implements Storage { } } + @Override + public Bucket lockRetentionPolicy(final BucketInfo bucket, final BucketTargetOption... options) { + return null; + } + @Override public Blob get(BlobId blob) { if (bucketName.equals(blob.getBucket())) { diff --git a/plugins/transport-nio/src/test/java/org/elasticsearch/transport/nio/NioTransportIT.java b/plugins/transport-nio/src/test/java/org/elasticsearch/transport/nio/NioTransportIT.java index c6452e0be91..61f07aa0162 100644 --- a/plugins/transport-nio/src/test/java/org/elasticsearch/transport/nio/NioTransportIT.java +++ b/plugins/transport-nio/src/test/java/org/elasticsearch/transport/nio/NioTransportIT.java @@ -37,8 +37,8 @@ import org.elasticsearch.test.ESIntegTestCase.ClusterScope; import org.elasticsearch.test.ESIntegTestCase.Scope; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TcpChannel; -import org.elasticsearch.transport.TcpTransport; import org.elasticsearch.transport.Transport; +import org.elasticsearch.transport.TransportSettings; import java.io.IOException; import java.net.InetSocketAddress; @@ -80,7 +80,7 @@ public class NioTransportIT extends NioIntegTestCase { fail("Expected exception, but didn't happen"); } catch (ElasticsearchException e) { assertThat(e.getMessage(), containsString("MY MESSAGE")); - assertThat(channelProfileName, is(TcpTransport.DEFAULT_PROFILE)); + assertThat(channelProfileName, is(TransportSettings.DEFAULT_PROFILE)); } } @@ -112,7 +112,7 @@ public class NioTransportIT extends NioIntegTestCase { InetSocketAddress remoteAddress, byte status) throws IOException { String action = super.handleRequest(channel, profileName, stream, requestId, messageLengthBytes, version, remoteAddress, status); - channelProfileName = TcpTransport.DEFAULT_PROFILE; + channelProfileName = TransportSettings.DEFAULT_PROFILE; return action; } diff --git a/plugins/transport-nio/src/test/java/org/elasticsearch/transport/nio/SimpleNioTransportTests.java b/plugins/transport-nio/src/test/java/org/elasticsearch/transport/nio/SimpleNioTransportTests.java index 9dd3bc3b957..82a99103627 100644 --- a/plugins/transport-nio/src/test/java/org/elasticsearch/transport/nio/SimpleNioTransportTests.java +++ b/plugins/transport-nio/src/test/java/org/elasticsearch/transport/nio/SimpleNioTransportTests.java @@ -37,9 +37,8 @@ import org.elasticsearch.transport.BindTransportException; import org.elasticsearch.transport.ConnectTransportException; import org.elasticsearch.transport.ConnectionProfile; import org.elasticsearch.transport.TcpChannel; -import org.elasticsearch.transport.TcpTransport; import org.elasticsearch.transport.Transport; -import org.elasticsearch.transport.TransportService; +import org.elasticsearch.transport.TransportSettings; import java.io.IOException; import java.net.InetAddress; @@ -79,7 +78,7 @@ public class SimpleNioTransportTests extends AbstractSimpleTransportTestCase { @Override protected MockTransportService build(Settings settings, Version version, ClusterSettings clusterSettings, boolean doHandshake) { settings = Settings.builder().put(settings) - .put(TcpTransport.PORT.getKey(), "0") + .put(TransportSettings.PORT.getKey(), "0") .build(); MockTransportService transportService = nioFromThreadPool(settings, threadPool, version, clusterSettings, doHandshake); transportService.start(); @@ -104,9 +103,9 @@ public class SimpleNioTransportTests extends AbstractSimpleTransportTestCase { int port = serviceA.boundAddress().publishAddress().getPort(); Settings settings = Settings.builder() .put(Node.NODE_NAME_SETTING.getKey(), "foobar") - .put(TransportService.TRACE_LOG_INCLUDE_SETTING.getKey(), "") - .put(TransportService.TRACE_LOG_EXCLUDE_SETTING.getKey(), "NOTHING") - .put("transport.tcp.port", port) + .put(TransportSettings.TRACE_LOG_INCLUDE_SETTING.getKey(), "") + .put(TransportSettings.TRACE_LOG_EXCLUDE_SETTING.getKey(), "NOTHING") + .put(TransportSettings.PORT.getKey(), port) .build(); ClusterSettings clusterSettings = new ClusterSettings(settings, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS); BindTransportException bindTransportException = expectThrows(BindTransportException.class, () -> { diff --git a/qa/full-cluster-restart/src/test/java/org/elasticsearch/upgrades/FullClusterRestartIT.java b/qa/full-cluster-restart/src/test/java/org/elasticsearch/upgrades/FullClusterRestartIT.java index 955cf35afac..3ce23c8e6a2 100644 --- a/qa/full-cluster-restart/src/test/java/org/elasticsearch/upgrades/FullClusterRestartIT.java +++ b/qa/full-cluster-restart/src/test/java/org/elasticsearch/upgrades/FullClusterRestartIT.java @@ -27,6 +27,7 @@ import org.elasticsearch.client.ResponseException; import org.elasticsearch.client.RestClient; import org.elasticsearch.index.mapper.TypeFieldMapper; import org.elasticsearch.rest.action.document.RestGetAction; +import org.elasticsearch.rest.action.document.RestUpdateAction; import org.elasticsearch.rest.action.search.RestExplainAction; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.common.Booleans; @@ -626,6 +627,7 @@ public class FullClusterRestartIT extends AbstractFullClusterRestartTestCase { String docId = (String) hit.get("_id"); Request updateRequest = new Request("POST", "/" + index + "/doc/" + docId + "/_update"); + updateRequest.setOptions(expectWarnings(RestUpdateAction.TYPES_DEPRECATION_MESSAGE)); updateRequest.setJsonEntity("{ \"doc\" : { \"foo\": \"bar\"}}"); client().performRequest(updateRequest); diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/update.json b/rest-api-spec/src/main/resources/rest-api-spec/api/update.json index 2d13b49e3cb..37710cf07a1 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/update.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/update.json @@ -3,8 +3,8 @@ "documentation": "http://www.elastic.co/guide/en/elasticsearch/reference/master/docs-update.html", "methods": ["POST"], "url": { - "path": "/{index}/{type}/{id}/_update", - "paths": ["/{index}/{type}/{id}/_update", "/{index}/_doc/{id}/_update"], + "path": "/{index}/_update/{id}", + "paths": ["/{index}/_update/{id}", "/{index}/{type}/{id}/_update"], "parts": { "id": { "type": "string", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/search/230_interval_query.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/search/230_interval_query.yml new file mode 100644 index 00000000000..2a25055be32 --- /dev/null +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/search/230_interval_query.yml @@ -0,0 +1,327 @@ +setup: + - skip: + version: " - 6.99.99" + reason: "Implemented in 7.0" + + - do: + indices.create: + index: test + body: + mappings: + test: + properties: + text: + type: text + analyzer: standard + - do: + bulk: + refresh: true + body: + - '{"index": {"_index": "test", "_type": "test", "_id": "1"}}' + - '{"text" : "Some like it hot, some like it cold"}' + - '{"index": {"_index": "test", "_type": "test", "_id": "2"}}' + - '{"text" : "Its cold outside, theres no kind of atmosphere"}' + - '{"index": {"_index": "test", "_type": "test", "_id": "3"}}' + - '{"text" : "Baby its cold there outside"}' + - '{"index": {"_index": "test", "_type": "test", "_id": "4"}}' + - '{"text" : "Outside it is cold and wet"}' + +--- +"Test ordered matching": + - do: + search: + index: test + body: + query: + intervals: + text: + match: + query: "cold outside" + ordered: true + - match: { hits.total.value: 2 } + +--- +"Test default unordered matching": + - do: + search: + index: test + body: + query: + intervals: + text: + match: + query: "cold outside" + - match: { hits.total.value: 3 } + +--- +"Test explicit unordered matching": + - do: + search: + index: test + body: + query: + intervals: + text: + match: + query: "cold outside" + ordered: false + - match: { hits.total.value: 3 } + +--- +"Test phrase matching": + - do: + search: + index: test + body: + query: + intervals: + text: + match: + query: "cold outside" + ordered: true + max_gaps: 0 + - match: { hits.total.value: 1 } + +--- +"Test unordered max_gaps matching": + - do: + search: + index: test + body: + query: + intervals: + text: + match: + query: "cold outside" + max_gaps: 1 + - match: { hits.total.value: 2 } + +--- +"Test ordered max_gaps matching": + - do: + search: + index: test + body: + query: + intervals: + text: + match: + query: "cold outside" + max_gaps: 0 + ordered: true + - match: { hits.total.value: 1 } + +--- +"Test ordered combination with disjunction": + - do: + search: + index: test + body: + query: + intervals: + text: + all_of: + intervals: + - any_of: + intervals: + - match: + query: "cold" + - match: + query: "outside" + - match: + query: "atmosphere" + ordered: true + - match: { hits.total.value: 1 } + +--- +"Test ordered combination with max_gaps": + - do: + search: + index: test + body: + query: + intervals: + text: + all_of: + intervals: + - match: + query: "cold" + - match: + query: "outside" + max_gaps: 0 + ordered: true + - match: { hits.total.value: 1 } + +--- +"Test ordered combination": + - do: + search: + index: test + body: + query: + intervals: + text: + all_of: + intervals: + - match: + query: "cold" + - match: + query: "outside" + ordered: true + - match: { hits.total.value: 2 } + +--- +"Test unordered combination": + - do: + search: + index: test + body: + query: + intervals: + text: + all_of: + intervals: + - match: + query: "cold" + - match: + query: "outside" + max_gaps: 1 + ordered: false + - match: { hits.total.value: 2 } + +--- +"Test block combination": + - do: + search: + index: test + body: + query: + intervals: + text: + all_of: + intervals: + - match: + query: "cold" + - match: + query: "outside" + ordered: true + max_gaps: 0 + - match: { hits.total.value: 1 } + + +--- +"Test containing": + - do: + search: + index: test + body: + query: + intervals: + text: + all_of: + intervals: + - match: + query: "cold" + - match: + query: "outside" + ordered: false + filter: + containing: + match: + query: "is" + - match: { hits.total.value: 1 } + + +--- +"Test not containing": + - do: + search: + index: test + body: + query: + intervals: + text: + all_of: + intervals: + - match: + query: "cold" + - match: + query: "outside" + ordered: false + filter: + not_containing: + match: + query: "is" + - match: { hits.total.value: 2 } + +--- +"Test contained_by": + - do: + search: + index: test + body: + query: + intervals: + text: + match: + query: "is" + filter: + contained_by: + all_of: + intervals: + - match: + query: "cold" + - match: + query: "outside" + ordered: false + - match: { hits.total.value: 1 } + +--- +"Test not_contained_by": + - do: + search: + index: test + body: + query: + intervals: + text: + match: + query: "it" + filter: + not_contained_by: + all_of: + intervals: + - match: + query: "cold" + - match: + query: "outside" + - match: { hits.total.value: 1 } + +--- +"Test not_overlapping": + - do: + search: + index: test + body: + query: + intervals: + text: + all_of: + intervals: + - match: + query: "cold" + - match: + query: "outside" + ordered: true + filter: + not_overlapping: + all_of: + intervals: + - match: + query: "baby" + - match: + query: "there" + ordered: false + - match: { hits.total.value: 1 } + + diff --git a/server/licenses/lucene-analyzers-common-8.0.0-snapshot-774e9aefbc.jar.sha1 b/server/licenses/lucene-analyzers-common-8.0.0-snapshot-774e9aefbc.jar.sha1 new file mode 100644 index 00000000000..b471f9fe139 --- /dev/null +++ b/server/licenses/lucene-analyzers-common-8.0.0-snapshot-774e9aefbc.jar.sha1 @@ -0,0 +1 @@ +729c6a031e3849874028020301e1f45a05d5a0bb \ No newline at end of file diff --git a/server/licenses/lucene-analyzers-common-8.0.0-snapshot-7e4555a2fd.jar.sha1 b/server/licenses/lucene-analyzers-common-8.0.0-snapshot-7e4555a2fd.jar.sha1 deleted file mode 100644 index 110e4c210f6..00000000000 --- a/server/licenses/lucene-analyzers-common-8.0.0-snapshot-7e4555a2fd.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -37be26a0881a2ae009a7057d6f384b75136d98f7 \ No newline at end of file diff --git a/server/licenses/lucene-backward-codecs-8.0.0-snapshot-774e9aefbc.jar.sha1 b/server/licenses/lucene-backward-codecs-8.0.0-snapshot-774e9aefbc.jar.sha1 new file mode 100644 index 00000000000..5c6c5cc229d --- /dev/null +++ b/server/licenses/lucene-backward-codecs-8.0.0-snapshot-774e9aefbc.jar.sha1 @@ -0,0 +1 @@ +5f831dea7c0bafd6306653144388a8ecd1186158 \ No newline at end of file diff --git a/server/licenses/lucene-backward-codecs-8.0.0-snapshot-7e4555a2fd.jar.sha1 b/server/licenses/lucene-backward-codecs-8.0.0-snapshot-7e4555a2fd.jar.sha1 deleted file mode 100644 index b320767fd0b..00000000000 --- a/server/licenses/lucene-backward-codecs-8.0.0-snapshot-7e4555a2fd.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -a5b18174ee3936b29218a0320b2a8b94e7150871 \ No newline at end of file diff --git a/server/licenses/lucene-core-8.0.0-snapshot-774e9aefbc.jar.sha1 b/server/licenses/lucene-core-8.0.0-snapshot-774e9aefbc.jar.sha1 new file mode 100644 index 00000000000..2708b818d44 --- /dev/null +++ b/server/licenses/lucene-core-8.0.0-snapshot-774e9aefbc.jar.sha1 @@ -0,0 +1 @@ +49b3ac44b6749a7ebf0c2e41a81e7910133d2fcc \ No newline at end of file diff --git a/server/licenses/lucene-core-8.0.0-snapshot-7e4555a2fd.jar.sha1 b/server/licenses/lucene-core-8.0.0-snapshot-7e4555a2fd.jar.sha1 deleted file mode 100644 index ef4b48a8bd6..00000000000 --- a/server/licenses/lucene-core-8.0.0-snapshot-7e4555a2fd.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -47253358ac340c35845c2a1007849db4234740da \ No newline at end of file diff --git a/server/licenses/lucene-grouping-8.0.0-snapshot-774e9aefbc.jar.sha1 b/server/licenses/lucene-grouping-8.0.0-snapshot-774e9aefbc.jar.sha1 new file mode 100644 index 00000000000..2c7a4912dea --- /dev/null +++ b/server/licenses/lucene-grouping-8.0.0-snapshot-774e9aefbc.jar.sha1 @@ -0,0 +1 @@ +0396dff0af03463e784b86fd1a24008e2f07daa2 \ No newline at end of file diff --git a/server/licenses/lucene-grouping-8.0.0-snapshot-7e4555a2fd.jar.sha1 b/server/licenses/lucene-grouping-8.0.0-snapshot-7e4555a2fd.jar.sha1 deleted file mode 100644 index 2b60ce09d94..00000000000 --- a/server/licenses/lucene-grouping-8.0.0-snapshot-7e4555a2fd.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -98799c869205e22d903a797dcb495c31954699e0 \ No newline at end of file diff --git a/server/licenses/lucene-highlighter-8.0.0-snapshot-774e9aefbc.jar.sha1 b/server/licenses/lucene-highlighter-8.0.0-snapshot-774e9aefbc.jar.sha1 new file mode 100644 index 00000000000..165e6dadbf5 --- /dev/null +++ b/server/licenses/lucene-highlighter-8.0.0-snapshot-774e9aefbc.jar.sha1 @@ -0,0 +1 @@ +d552b941fef2a64ab4c9b2509906950257f92262 \ No newline at end of file diff --git a/server/licenses/lucene-highlighter-8.0.0-snapshot-7e4555a2fd.jar.sha1 b/server/licenses/lucene-highlighter-8.0.0-snapshot-7e4555a2fd.jar.sha1 deleted file mode 100644 index 1c304157faf..00000000000 --- a/server/licenses/lucene-highlighter-8.0.0-snapshot-7e4555a2fd.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -7bb476d98f9e9caf7ba62ac1b0feb791979c36c9 \ No newline at end of file diff --git a/server/licenses/lucene-join-8.0.0-snapshot-774e9aefbc.jar.sha1 b/server/licenses/lucene-join-8.0.0-snapshot-774e9aefbc.jar.sha1 new file mode 100644 index 00000000000..d80ff31f145 --- /dev/null +++ b/server/licenses/lucene-join-8.0.0-snapshot-774e9aefbc.jar.sha1 @@ -0,0 +1 @@ +192e9374124c14c7cd594a6f87aed61806e6e402 \ No newline at end of file diff --git a/server/licenses/lucene-join-8.0.0-snapshot-7e4555a2fd.jar.sha1 b/server/licenses/lucene-join-8.0.0-snapshot-7e4555a2fd.jar.sha1 deleted file mode 100644 index b787c7054b9..00000000000 --- a/server/licenses/lucene-join-8.0.0-snapshot-7e4555a2fd.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -a7a3562acc7f0c20ad9d24bc21f140d920de973b \ No newline at end of file diff --git a/server/licenses/lucene-memory-8.0.0-snapshot-774e9aefbc.jar.sha1 b/server/licenses/lucene-memory-8.0.0-snapshot-774e9aefbc.jar.sha1 new file mode 100644 index 00000000000..179dd72165f --- /dev/null +++ b/server/licenses/lucene-memory-8.0.0-snapshot-774e9aefbc.jar.sha1 @@ -0,0 +1 @@ +b9345c0321a3f4c7aa69ecfaf15cdee74180e409 \ No newline at end of file diff --git a/server/licenses/lucene-memory-8.0.0-snapshot-7e4555a2fd.jar.sha1 b/server/licenses/lucene-memory-8.0.0-snapshot-7e4555a2fd.jar.sha1 deleted file mode 100644 index 4ff43ebbcba..00000000000 --- a/server/licenses/lucene-memory-8.0.0-snapshot-7e4555a2fd.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -0b76d08438b959417d2372512ce6f43347085f51 \ No newline at end of file diff --git a/server/licenses/lucene-misc-8.0.0-snapshot-774e9aefbc.jar.sha1 b/server/licenses/lucene-misc-8.0.0-snapshot-774e9aefbc.jar.sha1 new file mode 100644 index 00000000000..abfd221dcdf --- /dev/null +++ b/server/licenses/lucene-misc-8.0.0-snapshot-774e9aefbc.jar.sha1 @@ -0,0 +1 @@ +ace540746369ded8b2f354d35002f5ccf6a58aab \ No newline at end of file diff --git a/server/licenses/lucene-misc-8.0.0-snapshot-7e4555a2fd.jar.sha1 b/server/licenses/lucene-misc-8.0.0-snapshot-7e4555a2fd.jar.sha1 deleted file mode 100644 index f9b94caf6b6..00000000000 --- a/server/licenses/lucene-misc-8.0.0-snapshot-7e4555a2fd.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -0ed089a34dbd66f8b153d292d6dd2a04f99ce8af \ No newline at end of file diff --git a/server/licenses/lucene-queries-8.0.0-snapshot-774e9aefbc.jar.sha1 b/server/licenses/lucene-queries-8.0.0-snapshot-774e9aefbc.jar.sha1 new file mode 100644 index 00000000000..7acef8ed7f8 --- /dev/null +++ b/server/licenses/lucene-queries-8.0.0-snapshot-774e9aefbc.jar.sha1 @@ -0,0 +1 @@ +baaf3082d703c0e89ea5f131de878b586d302e34 \ No newline at end of file diff --git a/server/licenses/lucene-queries-8.0.0-snapshot-7e4555a2fd.jar.sha1 b/server/licenses/lucene-queries-8.0.0-snapshot-7e4555a2fd.jar.sha1 deleted file mode 100644 index 4306c03c75e..00000000000 --- a/server/licenses/lucene-queries-8.0.0-snapshot-7e4555a2fd.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -59b63afbf361b9856d544d03e3117e4cd6ef1b18 \ No newline at end of file diff --git a/server/licenses/lucene-queryparser-8.0.0-snapshot-774e9aefbc.jar.sha1 b/server/licenses/lucene-queryparser-8.0.0-snapshot-774e9aefbc.jar.sha1 new file mode 100644 index 00000000000..9e7b927a563 --- /dev/null +++ b/server/licenses/lucene-queryparser-8.0.0-snapshot-774e9aefbc.jar.sha1 @@ -0,0 +1 @@ +799b3ffee1401fba8874b0a8ce1ab203c98d9708 \ No newline at end of file diff --git a/server/licenses/lucene-queryparser-8.0.0-snapshot-7e4555a2fd.jar.sha1 b/server/licenses/lucene-queryparser-8.0.0-snapshot-7e4555a2fd.jar.sha1 deleted file mode 100644 index 58ea47821d1..00000000000 --- a/server/licenses/lucene-queryparser-8.0.0-snapshot-7e4555a2fd.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -63266335c7e8839d944bbc152aa110f921878cda \ No newline at end of file diff --git a/server/licenses/lucene-sandbox-8.0.0-snapshot-774e9aefbc.jar.sha1 b/server/licenses/lucene-sandbox-8.0.0-snapshot-774e9aefbc.jar.sha1 new file mode 100644 index 00000000000..e8bb1cbe8ca --- /dev/null +++ b/server/licenses/lucene-sandbox-8.0.0-snapshot-774e9aefbc.jar.sha1 @@ -0,0 +1 @@ +a6293e2d890e4b1be455524466474a3eaac8be9a \ No newline at end of file diff --git a/server/licenses/lucene-sandbox-8.0.0-snapshot-7e4555a2fd.jar.sha1 b/server/licenses/lucene-sandbox-8.0.0-snapshot-7e4555a2fd.jar.sha1 deleted file mode 100644 index 114cc004e0f..00000000000 --- a/server/licenses/lucene-sandbox-8.0.0-snapshot-7e4555a2fd.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -4d37ca0d28dce66953bbcc4001ba08dc1af4d3b2 \ No newline at end of file diff --git a/server/licenses/lucene-spatial-8.0.0-snapshot-774e9aefbc.jar.sha1 b/server/licenses/lucene-spatial-8.0.0-snapshot-774e9aefbc.jar.sha1 new file mode 100644 index 00000000000..f646f423a8d --- /dev/null +++ b/server/licenses/lucene-spatial-8.0.0-snapshot-774e9aefbc.jar.sha1 @@ -0,0 +1 @@ +77490eb78316afdec9d0889868997777caf820c0 \ No newline at end of file diff --git a/server/licenses/lucene-spatial-8.0.0-snapshot-7e4555a2fd.jar.sha1 b/server/licenses/lucene-spatial-8.0.0-snapshot-7e4555a2fd.jar.sha1 deleted file mode 100644 index 6fda0117bb1..00000000000 --- a/server/licenses/lucene-spatial-8.0.0-snapshot-7e4555a2fd.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -47dfd50d6911b0a127d261068b0bfe496232e0c1 \ No newline at end of file diff --git a/server/licenses/lucene-spatial-extras-8.0.0-snapshot-774e9aefbc.jar.sha1 b/server/licenses/lucene-spatial-extras-8.0.0-snapshot-774e9aefbc.jar.sha1 new file mode 100644 index 00000000000..da0905ca36b --- /dev/null +++ b/server/licenses/lucene-spatial-extras-8.0.0-snapshot-774e9aefbc.jar.sha1 @@ -0,0 +1 @@ +4bb18049052ef738702ff5c0b294a5986971ed59 \ No newline at end of file diff --git a/server/licenses/lucene-spatial-extras-8.0.0-snapshot-7e4555a2fd.jar.sha1 b/server/licenses/lucene-spatial-extras-8.0.0-snapshot-7e4555a2fd.jar.sha1 deleted file mode 100644 index 2733d34103e..00000000000 --- a/server/licenses/lucene-spatial-extras-8.0.0-snapshot-7e4555a2fd.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -43f6876126ebb024e76a23d2247897d4a98c9c0f \ No newline at end of file diff --git a/server/licenses/lucene-spatial3d-8.0.0-snapshot-774e9aefbc.jar.sha1 b/server/licenses/lucene-spatial3d-8.0.0-snapshot-774e9aefbc.jar.sha1 new file mode 100644 index 00000000000..60f7aaf2b53 --- /dev/null +++ b/server/licenses/lucene-spatial3d-8.0.0-snapshot-774e9aefbc.jar.sha1 @@ -0,0 +1 @@ +936aa91d3b48bd559ee9d25303934f06aa95c1f7 \ No newline at end of file diff --git a/server/licenses/lucene-spatial3d-8.0.0-snapshot-7e4555a2fd.jar.sha1 b/server/licenses/lucene-spatial3d-8.0.0-snapshot-7e4555a2fd.jar.sha1 deleted file mode 100644 index 6ee59b7015d..00000000000 --- a/server/licenses/lucene-spatial3d-8.0.0-snapshot-7e4555a2fd.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -b6fa3f8d933fc7c2b91acd17edf3e849a34f89d7 \ No newline at end of file diff --git a/server/licenses/lucene-suggest-8.0.0-snapshot-774e9aefbc.jar.sha1 b/server/licenses/lucene-suggest-8.0.0-snapshot-774e9aefbc.jar.sha1 new file mode 100644 index 00000000000..1ea6540ff82 --- /dev/null +++ b/server/licenses/lucene-suggest-8.0.0-snapshot-774e9aefbc.jar.sha1 @@ -0,0 +1 @@ +268747a9cbdebb492268aca4558968f9d46d37a9 \ No newline at end of file diff --git a/server/licenses/lucene-suggest-8.0.0-snapshot-7e4555a2fd.jar.sha1 b/server/licenses/lucene-suggest-8.0.0-snapshot-7e4555a2fd.jar.sha1 deleted file mode 100644 index 14f415b93eb..00000000000 --- a/server/licenses/lucene-suggest-8.0.0-snapshot-7e4555a2fd.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -0733cf78f94066053d3148448a565e71064515ed \ No newline at end of file diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/restore/TransportRestoreSnapshotAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/restore/TransportRestoreSnapshotAction.java index 6ce7245503b..56fcba85167 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/restore/TransportRestoreSnapshotAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/restore/TransportRestoreSnapshotAction.java @@ -92,12 +92,13 @@ public class TransportRestoreSnapshotAction extends TransportMasterNodeAction primary.applyIndexOperationOnPrimary(request.version(), request.versionType(), sourceToParse, - request.getAutoGeneratedTimestamp(), request.isRetry()), + request.ifSeqNoMatch(), request.ifPrimaryTermMatch(), request.getAutoGeneratedTimestamp(), request.isRetry()), e -> primary.getFailedIndexResult(e, request.version()), context::markOperationAsExecuted, mapping -> mappingUpdater.updateMappings(mapping, primary.shardId(), request.type())); @@ -471,7 +471,8 @@ public class TransportShardBulkAction extends TransportWriteAction primary.applyDeleteOperationOnPrimary(request.version(), request.type(), request.id(), request.versionType()), + () -> primary.applyDeleteOperationOnPrimary(request.version(), request.type(), request.id(), request.versionType(), + request.ifSeqNoMatch(), request.ifPrimaryTermMatch()), e -> primary.getFailedDeleteResult(e, request.version()), context::markOperationAsExecuted, mapping -> mappingUpdater.updateMappings(mapping, primary.shardId(), request.type())); diff --git a/server/src/main/java/org/elasticsearch/action/delete/DeleteRequest.java b/server/src/main/java/org/elasticsearch/action/delete/DeleteRequest.java index ae1271537f7..c9f2df56333 100644 --- a/server/src/main/java/org/elasticsearch/action/delete/DeleteRequest.java +++ b/server/src/main/java/org/elasticsearch/action/delete/DeleteRequest.java @@ -31,6 +31,7 @@ import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.lucene.uid.Versions; import org.elasticsearch.index.VersionType; import org.elasticsearch.index.mapper.MapperService; +import org.elasticsearch.index.seqno.SequenceNumbers; import org.elasticsearch.index.shard.ShardId; import java.io.IOException; @@ -57,6 +58,8 @@ public class DeleteRequest extends ReplicatedWriteRequest private String routing; private long version = Versions.MATCH_ANY; private VersionType versionType = VersionType.INTERNAL; + private long ifSeqNoMatch = SequenceNumbers.UNASSIGNED_SEQ_NO; + private long ifPrimaryTermMatch = 0; public DeleteRequest() { } @@ -112,6 +115,12 @@ public class DeleteRequest extends ReplicatedWriteRequest if (versionType == VersionType.FORCE) { validationException = addValidationError("version type [force] may no longer be used", validationException); } + + if (ifSeqNoMatch != SequenceNumbers.UNASSIGNED_SEQ_NO && ( + versionType != VersionType.INTERNAL || version != Versions.MATCH_ANY + )) { + validationException = addValidationError("compare and write operations can not use versioning", validationException); + } return validationException; } @@ -194,6 +203,32 @@ public class DeleteRequest extends ReplicatedWriteRequest return this; } + public long ifSeqNoMatch() { + return ifSeqNoMatch; + } + + public long ifPrimaryTermMatch() { + return ifPrimaryTermMatch; + } + + public DeleteRequest setIfMatch(long seqNo, long term) { + if (term == 0 && seqNo != SequenceNumbers.UNASSIGNED_SEQ_NO) { + throw new IllegalArgumentException("seqNo is set, but primary term is [0]"); + } + if (term != 0 && seqNo == SequenceNumbers.UNASSIGNED_SEQ_NO) { + throw new IllegalArgumentException("seqNo is unassigned, but primary term is [" + term + "]"); + } + if (seqNo < 0 && seqNo != SequenceNumbers.UNASSIGNED_SEQ_NO) { + throw new IllegalArgumentException("sequence numbers must be non negative. got [" + seqNo + "]."); + } + if (term < 0) { + throw new IllegalArgumentException("primary term must be non negative. got [" + term + "]"); + } + ifSeqNoMatch = seqNo; + ifPrimaryTermMatch = term; + return this; + } + @Override public VersionType versionType() { return this.versionType; @@ -215,6 +250,13 @@ public class DeleteRequest extends ReplicatedWriteRequest } version = in.readLong(); versionType = VersionType.fromValue(in.readByte()); + if (in.getVersion().onOrAfter(Version.V_7_0_0)) { + ifSeqNoMatch = in.readZLong(); + ifPrimaryTermMatch = in.readVLong(); + } else { + ifSeqNoMatch = SequenceNumbers.UNASSIGNED_SEQ_NO; + ifPrimaryTermMatch = 0; + } } @Override @@ -228,6 +270,15 @@ public class DeleteRequest extends ReplicatedWriteRequest } out.writeLong(version); out.writeByte(versionType.getValue()); + if (out.getVersion().onOrAfter(Version.V_7_0_0)) { + out.writeZLong(ifSeqNoMatch); + out.writeVLong(ifPrimaryTermMatch); + } else if (ifSeqNoMatch != SequenceNumbers.UNASSIGNED_SEQ_NO || ifPrimaryTermMatch != 0) { + assert false : "setIfMatch [" + ifSeqNoMatch + "], currentDocTem [" + ifPrimaryTermMatch + "]"; + throw new IllegalStateException( + "sequence number based compare and write is not supported until all nodes are on version 7.0 or higher. " + + "Stream version [" + out.getVersion() + "]"); + } } @Override diff --git a/server/src/main/java/org/elasticsearch/action/delete/DeleteRequestBuilder.java b/server/src/main/java/org/elasticsearch/action/delete/DeleteRequestBuilder.java index 9060af8e17c..f0df2d3558b 100644 --- a/server/src/main/java/org/elasticsearch/action/delete/DeleteRequestBuilder.java +++ b/server/src/main/java/org/elasticsearch/action/delete/DeleteRequestBuilder.java @@ -80,4 +80,14 @@ public class DeleteRequestBuilder extends ReplicationRequestBuilder implement private long autoGeneratedTimestamp = UNSET_AUTO_GENERATED_TIMESTAMP; private boolean isRetry = false; + private long ifSeqNoMatch = SequenceNumbers.UNASSIGNED_SEQ_NO; + private long ifPrimaryTermMatch = 0; public IndexRequest() { @@ -164,6 +167,12 @@ public class IndexRequest extends ReplicatedWriteRequest implement validationException); return validationException; } + + if (ifSeqNoMatch != SequenceNumbers.UNASSIGNED_SEQ_NO || ifPrimaryTermMatch != 0) { + validationException = addValidationError("create operations do not support compare and set. use index instead", + validationException); + return validationException; + } } if (opType() != OpType.INDEX && id == null) { @@ -192,6 +201,12 @@ public class IndexRequest extends ReplicatedWriteRequest implement validationException = addValidationError("pipeline cannot be an empty string", validationException); } + if (ifSeqNoMatch != SequenceNumbers.UNASSIGNED_SEQ_NO && ( + versionType != VersionType.INTERNAL || version != Versions.MATCH_ANY + )) { + validationException = addValidationError("compare and write operations can not use versioning", validationException); + } + return validationException; } @@ -471,6 +486,33 @@ public class IndexRequest extends ReplicatedWriteRequest implement return this; } + public IndexRequest ifMatch(long seqNo, long term) { + if (term == 0 && seqNo != SequenceNumbers.UNASSIGNED_SEQ_NO) { + throw new IllegalArgumentException("seqNo is set, but primary term is [0]"); + } + + if (term != 0 && seqNo == SequenceNumbers.UNASSIGNED_SEQ_NO) { + throw new IllegalArgumentException("seqNo is unassigned, but primary term is [" + term + "]"); + } + if (seqNo < 0 && seqNo != SequenceNumbers.UNASSIGNED_SEQ_NO) { + throw new IllegalArgumentException("sequence numbers must be non negative. got [" + seqNo + "]."); + } + if (term < 0) { + throw new IllegalArgumentException("primary term must be non negative. got [" + term + "]"); + } + ifSeqNoMatch = seqNo; + ifPrimaryTermMatch = term; + return this; + } + + public long ifSeqNoMatch() { + return ifSeqNoMatch; + } + + public long ifPrimaryTermMatch() { + return ifPrimaryTermMatch; + } + @Override public VersionType versionType() { return this.versionType; @@ -492,6 +534,8 @@ public class IndexRequest extends ReplicatedWriteRequest implement // generate id if not already provided if (id == null) { assert autoGeneratedTimestamp == -1 : "timestamp has already been generated!"; + assert ifSeqNoMatch == SequenceNumbers.UNASSIGNED_SEQ_NO; + assert ifPrimaryTermMatch == 0; autoGeneratedTimestamp = Math.max(0, System.currentTimeMillis()); // extra paranoia String uid; if (indexCreatedVersion.onOrAfter(Version.V_6_0_0_beta1)) { @@ -533,6 +577,13 @@ public class IndexRequest extends ReplicatedWriteRequest implement } else { contentType = null; } + if (in.getVersion().onOrAfter(Version.V_7_0_0)) { + ifSeqNoMatch = in.readZLong(); + ifPrimaryTermMatch = in.readVLong(); + } else { + ifSeqNoMatch = SequenceNumbers.UNASSIGNED_SEQ_NO; + ifPrimaryTermMatch = 0; + } } @Override @@ -564,6 +615,15 @@ public class IndexRequest extends ReplicatedWriteRequest implement } else { out.writeBoolean(false); } + if (out.getVersion().onOrAfter(Version.V_7_0_0)) { + out.writeZLong(ifSeqNoMatch); + out.writeVLong(ifPrimaryTermMatch); + } else if (ifSeqNoMatch != SequenceNumbers.UNASSIGNED_SEQ_NO || ifPrimaryTermMatch != 0) { + assert false : "setIfMatch [" + ifSeqNoMatch + "], currentDocTem [" + ifPrimaryTermMatch + "]"; + throw new IllegalStateException( + "sequence number based compare and write is not supported until all nodes are on version 7.0 or higher. " + + "Stream version [" + out.getVersion() + "]"); + } } @Override diff --git a/server/src/main/java/org/elasticsearch/action/index/IndexRequestBuilder.java b/server/src/main/java/org/elasticsearch/action/index/IndexRequestBuilder.java index b81d09abda3..8ca32d40e8c 100644 --- a/server/src/main/java/org/elasticsearch/action/index/IndexRequestBuilder.java +++ b/server/src/main/java/org/elasticsearch/action/index/IndexRequestBuilder.java @@ -199,6 +199,15 @@ public class IndexRequestBuilder extends ReplicationRequestBuilder fetchResults, IntFunction resultsLookup) { - final boolean sorted = reducedQueryPhase.isSortedByField; - ScoreDoc[] sortedDocs = reducedQueryPhase.scoreDocs; + SortedTopDocs sortedTopDocs = reducedQueryPhase.sortedTopDocs; int sortScoreIndex = -1; - if (sorted) { - for (int i = 0; i < reducedQueryPhase.sortField.length; i++) { - if (reducedQueryPhase.sortField[i].getType() == SortField.Type.SCORE) { + if (sortedTopDocs.isSortedByField) { + SortField[] sortFields = sortedTopDocs.sortFields; + for (int i = 0; i < sortFields.length; i++) { + if (sortFields[i].getType() == SortField.Type.SCORE) { sortScoreIndex = i; } } @@ -362,12 +367,12 @@ public final class SearchPhaseController { int from = ignoreFrom ? 0 : reducedQueryPhase.from; int numSearchHits = (int) Math.min(reducedQueryPhase.fetchHits - from, reducedQueryPhase.size); // with collapsing we can have more fetch hits than sorted docs - numSearchHits = Math.min(sortedDocs.length, numSearchHits); + numSearchHits = Math.min(sortedTopDocs.scoreDocs.length, numSearchHits); // merge hits List hits = new ArrayList<>(); if (!fetchResults.isEmpty()) { for (int i = 0; i < numSearchHits; i++) { - ScoreDoc shardDoc = sortedDocs[i]; + ScoreDoc shardDoc = sortedTopDocs.scoreDocs[i]; SearchPhaseResult fetchResultProvider = resultsLookup.apply(shardDoc.shardIndex); if (fetchResultProvider == null) { // this can happen if we are hitting a shard failure during the fetch phase @@ -381,21 +386,21 @@ public final class SearchPhaseController { assert index < fetchResult.hits().getHits().length : "not enough hits fetched. index [" + index + "] length: " + fetchResult.hits().getHits().length; SearchHit searchHit = fetchResult.hits().getHits()[index]; - if (sorted == false) { - searchHit.score(shardDoc.score); - } searchHit.shard(fetchResult.getSearchShardTarget()); - if (sorted) { + if (sortedTopDocs.isSortedByField) { FieldDoc fieldDoc = (FieldDoc) shardDoc; searchHit.sortValues(fieldDoc.fields, reducedQueryPhase.sortValueFormats); if (sortScoreIndex != -1) { searchHit.score(((Number) fieldDoc.fields[sortScoreIndex]).floatValue()); } + } else { + searchHit.score(shardDoc.score); } hits.add(searchHit); } } - return new SearchHits(hits.toArray(new SearchHit[0]), reducedQueryPhase.totalHits, reducedQueryPhase.maxScore); + return new SearchHits(hits.toArray(new SearchHit[0]), reducedQueryPhase.totalHits, + reducedQueryPhase.maxScore, sortedTopDocs.sortFields, sortedTopDocs.collapseField, sortedTopDocs.collapseValues); } /** @@ -436,8 +441,7 @@ public final class SearchPhaseController { if (queryResults.isEmpty()) { // early terminate we have nothing to reduce final TotalHits totalHits = topDocsStats.getTotalHits(); return new ReducedQueryPhase(totalHits, topDocsStats.fetchHits, topDocsStats.maxScore, - timedOut, terminatedEarly, null, null, null, EMPTY_DOCS, null, - null, numReducePhases, false, 0, 0, true); + timedOut, terminatedEarly, null, null, null, SortedTopDocs.EMPTY, null, numReducePhases, 0, 0, true); } final QuerySearchResult firstResult = queryResults.stream().findFirst().get().queryResult(); final boolean hasSuggest = firstResult.suggest() != null; @@ -499,11 +503,11 @@ public final class SearchPhaseController { final InternalAggregations aggregations = aggregationsList.isEmpty() ? null : reduceAggs(aggregationsList, firstResult.pipelineAggregators(), reduceContext); final SearchProfileShardResults shardResults = profileResults.isEmpty() ? null : new SearchProfileShardResults(profileResults); - final SortedTopDocs scoreDocs = sortDocs(isScrollRequest, queryResults, bufferedTopDocs, topDocsStats, from, size); + final SortedTopDocs sortedTopDocs = sortDocs(isScrollRequest, queryResults, bufferedTopDocs, topDocsStats, from, size); final TotalHits totalHits = topDocsStats.getTotalHits(); return new ReducedQueryPhase(totalHits, topDocsStats.fetchHits, topDocsStats.maxScore, - timedOut, terminatedEarly, suggest, aggregations, shardResults, scoreDocs.scoreDocs, scoreDocs.sortFields, - firstResult.sortValueFormats(), numReducePhases, scoreDocs.isSortedByField, size, from, false); + timedOut, terminatedEarly, suggest, aggregations, shardResults, sortedTopDocs, + firstResult.sortValueFormats(), numReducePhases, size, from, firstResult == null); } /** @@ -551,12 +555,8 @@ public final class SearchPhaseController { final SearchProfileShardResults shardResults; // the number of reduces phases final int numReducePhases; - // the searches merged top docs - final ScoreDoc[] scoreDocs; - // the top docs sort fields used to sort the score docs, null if the results are not sorted - final SortField[] sortField; - // true iff the result score docs is sorted by a field (not score), this implies that sortField is set. - final boolean isSortedByField; + //encloses info about the merged top docs, the sort fields used to sort the score docs etc. + final SortedTopDocs sortedTopDocs; // the size of the top hits to return final int size; // true iff the query phase had no results. Otherwise false @@ -567,9 +567,8 @@ public final class SearchPhaseController { final DocValueFormat[] sortValueFormats; ReducedQueryPhase(TotalHits totalHits, long fetchHits, float maxScore, boolean timedOut, Boolean terminatedEarly, Suggest suggest, - InternalAggregations aggregations, SearchProfileShardResults shardResults, ScoreDoc[] scoreDocs, - SortField[] sortFields, DocValueFormat[] sortValueFormats, int numReducePhases, boolean isSortedByField, int size, - int from, boolean isEmptyResult) { + InternalAggregations aggregations, SearchProfileShardResults shardResults, SortedTopDocs sortedTopDocs, + DocValueFormat[] sortValueFormats, int numReducePhases, int size, int from, boolean isEmptyResult) { if (numReducePhases <= 0) { throw new IllegalArgumentException("at least one reduce phase must have been applied but was: " + numReducePhases); } @@ -586,9 +585,7 @@ public final class SearchPhaseController { this.aggregations = aggregations; this.shardResults = shardResults; this.numReducePhases = numReducePhases; - this.scoreDocs = scoreDocs; - this.sortField = sortFields; - this.isSortedByField = isSortedByField; + this.sortedTopDocs = sortedTopDocs; this.size = size; this.from = from; this.isEmptyResult = isEmptyResult; @@ -728,7 +725,7 @@ public final class SearchPhaseController { } return new InitialSearchPhase.ArraySearchPhaseResults(numShards) { @Override - public ReducedQueryPhase reduce() { + ReducedQueryPhase reduce() { return reducedQueryPhase(results.asList(), isScrollRequest, trackTotalHits); } }; @@ -770,15 +767,23 @@ public final class SearchPhaseController { } static final class SortedTopDocs { - static final SortedTopDocs EMPTY = new SortedTopDocs(EMPTY_DOCS, false, null); + static final SortedTopDocs EMPTY = new SortedTopDocs(EMPTY_DOCS, false, null, null, null); + // the searches merged top docs final ScoreDoc[] scoreDocs; + // true iff the result score docs is sorted by a field (not score), this implies that sortField is set. final boolean isSortedByField; + // the top docs sort fields used to sort the score docs, null if the results are not sorted final SortField[] sortFields; + final String collapseField; + final Object[] collapseValues; - SortedTopDocs(ScoreDoc[] scoreDocs, boolean isSortedByField, SortField[] sortFields) { + SortedTopDocs(ScoreDoc[] scoreDocs, boolean isSortedByField, SortField[] sortFields, + String collapseField, Object[] collapseValues) { this.scoreDocs = scoreDocs; this.isSortedByField = isSortedByField; this.sortFields = sortFields; + this.collapseField = collapseField; + this.collapseValues = collapseValues; } } } diff --git a/server/src/main/java/org/elasticsearch/action/search/SearchRequest.java b/server/src/main/java/org/elasticsearch/action/search/SearchRequest.java index d5ab34fe582..68968c071f4 100644 --- a/server/src/main/java/org/elasticsearch/action/search/SearchRequest.java +++ b/server/src/main/java/org/elasticsearch/action/search/SearchRequest.java @@ -77,7 +77,6 @@ public final class SearchRequest extends ActionRequest implements IndicesRequest private Boolean allowPartialSearchResults; - private Scroll scroll; private int batchedReduceSize = DEFAULT_BATCHED_REDUCE_SIZE; @@ -95,6 +94,25 @@ public final class SearchRequest extends ActionRequest implements IndicesRequest public SearchRequest() { } + /** + * Constructs a new search request from the provided search request + */ + public SearchRequest(SearchRequest searchRequest) { + this.allowPartialSearchResults = searchRequest.allowPartialSearchResults; + this.batchedReduceSize = searchRequest.batchedReduceSize; + this.indices = searchRequest.indices; + this.indicesOptions = searchRequest.indicesOptions; + this.maxConcurrentShardRequests = searchRequest.maxConcurrentShardRequests; + this.preference = searchRequest.preference; + this.preFilterShardSize = searchRequest.preFilterShardSize; + this.requestCache = searchRequest.requestCache; + this.routing = searchRequest.routing; + this.scroll = searchRequest.scroll; + this.searchType = searchRequest.searchType; + this.source = searchRequest.source; + this.types = searchRequest.types; + } + /** * Constructs a new search request against the indices. No indices provided here means that search * will run against all indices. diff --git a/server/src/main/java/org/elasticsearch/action/search/SearchScrollQueryThenFetchAsyncAction.java b/server/src/main/java/org/elasticsearch/action/search/SearchScrollQueryThenFetchAsyncAction.java index 794e3c84f13..df18296de2a 100644 --- a/server/src/main/java/org/elasticsearch/action/search/SearchScrollQueryThenFetchAsyncAction.java +++ b/server/src/main/java/org/elasticsearch/action/search/SearchScrollQueryThenFetchAsyncAction.java @@ -35,7 +35,6 @@ import org.elasticsearch.search.query.QuerySearchResult; import org.elasticsearch.search.query.ScrollQuerySearchResult; import org.elasticsearch.transport.Transport; -import java.io.IOException; import java.util.function.BiFunction; final class SearchScrollQueryThenFetchAsyncAction extends SearchScrollAsyncAction { @@ -68,16 +67,16 @@ final class SearchScrollQueryThenFetchAsyncAction extends SearchScrollAsyncActio protected SearchPhase moveToNextPhase(BiFunction clusterNodeLookup) { return new SearchPhase("fetch") { @Override - public void run() throws IOException { + public void run() { final SearchPhaseController.ReducedQueryPhase reducedQueryPhase = searchPhaseController.reducedScrollQueryPhase( queryResults.asList()); - if (reducedQueryPhase.scoreDocs.length == 0) { + ScoreDoc[] scoreDocs = reducedQueryPhase.sortedTopDocs.scoreDocs; + if (scoreDocs.length == 0) { sendResponse(reducedQueryPhase, fetchResults); return; } - final IntArrayList[] docIdsToLoad = searchPhaseController.fillDocIdsToLoad(queryResults.length(), - reducedQueryPhase.scoreDocs); + final IntArrayList[] docIdsToLoad = searchPhaseController.fillDocIdsToLoad(queryResults.length(), scoreDocs); final ScoreDoc[] lastEmittedDocPerShard = searchPhaseController.getLastEmittedDocPerShard(reducedQueryPhase, queryResults.length()); final CountDown counter = new CountDown(docIdsToLoad.length); diff --git a/server/src/main/java/org/elasticsearch/action/update/UpdateRequest.java b/server/src/main/java/org/elasticsearch/action/update/UpdateRequest.java index a4fdce17d09..cd0b5629b76 100644 --- a/server/src/main/java/org/elasticsearch/action/update/UpdateRequest.java +++ b/server/src/main/java/org/elasticsearch/action/update/UpdateRequest.java @@ -43,6 +43,7 @@ import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.index.VersionType; +import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.script.Script; import org.elasticsearch.script.ScriptType; @@ -90,7 +91,7 @@ public class UpdateRequest extends InstanceShardOperationRequest ObjectParser.ValueType.OBJECT_ARRAY_BOOLEAN_OR_STRING); } - private String type; + private String type = MapperService.SINGLE_MAPPING_NAME; private String id; @Nullable private String routing; @@ -121,6 +122,15 @@ public class UpdateRequest extends InstanceShardOperationRequest } + public UpdateRequest(String index, String id) { + super(index); + this.id = id; + } + + /** + * @deprecated Types are in the process of being removed. Use {@link #UpdateRequest(String, String)} instead. + */ + @Deprecated public UpdateRequest(String index, String type, String id) { super(index); this.type = type; @@ -173,7 +183,10 @@ public class UpdateRequest extends InstanceShardOperationRequest /** * The type of the indexed document. + * + * @deprecated Types are in the process of being removed. */ + @Deprecated @Override public String type() { return type; @@ -181,7 +194,10 @@ public class UpdateRequest extends InstanceShardOperationRequest /** * Sets the type of the indexed document. + * + * @deprecated Types are in the process of being removed. */ + @Deprecated public UpdateRequest type(String type) { this.type = type; return this; diff --git a/server/src/main/java/org/elasticsearch/bootstrap/Security.java b/server/src/main/java/org/elasticsearch/bootstrap/Security.java index 734b15d5098..2a537186f6a 100644 --- a/server/src/main/java/org/elasticsearch/bootstrap/Security.java +++ b/server/src/main/java/org/elasticsearch/bootstrap/Security.java @@ -29,6 +29,7 @@ import org.elasticsearch.plugins.PluginInfo; import org.elasticsearch.plugins.PluginsService; import org.elasticsearch.secure_sm.SecureSM; import org.elasticsearch.transport.TcpTransport; +import org.elasticsearch.transport.TransportSettings; import java.io.IOException; import java.net.SocketPermission; @@ -368,7 +369,7 @@ final class Security { * @param settings the {@link Settings} instance to read the transport settings from */ private static void addSocketPermissionForTransport(final Permissions policy, final Settings settings) { - final String transportRange = TcpTransport.PORT.get(settings); + final String transportRange = TransportSettings.PORT.get(settings); addSocketPermissionForPortRange(policy, transportRange); } diff --git a/server/src/main/java/org/elasticsearch/client/transport/TransportClient.java b/server/src/main/java/org/elasticsearch/client/transport/TransportClient.java index 8450fe8d714..b5720c023f0 100644 --- a/server/src/main/java/org/elasticsearch/client/transport/TransportClient.java +++ b/server/src/main/java/org/elasticsearch/client/transport/TransportClient.java @@ -57,9 +57,9 @@ import org.elasticsearch.plugins.SearchPlugin; import org.elasticsearch.search.SearchModule; import org.elasticsearch.threadpool.ExecutorBuilder; import org.elasticsearch.threadpool.ThreadPool; -import org.elasticsearch.transport.TcpTransport; import org.elasticsearch.transport.Transport; import org.elasticsearch.transport.TransportService; +import org.elasticsearch.transport.TransportSettings; import java.io.Closeable; import java.util.ArrayList; @@ -102,7 +102,7 @@ public abstract class TransportClient extends AbstractClient { private static PluginsService newPluginService(final Settings settings, Collection> plugins) { final Settings.Builder settingsBuilder = Settings.builder() - .put(TcpTransport.PING_SCHEDULE.getKey(), "5s") // enable by default the transport schedule ping interval + .put(TransportSettings.PING_SCHEDULE.getKey(), "5s") // enable by default the transport schedule ping interval .put(InternalSettingsPreparer.prepareSettings(settings)) .put(NetworkService.NETWORK_SERVER.getKey(), false) .put(CLIENT_TYPE_SETTING_S.getKey(), CLIENT_TYPE); @@ -136,7 +136,7 @@ public abstract class TransportClient extends AbstractClient { Settings.builder() .put(defaultSettings) .put(pluginsService.updatedSettings()) - .put(TcpTransport.FEATURE_PREFIX + "." + TRANSPORT_CLIENT_FEATURE, true) + .put(TransportSettings.FEATURE_PREFIX + "." + TRANSPORT_CLIENT_FEATURE, true) .build(); final List resourcesToClose = new ArrayList<>(); final ThreadPool threadPool = new ThreadPool(settings); diff --git a/server/src/main/java/org/elasticsearch/cluster/RestoreInProgress.java b/server/src/main/java/org/elasticsearch/cluster/RestoreInProgress.java index 066f00c2cd1..c229a826ee8 100644 --- a/server/src/main/java/org/elasticsearch/cluster/RestoreInProgress.java +++ b/server/src/main/java/org/elasticsearch/cluster/RestoreInProgress.java @@ -19,6 +19,7 @@ package org.elasticsearch.cluster; +import com.carrotsearch.hppc.cursors.ObjectCursor; import com.carrotsearch.hppc.cursors.ObjectObjectCursor; import org.elasticsearch.Version; @@ -33,36 +34,33 @@ import org.elasticsearch.snapshots.Snapshot; import java.io.IOException; import java.util.ArrayList; -import java.util.Arrays; import java.util.Collections; +import java.util.Iterator; import java.util.List; import java.util.Objects; +import java.util.UUID; /** * Meta data about restore processes that are currently executing */ -public class RestoreInProgress extends AbstractNamedDiffable implements Custom { +public class RestoreInProgress extends AbstractNamedDiffable implements Custom, Iterable { + + /** + * Fallback UUID used for restore operations that were started before v7.0 and don't have a uuid in the cluster state. + */ + public static final String BWC_UUID = new UUID(0, 0).toString(); public static final String TYPE = "restore"; - private final List entries; + private final ImmutableOpenMap entries; /** * Constructs new restore metadata * - * @param entries list of currently running restore processes + * @param entries map of currently running restore processes keyed by their restore uuid */ - public RestoreInProgress(Entry... entries) { - this.entries = Arrays.asList(entries); - } - - /** - * Returns list of currently running restore processes - * - * @return list of currently running restore processes - */ - public List entries() { - return this.entries; + private RestoreInProgress(ImmutableOpenMap entries) { + this.entries = entries; } @Override @@ -84,20 +82,48 @@ public class RestoreInProgress extends AbstractNamedDiffable implements @Override public String toString() { - StringBuilder builder = new StringBuilder("RestoreInProgress["); - for (int i = 0; i < entries.size(); i++) { - builder.append(entries.get(i).snapshot().getSnapshotId().getName()); - if (i + 1 < entries.size()) { - builder.append(","); - } + return new StringBuilder("RestoreInProgress[").append(entries).append("]").toString(); + } + + public Entry get(String restoreUUID) { + return entries.get(restoreUUID); + } + + public boolean isEmpty() { + return entries.isEmpty(); + } + + @Override + public Iterator iterator() { + return entries.valuesIt(); + } + + public static final class Builder { + + private final ImmutableOpenMap.Builder entries = ImmutableOpenMap.builder(); + + public Builder() { + } + + public Builder(RestoreInProgress restoreInProgress) { + entries.putAll(restoreInProgress.entries); + } + + public Builder add(Entry entry) { + entries.put(entry.uuid, entry); + return this; + } + + public RestoreInProgress build() { + return new RestoreInProgress(entries.build()); } - return builder.append("]").toString(); } /** * Restore metadata */ public static class Entry { + private final String uuid; private final State state; private final Snapshot snapshot; private final ImmutableOpenMap shards; @@ -106,12 +132,14 @@ public class RestoreInProgress extends AbstractNamedDiffable implements /** * Creates new restore metadata * + * @param uuid uuid of the restore * @param snapshot snapshot * @param state current state of the restore process * @param indices list of indices being restored * @param shards map of shards being restored to their current restore status */ - public Entry(Snapshot snapshot, State state, List indices, ImmutableOpenMap shards) { + public Entry(String uuid, Snapshot snapshot, State state, List indices, + ImmutableOpenMap shards) { this.snapshot = Objects.requireNonNull(snapshot); this.state = Objects.requireNonNull(state); this.indices = Objects.requireNonNull(indices); @@ -120,6 +148,15 @@ public class RestoreInProgress extends AbstractNamedDiffable implements } else { this.shards = shards; } + this.uuid = Objects.requireNonNull(uuid); + } + + /** + * Returns restore uuid + * @return restore uuid + */ + public String uuid() { + return uuid; } /** @@ -167,7 +204,8 @@ public class RestoreInProgress extends AbstractNamedDiffable implements return false; } Entry entry = (Entry) o; - return snapshot.equals(entry.snapshot) && + return uuid.equals(entry.uuid) && + snapshot.equals(entry.snapshot) && state == entry.state && indices.equals(entry.indices) && shards.equals(entry.shards); @@ -175,7 +213,7 @@ public class RestoreInProgress extends AbstractNamedDiffable implements @Override public int hashCode() { - return Objects.hash(snapshot, state, indices, shards); + return Objects.hash(uuid, snapshot, state, indices, shards); } } @@ -394,8 +432,15 @@ public class RestoreInProgress extends AbstractNamedDiffable implements } public RestoreInProgress(StreamInput in) throws IOException { - Entry[] entries = new Entry[in.readVInt()]; - for (int i = 0; i < entries.length; i++) { + int count = in.readVInt(); + final ImmutableOpenMap.Builder entriesBuilder = ImmutableOpenMap.builder(count); + for (int i = 0; i < count; i++) { + final String uuid; + if (in.getVersion().onOrAfter(Version.V_7_0_0)) { + uuid = in.readString(); + } else { + uuid = BWC_UUID; + } Snapshot snapshot = new Snapshot(in); State state = State.fromValue(in.readByte()); int indices = in.readVInt(); @@ -410,9 +455,9 @@ public class RestoreInProgress extends AbstractNamedDiffable implements ShardRestoreStatus shardState = ShardRestoreStatus.readShardRestoreStatus(in); builder.put(shardId, shardState); } - entries[i] = new Entry(snapshot, state, Collections.unmodifiableList(indexBuilder), builder.build()); + entriesBuilder.put(uuid, new Entry(uuid, snapshot, state, Collections.unmodifiableList(indexBuilder), builder.build())); } - this.entries = Arrays.asList(entries); + this.entries = entriesBuilder.build(); } /** @@ -421,7 +466,11 @@ public class RestoreInProgress extends AbstractNamedDiffable implements @Override public void writeTo(StreamOutput out) throws IOException { out.writeVInt(entries.size()); - for (Entry entry : entries) { + for (ObjectCursor v : entries.values()) { + Entry entry = v.value; + if (out.getVersion().onOrAfter(Version.V_7_0_0)) { + out.writeString(entry.uuid); + } entry.snapshot().writeTo(out); out.writeByte(entry.state().value()); out.writeVInt(entry.indices().size()); @@ -442,8 +491,8 @@ public class RestoreInProgress extends AbstractNamedDiffable implements @Override public XContentBuilder toXContent(XContentBuilder builder, ToXContent.Params params) throws IOException { builder.startArray("snapshots"); - for (Entry entry : entries) { - toXContent(entry, builder, params); + for (ObjectCursor entry : entries.values()) { + toXContent(entry.value, builder, params); } builder.endArray(); return builder; diff --git a/server/src/main/java/org/elasticsearch/cluster/coordination/DiscoveryUpgradeService.java b/server/src/main/java/org/elasticsearch/cluster/coordination/DiscoveryUpgradeService.java index 52fb25dd8af..496adb65bb6 100644 --- a/server/src/main/java/org/elasticsearch/cluster/coordination/DiscoveryUpgradeService.java +++ b/server/src/main/java/org/elasticsearch/cluster/coordination/DiscoveryUpgradeService.java @@ -310,11 +310,9 @@ public class DiscoveryUpgradeService { * we lie and claim to have an impossible ID that compares above all genuine IDs. */ public static DiscoveryNode createDiscoveryNodeWithImpossiblyHighId(DiscoveryNode node) { - // IDs are base-64-encoded UUIDs, which means they the character set [0-9A-Za-z_-]. The highest character in this set is 'z', and - // 'z' < '{', so by starting the ID with '{' we can be sure it's greater. This is terrible. - final String fakeId = "{zen2}" + node.getId(); - assert node.getId().compareTo(fakeId) < 0 : node + " vs " + fakeId; - return new DiscoveryNode(node.getName(), fakeId, node.getEphemeralId(), node.getHostName(), + // IDs are base-64-encoded UUIDs, which means they use the character set [0-9A-Za-z_-]. The highest character in this set is 'z', + // and 'z' < '{', so by starting the ID with '{' we can be sure it's greater. This is terrible. + return new DiscoveryNode(node.getName(), "{zen2}" + node.getId(), node.getEphemeralId(), node.getHostName(), node.getHostAddress(), node.getAddress(), node.getAttributes(), node.getRoles(), node.getVersion()); } } diff --git a/server/src/main/java/org/elasticsearch/cluster/routing/RecoverySource.java b/server/src/main/java/org/elasticsearch/cluster/routing/RecoverySource.java index c72c25a0578..3654d66ad58 100644 --- a/server/src/main/java/org/elasticsearch/cluster/routing/RecoverySource.java +++ b/server/src/main/java/org/elasticsearch/cluster/routing/RecoverySource.java @@ -20,6 +20,7 @@ package org.elasticsearch.cluster.routing; import org.elasticsearch.Version; +import org.elasticsearch.cluster.RestoreInProgress; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; @@ -208,22 +209,33 @@ public abstract class RecoverySource implements Writeable, ToXContentObject { * recovery from a snapshot */ public static class SnapshotRecoverySource extends RecoverySource { + private final String restoreUUID; private final Snapshot snapshot; private final String index; private final Version version; - public SnapshotRecoverySource(Snapshot snapshot, Version version, String index) { + public SnapshotRecoverySource(String restoreUUID, Snapshot snapshot, Version version, String index) { + this.restoreUUID = restoreUUID; this.snapshot = Objects.requireNonNull(snapshot); this.version = Objects.requireNonNull(version); this.index = Objects.requireNonNull(index); } SnapshotRecoverySource(StreamInput in) throws IOException { + if (in.getVersion().onOrAfter(Version.V_7_0_0)) { + restoreUUID = in.readString(); + } else { + restoreUUID = RestoreInProgress.BWC_UUID; + } snapshot = new Snapshot(in); version = Version.readVersion(in); index = in.readString(); } + public String restoreUUID() { + return restoreUUID; + } + public Snapshot snapshot() { return snapshot; } @@ -238,6 +250,9 @@ public abstract class RecoverySource implements Writeable, ToXContentObject { @Override protected void writeAdditionalFields(StreamOutput out) throws IOException { + if (out.getVersion().onOrAfter(Version.V_7_0_0)) { + out.writeString(restoreUUID); + } snapshot.writeTo(out); Version.writeVersion(version, out); out.writeString(index); @@ -253,12 +268,13 @@ public abstract class RecoverySource implements Writeable, ToXContentObject { builder.field("repository", snapshot.getRepository()) .field("snapshot", snapshot.getSnapshotId().getName()) .field("version", version.toString()) - .field("index", index); + .field("index", index) + .field("restoreUUID", restoreUUID); } @Override public String toString() { - return "snapshot recovery from " + snapshot.toString(); + return "snapshot recovery [" + restoreUUID + "] from " + snapshot; } @Override @@ -271,12 +287,13 @@ public abstract class RecoverySource implements Writeable, ToXContentObject { } SnapshotRecoverySource that = (SnapshotRecoverySource) o; - return snapshot.equals(that.snapshot) && index.equals(that.index) && version.equals(that.version); + return restoreUUID.equals(that.restoreUUID) && snapshot.equals(that.snapshot) + && index.equals(that.index) && version.equals(that.version); } @Override public int hashCode() { - return Objects.hash(snapshot, index, version); + return Objects.hash(restoreUUID, snapshot, index, version); } } diff --git a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/RestoreInProgressAllocationDecider.java b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/RestoreInProgressAllocationDecider.java index 63971ca46e4..18bd5f2f13a 100644 --- a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/RestoreInProgressAllocationDecider.java +++ b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/RestoreInProgressAllocationDecider.java @@ -24,7 +24,6 @@ import org.elasticsearch.cluster.routing.RecoverySource; import org.elasticsearch.cluster.routing.RoutingNode; import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.cluster.routing.allocation.RoutingAllocation; -import org.elasticsearch.snapshots.Snapshot; /** * This {@link AllocationDecider} prevents shards that have failed to be @@ -46,25 +45,24 @@ public class RestoreInProgressAllocationDecider extends AllocationDecider { return allocation.decision(Decision.YES, NAME, "ignored as shard is not being recovered from a snapshot"); } - final Snapshot snapshot = ((RecoverySource.SnapshotRecoverySource) recoverySource).snapshot(); + RecoverySource.SnapshotRecoverySource source = (RecoverySource.SnapshotRecoverySource) recoverySource; final RestoreInProgress restoresInProgress = allocation.custom(RestoreInProgress.TYPE); if (restoresInProgress != null) { - for (RestoreInProgress.Entry restoreInProgress : restoresInProgress.entries()) { - if (restoreInProgress.snapshot().equals(snapshot)) { - RestoreInProgress.ShardRestoreStatus shardRestoreStatus = restoreInProgress.shards().get(shardRouting.shardId()); - if (shardRestoreStatus != null && shardRestoreStatus.state().completed() == false) { - assert shardRestoreStatus.state() != RestoreInProgress.State.SUCCESS : "expected shard [" + shardRouting - + "] to be in initializing state but got [" + shardRestoreStatus.state() + "]"; - return allocation.decision(Decision.YES, NAME, "shard is currently being restored"); - } - break; + RestoreInProgress.Entry restoreInProgress = restoresInProgress.get(source.restoreUUID()); + if (restoreInProgress != null) { + RestoreInProgress.ShardRestoreStatus shardRestoreStatus = restoreInProgress.shards().get(shardRouting.shardId()); + if (shardRestoreStatus != null && shardRestoreStatus.state().completed() == false) { + assert shardRestoreStatus.state() != RestoreInProgress.State.SUCCESS : "expected shard [" + shardRouting + + "] to be in initializing state but got [" + shardRestoreStatus.state() + "]"; + return allocation.decision(Decision.YES, NAME, "shard is currently being restored"); } } } return allocation.decision(Decision.NO, NAME, "shard has failed to be restored from the snapshot [%s] because of [%s] - " + "manually close or delete the index [%s] in order to retry to restore the snapshot again or use the reroute API to force the " + - "allocation of an empty primary shard", snapshot, shardRouting.unassignedInfo().getDetails(), shardRouting.getIndexName()); + "allocation of an empty primary shard", + source.snapshot(), shardRouting.unassignedInfo().getDetails(), shardRouting.getIndexName()); } @Override diff --git a/server/src/main/java/org/elasticsearch/common/io/stream/StreamOutput.java b/server/src/main/java/org/elasticsearch/common/io/stream/StreamOutput.java index b00706b78ae..4164e29a582 100644 --- a/server/src/main/java/org/elasticsearch/common/io/stream/StreamOutput.java +++ b/server/src/main/java/org/elasticsearch/common/io/stream/StreamOutput.java @@ -785,22 +785,36 @@ public abstract class StreamOutput extends OutputStream { } } - public void writeArray(T[] array) throws IOException { - writeVInt(array.length); - for (T value: array) { - value.writeTo(this); - } - } - - public void writeOptionalArray(@Nullable T[] array) throws IOException { + /** + * Same as {@link #writeArray(Writer, Object[])} but the provided array may be null. An additional boolean value is + * serialized to indicate whether the array was null or not. + */ + public void writeOptionalArray(final Writer writer, final @Nullable T[] array) throws IOException { if (array == null) { writeBoolean(false); } else { writeBoolean(true); - writeArray(array); + writeArray(writer, array); } } + /** + * Writes the specified array of {@link Writeable}s. This method can be seen as + * writer version of {@link StreamInput#readArray(Writeable.Reader, IntFunction)}. The length of array encoded as a variable-length + * integer is first written to the stream, and then the elements of the array are written to the stream. + */ + public void writeArray(T[] array) throws IOException { + writeArray((out, value) -> value.writeTo(out), array); + } + + /** + * Same as {@link #writeArray(Writeable[])} but the provided array may be null. An additional boolean value is + * serialized to indicate whether the array was null or not. + */ + public void writeOptionalArray(@Nullable T[] array) throws IOException { + writeOptionalArray((out, value) -> value.writeTo(out), array); + } + /** * Serializes a potential null value. */ diff --git a/server/src/main/java/org/elasticsearch/common/joda/Joda.java b/server/src/main/java/org/elasticsearch/common/joda/Joda.java index 6cbd77c5abe..f9998116455 100644 --- a/server/src/main/java/org/elasticsearch/common/joda/Joda.java +++ b/server/src/main/java/org/elasticsearch/common/joda/Joda.java @@ -19,7 +19,9 @@ package org.elasticsearch.common.joda; +import org.apache.logging.log4j.LogManager; import org.elasticsearch.common.Strings; +import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.common.time.DateFormatter; import org.joda.time.Chronology; import org.joda.time.DateTime; @@ -48,10 +50,12 @@ import java.util.Locale; public class Joda { + private static DeprecationLogger deprecationLogger = new DeprecationLogger(LogManager.getLogger(Joda.class)); + /** * Parses a joda based pattern, including some named ones (similar to the built in Joda ISO ones). */ - public static JodaDateFormatter forPattern(String input, Locale locale) { + public static JodaDateFormatter forPattern(String input) { if (Strings.hasLength(input)) { input = input.trim(); } @@ -102,8 +106,8 @@ public class Joda { // in this case, we have a separate parser and printer since the dataOptionalTimeParser can't print // this sucks we should use the root local by default and not be dependent on the node return new JodaDateFormatter(input, - ISODateTimeFormat.dateOptionalTimeParser().withLocale(locale).withZone(DateTimeZone.UTC), - ISODateTimeFormat.dateTime().withLocale(locale).withZone(DateTimeZone.UTC)); + ISODateTimeFormat.dateOptionalTimeParser().withLocale(Locale.ROOT).withZone(DateTimeZone.UTC), + ISODateTimeFormat.dateTime().withLocale(Locale.ROOT).withZone(DateTimeZone.UTC)); } else if ("dateTime".equals(input) || "date_time".equals(input)) { formatter = ISODateTimeFormat.dateTime(); } else if ("dateTimeNoMillis".equals(input) || "date_time_no_millis".equals(input)) { @@ -179,8 +183,8 @@ public class Joda { // in this case, we have a separate parser and printer since the dataOptionalTimeParser can't print // this sucks we should use the root local by default and not be dependent on the node return new JodaDateFormatter(input, - StrictISODateTimeFormat.dateOptionalTimeParser().withLocale(locale).withZone(DateTimeZone.UTC), - StrictISODateTimeFormat.dateTime().withLocale(locale).withZone(DateTimeZone.UTC)); + StrictISODateTimeFormat.dateOptionalTimeParser().withLocale(Locale.ROOT).withZone(DateTimeZone.UTC), + StrictISODateTimeFormat.dateTime().withLocale(Locale.ROOT).withZone(DateTimeZone.UTC)); } else if ("strictDateTime".equals(input) || "strict_date_time".equals(input)) { formatter = StrictISODateTimeFormat.dateTime(); } else if ("strictDateTimeNoMillis".equals(input) || "strict_date_time_no_millis".equals(input)) { @@ -227,19 +231,34 @@ public class Joda { formatter = StrictISODateTimeFormat.yearMonth(); } else if ("strictYearMonthDay".equals(input) || "strict_year_month_day".equals(input)) { formatter = StrictISODateTimeFormat.yearMonthDay(); - } else { try { + maybeLogJodaDeprecation(input); formatter = DateTimeFormat.forPattern(input); } catch (IllegalArgumentException e) { throw new IllegalArgumentException("Invalid format: [" + input + "]: " + e.getMessage(), e); } } - formatter = formatter.withLocale(locale).withZone(DateTimeZone.UTC); + formatter = formatter.withLocale(Locale.ROOT).withZone(DateTimeZone.UTC); return new JodaDateFormatter(input, formatter, formatter); } + private static void maybeLogJodaDeprecation(String input) { + if (input.contains("CC")) { + deprecationLogger.deprecatedAndMaybeLog("joda-century-of-era-format", + "Use of 'C' (century-of-era) is deprecated and will not be supported in the next major version of Elasticsearch."); + } + if (input.contains("YY")) { + deprecationLogger.deprecatedAndMaybeLog("joda-year-of-era-format", "Use of 'Y' (year-of-era) will change to 'y' in the" + + " next major version of Elasticsearch. Prefix your date format with '8' to use the new specifier."); + } + if (input.contains("xx")) { + deprecationLogger.deprecatedAndMaybeLog("joda-week-based-year-format","Use of 'x' (week-based-year) will change" + + " to 'Y' in the next major version of Elasticsearch. Prefix your date format with '8' to use the new specifier."); + } + } + public static DateFormatter getStrictStandardDateFormatter() { // 2014/10/10 DateTimeFormatter shortFormatter = new DateTimeFormatterBuilder() diff --git a/server/src/main/java/org/elasticsearch/common/lucene/Lucene.java b/server/src/main/java/org/elasticsearch/common/lucene/Lucene.java index fd9d63ea225..4d4a2d838db 100644 --- a/server/src/main/java/org/elasticsearch/common/lucene/Lucene.java +++ b/server/src/main/java/org/elasticsearch/common/lucene/Lucene.java @@ -128,6 +128,9 @@ public class Lucene { public static final TopDocs EMPTY_TOP_DOCS = new TopDocs(new TotalHits(0, TotalHits.Relation.EQUAL_TO), EMPTY_SCORE_DOCS); + private Lucene() { + } + public static Version parseVersion(@Nullable String version, Version defaultVersion, Logger logger) { if (version == null) { return defaultVersion; @@ -201,7 +204,7 @@ public class Lucene { try (Lock writeLock = directory.obtainLock(IndexWriter.WRITE_LOCK_NAME)) { int foundSegmentFiles = 0; for (final String file : directory.listAll()) { - /** + /* * we could also use a deletion policy here but in the case of snapshot and restore * sometimes we restore an index and override files that were referenced by a "future" * commit. If such a commit is opened by the IW it would likely throw a corrupted index exception @@ -227,7 +230,7 @@ public class Lucene { .setCommitOnClose(false) .setMergePolicy(NoMergePolicy.INSTANCE) .setOpenMode(IndexWriterConfig.OpenMode.APPEND))) { - // do nothing and close this will kick of IndexFileDeleter which will remove all pending files + // do nothing and close this will kick off IndexFileDeleter which will remove all pending files } return si; } @@ -321,12 +324,7 @@ public class Lucene { } else if (type == 1) { TotalHits totalHits = readTotalHits(in); float maxScore = in.readFloat(); - - SortField[] fields = new SortField[in.readVInt()]; - for (int i = 0; i < fields.length; i++) { - fields[i] = readSortField(in); - } - + SortField[] fields = in.readArray(Lucene::readSortField, SortField[]::new); FieldDoc[] fieldDocs = new FieldDoc[in.readVInt()]; for (int i = 0; i < fieldDocs.length; i++) { fieldDocs[i] = readFieldDoc(in); @@ -337,10 +335,7 @@ public class Lucene { float maxScore = in.readFloat(); String field = in.readString(); - SortField[] fields = new SortField[in.readVInt()]; - for (int i = 0; i < fields.length; i++) { - fields[i] = readSortField(in); - } + SortField[] fields = in.readArray(Lucene::readSortField, SortField[]::new); int size = in.readVInt(); Object[] collapseValues = new Object[size]; FieldDoc[] fieldDocs = new FieldDoc[size]; @@ -385,7 +380,7 @@ public class Lucene { return new FieldDoc(in.readVInt(), in.readFloat(), cFields); } - private static Comparable readSortValue(StreamInput in) throws IOException { + public static Comparable readSortValue(StreamInput in) throws IOException { byte type = in.readByte(); if (type == 0) { return null; @@ -436,11 +431,7 @@ public class Lucene { out.writeFloat(topDocs.maxScore); out.writeString(collapseDocs.field); - - out.writeVInt(collapseDocs.fields.length); - for (SortField sortField : collapseDocs.fields) { - writeSortField(out, sortField); - } + out.writeArray(Lucene::writeSortField, collapseDocs.fields); out.writeVInt(topDocs.topDocs.scoreDocs.length); for (int i = 0; i < topDocs.topDocs.scoreDocs.length; i++) { @@ -455,10 +446,7 @@ public class Lucene { writeTotalHits(out, topDocs.topDocs.totalHits); out.writeFloat(topDocs.maxScore); - out.writeVInt(topFieldDocs.fields.length); - for (SortField sortField : topFieldDocs.fields) { - writeSortField(out, sortField); - } + out.writeArray(Lucene::writeSortField, topFieldDocs.fields); out.writeVInt(topDocs.topDocs.scoreDocs.length); for (ScoreDoc doc : topFieldDocs.scoreDocs) { @@ -501,8 +489,7 @@ public class Lucene { } } - - private static void writeSortValue(StreamOutput out, Object field) throws IOException { + public static void writeSortValue(StreamOutput out, Object field) throws IOException { if (field == null) { out.writeByte((byte) 0); } else { @@ -687,11 +674,7 @@ public class Lucene { } } - private Lucene() { - - } - - public static final boolean indexExists(final Directory directory) throws IOException { + public static boolean indexExists(final Directory directory) throws IOException { return DirectoryReader.indexExists(directory); } @@ -701,7 +684,7 @@ public class Lucene { * * Will retry the directory every second for at least {@code timeLimitMillis} */ - public static final boolean waitForIndex(final Directory directory, final long timeLimitMillis) + public static boolean waitForIndex(final Directory directory, final long timeLimitMillis) throws IOException { final long DELAY = 1000; long waited = 0; @@ -1070,7 +1053,7 @@ public class Lucene { } public LeafMetaData getMetaData() { - return new LeafMetaData(Version.LATEST.major, Version.LATEST, (Sort)null); + return new LeafMetaData(Version.LATEST.major, Version.LATEST, null); } public CacheHelper getCoreCacheHelper() { diff --git a/server/src/main/java/org/elasticsearch/common/network/NetworkModule.java b/server/src/main/java/org/elasticsearch/common/network/NetworkModule.java index edc815af417..bbfae10991e 100644 --- a/server/src/main/java/org/elasticsearch/common/network/NetworkModule.java +++ b/server/src/main/java/org/elasticsearch/common/network/NetworkModule.java @@ -38,6 +38,7 @@ import org.elasticsearch.common.util.PageCacheRecycler; import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.http.HttpServerTransport; +import org.elasticsearch.index.shard.PrimaryReplicaSyncer.ResyncTask; import org.elasticsearch.indices.breaker.CircuitBreakerService; import org.elasticsearch.plugins.NetworkPlugin; import org.elasticsearch.tasks.RawTaskStatus; @@ -94,6 +95,8 @@ public final class NetworkModule { new NamedWriteableRegistry.Entry(Task.Status.class, ReplicationTask.Status.NAME, ReplicationTask.Status::new)); namedWriteables.add( new NamedWriteableRegistry.Entry(Task.Status.class, RawTaskStatus.NAME, RawTaskStatus::new)); + namedWriteables.add( + new NamedWriteableRegistry.Entry(Task.Status.class, ResyncTask.Status.NAME, ResyncTask.Status::new)); } private final Map> transportFactories = new HashMap<>(); diff --git a/server/src/main/java/org/elasticsearch/common/network/NetworkService.java b/server/src/main/java/org/elasticsearch/common/network/NetworkService.java index de4aee289d3..cde873fa577 100644 --- a/server/src/main/java/org/elasticsearch/common/network/NetworkService.java +++ b/server/src/main/java/org/elasticsearch/common/network/NetworkService.java @@ -39,13 +39,14 @@ public final class NetworkService { /** By default, we bind to loopback interfaces */ public static final String DEFAULT_NETWORK_HOST = "_local_"; + public static final Setting NETWORK_SERVER = + Setting.boolSetting("network.server", true, Property.NodeScope); public static final Setting> GLOBAL_NETWORK_HOST_SETTING = Setting.listSetting("network.host", Collections.emptyList(), Function.identity(), Property.NodeScope); - public static final Setting> GLOBAL_NETWORK_BINDHOST_SETTING = + public static final Setting> GLOBAL_NETWORK_BIND_HOST_SETTING = Setting.listSetting("network.bind_host", GLOBAL_NETWORK_HOST_SETTING, Function.identity(), Property.NodeScope); - public static final Setting> GLOBAL_NETWORK_PUBLISHHOST_SETTING = + public static final Setting> GLOBAL_NETWORK_PUBLISH_HOST_SETTING = Setting.listSetting("network.publish_host", GLOBAL_NETWORK_HOST_SETTING, Function.identity(), Property.NodeScope); - public static final Setting NETWORK_SERVER = Setting.boolSetting("network.server", true, Property.NodeScope); public static final Setting TCP_NO_DELAY = Setting.boolSetting("network.tcp.no_delay", true, Property.NodeScope); @@ -57,6 +58,7 @@ public final class NetworkService { Setting.byteSizeSetting("network.tcp.send_buffer_size", new ByteSizeValue(-1), Property.NodeScope); public static final Setting TCP_RECEIVE_BUFFER_SIZE = Setting.byteSizeSetting("network.tcp.receive_buffer_size", new ByteSizeValue(-1), Property.NodeScope); + // TODO: Deprecate in 7.0 public static final Setting TCP_CONNECT_TIMEOUT = Setting.timeSetting("network.tcp.connect_timeout", new TimeValue(30, TimeUnit.SECONDS), Property.NodeScope); diff --git a/server/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java b/server/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java index 53fc07f53bb..1e9736bccc8 100644 --- a/server/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java +++ b/server/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java @@ -105,9 +105,7 @@ import org.elasticsearch.search.fetch.subphase.highlight.FastVectorHighlighter; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.RemoteClusterAware; import org.elasticsearch.transport.RemoteClusterService; -import org.elasticsearch.transport.TcpTransport; -import org.elasticsearch.transport.Transport; -import org.elasticsearch.transport.TransportService; +import org.elasticsearch.transport.TransportSettings; import org.elasticsearch.watcher.ResourceWatcherService; import java.util.Arrays; @@ -270,6 +268,7 @@ public final class ClusterSettings extends AbstractScopedSettings { HttpTransportSettings.SETTING_HTTP_MAX_INITIAL_LINE_LENGTH, HttpTransportSettings.SETTING_HTTP_READ_TIMEOUT, HttpTransportSettings.SETTING_HTTP_RESET_COOKIES, + HttpTransportSettings.OLD_SETTING_HTTP_TCP_NO_DELAY, HttpTransportSettings.SETTING_HTTP_TCP_NO_DELAY, HttpTransportSettings.SETTING_HTTP_TCP_KEEP_ALIVE, HttpTransportSettings.SETTING_HTTP_TCP_REUSE_ADDRESS, @@ -308,44 +307,54 @@ public final class ClusterSettings extends AbstractScopedSettings { RemoteClusterService.SEARCH_ENABLE_REMOTE_CLUSTERS, RemoteClusterService.REMOTE_CLUSTER_PING_SCHEDULE, RemoteClusterService.REMOTE_CLUSTER_COMPRESS, - TransportService.TRACE_LOG_EXCLUDE_SETTING, - TransportService.TRACE_LOG_INCLUDE_SETTING, TransportCloseIndexAction.CLUSTER_INDICES_CLOSE_ENABLE_SETTING, ShardsLimitAllocationDecider.CLUSTER_TOTAL_SHARDS_PER_NODE_SETTING, NodeConnectionsService.CLUSTER_NODE_RECONNECT_INTERVAL_SETTING, HierarchyCircuitBreakerService.FIELDDATA_CIRCUIT_BREAKER_TYPE_SETTING, HierarchyCircuitBreakerService.REQUEST_CIRCUIT_BREAKER_TYPE_SETTING, - Transport.TRANSPORT_TCP_COMPRESS, - TcpTransport.HOST, - TcpTransport.PUBLISH_HOST, - TcpTransport.BIND_HOST, - TcpTransport.PUBLISH_PORT, - TcpTransport.PORT, - TcpTransport.BIND_HOST_PROFILE, - TcpTransport.PUBLISH_HOST_PROFILE, - TcpTransport.PUBLISH_PORT_PROFILE, - TcpTransport.PORT_PROFILE, - TcpTransport.TCP_NO_DELAY_PROFILE, - TcpTransport.TCP_KEEP_ALIVE_PROFILE, - TcpTransport.TCP_REUSE_ADDRESS_PROFILE, - TcpTransport.TCP_SEND_BUFFER_SIZE_PROFILE, - TcpTransport.TCP_RECEIVE_BUFFER_SIZE_PROFILE, - TransportService.CONNECTIONS_PER_NODE_RECOVERY, - TransportService.CONNECTIONS_PER_NODE_BULK, - TransportService.CONNECTIONS_PER_NODE_REG, - TransportService.CONNECTIONS_PER_NODE_STATE, - TransportService.CONNECTIONS_PER_NODE_PING, - TransportService.TCP_CONNECT_TIMEOUT, - TcpTransport.PING_SCHEDULE, + TransportSettings.HOST, + TransportSettings.PUBLISH_HOST, + TransportSettings.PUBLISH_HOST_PROFILE, + TransportSettings.BIND_HOST, + TransportSettings.BIND_HOST_PROFILE, + TransportSettings.OLD_PORT, + TransportSettings.PORT, + TransportSettings.PORT_PROFILE, + TransportSettings.PUBLISH_PORT, + TransportSettings.PUBLISH_PORT_PROFILE, + TransportSettings.OLD_TRANSPORT_COMPRESS, + TransportSettings.TRANSPORT_COMPRESS, + TransportSettings.PING_SCHEDULE, + TransportSettings.TCP_CONNECT_TIMEOUT, + TransportSettings.CONNECT_TIMEOUT, + TransportSettings.DEFAULT_FEATURES_SETTING, + TransportSettings.OLD_TCP_NO_DELAY, + TransportSettings.TCP_NO_DELAY, + TransportSettings.OLD_TCP_NO_DELAY_PROFILE, + TransportSettings.TCP_NO_DELAY_PROFILE, + TransportSettings.TCP_KEEP_ALIVE, + TransportSettings.OLD_TCP_KEEP_ALIVE_PROFILE, + TransportSettings.TCP_KEEP_ALIVE_PROFILE, + TransportSettings.TCP_REUSE_ADDRESS, + TransportSettings.OLD_TCP_REUSE_ADDRESS_PROFILE, + TransportSettings.TCP_REUSE_ADDRESS_PROFILE, + TransportSettings.TCP_SEND_BUFFER_SIZE, + TransportSettings.OLD_TCP_SEND_BUFFER_SIZE_PROFILE, + TransportSettings.TCP_SEND_BUFFER_SIZE_PROFILE, + TransportSettings.TCP_RECEIVE_BUFFER_SIZE, + TransportSettings.OLD_TCP_RECEIVE_BUFFER_SIZE_PROFILE, + TransportSettings.TCP_RECEIVE_BUFFER_SIZE_PROFILE, + TransportSettings.CONNECTIONS_PER_NODE_RECOVERY, + TransportSettings.CONNECTIONS_PER_NODE_BULK, + TransportSettings.CONNECTIONS_PER_NODE_REG, + TransportSettings.CONNECTIONS_PER_NODE_STATE, + TransportSettings.CONNECTIONS_PER_NODE_PING, + TransportSettings.TRACE_LOG_EXCLUDE_SETTING, + TransportSettings.TRACE_LOG_INCLUDE_SETTING, NetworkService.NETWORK_SERVER, - TcpTransport.TCP_NO_DELAY, - TcpTransport.TCP_KEEP_ALIVE, - TcpTransport.TCP_REUSE_ADDRESS, - TcpTransport.TCP_SEND_BUFFER_SIZE, - TcpTransport.TCP_RECEIVE_BUFFER_SIZE, NetworkService.GLOBAL_NETWORK_HOST_SETTING, - NetworkService.GLOBAL_NETWORK_BINDHOST_SETTING, - NetworkService.GLOBAL_NETWORK_PUBLISHHOST_SETTING, + NetworkService.GLOBAL_NETWORK_BIND_HOST_SETTING, + NetworkService.GLOBAL_NETWORK_PUBLISH_HOST_SETTING, NetworkService.TCP_NO_DELAY, NetworkService.TCP_KEEP_ALIVE, NetworkService.TCP_REUSE_ADDRESS, @@ -415,7 +424,6 @@ public final class ClusterSettings extends AbstractScopedSettings { ClusterModule.SHARDS_ALLOCATOR_TYPE_SETTING, EsExecutors.PROCESSORS_SETTING, ThreadContext.DEFAULT_HEADERS_SETTING, - TcpTransport.DEFAULT_FEATURES_SETTING, Loggers.LOG_DEFAULT_LEVEL_SETTING, Loggers.LOG_LEVEL_SETTING, NodeEnvironment.MAX_LOCAL_STORAGE_NODES_SETTING, diff --git a/server/src/main/java/org/elasticsearch/common/time/DateFormatter.java b/server/src/main/java/org/elasticsearch/common/time/DateFormatter.java index 62a2eac2e9e..140c9fec394 100644 --- a/server/src/main/java/org/elasticsearch/common/time/DateFormatter.java +++ b/server/src/main/java/org/elasticsearch/common/time/DateFormatter.java @@ -126,16 +126,22 @@ public interface DateFormatter { DateMathParser toDateMathParser(); static DateFormatter forPattern(String input) { - return forPattern(input, Locale.ROOT); - } - - static DateFormatter forPattern(String input, Locale locale) { if (Strings.hasLength(input) == false) { throw new IllegalArgumentException("No date pattern provided"); } List formatters = new ArrayList<>(); for (String pattern : Strings.delimitedListToStringArray(input, "||")) { - formatters.add(Joda.forPattern(pattern, locale)); + if (Strings.hasLength(input) == false) { + throw new IllegalArgumentException("Cannot have empty element in multi date format pattern: " + input); + } + final DateFormatter formatter; + if (pattern.startsWith("8")) { + // force java 8 date format + formatter = DateFormatters.forPattern(pattern.substring(1)); + } else { + formatter = Joda.forPattern(pattern); + } + formatters.add(formatter); } if (formatters.size() == 1) { diff --git a/server/src/main/java/org/elasticsearch/common/time/DateFormatters.java b/server/src/main/java/org/elasticsearch/common/time/DateFormatters.java index 512b1985e2f..0f1234dde02 100644 --- a/server/src/main/java/org/elasticsearch/common/time/DateFormatters.java +++ b/server/src/main/java/org/elasticsearch/common/time/DateFormatters.java @@ -39,7 +39,6 @@ import java.time.temporal.IsoFields; import java.time.temporal.TemporalAccessor; import java.time.temporal.TemporalAdjusters; import java.time.temporal.WeekFields; -import java.util.ArrayList; import java.util.Collections; import java.util.List; import java.util.Locale; @@ -1273,10 +1272,6 @@ public class DateFormatters { ///////////////////////////////////////// public static DateFormatter forPattern(String input) { - return forPattern(input, Locale.ROOT); - } - - private static DateFormatter forPattern(String input, Locale locale) { if (Strings.hasLength(input)) { input = input.trim(); } @@ -1443,25 +1438,9 @@ public class DateFormatters { return STRICT_YEAR_MONTH; } else if ("strictYearMonthDay".equals(input) || "strict_year_month_day".equals(input)) { return STRICT_YEAR_MONTH_DAY; - } else if (Strings.hasLength(input) && input.contains("||")) { - String[] formats = Strings.delimitedListToStringArray(input, "||"); - if (formats.length == 1) { - return forPattern(formats[0], locale); - } else { - try { - List formatters = new ArrayList<>(formats.length); - for (int i = 0; i < formats.length; i++) { - formatters.add(forPattern(formats[i], locale)); - } - - return new MergedDateFormatter(input, formatters); - } catch (IllegalArgumentException e) { - throw new IllegalArgumentException("Invalid format: [" + input + "]: " + e.getMessage(), e); - } - } } else { try { - return new JavaDateFormatter(input, new DateTimeFormatterBuilder().appendPattern(input).toFormatter(locale)); + return new JavaDateFormatter(input, new DateTimeFormatterBuilder().appendPattern(input).toFormatter(Locale.ROOT)); } catch (IllegalArgumentException e) { throw new IllegalArgumentException("Invalid format: [" + input + "]: " + e.getMessage(), e); } @@ -1471,7 +1450,8 @@ public class DateFormatters { static class MergedDateFormatter implements DateFormatter { private final String pattern; - private final List formatters; + // package private for tests + final List formatters; private final List dateMathParsers; MergedDateFormatter(String pattern, List formatters) { diff --git a/server/src/main/java/org/elasticsearch/discovery/PeerFinder.java b/server/src/main/java/org/elasticsearch/discovery/PeerFinder.java index 61d2e031977..58248b8183d 100644 --- a/server/src/main/java/org/elasticsearch/discovery/PeerFinder.java +++ b/server/src/main/java/org/elasticsearch/discovery/PeerFinder.java @@ -55,6 +55,7 @@ import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import java.util.Objects; @@ -62,10 +63,9 @@ import java.util.Optional; import java.util.function.Consumer; import java.util.stream.Collectors; +import static java.util.Collections.emptyList; import static org.elasticsearch.cluster.coordination.Coordinator.isZen1Node; import static org.elasticsearch.cluster.coordination.DiscoveryUpgradeService.createDiscoveryNodeWithImpossiblyHighId; -import static java.util.Collections.emptyList; -import static org.elasticsearch.common.util.concurrent.ConcurrentCollections.newConcurrentMap; public abstract class PeerFinder { @@ -95,7 +95,7 @@ public abstract class PeerFinder { private volatile long currentTerm; private boolean active; private DiscoveryNodes lastAcceptedNodes; - private final Map peersByAddress = newConcurrentMap(); + private final Map peersByAddress = new LinkedHashMap<>(); private Optional leader = Optional.empty(); private volatile List lastResolvedAddresses = emptyList(); @@ -150,6 +150,7 @@ public abstract class PeerFinder { } private boolean assertInactiveWithNoKnownPeers() { + assert holdsLock() : "PeerFinder mutex not held"; assert active == false; assert peersByAddress.isEmpty() : peersByAddress.keySet(); return true; @@ -256,11 +257,7 @@ public abstract class PeerFinder { private boolean handleWakeUp() { assert holdsLock() : "PeerFinder mutex not held"; - boolean peersRemoved = false; - - for (final Peer peer : peersByAddress.values()) { - peersRemoved = peer.handleWakeUp() || peersRemoved; // care: avoid short-circuiting, each peer needs waking up - } + final boolean peersRemoved = peersByAddress.values().removeIf(Peer::handleWakeUp); if (active == false) { logger.trace("not active"); @@ -344,7 +341,6 @@ public abstract class PeerFinder { assert holdsLock() : "PeerFinder mutex not held"; if (active == false) { - removePeer(); return true; } @@ -358,7 +354,6 @@ public abstract class PeerFinder { } } else { logger.trace("{} no longer connected", this); - removePeer(); return true; } } @@ -394,18 +389,13 @@ public abstract class PeerFinder { @Override public void onFailure(Exception e) { logger.debug(() -> new ParameterizedMessage("{} connection failed", Peer.this), e); - removePeer(); + synchronized (mutex) { + peersByAddress.remove(transportAddress); + } } }); } - void removePeer() { - final Peer removed = peersByAddress.remove(transportAddress); - // assert removed == Peer.this : removed + " != " + Peer.this; - // ^ This assertion sometimes trips if we are deactivated and reactivated while a request is in flight. - // TODO be more careful about avoiding multiple active Peer objects for each address - } - private void requestPeers() { assert holdsLock() : "PeerFinder mutex not held"; assert peersRequestInFlight == false : "PeersRequest already in flight"; diff --git a/server/src/main/java/org/elasticsearch/http/AbstractHttpServerTransport.java b/server/src/main/java/org/elasticsearch/http/AbstractHttpServerTransport.java index 1afe3acba48..4b53ac902d9 100644 --- a/server/src/main/java/org/elasticsearch/http/AbstractHttpServerTransport.java +++ b/server/src/main/java/org/elasticsearch/http/AbstractHttpServerTransport.java @@ -98,11 +98,11 @@ public abstract class AbstractHttpServerTransport extends AbstractLifecycleCompo // we can't make the network.bind_host a fallback since we already fall back to http.host hence the extra conditional here List httpBindHost = SETTING_HTTP_BIND_HOST.get(settings); - this.bindHosts = (httpBindHost.isEmpty() ? NetworkService.GLOBAL_NETWORK_BINDHOST_SETTING.get(settings) : httpBindHost) + this.bindHosts = (httpBindHost.isEmpty() ? NetworkService.GLOBAL_NETWORK_BIND_HOST_SETTING.get(settings) : httpBindHost) .toArray(Strings.EMPTY_ARRAY); // we can't make the network.publish_host a fallback since we already fall back to http.host hence the extra conditional here List httpPublishHost = SETTING_HTTP_PUBLISH_HOST.get(settings); - this.publishHosts = (httpPublishHost.isEmpty() ? NetworkService.GLOBAL_NETWORK_PUBLISHHOST_SETTING.get(settings) : httpPublishHost) + this.publishHosts = (httpPublishHost.isEmpty() ? NetworkService.GLOBAL_NETWORK_PUBLISH_HOST_SETTING.get(settings) : httpPublishHost) .toArray(Strings.EMPTY_ARRAY); this.port = SETTING_HTTP_PORT.get(settings); diff --git a/server/src/main/java/org/elasticsearch/http/HttpTransportSettings.java b/server/src/main/java/org/elasticsearch/http/HttpTransportSettings.java index 4670137d09a..ddd8bfa7385 100644 --- a/server/src/main/java/org/elasticsearch/http/HttpTransportSettings.java +++ b/server/src/main/java/org/elasticsearch/http/HttpTransportSettings.java @@ -105,8 +105,13 @@ public final class HttpTransportSettings { public static final Setting SETTING_HTTP_READ_TIMEOUT = Setting.timeSetting("http.read_timeout", new TimeValue(0), new TimeValue(0), Property.NodeScope); - public static final Setting SETTING_HTTP_TCP_NO_DELAY = + // Tcp socket settings + + // TODO: Deprecate in 7.0 + public static final Setting OLD_SETTING_HTTP_TCP_NO_DELAY = boolSetting("http.tcp_no_delay", NetworkService.TCP_NO_DELAY, Setting.Property.NodeScope); + public static final Setting SETTING_HTTP_TCP_NO_DELAY = + boolSetting("http.tcp.no_delay", OLD_SETTING_HTTP_TCP_NO_DELAY, Setting.Property.NodeScope); public static final Setting SETTING_HTTP_TCP_KEEP_ALIVE = boolSetting("http.tcp.keep_alive", NetworkService.TCP_KEEP_ALIVE, Setting.Property.NodeScope); public static final Setting SETTING_HTTP_TCP_REUSE_ADDRESS = diff --git a/server/src/main/java/org/elasticsearch/index/fielddata/ScriptDocValues.java b/server/src/main/java/org/elasticsearch/index/fielddata/ScriptDocValues.java index f2e7463c5a6..c65dc2e97a8 100644 --- a/server/src/main/java/org/elasticsearch/index/fielddata/ScriptDocValues.java +++ b/server/src/main/java/org/elasticsearch/index/fielddata/ScriptDocValues.java @@ -19,7 +19,6 @@ package org.elasticsearch.index.fielddata; -import org.apache.logging.log4j.LogManager; import org.apache.lucene.index.SortedNumericDocValues; import org.apache.lucene.util.ArrayUtil; import org.apache.lucene.util.BytesRef; @@ -27,25 +26,19 @@ import org.apache.lucene.util.BytesRefBuilder; import org.elasticsearch.common.geo.GeoHashUtils; import org.elasticsearch.common.geo.GeoPoint; import org.elasticsearch.common.geo.GeoUtils; -import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.script.JodaCompatibleZonedDateTime; import java.io.IOException; -import java.security.AccessController; -import java.security.PrivilegedAction; import java.time.Instant; import java.time.ZoneOffset; import java.util.AbstractList; import java.util.Arrays; import java.util.Comparator; -import java.util.List; -import java.util.function.BiConsumer; import java.util.function.UnaryOperator; /** * Script level doc values, the assumption is that any implementation will - * implement a getValue and a getValues that return - * the relevant type that then can be used in scripts. + * implement a {@link Longs#getValue getValue} method. * * Implementations should not internally re-use objects for the values that they * return as a single {@link ScriptDocValues} instance can be reused to return @@ -53,39 +46,11 @@ import java.util.function.UnaryOperator; */ public abstract class ScriptDocValues extends AbstractList { - private static final DeprecationLogger deprecationLogger = new DeprecationLogger(LogManager.getLogger(ScriptDocValues.class)); - /** - * Callback for deprecated fields. In production this should always point to - * {@link #deprecationLogger} but tests will override it so they can test - * that we use the required permissions when calling it. - */ - private final BiConsumer deprecationCallback; - - public ScriptDocValues() { - deprecationCallback = deprecationLogger::deprecatedAndMaybeLog; - } - - /** - * Constructor for testing deprecation callback. - */ - ScriptDocValues(BiConsumer deprecationCallback) { - this.deprecationCallback = deprecationCallback; - } - /** * Set the current doc ID. */ public abstract void setNextDocId(int docId) throws IOException; - /** - * Return a copy of the list of the values for the current document. - */ - public final List getValues() { - deprecated("ScriptDocValues#getValues", "Deprecated getValues used, the field is a list and should be accessed directly." - + " For example, use doc['foo'] instead of doc['foo'].values."); - return this; - } - // Throw meaningful exceptions if someone tries to modify the ScriptDocValues. @Override public final void add(int index, T element) { @@ -112,21 +77,6 @@ public abstract class ScriptDocValues extends AbstractList { throw new UnsupportedOperationException("doc values are unmodifiable"); } - /** - * Log a deprecation log, with the server's permissions and not the permissions - * of the script calling this method. We need to do this to prevent errors - * when rolling the log file. - */ - private void deprecated(String key, String message) { - AccessController.doPrivileged(new PrivilegedAction() { - @Override - public Void run() { - deprecationCallback.accept(key, message); - return null; - } - }); - } - public static final class Longs extends ScriptDocValues { private final SortedNumericDocValues in; private long[] values = new long[0]; @@ -139,14 +89,6 @@ public abstract class ScriptDocValues extends AbstractList { this.in = in; } - /** - * Constructor for testing deprecation callback. - */ - Longs(SortedNumericDocValues in, BiConsumer deprecationCallback) { - super(deprecationCallback); - this.in = in; - } - @Override public void setNextDocId(int docId) throws IOException { if (in.advanceExact(docId)) { @@ -204,14 +146,6 @@ public abstract class ScriptDocValues extends AbstractList { this.in = in; } - /** - * Constructor for testing deprecation callback. - */ - Dates(SortedNumericDocValues in, BiConsumer deprecationCallback) { - super(deprecationCallback); - this.in = in; - } - /** * Fetch the first field value or 0 millis after epoch if there are no * in. @@ -330,14 +264,6 @@ public abstract class ScriptDocValues extends AbstractList { this.in = in; } - /** - * Constructor for testing deprecation callback. - */ - GeoPoints(MultiGeoPointValues in, BiConsumer deprecationCallback) { - super(deprecationCallback); - this.in = in; - } - @Override public void setNextDocId(int docId) throws IOException { if (in.advanceExact(docId)) { @@ -379,19 +305,17 @@ public abstract class ScriptDocValues extends AbstractList { } public double[] getLats() { - List points = getValues(); - double[] lats = new double[points.size()]; - for (int i = 0; i < points.size(); i++) { - lats[i] = points.get(i).lat(); + double[] lats = new double[size()]; + for (int i = 0; i < size(); i++) { + lats[i] = get(i).lat(); } return lats; } public double[] getLons() { - List points = getValues(); - double[] lons = new double[points.size()]; - for (int i = 0; i < points.size(); i++) { - lons[i] = points.get(i).lon(); + double[] lons = new double[size()]; + for (int i = 0; i < size(); i++) { + lons[i] = get(i).lon(); } return lons; } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/DateFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/DateFieldMapper.java index 2f0d6e1e648..1e17aab3160 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/DateFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/DateFieldMapper.java @@ -64,8 +64,7 @@ import static org.elasticsearch.index.mapper.TypeParsers.parseDateTimeFormatter; public class DateFieldMapper extends FieldMapper { public static final String CONTENT_TYPE = "date"; - public static final DateFormatter DEFAULT_DATE_TIME_FORMATTER = DateFormatter.forPattern( - "strict_date_optional_time||epoch_millis", Locale.ROOT); + public static final DateFormatter DEFAULT_DATE_TIME_FORMATTER = DateFormatter.forPattern("strict_date_optional_time||epoch_millis"); public static class Defaults { public static final Explicit IGNORE_MALFORMED = new Explicit<>(false, false); diff --git a/server/src/main/java/org/elasticsearch/index/mapper/MappedFieldType.java b/server/src/main/java/org/elasticsearch/index/mapper/MappedFieldType.java index eaafeefa7e0..741b2300a46 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/MappedFieldType.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/MappedFieldType.java @@ -34,6 +34,7 @@ import org.apache.lucene.search.MultiTermQuery; import org.apache.lucene.search.Query; import org.apache.lucene.search.TermInSetQuery; import org.apache.lucene.search.TermQuery; +import org.apache.lucene.search.intervals.IntervalsSource; import org.apache.lucene.util.BytesRef; import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.common.Nullable; @@ -374,6 +375,14 @@ public abstract class MappedFieldType extends FieldType { + "] which is of type [" + typeName() + "]"); } + /** + * Create an {@link IntervalsSource} to be used for proximity queries + */ + public IntervalsSource intervals(String query, int max_gaps, boolean ordered, NamedAnalyzer analyzer) throws IOException { + throw new IllegalArgumentException("Can only use interval queries on text fields - not on [" + name + + "] which is of type [" + typeName() + "]"); + } + /** * An enum used to describe the relation between the range of terms in a * shard when compared with a query range @@ -465,4 +474,5 @@ public abstract class MappedFieldType extends FieldType { } return ((TermQuery) termQuery).getTerm(); } + } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/TextFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/TextFieldMapper.java index d0419a0e44b..5987e167dc9 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/TextFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/TextFieldMapper.java @@ -39,6 +39,7 @@ import org.apache.lucene.search.NormsFieldExistsQuery; import org.apache.lucene.search.PhraseQuery; import org.apache.lucene.search.Query; import org.apache.lucene.search.TermQuery; +import org.apache.lucene.search.intervals.IntervalsSource; import org.elasticsearch.Version; import org.elasticsearch.common.collect.Iterators; import org.elasticsearch.common.settings.Settings; @@ -48,6 +49,7 @@ import org.elasticsearch.index.analysis.AnalyzerScope; import org.elasticsearch.index.analysis.NamedAnalyzer; import org.elasticsearch.index.fielddata.IndexFieldData; import org.elasticsearch.index.fielddata.plain.PagedBytesIndexFieldData; +import org.elasticsearch.index.query.IntervalBuilder; import org.elasticsearch.index.query.QueryShardContext; import java.io.IOException; @@ -579,6 +581,15 @@ public class TextFieldMapper extends FieldMapper { } } + @Override + public IntervalsSource intervals(String text, int maxGaps, boolean ordered, NamedAnalyzer analyzer) throws IOException { + if (indexOptions().compareTo(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS) < 0) { + throw new IllegalArgumentException("Cannot create intervals against field [" + name() + "] with no positions indexed"); + } + IntervalBuilder builder = new IntervalBuilder(name(), analyzer == null ? searchAnalyzer() : analyzer); + return builder.analyzeText(text, maxGaps, ordered); + } + @Override public Query phraseQuery(String field, TokenStream stream, int slop, boolean enablePosIncrements) throws IOException { diff --git a/server/src/main/java/org/elasticsearch/index/query/IntervalBuilder.java b/server/src/main/java/org/elasticsearch/index/query/IntervalBuilder.java new file mode 100644 index 00000000000..7f42eb13719 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/index/query/IntervalBuilder.java @@ -0,0 +1,299 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.index.query; + +import org.apache.lucene.analysis.Analyzer; +import org.apache.lucene.analysis.CachingTokenFilter; +import org.apache.lucene.analysis.TokenStream; +import org.apache.lucene.analysis.tokenattributes.PositionIncrementAttribute; +import org.apache.lucene.analysis.tokenattributes.PositionLengthAttribute; +import org.apache.lucene.analysis.tokenattributes.TermToBytesRefAttribute; +import org.apache.lucene.index.LeafReaderContext; +import org.apache.lucene.index.Term; +import org.apache.lucene.search.BooleanQuery; +import org.apache.lucene.search.MatchesIterator; +import org.apache.lucene.search.intervals.IntervalIterator; +import org.apache.lucene.search.intervals.Intervals; +import org.apache.lucene.search.intervals.IntervalsSource; +import org.apache.lucene.util.BytesRef; +import org.apache.lucene.util.graph.GraphTokenStreamFiniteStrings; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.Iterator; +import java.util.List; +import java.util.Set; + +/** + * Constructs an IntervalsSource based on analyzed text + */ +public class IntervalBuilder { + + private final String field; + private final Analyzer analyzer; + + public IntervalBuilder(String field, Analyzer analyzer) { + this.field = field; + this.analyzer = analyzer; + } + + public IntervalsSource analyzeText(String query, int maxGaps, boolean ordered) throws IOException { + try (TokenStream ts = analyzer.tokenStream(field, query); + CachingTokenFilter stream = new CachingTokenFilter(ts)) { + return analyzeText(stream, maxGaps, ordered); + } + } + + protected IntervalsSource analyzeText(CachingTokenFilter stream, int maxGaps, boolean ordered) throws IOException { + + TermToBytesRefAttribute termAtt = stream.getAttribute(TermToBytesRefAttribute.class); + PositionIncrementAttribute posIncAtt = stream.addAttribute(PositionIncrementAttribute.class); + PositionLengthAttribute posLenAtt = stream.addAttribute(PositionLengthAttribute.class); + + if (termAtt == null) { + return NO_INTERVALS; + } + + // phase 1: read through the stream and assess the situation: + // counting the number of tokens/positions and marking if we have any synonyms. + + int numTokens = 0; + boolean hasSynonyms = false; + boolean isGraph = false; + + stream.reset(); + while (stream.incrementToken()) { + numTokens++; + int positionIncrement = posIncAtt.getPositionIncrement(); + if (positionIncrement == 0) { + hasSynonyms = true; + } + int positionLength = posLenAtt.getPositionLength(); + if (positionLength > 1) { + isGraph = true; + } + } + + // phase 2: based on token count, presence of synonyms, and options + // formulate a single term, boolean, or phrase. + + if (numTokens == 0) { + return null; + } else if (numTokens == 1) { + // single term + return analyzeTerm(stream); + } else if (isGraph) { + // graph + return combineSources(analyzeGraph(stream), maxGaps, ordered); + } else { + // phrase + if (hasSynonyms) { + // phrase with single-term synonyms + return analyzeSynonyms(stream, maxGaps, ordered); + } else { + // simple phrase + return combineSources(analyzeTerms(stream), maxGaps, ordered); + } + } + + } + + protected IntervalsSource analyzeTerm(TokenStream ts) throws IOException { + TermToBytesRefAttribute bytesAtt = ts.addAttribute(TermToBytesRefAttribute.class); + ts.reset(); + ts.incrementToken(); + return Intervals.term(BytesRef.deepCopyOf(bytesAtt.getBytesRef())); + } + + protected static IntervalsSource combineSources(List sources, int maxGaps, boolean ordered) { + if (sources.size() == 0) { + return NO_INTERVALS; + } + if (sources.size() == 1) { + return sources.get(0); + } + IntervalsSource[] sourcesArray = sources.toArray(new IntervalsSource[0]); + if (maxGaps == 0 && ordered) { + return Intervals.phrase(sourcesArray); + } + IntervalsSource inner = ordered ? Intervals.ordered(sourcesArray) : Intervals.unordered(sourcesArray); + if (maxGaps == -1) { + return inner; + } + return Intervals.maxgaps(maxGaps, inner); + } + + protected List analyzeTerms(TokenStream ts) throws IOException { + List terms = new ArrayList<>(); + TermToBytesRefAttribute bytesAtt = ts.addAttribute(TermToBytesRefAttribute.class); + ts.reset(); + while (ts.incrementToken()) { + BytesRef term = bytesAtt.getBytesRef(); + terms.add(Intervals.term(BytesRef.deepCopyOf(term))); + } + ts.end(); + return terms; + } + + protected IntervalsSource analyzeSynonyms(TokenStream ts, int maxGaps, boolean ordered) throws IOException { + List terms = new ArrayList<>(); + List synonyms = new ArrayList<>(); + TermToBytesRefAttribute bytesAtt = ts.addAttribute(TermToBytesRefAttribute.class); + PositionIncrementAttribute posAtt = ts.addAttribute(PositionIncrementAttribute.class); + ts.reset(); + while (ts.incrementToken()) { + if (posAtt.getPositionIncrement() == 1) { + if (synonyms.size() == 1) { + terms.add(synonyms.get(0)); + } + else if (synonyms.size() > 1) { + terms.add(Intervals.or(synonyms.toArray(new IntervalsSource[0]))); + } + synonyms.clear(); + } + synonyms.add(Intervals.term(BytesRef.deepCopyOf(bytesAtt.getBytesRef()))); + } + if (synonyms.size() == 1) { + terms.add(synonyms.get(0)); + } + else { + terms.add(Intervals.or(synonyms.toArray(new IntervalsSource[0]))); + } + return combineSources(terms, maxGaps, ordered); + } + + protected List analyzeGraph(TokenStream source) throws IOException { + source.reset(); + GraphTokenStreamFiniteStrings graph = new GraphTokenStreamFiniteStrings(source); + + List clauses = new ArrayList<>(); + int[] articulationPoints = graph.articulationPoints(); + int lastState = 0; + int maxClauseCount = BooleanQuery.getMaxClauseCount(); + for (int i = 0; i <= articulationPoints.length; i++) { + int start = lastState; + int end = -1; + if (i < articulationPoints.length) { + end = articulationPoints[i]; + } + lastState = end; + if (graph.hasSidePath(start)) { + List paths = new ArrayList<>(); + Iterator it = graph.getFiniteStrings(start, end); + while (it.hasNext()) { + TokenStream ts = it.next(); + IntervalsSource phrase = combineSources(analyzeTerms(ts), 0, true); + if (paths.size() >= maxClauseCount) { + throw new BooleanQuery.TooManyClauses(); + } + paths.add(phrase); + } + if (paths.size() > 0) { + clauses.add(Intervals.or(paths.toArray(new IntervalsSource[0]))); + } + } else { + Iterator it = graph.getFiniteStrings(start, end); + TokenStream ts = it.next(); + clauses.addAll(analyzeTerms(ts)); + assert it.hasNext() == false; + } + } + return clauses; + } + + private static final IntervalsSource NO_INTERVALS = new IntervalsSource() { + + @Override + public IntervalIterator intervals(String field, LeafReaderContext ctx) { + return new IntervalIterator() { + @Override + public int start() { + return NO_MORE_INTERVALS; + } + + @Override + public int end() { + return NO_MORE_INTERVALS; + } + + @Override + public int gaps() { + throw new UnsupportedOperationException(); + } + + @Override + public int nextInterval() { + return NO_MORE_INTERVALS; + } + + @Override + public float matchCost() { + return 0; + } + + @Override + public int docID() { + return NO_MORE_DOCS; + } + + @Override + public int nextDoc() { + return NO_MORE_DOCS; + } + + @Override + public int advance(int target) { + return NO_MORE_DOCS; + } + + @Override + public long cost() { + return 0; + } + }; + } + + @Override + public MatchesIterator matches(String field, LeafReaderContext ctx, int doc) { + return null; + } + + @Override + public void extractTerms(String field, Set terms) { + + } + + @Override + public int hashCode() { + return 0; + } + + @Override + public boolean equals(Object other) { + return other == this; + } + + @Override + public String toString() { + return "no_match"; + } + }; + +} diff --git a/server/src/main/java/org/elasticsearch/index/query/IntervalQueryBuilder.java b/server/src/main/java/org/elasticsearch/index/query/IntervalQueryBuilder.java new file mode 100644 index 00000000000..a1badc38323 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/index/query/IntervalQueryBuilder.java @@ -0,0 +1,152 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.index.query; + +import org.apache.lucene.index.IndexOptions; +import org.apache.lucene.search.MatchNoDocsQuery; +import org.apache.lucene.search.Query; +import org.apache.lucene.search.intervals.IntervalQuery; +import org.elasticsearch.common.ParsingException; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.index.mapper.MappedFieldType; + +import java.io.IOException; +import java.util.Objects; + +/** + * Builder for {@link IntervalQuery} + */ +public class IntervalQueryBuilder extends AbstractQueryBuilder { + + public static final String NAME = "intervals"; + + private final String field; + private final IntervalsSourceProvider sourceProvider; + + public IntervalQueryBuilder(String field, IntervalsSourceProvider sourceProvider) { + this.field = field; + this.sourceProvider = sourceProvider; + } + + public IntervalQueryBuilder(StreamInput in) throws IOException { + super(in); + this.field = in.readString(); + this.sourceProvider = in.readNamedWriteable(IntervalsSourceProvider.class); + } + + @Override + protected void doWriteTo(StreamOutput out) throws IOException { + out.writeString(field); + out.writeNamedWriteable(sourceProvider); + } + + @Override + protected void doXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(NAME); + builder.field(field); + builder.startObject(); + sourceProvider.toXContent(builder, params); + printBoostAndQueryName(builder); + builder.endObject(); + builder.endObject(); + } + + public static IntervalQueryBuilder fromXContent(XContentParser parser) throws IOException { + if (parser.nextToken() != XContentParser.Token.FIELD_NAME) { + throw new ParsingException(parser.getTokenLocation(), "Expected [FIELD_NAME] but got [" + parser.currentToken() + "]"); + } + String field = parser.currentName(); + if (parser.nextToken() != XContentParser.Token.START_OBJECT) { + throw new ParsingException(parser.getTokenLocation(), "Expected [START_OBJECT] but got [" + parser.currentToken() + "]"); + } + String name = null; + float boost = 1; + IntervalsSourceProvider provider = null; + String providerName = null; + while (parser.nextToken() != XContentParser.Token.END_OBJECT) { + if (parser.currentToken() != XContentParser.Token.FIELD_NAME) { + throw new ParsingException(parser.getTokenLocation(), + "Expected [FIELD_NAME] but got [" + parser.currentToken() + "]"); + } + switch (parser.currentName()) { + case "_name": + parser.nextToken(); + name = parser.text(); + break; + case "boost": + parser.nextToken(); + boost = parser.floatValue(); + break; + default: + if (providerName != null) { + throw new ParsingException(parser.getTokenLocation(), + "Only one interval rule can be specified, found [" + providerName + "] and [" + parser.currentName() + "]"); + } + providerName = parser.currentName(); + provider = IntervalsSourceProvider.fromXContent(parser); + + } + } + if (parser.nextToken() != XContentParser.Token.END_OBJECT) { + throw new ParsingException(parser.getTokenLocation(), + "Expected [END_OBJECT] but got [" + parser.currentToken() + "]"); + } + if (provider == null) { + throw new ParsingException(parser.getTokenLocation(), "Missing intervals from interval query definition"); + } + IntervalQueryBuilder builder = new IntervalQueryBuilder(field, provider); + builder.queryName(name); + builder.boost(boost); + return builder; + + } + + @Override + protected Query doToQuery(QueryShardContext context) throws IOException { + MappedFieldType fieldType = context.fieldMapper(field); + if (fieldType == null) { + // Be lenient with unmapped fields so that cross-index search will work nicely + return new MatchNoDocsQuery(); + } + if (fieldType.tokenized() == false || + fieldType.indexOptions().compareTo(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS) < 0) { + throw new IllegalArgumentException("Cannot create IntervalQuery over field [" + field + "] with no indexed positions"); + } + return new IntervalQuery(field, sourceProvider.getSource(context, fieldType)); + } + + @Override + protected boolean doEquals(IntervalQueryBuilder other) { + return Objects.equals(field, other.field) && Objects.equals(sourceProvider, other.sourceProvider); + } + + @Override + protected int doHashCode() { + return Objects.hash(field, sourceProvider); + } + + @Override + public String getWriteableName() { + return NAME; + } +} diff --git a/server/src/main/java/org/elasticsearch/index/query/IntervalsSourceProvider.java b/server/src/main/java/org/elasticsearch/index/query/IntervalsSourceProvider.java new file mode 100644 index 00000000000..79bcbe26fbc --- /dev/null +++ b/server/src/main/java/org/elasticsearch/index/query/IntervalsSourceProvider.java @@ -0,0 +1,478 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.index.query; + +import org.apache.lucene.search.intervals.Intervals; +import org.apache.lucene.search.intervals.IntervalsSource; +import org.elasticsearch.common.ParseField; +import org.elasticsearch.common.ParsingException; +import org.elasticsearch.common.io.stream.NamedWriteable; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.common.xcontent.ConstructingObjectParser; +import org.elasticsearch.common.xcontent.ToXContent; +import org.elasticsearch.common.xcontent.ToXContentFragment; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.index.analysis.NamedAnalyzer; +import org.elasticsearch.index.mapper.MappedFieldType; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.List; +import java.util.Locale; +import java.util.Objects; + +import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg; +import static org.elasticsearch.common.xcontent.ConstructingObjectParser.optionalConstructorArg; + +/** + * Factory class for {@link IntervalsSource} + * + * Built-in sources include {@link Match}, which analyzes a text string and converts it + * to a proximity source (phrase, ordered or unordered depending on how + * strict the matching should be); {@link Combine}, which allows proximity queries + * between different sub-sources; and {@link Disjunction}. + */ +public abstract class IntervalsSourceProvider implements NamedWriteable, ToXContentFragment { + + public abstract IntervalsSource getSource(QueryShardContext context, MappedFieldType fieldType) throws IOException; + + @Override + public abstract int hashCode(); + + @Override + public abstract boolean equals(Object other); + + public static IntervalsSourceProvider fromXContent(XContentParser parser) throws IOException { + assert parser.currentToken() == XContentParser.Token.FIELD_NAME; + switch (parser.currentName()) { + case "match": + return Match.fromXContent(parser); + case "any_of": + return Disjunction.fromXContent(parser); + case "all_of": + return Combine.fromXContent(parser); + } + throw new ParsingException(parser.getTokenLocation(), + "Unknown interval type [" + parser.currentName() + "], expecting one of [match, any_of, all_of]"); + } + + private static IntervalsSourceProvider parseInnerIntervals(XContentParser parser) throws IOException { + if (parser.nextToken() != XContentParser.Token.FIELD_NAME) { + throw new ParsingException(parser.getTokenLocation(), "Expected [FIELD_NAME] but got [" + parser.currentToken() + "]"); + } + IntervalsSourceProvider isp = IntervalsSourceProvider.fromXContent(parser); + if (parser.nextToken() != XContentParser.Token.END_OBJECT) { + throw new ParsingException(parser.getTokenLocation(), "Expected [END_OBJECT] but got [" + parser.currentToken() + "]"); + } + return isp; + } + + public static class Match extends IntervalsSourceProvider { + + public static final String NAME = "match"; + + private final String query; + private final int maxGaps; + private final boolean ordered; + private final String analyzer; + private final IntervalFilter filter; + + public Match(String query, int maxGaps, boolean ordered, String analyzer, IntervalFilter filter) { + this.query = query; + this.maxGaps = maxGaps; + this.ordered = ordered; + this.analyzer = analyzer; + this.filter = filter; + } + + public Match(StreamInput in) throws IOException { + this.query = in.readString(); + this.maxGaps = in.readVInt(); + this.ordered = in.readBoolean(); + this.analyzer = in.readOptionalString(); + this.filter = in.readOptionalWriteable(IntervalFilter::new); + } + + @Override + public IntervalsSource getSource(QueryShardContext context, MappedFieldType fieldType) throws IOException { + NamedAnalyzer analyzer = null; + if (this.analyzer != null) { + analyzer = context.getMapperService().getIndexAnalyzers().get(this.analyzer); + } + IntervalsSource source = fieldType.intervals(query, maxGaps, ordered, analyzer); + if (filter != null) { + return filter.filter(source, context, fieldType); + } + return source; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + Match match = (Match) o; + return maxGaps == match.maxGaps && + ordered == match.ordered && + Objects.equals(query, match.query) && + Objects.equals(filter, match.filter) && + Objects.equals(analyzer, match.analyzer); + } + + @Override + public int hashCode() { + return Objects.hash(query, maxGaps, ordered, analyzer, filter); + } + + @Override + public String getWriteableName() { + return NAME; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeString(query); + out.writeVInt(maxGaps); + out.writeBoolean(ordered); + out.writeOptionalString(analyzer); + out.writeOptionalWriteable(filter); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.field(NAME); + builder.startObject(); + builder.field("query", query); + builder.field("max_gaps", maxGaps); + builder.field("ordered", ordered); + if (analyzer != null) { + builder.field("analyzer", analyzer); + } + if (filter != null) { + builder.field("filter", filter); + } + return builder.endObject(); + } + + private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>(NAME, + args -> { + String query = (String) args[0]; + int max_gaps = (args[1] == null ? -1 : (Integer) args[1]); + boolean ordered = (args[2] != null && (boolean) args[2]); + String analyzer = (String) args[3]; + IntervalFilter filter = (IntervalFilter) args[4]; + return new Match(query, max_gaps, ordered, analyzer, filter); + }); + static { + PARSER.declareString(constructorArg(), new ParseField("query")); + PARSER.declareInt(optionalConstructorArg(), new ParseField("max_gaps")); + PARSER.declareBoolean(optionalConstructorArg(), new ParseField("ordered")); + PARSER.declareString(optionalConstructorArg(), new ParseField("analyzer")); + PARSER.declareObject(optionalConstructorArg(), (p, c) -> IntervalFilter.fromXContent(p), new ParseField("filter")); + } + + public static Match fromXContent(XContentParser parser) { + return PARSER.apply(parser, null); + } + } + + public static class Disjunction extends IntervalsSourceProvider { + + public static final String NAME = "any_of"; + + private final List subSources; + private final IntervalFilter filter; + + public Disjunction(List subSources, IntervalFilter filter) { + this.subSources = subSources; + this.filter = filter; + } + + public Disjunction(StreamInput in) throws IOException { + this.subSources = in.readNamedWriteableList(IntervalsSourceProvider.class); + this.filter = in.readOptionalWriteable(IntervalFilter::new); + } + + @Override + public IntervalsSource getSource(QueryShardContext ctx, MappedFieldType fieldType) throws IOException { + List sources = new ArrayList<>(); + for (IntervalsSourceProvider provider : subSources) { + sources.add(provider.getSource(ctx, fieldType)); + } + IntervalsSource source = Intervals.or(sources.toArray(new IntervalsSource[0])); + if (filter == null) { + return source; + } + return filter.filter(source, ctx, fieldType); + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + Disjunction that = (Disjunction) o; + return Objects.equals(subSources, that.subSources); + } + + @Override + public int hashCode() { + return Objects.hash(subSources); + } + + @Override + public String getWriteableName() { + return NAME; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeNamedWriteableList(subSources); + out.writeOptionalWriteable(filter); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(NAME); + builder.startArray("intervals"); + for (IntervalsSourceProvider provider : subSources) { + builder.startObject(); + provider.toXContent(builder, params); + builder.endObject(); + } + builder.endArray(); + if (filter != null) { + builder.field("filter", filter); + } + return builder.endObject(); + } + + @SuppressWarnings("unchecked") + private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>(NAME, + args -> { + List subSources = (List)args[0]; + IntervalFilter filter = (IntervalFilter) args[1]; + return new Disjunction(subSources, filter); + }); + static { + PARSER.declareObjectArray(constructorArg(), (p, c) -> IntervalsSourceProvider.parseInnerIntervals(p), + new ParseField("intervals")); + PARSER.declareObject(optionalConstructorArg(), (p, c) -> IntervalFilter.fromXContent(p), + new ParseField("filter")); + } + + public static Disjunction fromXContent(XContentParser parser) throws IOException { + return PARSER.parse(parser, null); + } + } + + public static class Combine extends IntervalsSourceProvider { + + public static final String NAME = "all_of"; + + private final List subSources; + private final boolean ordered; + private final int maxGaps; + private final IntervalFilter filter; + + public Combine(List subSources, boolean ordered, int maxGaps, IntervalFilter filter) { + this.subSources = subSources; + this.ordered = ordered; + this.maxGaps = maxGaps; + this.filter = filter; + } + + public Combine(StreamInput in) throws IOException { + this.ordered = in.readBoolean(); + this.subSources = in.readNamedWriteableList(IntervalsSourceProvider.class); + this.maxGaps = in.readInt(); + this.filter = in.readOptionalWriteable(IntervalFilter::new); + } + + @Override + public IntervalsSource getSource(QueryShardContext ctx, MappedFieldType fieldType) throws IOException { + List ss = new ArrayList<>(); + for (IntervalsSourceProvider provider : subSources) { + ss.add(provider.getSource(ctx, fieldType)); + } + IntervalsSource source = IntervalBuilder.combineSources(ss, maxGaps, ordered); + if (filter != null) { + return filter.filter(source, ctx, fieldType); + } + return source; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + Combine combine = (Combine) o; + return Objects.equals(subSources, combine.subSources) && + ordered == combine.ordered && maxGaps == combine.maxGaps; + } + + @Override + public int hashCode() { + return Objects.hash(subSources, ordered, maxGaps); + } + + @Override + public String getWriteableName() { + return NAME; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeBoolean(ordered); + out.writeNamedWriteableList(subSources); + out.writeInt(maxGaps); + out.writeOptionalWriteable(filter); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(NAME); + builder.field("ordered", ordered); + builder.field("max_gaps", maxGaps); + builder.startArray("intervals"); + for (IntervalsSourceProvider provider : subSources) { + builder.startObject(); + provider.toXContent(builder, params); + builder.endObject(); + } + builder.endArray(); + if (filter != null) { + builder.field("filter", filter); + } + return builder.endObject(); + } + + @SuppressWarnings("unchecked") + static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>(NAME, + args -> { + boolean ordered = (args[0] != null && (boolean) args[0]); + List subSources = (List)args[1]; + Integer maxGaps = (args[2] == null ? -1 : (Integer)args[2]); + IntervalFilter filter = (IntervalFilter) args[3]; + return new Combine(subSources, ordered, maxGaps, filter); + }); + static { + PARSER.declareBoolean(optionalConstructorArg(), new ParseField("ordered")); + PARSER.declareObjectArray(constructorArg(), (p, c) -> IntervalsSourceProvider.parseInnerIntervals(p), + new ParseField("intervals")); + PARSER.declareInt(optionalConstructorArg(), new ParseField("max_gaps")); + PARSER.declareObject(optionalConstructorArg(), (p, c) -> IntervalFilter.fromXContent(p), new ParseField("filter")); + } + + public static Combine fromXContent(XContentParser parser) { + return PARSER.apply(parser, null); + } + } + + public static class IntervalFilter implements ToXContent, Writeable { + + public static final String NAME = "filter"; + + private final String type; + private final IntervalsSourceProvider filter; + + public IntervalFilter(IntervalsSourceProvider filter, String type) { + this.filter = filter; + this.type = type.toLowerCase(Locale.ROOT); + } + + public IntervalFilter(StreamInput in) throws IOException { + this.type = in.readString(); + this.filter = in.readNamedWriteable(IntervalsSourceProvider.class); + } + + public IntervalsSource filter(IntervalsSource input, QueryShardContext context, MappedFieldType fieldType) throws IOException { + IntervalsSource filterSource = filter.getSource(context, fieldType); + switch (type) { + case "containing": + return Intervals.containing(input, filterSource); + case "contained_by": + return Intervals.containedBy(input, filterSource); + case "not_containing": + return Intervals.notContaining(input, filterSource); + case "not_contained_by": + return Intervals.notContainedBy(input, filterSource); + case "not_overlapping": + return Intervals.nonOverlapping(input, filterSource); + default: + throw new IllegalArgumentException("Unknown filter type [" + type + "]"); + } + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + IntervalFilter that = (IntervalFilter) o; + return Objects.equals(type, that.type) && + Objects.equals(filter, that.filter); + } + + @Override + public int hashCode() { + return Objects.hash(type, filter); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeString(type); + out.writeNamedWriteable(filter); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + builder.field(type); + builder.startObject(); + filter.toXContent(builder, params); + builder.endObject(); + builder.endObject(); + return builder; + } + + public static IntervalFilter fromXContent(XContentParser parser) throws IOException { + if (parser.nextToken() != XContentParser.Token.FIELD_NAME) { + throw new ParsingException(parser.getTokenLocation(), "Expected [FIELD_NAME] but got [" + parser.currentToken() + "]"); + } + String type = parser.currentName(); + if (parser.nextToken() != XContentParser.Token.START_OBJECT) { + throw new ParsingException(parser.getTokenLocation(), "Expected [START_OBJECT] but got [" + parser.currentToken() + "]"); + } + if (parser.nextToken() != XContentParser.Token.FIELD_NAME) { + throw new ParsingException(parser.getTokenLocation(), "Expected [FIELD_NAME] but got [" + parser.currentToken() + "]"); + } + IntervalsSourceProvider intervals = IntervalsSourceProvider.fromXContent(parser); + if (parser.nextToken() != XContentParser.Token.END_OBJECT) { + throw new ParsingException(parser.getTokenLocation(), "Expected [END_OBJECT] but got [" + parser.currentToken() + "]"); + } + if (parser.nextToken() != XContentParser.Token.END_OBJECT) { + throw new ParsingException(parser.getTokenLocation(), "Expected [END_OBJECT] but got [" + parser.currentToken() + "]"); + } + return new IntervalFilter(intervals, type); + } + } + +} diff --git a/server/src/main/java/org/elasticsearch/index/query/MultiMatchQueryBuilder.java b/server/src/main/java/org/elasticsearch/index/query/MultiMatchQueryBuilder.java index 996e878dba8..d8476d791d7 100644 --- a/server/src/main/java/org/elasticsearch/index/query/MultiMatchQueryBuilder.java +++ b/server/src/main/java/org/elasticsearch/index/query/MultiMatchQueryBuilder.java @@ -67,7 +67,6 @@ public class MultiMatchQueryBuilder extends AbstractQueryBuilderTie-Breaker for "best-match" disjunction queries (OR-Queries). * The tie breaker capability allows documents that match more than one query clause @@ -593,9 +581,6 @@ public class MultiMatchQueryBuilder extends AbstractQueryBuilder newFieldsBoosts; if (fieldsBoosts.isEmpty()) { // no fields provided, defaults to index.query.default_field @@ -828,7 +800,7 @@ public class MultiMatchQueryBuilder extends AbstractQueryBuilder shard term [" + this.operationPrimaryTerm + "]"; ensureWriteAllowed(origin); Engine.Index operation; try { operation = prepareIndex(docMapper(sourceToParse.type()), indexSettings.getIndexVersionCreated(), sourceToParse, seqNo, - opPrimaryTerm, version, versionType, origin, - autoGeneratedTimeStamp, isRetry); + opPrimaryTerm, version, versionType, origin, autoGeneratedTimeStamp, isRetry, + ifSeqNoMatch, ifPrimaryTermMatch); Mapping update = operation.parsedDoc().dynamicMappingsUpdate(); if (update != null) { return new Engine.IndexResult(update); @@ -727,8 +730,9 @@ public class IndexShard extends AbstractIndexShardComponent implements IndicesCl } public static Engine.Index prepareIndex(DocumentMapperForType docMapper, Version indexCreatedVersion, SourceToParse source, long seqNo, - long primaryTerm, long version, VersionType versionType, Engine.Operation.Origin origin, long autoGeneratedIdTimestamp, - boolean isRetry) { + long primaryTerm, long version, VersionType versionType, Engine.Operation.Origin origin, + long autoGeneratedIdTimestamp, boolean isRetry, + long ifSeqNoMatch, long ifPrimaryTermMatch) { long startTime = System.nanoTime(); ParsedDocument doc = docMapper.getDocumentMapper().parse(source); if (docMapper.getMapping() != null) { @@ -736,7 +740,7 @@ public class IndexShard extends AbstractIndexShardComponent implements IndicesCl } Term uid = new Term(IdFieldMapper.NAME, Uid.encodeId(doc.id())); return new Engine.Index(uid, doc, seqNo, primaryTerm, version, versionType, origin, startTime, autoGeneratedIdTimestamp, isRetry, - UNASSIGNED_SEQ_NO, 0); + ifSeqNoMatch, ifPrimaryTermMatch); } private Engine.IndexResult index(Engine engine, Engine.Index index) throws IOException { @@ -787,19 +791,22 @@ public class IndexShard extends AbstractIndexShardComponent implements IndicesCl return new Engine.DeleteResult(e, version, operationPrimaryTerm); } - public Engine.DeleteResult applyDeleteOperationOnPrimary(long version, String type, String id, VersionType versionType) + public Engine.DeleteResult applyDeleteOperationOnPrimary(long version, String type, String id, VersionType versionType, + long ifSeqNoMatch, long ifPrimaryTermMatch) throws IOException { assert versionType.validateVersionForWrites(version); return applyDeleteOperation(getEngine(), UNASSIGNED_SEQ_NO, operationPrimaryTerm, version, type, id, versionType, - Engine.Operation.Origin.PRIMARY); + ifSeqNoMatch, ifPrimaryTermMatch, Engine.Operation.Origin.PRIMARY); } public Engine.DeleteResult applyDeleteOperationOnReplica(long seqNo, long version, String type, String id) throws IOException { - return applyDeleteOperation(getEngine(), seqNo, operationPrimaryTerm, version, type, id, null, Engine.Operation.Origin.REPLICA); + return applyDeleteOperation( + getEngine(), seqNo, operationPrimaryTerm, version, type, id, null, UNASSIGNED_SEQ_NO, 0, Engine.Operation.Origin.REPLICA); } private Engine.DeleteResult applyDeleteOperation(Engine engine, long seqNo, long opPrimaryTerm, long version, String type, String id, - @Nullable VersionType versionType, Engine.Operation.Origin origin) throws IOException { + @Nullable VersionType versionType, long ifSeqNoMatch, long ifPrimaryTermMatch, + Engine.Operation.Origin origin) throws IOException { assert opPrimaryTerm <= this.operationPrimaryTerm : "op term [ " + opPrimaryTerm + " ] > shard term [" + this.operationPrimaryTerm + "]"; ensureWriteAllowed(origin); @@ -828,15 +835,16 @@ public class IndexShard extends AbstractIndexShardComponent implements IndicesCl } final Term uid = new Term(IdFieldMapper.NAME, Uid.encodeId(id)); final Engine.Delete delete = prepareDelete(type, id, uid, seqNo, opPrimaryTerm, version, - versionType, origin); + versionType, origin, ifSeqNoMatch, ifPrimaryTermMatch); return delete(engine, delete); } private Engine.Delete prepareDelete(String type, String id, Term uid, long seqNo, long primaryTerm, long version, - VersionType versionType, Engine.Operation.Origin origin) { + VersionType versionType, Engine.Operation.Origin origin, + long ifSeqNoMatch, long ifPrimaryTermMatch) { long startTime = System.nanoTime(); return new Engine.Delete(resolveType(type), id, uid, seqNo, primaryTerm, version, versionType, origin, startTime, - UNASSIGNED_SEQ_NO, 0); + ifSeqNoMatch, ifPrimaryTermMatch); } private Engine.DeleteResult delete(Engine engine, Engine.Delete delete) throws IOException { @@ -1283,14 +1291,14 @@ public class IndexShard extends AbstractIndexShardComponent implements IndicesCl // we set canHaveDuplicates to true all the time such that we de-optimze the translog case and ensure that all // autoGeneratedID docs that are coming from the primary are updated correctly. result = applyIndexOperation(engine, index.seqNo(), index.primaryTerm(), index.version(), - versionType, index.getAutoGeneratedIdTimestamp(), true, origin, + versionType, UNASSIGNED_SEQ_NO, 0, index.getAutoGeneratedIdTimestamp(), true, origin, source(shardId.getIndexName(), index.type(), index.id(), index.source(), XContentHelper.xContentType(index.source())).routing(index.routing())); break; case DELETE: final Translog.Delete delete = (Translog.Delete) operation; result = applyDeleteOperation(engine, delete.seqNo(), delete.primaryTerm(), delete.version(), delete.type(), delete.id(), - versionType, origin); + versionType, UNASSIGNED_SEQ_NO, 0, origin); break; case NO_OP: final Translog.NoOp noOp = (Translog.NoOp) operation; @@ -1997,7 +2005,7 @@ public class IndexShard extends AbstractIndexShardComponent implements IndicesCl getLocalCheckpoint() == primaryContext.getCheckpointStates().get(routingEntry().allocationId().getId()).getLocalCheckpoint(); synchronized (mutex) { replicationTracker.activateWithPrimaryContext(primaryContext); // make changes to primaryMode flag only under mutex - if (getMaxSeqNoOfUpdatesOrDeletes() == SequenceNumbers.UNASSIGNED_SEQ_NO) { + if (getMaxSeqNoOfUpdatesOrDeletes() == UNASSIGNED_SEQ_NO) { // If the old primary was on an old version that did not replicate the msu, // we need to bootstrap it manually from its local history. assert indexSettings.getIndexVersionCreated().before(Version.V_6_5_0); @@ -2916,8 +2924,8 @@ public class IndexShard extends AbstractIndexShardComponent implements IndicesCl * @see org.elasticsearch.indices.recovery.RecoveryTarget#indexTranslogOperations(List, int, long, long) */ public void advanceMaxSeqNoOfUpdatesOrDeletes(long seqNo) { - assert seqNo != SequenceNumbers.UNASSIGNED_SEQ_NO - || getMaxSeqNoOfUpdatesOrDeletes() == SequenceNumbers.UNASSIGNED_SEQ_NO : + assert seqNo != UNASSIGNED_SEQ_NO + || getMaxSeqNoOfUpdatesOrDeletes() == UNASSIGNED_SEQ_NO : "replica has max_seq_no_of_updates=" + getMaxSeqNoOfUpdatesOrDeletes() + " but primary does not"; getEngine().advanceMaxSeqNoOfUpdatesOrDeletes(seqNo); assert seqNo <= getMaxSeqNoOfUpdatesOrDeletes() : getMaxSeqNoOfUpdatesOrDeletes() + " < " + seqNo; diff --git a/server/src/main/java/org/elasticsearch/plugins/PluginsService.java b/server/src/main/java/org/elasticsearch/plugins/PluginsService.java index 2ad9a0892b8..2837add3c90 100644 --- a/server/src/main/java/org/elasticsearch/plugins/PluginsService.java +++ b/server/src/main/java/org/elasticsearch/plugins/PluginsService.java @@ -41,7 +41,7 @@ import org.elasticsearch.common.settings.Setting.Property; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.IndexModule; import org.elasticsearch.threadpool.ExecutorBuilder; -import org.elasticsearch.transport.TcpTransport; +import org.elasticsearch.transport.TransportSettings; import java.io.IOException; import java.lang.reflect.Constructor; @@ -226,7 +226,7 @@ public class PluginsService { } } for (final String feature : features.keySet()) { - builder.put(TcpTransport.FEATURE_PREFIX + "." + feature, true); + builder.put(TransportSettings.FEATURE_PREFIX + "." + feature, true); } return builder.put(this.settings).build(); } diff --git a/server/src/main/java/org/elasticsearch/plugins/SearchPlugin.java b/server/src/main/java/org/elasticsearch/plugins/SearchPlugin.java index c8d4e2e6209..d07467e5d1f 100644 --- a/server/src/main/java/org/elasticsearch/plugins/SearchPlugin.java +++ b/server/src/main/java/org/elasticsearch/plugins/SearchPlugin.java @@ -41,13 +41,13 @@ import org.elasticsearch.search.aggregations.PipelineAggregationBuilder; import org.elasticsearch.search.aggregations.bucket.significant.SignificantTerms; import org.elasticsearch.search.aggregations.bucket.significant.heuristics.SignificanceHeuristic; import org.elasticsearch.search.aggregations.bucket.significant.heuristics.SignificanceHeuristicParser; -import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator; -import org.elasticsearch.search.aggregations.pipeline.MovAvgPipelineAggregator; import org.elasticsearch.search.aggregations.pipeline.MovAvgModel; +import org.elasticsearch.search.aggregations.pipeline.MovAvgPipelineAggregator; +import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator; import org.elasticsearch.search.fetch.FetchSubPhase; import org.elasticsearch.search.fetch.subphase.highlight.Highlighter; -import org.elasticsearch.search.rescore.RescorerBuilder; import org.elasticsearch.search.rescore.Rescorer; +import org.elasticsearch.search.rescore.RescorerBuilder; import org.elasticsearch.search.suggest.Suggest; import org.elasticsearch.search.suggest.Suggester; import org.elasticsearch.search.suggest.SuggestionBuilder; @@ -127,7 +127,7 @@ public interface SearchPlugin { return emptyList(); } /** - * The next {@link Rescorer}s added by this plugin. + * The new {@link Rescorer}s added by this plugin. */ default List> getRescorers() { return emptyList(); @@ -239,6 +239,7 @@ public interface SearchPlugin { super(name, reader, parser); } } + /** * Specification for an {@link Aggregation}. */ diff --git a/server/src/main/java/org/elasticsearch/rest/action/document/RestDeleteAction.java b/server/src/main/java/org/elasticsearch/rest/action/document/RestDeleteAction.java index d3464d23986..87cc7a0fb41 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/document/RestDeleteAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/document/RestDeleteAction.java @@ -40,7 +40,7 @@ import static org.elasticsearch.rest.RestRequest.Method.DELETE; public class RestDeleteAction extends BaseRestHandler { private static final DeprecationLogger deprecationLogger = new DeprecationLogger( LogManager.getLogger(RestDeleteAction.class)); - static final String TYPES_DEPRECATION_MESSAGE = "[types removal] Specifying types in " + + public static final String TYPES_DEPRECATION_MESSAGE = "[types removal] Specifying types in " + "document index requests is deprecated, use the /{index}/_doc/{id} endpoint instead."; public RestDeleteAction(Settings settings, RestController controller) { diff --git a/server/src/main/java/org/elasticsearch/rest/action/document/RestMultiGetAction.java b/server/src/main/java/org/elasticsearch/rest/action/document/RestMultiGetAction.java index cc2fd6ba1db..3684f9b3b08 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/document/RestMultiGetAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/document/RestMultiGetAction.java @@ -40,7 +40,7 @@ import static org.elasticsearch.rest.RestRequest.Method.POST; public class RestMultiGetAction extends BaseRestHandler { private static final DeprecationLogger deprecationLogger = new DeprecationLogger( LogManager.getLogger(RestMultiGetAction.class)); - static final String TYPES_DEPRECATION_MESSAGE = "[types removal]" + + public static final String TYPES_DEPRECATION_MESSAGE = "[types removal]" + " Specifying types in multi get requests is deprecated."; private final boolean allowExplicitIndex; diff --git a/server/src/main/java/org/elasticsearch/rest/action/document/RestUpdateAction.java b/server/src/main/java/org/elasticsearch/rest/action/document/RestUpdateAction.java index b672445421a..b3040a3572f 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/document/RestUpdateAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/document/RestUpdateAction.java @@ -19,10 +19,12 @@ package org.elasticsearch.rest.action.document; +import org.apache.logging.log4j.LogManager; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.support.ActiveShardCount; import org.elasticsearch.action.update.UpdateRequest; import org.elasticsearch.client.node.NodeClient; +import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.VersionType; import org.elasticsearch.rest.BaseRestHandler; @@ -37,9 +39,14 @@ import java.io.IOException; import static org.elasticsearch.rest.RestRequest.Method.POST; public class RestUpdateAction extends BaseRestHandler { + private static final DeprecationLogger deprecationLogger = + new DeprecationLogger(LogManager.getLogger(RestUpdateAction.class)); + public static final String TYPES_DEPRECATION_MESSAGE = "[types removal] Specifying types in " + + "document update requests is deprecated, use the endpoint /{index}/_update/{id} instead."; public RestUpdateAction(Settings settings, RestController controller) { super(settings); + controller.registerHandler(POST, "/{index}/_update/{id}", this); controller.registerHandler(POST, "/{index}/{type}/{id}/_update", this); } @@ -50,9 +57,16 @@ public class RestUpdateAction extends BaseRestHandler { @Override public RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) throws IOException { - UpdateRequest updateRequest = new UpdateRequest(request.param("index"), - request.param("type"), - request.param("id")); + UpdateRequest updateRequest; + if (request.hasParam("type")) { + deprecationLogger.deprecatedAndMaybeLog("update_with_types", TYPES_DEPRECATION_MESSAGE); + updateRequest = new UpdateRequest(request.param("index"), + request.param("type"), + request.param("id")); + } else { + updateRequest = new UpdateRequest(request.param("index"), request.param("id")); + } + updateRequest.routing(request.param("routing")); updateRequest.timeout(request.paramAsTime("timeout", updateRequest.timeout())); updateRequest.setRefreshPolicy(request.param("refresh")); diff --git a/server/src/main/java/org/elasticsearch/script/JodaCompatibleZonedDateTime.java b/server/src/main/java/org/elasticsearch/script/JodaCompatibleZonedDateTime.java index d0eecb10123..ec6492c9d5e 100644 --- a/server/src/main/java/org/elasticsearch/script/JodaCompatibleZonedDateTime.java +++ b/server/src/main/java/org/elasticsearch/script/JodaCompatibleZonedDateTime.java @@ -95,16 +95,16 @@ public class JodaCompatibleZonedDateTime { return DATE_FORMATTER.format(dt); } - public boolean isAfter(ZonedDateTime o) { - return dt.isAfter(o); + public boolean isAfter(JodaCompatibleZonedDateTime o) { + return dt.isAfter(o.getZonedDateTime()); } - public boolean isBefore(ZonedDateTime o) { - return dt.isBefore(o); + public boolean isBefore(JodaCompatibleZonedDateTime o) { + return dt.isBefore(o.getZonedDateTime()); } - public boolean isEqual(ZonedDateTime o) { - return dt.isEqual(o); + public boolean isEqual(JodaCompatibleZonedDateTime o) { + return dt.isEqual(o.getZonedDateTime()); } public int getDayOfMonth() { diff --git a/server/src/main/java/org/elasticsearch/search/SearchHits.java b/server/src/main/java/org/elasticsearch/search/SearchHits.java index 0f1cb7f11e1..2a432fc97fe 100644 --- a/server/src/main/java/org/elasticsearch/search/SearchHits.java +++ b/server/src/main/java/org/elasticsearch/search/SearchHits.java @@ -19,8 +19,10 @@ package org.elasticsearch.search; +import org.apache.lucene.search.SortField; import org.apache.lucene.search.TotalHits; import org.apache.lucene.search.TotalHits.Relation; +import org.elasticsearch.Version; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -56,14 +58,29 @@ public final class SearchHits implements Streamable, ToXContentFragment, Iterabl private float maxScore; + @Nullable + private SortField[] sortFields; + @Nullable + private String collapseField; + @Nullable + private Object[] collapseValues; + SearchHits() { } public SearchHits(SearchHit[] hits, @Nullable TotalHits totalHits, float maxScore) { + this(hits, totalHits, maxScore, null, null, null); + } + + public SearchHits(SearchHit[] hits, @Nullable TotalHits totalHits, float maxScore, @Nullable SortField[] sortFields, + @Nullable String collapseField, @Nullable Object[] collapseValues) { this.hits = hits; this.totalHits = totalHits == null ? null : new Total(totalHits); this.maxScore = maxScore; + this.sortFields = sortFields; + this.collapseField = collapseField; + this.collapseValues = collapseValues; } /** @@ -74,7 +91,6 @@ public final class SearchHits implements Streamable, ToXContentFragment, Iterabl return totalHits == null ? null : totalHits.in; } - /** * The maximum score of this query. */ @@ -96,6 +112,31 @@ public final class SearchHits implements Streamable, ToXContentFragment, Iterabl return hits[position]; } + /** + * In case documents were sorted by field(s), returns information about such field(s), null otherwise + * @see SortField + */ + @Nullable + public SortField[] getSortFields() { + return sortFields; + } + + /** + * In case field collapsing was performed, returns the field used for field collapsing, null otherwise + */ + @Nullable + public String getCollapseField() { + return collapseField; + } + + /** + * In case field collapsing was performed, returns the values of the field that field collapsing was performed on, null otherwise + */ + @Nullable + public Object[] getCollapseValues() { + return collapseValues; + } + @Override public Iterator iterator() { return Arrays.stream(getHits()).iterator(); @@ -175,8 +216,7 @@ public final class SearchHits implements Streamable, ToXContentFragment, Iterabl } } } - SearchHits searchHits = new SearchHits(hits.toArray(new SearchHit[hits.size()]), totalHits, maxScore); - return searchHits; + return new SearchHits(hits.toArray(new SearchHit[0]), totalHits, maxScore); } public static SearchHits readSearchHits(StreamInput in) throws IOException { @@ -203,6 +243,12 @@ public final class SearchHits implements Streamable, ToXContentFragment, Iterabl hits[i] = SearchHit.readSearchHit(in); } } + //TODO update version once backported + if (in.getVersion().onOrAfter(Version.V_7_0_0)) { + sortFields = in.readOptionalArray(Lucene::readSortField, SortField[]::new); + collapseField = in.readOptionalString(); + collapseValues = in.readOptionalArray(Lucene::readSortValue, Object[]::new); + } } @Override @@ -219,6 +265,12 @@ public final class SearchHits implements Streamable, ToXContentFragment, Iterabl hit.writeTo(out); } } + //TODO update version once backported + if (out.getVersion().onOrAfter(Version.V_7_0_0)) { + out.writeOptionalArray(Lucene::writeSortField, sortFields); + out.writeOptionalString(collapseField); + out.writeOptionalArray(Lucene::writeSortValue, collapseValues); + } } @Override @@ -229,12 +281,16 @@ public final class SearchHits implements Streamable, ToXContentFragment, Iterabl SearchHits other = (SearchHits) obj; return Objects.equals(totalHits, other.totalHits) && Objects.equals(maxScore, other.maxScore) - && Arrays.equals(hits, other.hits); + && Arrays.equals(hits, other.hits) + && Arrays.equals(sortFields, other.sortFields) + && Objects.equals(collapseField, other.collapseField) + && Arrays.equals(collapseValues, other.collapseValues); } @Override public int hashCode() { - return Objects.hash(totalHits, maxScore, Arrays.hashCode(hits)); + return Objects.hash(totalHits, maxScore, Arrays.hashCode(hits), + Arrays.hashCode(sortFields), collapseField, Arrays.hashCode(collapseValues)); } public static TotalHits parseTotalHitsFragment(XContentParser parser) throws IOException { diff --git a/server/src/main/java/org/elasticsearch/search/SearchModule.java b/server/src/main/java/org/elasticsearch/search/SearchModule.java index 66e97230636..2531685b945 100644 --- a/server/src/main/java/org/elasticsearch/search/SearchModule.java +++ b/server/src/main/java/org/elasticsearch/search/SearchModule.java @@ -44,6 +44,8 @@ import org.elasticsearch.index.query.GeoDistanceQueryBuilder; import org.elasticsearch.index.query.GeoPolygonQueryBuilder; import org.elasticsearch.index.query.GeoShapeQueryBuilder; import org.elasticsearch.index.query.IdsQueryBuilder; +import org.elasticsearch.index.query.IntervalQueryBuilder; +import org.elasticsearch.index.query.IntervalsSourceProvider; import org.elasticsearch.index.query.MatchAllQueryBuilder; import org.elasticsearch.index.query.MatchNoneQueryBuilder; import org.elasticsearch.index.query.MatchPhrasePrefixQueryBuilder; @@ -153,60 +155,41 @@ import org.elasticsearch.search.aggregations.bucket.terms.StringTerms; import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregationBuilder; import org.elasticsearch.search.aggregations.bucket.terms.UnmappedTerms; import org.elasticsearch.search.aggregations.metrics.AvgAggregationBuilder; -import org.elasticsearch.search.aggregations.metrics.InternalAvg; import org.elasticsearch.search.aggregations.metrics.CardinalityAggregationBuilder; -import org.elasticsearch.search.aggregations.metrics.InternalCardinality; +import org.elasticsearch.search.aggregations.metrics.ExtendedStatsAggregationBuilder; import org.elasticsearch.search.aggregations.metrics.GeoBoundsAggregationBuilder; -import org.elasticsearch.search.aggregations.metrics.InternalGeoBounds; import org.elasticsearch.search.aggregations.metrics.GeoCentroidAggregationBuilder; +import org.elasticsearch.search.aggregations.metrics.InternalAvg; +import org.elasticsearch.search.aggregations.metrics.InternalCardinality; +import org.elasticsearch.search.aggregations.metrics.InternalExtendedStats; +import org.elasticsearch.search.aggregations.metrics.InternalGeoBounds; import org.elasticsearch.search.aggregations.metrics.InternalGeoCentroid; +import org.elasticsearch.search.aggregations.metrics.InternalHDRPercentileRanks; +import org.elasticsearch.search.aggregations.metrics.InternalHDRPercentiles; import org.elasticsearch.search.aggregations.metrics.InternalMax; -import org.elasticsearch.search.aggregations.metrics.MaxAggregationBuilder; +import org.elasticsearch.search.aggregations.metrics.InternalMedianAbsoluteDeviation; import org.elasticsearch.search.aggregations.metrics.InternalMin; +import org.elasticsearch.search.aggregations.metrics.InternalScriptedMetric; +import org.elasticsearch.search.aggregations.metrics.InternalStats; +import org.elasticsearch.search.aggregations.metrics.InternalSum; +import org.elasticsearch.search.aggregations.metrics.InternalTDigestPercentileRanks; +import org.elasticsearch.search.aggregations.metrics.InternalTDigestPercentiles; +import org.elasticsearch.search.aggregations.metrics.InternalTopHits; +import org.elasticsearch.search.aggregations.metrics.InternalValueCount; +import org.elasticsearch.search.aggregations.metrics.InternalWeightedAvg; +import org.elasticsearch.search.aggregations.metrics.MaxAggregationBuilder; +import org.elasticsearch.search.aggregations.metrics.MedianAbsoluteDeviationAggregationBuilder; import org.elasticsearch.search.aggregations.metrics.MinAggregationBuilder; import org.elasticsearch.search.aggregations.metrics.PercentileRanksAggregationBuilder; import org.elasticsearch.search.aggregations.metrics.PercentilesAggregationBuilder; -import org.elasticsearch.search.aggregations.metrics.InternalHDRPercentileRanks; -import org.elasticsearch.search.aggregations.metrics.InternalHDRPercentiles; -import org.elasticsearch.search.aggregations.metrics.InternalTDigestPercentileRanks; -import org.elasticsearch.search.aggregations.metrics.InternalTDigestPercentiles; -import org.elasticsearch.search.aggregations.metrics.InternalScriptedMetric; import org.elasticsearch.search.aggregations.metrics.ScriptedMetricAggregationBuilder; -import org.elasticsearch.search.aggregations.metrics.InternalStats; import org.elasticsearch.search.aggregations.metrics.StatsAggregationBuilder; -import org.elasticsearch.search.aggregations.metrics.ExtendedStatsAggregationBuilder; -import org.elasticsearch.search.aggregations.metrics.InternalExtendedStats; -import org.elasticsearch.search.aggregations.metrics.InternalSum; import org.elasticsearch.search.aggregations.metrics.SumAggregationBuilder; -import org.elasticsearch.search.aggregations.metrics.InternalTopHits; import org.elasticsearch.search.aggregations.metrics.TopHitsAggregationBuilder; -import org.elasticsearch.search.aggregations.metrics.InternalValueCount; import org.elasticsearch.search.aggregations.metrics.ValueCountAggregationBuilder; -import org.elasticsearch.search.aggregations.metrics.InternalWeightedAvg; import org.elasticsearch.search.aggregations.metrics.WeightedAvgAggregationBuilder; -import org.elasticsearch.search.aggregations.metrics.InternalMedianAbsoluteDeviation; -import org.elasticsearch.search.aggregations.metrics.MedianAbsoluteDeviationAggregationBuilder; -import org.elasticsearch.search.aggregations.pipeline.InternalSimpleValue; -import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator; -import org.elasticsearch.search.aggregations.pipeline.InternalBucketMetricValue; import org.elasticsearch.search.aggregations.pipeline.AvgBucketPipelineAggregationBuilder; import org.elasticsearch.search.aggregations.pipeline.AvgBucketPipelineAggregator; -import org.elasticsearch.search.aggregations.pipeline.MaxBucketPipelineAggregationBuilder; -import org.elasticsearch.search.aggregations.pipeline.MaxBucketPipelineAggregator; -import org.elasticsearch.search.aggregations.pipeline.MinBucketPipelineAggregationBuilder; -import org.elasticsearch.search.aggregations.pipeline.MinBucketPipelineAggregator; -import org.elasticsearch.search.aggregations.pipeline.InternalPercentilesBucket; -import org.elasticsearch.search.aggregations.pipeline.PercentilesBucketPipelineAggregationBuilder; -import org.elasticsearch.search.aggregations.pipeline.PercentilesBucketPipelineAggregator; -import org.elasticsearch.search.aggregations.pipeline.InternalStatsBucket; -import org.elasticsearch.search.aggregations.pipeline.StatsBucketPipelineAggregationBuilder; -import org.elasticsearch.search.aggregations.pipeline.StatsBucketPipelineAggregator; -import org.elasticsearch.search.aggregations.pipeline.ExtendedStatsBucketParser; -import org.elasticsearch.search.aggregations.pipeline.ExtendedStatsBucketPipelineAggregationBuilder; -import org.elasticsearch.search.aggregations.pipeline.ExtendedStatsBucketPipelineAggregator; -import org.elasticsearch.search.aggregations.pipeline.InternalExtendedStatsBucket; -import org.elasticsearch.search.aggregations.pipeline.SumBucketPipelineAggregationBuilder; -import org.elasticsearch.search.aggregations.pipeline.SumBucketPipelineAggregator; import org.elasticsearch.search.aggregations.pipeline.BucketScriptPipelineAggregationBuilder; import org.elasticsearch.search.aggregations.pipeline.BucketScriptPipelineAggregator; import org.elasticsearch.search.aggregations.pipeline.BucketSelectorPipelineAggregationBuilder; @@ -217,19 +200,38 @@ import org.elasticsearch.search.aggregations.pipeline.CumulativeSumPipelineAggre import org.elasticsearch.search.aggregations.pipeline.CumulativeSumPipelineAggregator; import org.elasticsearch.search.aggregations.pipeline.DerivativePipelineAggregationBuilder; import org.elasticsearch.search.aggregations.pipeline.DerivativePipelineAggregator; -import org.elasticsearch.search.aggregations.pipeline.InternalDerivative; -import org.elasticsearch.search.aggregations.pipeline.MovAvgPipelineAggregationBuilder; -import org.elasticsearch.search.aggregations.pipeline.MovAvgPipelineAggregator; import org.elasticsearch.search.aggregations.pipeline.EwmaModel; +import org.elasticsearch.search.aggregations.pipeline.ExtendedStatsBucketParser; +import org.elasticsearch.search.aggregations.pipeline.ExtendedStatsBucketPipelineAggregationBuilder; +import org.elasticsearch.search.aggregations.pipeline.ExtendedStatsBucketPipelineAggregator; import org.elasticsearch.search.aggregations.pipeline.HoltLinearModel; import org.elasticsearch.search.aggregations.pipeline.HoltWintersModel; +import org.elasticsearch.search.aggregations.pipeline.InternalBucketMetricValue; +import org.elasticsearch.search.aggregations.pipeline.InternalDerivative; +import org.elasticsearch.search.aggregations.pipeline.InternalExtendedStatsBucket; +import org.elasticsearch.search.aggregations.pipeline.InternalPercentilesBucket; +import org.elasticsearch.search.aggregations.pipeline.InternalSimpleValue; +import org.elasticsearch.search.aggregations.pipeline.InternalStatsBucket; import org.elasticsearch.search.aggregations.pipeline.LinearModel; +import org.elasticsearch.search.aggregations.pipeline.MaxBucketPipelineAggregationBuilder; +import org.elasticsearch.search.aggregations.pipeline.MaxBucketPipelineAggregator; +import org.elasticsearch.search.aggregations.pipeline.MinBucketPipelineAggregationBuilder; +import org.elasticsearch.search.aggregations.pipeline.MinBucketPipelineAggregator; import org.elasticsearch.search.aggregations.pipeline.MovAvgModel; -import org.elasticsearch.search.aggregations.pipeline.SimpleModel; +import org.elasticsearch.search.aggregations.pipeline.MovAvgPipelineAggregationBuilder; +import org.elasticsearch.search.aggregations.pipeline.MovAvgPipelineAggregator; import org.elasticsearch.search.aggregations.pipeline.MovFnPipelineAggregationBuilder; import org.elasticsearch.search.aggregations.pipeline.MovFnPipelineAggregator; +import org.elasticsearch.search.aggregations.pipeline.PercentilesBucketPipelineAggregationBuilder; +import org.elasticsearch.search.aggregations.pipeline.PercentilesBucketPipelineAggregator; +import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator; import org.elasticsearch.search.aggregations.pipeline.SerialDiffPipelineAggregationBuilder; import org.elasticsearch.search.aggregations.pipeline.SerialDiffPipelineAggregator; +import org.elasticsearch.search.aggregations.pipeline.SimpleModel; +import org.elasticsearch.search.aggregations.pipeline.StatsBucketPipelineAggregationBuilder; +import org.elasticsearch.search.aggregations.pipeline.StatsBucketPipelineAggregator; +import org.elasticsearch.search.aggregations.pipeline.SumBucketPipelineAggregationBuilder; +import org.elasticsearch.search.aggregations.pipeline.SumBucketPipelineAggregator; import org.elasticsearch.search.fetch.FetchPhase; import org.elasticsearch.search.fetch.FetchSubPhase; import org.elasticsearch.search.fetch.subphase.DocValueFieldsFetchSubPhase; @@ -311,6 +313,7 @@ public class SearchModule { registerFetchSubPhases(plugins); registerSearchExts(plugins); registerShapes(); + registerIntervalsSourceProviders(); } public List getNamedWriteables() { @@ -803,6 +806,7 @@ public class SearchModule { registerQuery(new QuerySpec<>(ExistsQueryBuilder.NAME, ExistsQueryBuilder::new, ExistsQueryBuilder::fromXContent)); registerQuery(new QuerySpec<>(MatchNoneQueryBuilder.NAME, MatchNoneQueryBuilder::new, MatchNoneQueryBuilder::fromXContent)); registerQuery(new QuerySpec<>(TermsSetQueryBuilder.NAME, TermsSetQueryBuilder::new, TermsSetQueryBuilder::fromXContent)); + registerQuery(new QuerySpec<>(IntervalQueryBuilder.NAME, IntervalQueryBuilder::new, IntervalQueryBuilder::fromXContent)); if (ShapesAvailability.JTS_AVAILABLE && ShapesAvailability.SPATIAL4J_AVAILABLE) { registerQuery(new QuerySpec<>(GeoShapeQueryBuilder.NAME, GeoShapeQueryBuilder::new, GeoShapeQueryBuilder::fromXContent)); @@ -811,6 +815,15 @@ public class SearchModule { registerFromPlugin(plugins, SearchPlugin::getQueries, this::registerQuery); } + private void registerIntervalsSourceProviders() { + namedWriteables.add(new NamedWriteableRegistry.Entry(IntervalsSourceProvider.class, + IntervalsSourceProvider.Match.NAME, IntervalsSourceProvider.Match::new)); + namedWriteables.add(new NamedWriteableRegistry.Entry(IntervalsSourceProvider.class, + IntervalsSourceProvider.Combine.NAME, IntervalsSourceProvider.Combine::new)); + namedWriteables.add(new NamedWriteableRegistry.Entry(IntervalsSourceProvider.class, + IntervalsSourceProvider.Disjunction.NAME, IntervalsSourceProvider.Disjunction::new)); + } + private void registerQuery(QuerySpec spec) { namedWriteables.add(new NamedWriteableRegistry.Entry(QueryBuilder.class, spec.getName().getPreferredName(), spec.getReader())); namedXContents.add(new NamedXContentRegistry.Entry(QueryBuilder.class, spec.getName(), diff --git a/server/src/main/java/org/elasticsearch/search/SearchSortValues.java b/server/src/main/java/org/elasticsearch/search/SearchSortValues.java index 271b448d496..c79b5ad74d7 100644 --- a/server/src/main/java/org/elasticsearch/search/SearchSortValues.java +++ b/server/src/main/java/org/elasticsearch/search/SearchSortValues.java @@ -23,7 +23,6 @@ import org.apache.lucene.util.BytesRef; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; -import org.elasticsearch.common.xcontent.ToXContent.Params; import org.elasticsearch.common.xcontent.ToXContentFragment; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/DateHistogramAggregationBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/DateHistogramAggregationBuilder.java index a5799900355..5199313e0ac 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/DateHistogramAggregationBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/DateHistogramAggregationBuilder.java @@ -60,7 +60,6 @@ import org.joda.time.DateTimeZone; import java.io.IOException; import java.util.HashMap; import java.util.List; -import java.util.Locale; import java.util.Map; import java.util.Objects; @@ -72,7 +71,7 @@ import static java.util.Collections.unmodifiableMap; public class DateHistogramAggregationBuilder extends ValuesSourceAggregationBuilder implements MultiBucketAggregationBuilder { public static final String NAME = "date_histogram"; - private static DateMathParser EPOCH_MILLIS_PARSER = DateFormatter.forPattern("epoch_millis", Locale.ROOT).toDateMathParser(); + private static DateMathParser EPOCH_MILLIS_PARSER = DateFormatter.forPattern("epoch_millis").toDateMathParser(); public static final Map DATE_FIELD_UNITS; diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/InternalPercentilesBucket.java b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/InternalPercentilesBucket.java index 940511619b1..992c29637a2 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/InternalPercentilesBucket.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/InternalPercentilesBucket.java @@ -19,6 +19,7 @@ package org.elasticsearch.search.aggregations.pipeline; +import org.elasticsearch.Version; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.XContentBuilder; @@ -39,9 +40,10 @@ import java.util.Objects; public class InternalPercentilesBucket extends InternalNumericMetricsAggregation.MultiValue implements PercentilesBucket { private double[] percentiles; private double[] percents; + private boolean keyed = true; private final transient Map percentileLookups = new HashMap<>(); - InternalPercentilesBucket(String name, double[] percents, double[] percentiles, + InternalPercentilesBucket(String name, double[] percents, double[] percentiles, boolean keyed, DocValueFormat formatter, List pipelineAggregators, Map metaData) { super(name, pipelineAggregators, metaData); @@ -52,6 +54,7 @@ public class InternalPercentilesBucket extends InternalNumericMetricsAggregation this.format = formatter; this.percentiles = percentiles; this.percents = percents; + this.keyed = keyed; computeLookup(); } @@ -69,6 +72,11 @@ public class InternalPercentilesBucket extends InternalNumericMetricsAggregation format = in.readNamedWriteable(DocValueFormat.class); percentiles = in.readDoubleArray(); percents = in.readDoubleArray(); + + if (in.getVersion().onOrAfter(Version.V_7_0_0)) { + keyed = in.readBoolean(); + } + computeLookup(); } @@ -77,6 +85,10 @@ public class InternalPercentilesBucket extends InternalNumericMetricsAggregation out.writeNamedWriteable(format); out.writeDoubleArray(percentiles); out.writeDoubleArray(percents); + + if (out.getVersion().onOrAfter(Version.V_7_0_0)) { + out.writeBoolean(keyed); + } } @Override @@ -120,17 +132,33 @@ public class InternalPercentilesBucket extends InternalNumericMetricsAggregation @Override public XContentBuilder doXContentBody(XContentBuilder builder, Params params) throws IOException { - builder.startObject("values"); - for (double percent : percents) { - double value = percentile(percent); - boolean hasValue = !(Double.isInfinite(value) || Double.isNaN(value)); - String key = String.valueOf(percent); - builder.field(key, hasValue ? value : null); - if (hasValue && format != DocValueFormat.RAW) { - builder.field(key + "_as_string", percentileAsString(percent)); + if (keyed) { + builder.startObject("values"); + for (double percent : percents) { + double value = percentile(percent); + boolean hasValue = !(Double.isInfinite(value) || Double.isNaN(value)); + String key = String.valueOf(percent); + builder.field(key, hasValue ? value : null); + if (hasValue && format != DocValueFormat.RAW) { + builder.field(key + "_as_string", percentileAsString(percent)); + } } + builder.endObject(); + } else { + builder.startArray("values"); + for (double percent : percents) { + double value = percentile(percent); + boolean hasValue = !(Double.isInfinite(value) || Double.isNaN(value)); + builder.startObject(); + builder.field("key", percent); + builder.field("value", hasValue ? value : null); + if (hasValue && format != DocValueFormat.RAW) { + builder.field(String.valueOf(percent) + "_as_string", percentileAsString(percent)); + } + builder.endObject(); + } + builder.endArray(); } - builder.endObject(); return builder; } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/PercentilesBucketPipelineAggregationBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/PercentilesBucketPipelineAggregationBuilder.java index 49e065cdeef..a3ac2201777 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/PercentilesBucketPipelineAggregationBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/PercentilesBucketPipelineAggregationBuilder.java @@ -21,6 +21,7 @@ package org.elasticsearch.search.aggregations.pipeline; import com.carrotsearch.hppc.DoubleArrayList; +import org.elasticsearch.Version; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -40,8 +41,10 @@ public class PercentilesBucketPipelineAggregationBuilder extends BucketMetricsPipelineAggregationBuilder { public static final String NAME = "percentiles_bucket"; static final ParseField PERCENTS_FIELD = new ParseField("percents"); + static final ParseField KEYED_FIELD = new ParseField("keyed"); private double[] percents = new double[] { 1.0, 5.0, 25.0, 50.0, 75.0, 95.0, 99.0 }; + private boolean keyed = true; public PercentilesBucketPipelineAggregationBuilder(String name, String bucketsPath) { super(name, NAME, new String[] { bucketsPath }); @@ -54,24 +57,32 @@ public class PercentilesBucketPipelineAggregationBuilder throws IOException { super(in, NAME); percents = in.readDoubleArray(); + + if (in.getVersion().onOrAfter(Version.V_7_0_0)) { + keyed = in.readBoolean(); + } } @Override protected void innerWriteTo(StreamOutput out) throws IOException { out.writeDoubleArray(percents); + + if (out.getVersion().onOrAfter(Version.V_7_0_0)) { + out.writeBoolean(keyed); + } } /** * Get the percentages to calculate percentiles for in this aggregation */ - public double[] percents() { + public double[] getPercents() { return percents; } /** * Set the percentages to calculate percentiles for in this aggregation */ - public PercentilesBucketPipelineAggregationBuilder percents(double[] percents) { + public PercentilesBucketPipelineAggregationBuilder setPercents(double[] percents) { if (percents == null) { throw new IllegalArgumentException("[percents] must not be null: [" + name + "]"); } @@ -85,9 +96,24 @@ public class PercentilesBucketPipelineAggregationBuilder return this; } + /** + * Set whether the XContent should be keyed + */ + public PercentilesBucketPipelineAggregationBuilder setKeyed(boolean keyed) { + this.keyed = keyed; + return this; + } + + /** + * Get whether the XContent should be keyed + */ + public boolean getKeyed() { + return keyed; + } + @Override protected PipelineAggregator createInternal(Map metaData) throws IOException { - return new PercentilesBucketPipelineAggregator(name, percents, bucketsPaths, gapPolicy(), formatter(), metaData); + return new PercentilesBucketPipelineAggregator(name, percents, keyed, bucketsPaths, gapPolicy(), formatter(), metaData); } @Override @@ -108,6 +134,7 @@ public class PercentilesBucketPipelineAggregationBuilder if (percents != null) { builder.array(PERCENTS_FIELD.getPreferredName(), percents); } + builder.field(KEYED_FIELD.getPreferredName(), keyed); return builder; } @@ -122,7 +149,11 @@ public class PercentilesBucketPipelineAggregationBuilder double[] percents = (double[]) params.get(PERCENTS_FIELD.getPreferredName()); if (percents != null) { - factory.percents(percents); + factory.setPercents(percents); + } + Boolean keyed = (Boolean) params.get(KEYED_FIELD.getPreferredName()); + if (keyed != null) { + factory.setKeyed(keyed); } return factory; @@ -139,6 +170,10 @@ public class PercentilesBucketPipelineAggregationBuilder params.put(PERCENTS_FIELD.getPreferredName(), percents.toArray()); return true; } + else if (KEYED_FIELD.match(field, parser.getDeprecationHandler()) && token == XContentParser.Token.VALUE_BOOLEAN){ + params.put(KEYED_FIELD.getPreferredName(), parser.booleanValue()); + return true; + } return false; } @@ -146,13 +181,13 @@ public class PercentilesBucketPipelineAggregationBuilder @Override protected int innerHashCode() { - return Arrays.hashCode(percents); + return Objects.hash(Arrays.hashCode(percents), keyed); } @Override protected boolean innerEquals(BucketMetricsPipelineAggregationBuilder obj) { PercentilesBucketPipelineAggregationBuilder other = (PercentilesBucketPipelineAggregationBuilder) obj; - return Objects.deepEquals(percents, other.percents); + return Objects.deepEquals(percents, other.percents) && Objects.equals(keyed, other.keyed); } @Override diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/PercentilesBucketPipelineAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/PercentilesBucketPipelineAggregator.java index 20c38ca05bd..3333e80963e 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/PercentilesBucketPipelineAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/PercentilesBucketPipelineAggregator.java @@ -19,6 +19,7 @@ package org.elasticsearch.search.aggregations.pipeline; +import org.elasticsearch.Version; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.search.DocValueFormat; @@ -34,12 +35,14 @@ import java.util.Map; public class PercentilesBucketPipelineAggregator extends BucketMetricsPipelineAggregator { private final double[] percents; + private boolean keyed = true; private List data; - PercentilesBucketPipelineAggregator(String name, double[] percents, String[] bucketsPaths, GapPolicy gapPolicy, - DocValueFormat formatter, Map metaData) { + PercentilesBucketPipelineAggregator(String name, double[] percents, boolean keyed, String[] bucketsPaths, + GapPolicy gapPolicy, DocValueFormat formatter, Map metaData) { super(name, bucketsPaths, gapPolicy, formatter, metaData); this.percents = percents; + this.keyed = keyed; } /** @@ -48,11 +51,19 @@ public class PercentilesBucketPipelineAggregator extends BucketMetricsPipelineAg public PercentilesBucketPipelineAggregator(StreamInput in) throws IOException { super(in); percents = in.readDoubleArray(); + + if (in.getVersion().onOrAfter(Version.V_7_0_0)) { + keyed = in.readBoolean(); + } } @Override public void innerWriteTo(StreamOutput out) throws IOException { out.writeDoubleArray(percents); + + if (out.getVersion().onOrAfter(Version.V_7_0_0)) { + out.writeBoolean(keyed); + } } @Override @@ -91,6 +102,6 @@ public class PercentilesBucketPipelineAggregator extends BucketMetricsPipelineAg // todo need postCollection() to clean up temp sorted data? - return new InternalPercentilesBucket(name(), percents, percentiles, format, pipelineAggregators, metadata); + return new InternalPercentilesBucket(name(), percents, percentiles, keyed, format, pipelineAggregators, metadata); } } diff --git a/server/src/main/java/org/elasticsearch/snapshots/RestoreService.java b/server/src/main/java/org/elasticsearch/snapshots/RestoreService.java index 34046c205af..4c6090758dd 100644 --- a/server/src/main/java/org/elasticsearch/snapshots/RestoreService.java +++ b/server/src/main/java/org/elasticsearch/snapshots/RestoreService.java @@ -219,15 +219,18 @@ public class RestoreService implements ClusterStateApplier { // Now we can start the actual restore process by adding shards to be recovered in the cluster state // and updating cluster metadata (global and index) as needed clusterService.submitStateUpdateTask(request.cause(), new ClusterStateUpdateTask() { + String restoreUUID = UUIDs.randomBase64UUID(); RestoreInfo restoreInfo = null; @Override public ClusterState execute(ClusterState currentState) { - // Check if another restore process is already running - cannot run two restore processes at the - // same time RestoreInProgress restoreInProgress = currentState.custom(RestoreInProgress.TYPE); - if (restoreInProgress != null && !restoreInProgress.entries().isEmpty()) { - throw new ConcurrentSnapshotExecutionException(snapshot, "Restore process is already running in this cluster"); + if (currentState.getNodes().getMinNodeVersion().before(Version.V_7_0_0)) { + // Check if another restore process is already running - cannot run two restore processes at the + // same time in versions prior to 7.0 + if (restoreInProgress != null && restoreInProgress.isEmpty() == false) { + throw new ConcurrentSnapshotExecutionException(snapshot, "Restore process is already running in this cluster"); + } } // Check if the snapshot to restore is currently being deleted SnapshotDeletionsInProgress deletionsInProgress = currentState.custom(SnapshotDeletionsInProgress.TYPE); @@ -253,7 +256,7 @@ public class RestoreService implements ClusterStateApplier { for (Map.Entry indexEntry : indices.entrySet()) { String index = indexEntry.getValue(); boolean partial = checkPartial(index); - SnapshotRecoverySource recoverySource = new SnapshotRecoverySource(snapshot, snapshotInfo.version(), index); + SnapshotRecoverySource recoverySource = new SnapshotRecoverySource(restoreUUID, snapshot, snapshotInfo.version(), index); String renamedIndexName = indexEntry.getKey(); IndexMetaData snapshotIndexMetaData = metaData.index(index); snapshotIndexMetaData = updateIndexSettings(snapshotIndexMetaData, request.indexSettings, request.ignoreIndexSettings); @@ -329,8 +332,18 @@ public class RestoreService implements ClusterStateApplier { } shards = shardsBuilder.build(); - RestoreInProgress.Entry restoreEntry = new RestoreInProgress.Entry(snapshot, overallState(RestoreInProgress.State.INIT, shards), Collections.unmodifiableList(new ArrayList<>(indices.keySet())), shards); - builder.putCustom(RestoreInProgress.TYPE, new RestoreInProgress(restoreEntry)); + RestoreInProgress.Entry restoreEntry = new RestoreInProgress.Entry( + restoreUUID, snapshot, overallState(RestoreInProgress.State.INIT, shards), + Collections.unmodifiableList(new ArrayList<>(indices.keySet())), + shards + ); + RestoreInProgress.Builder restoreInProgressBuilder; + if (restoreInProgress != null) { + restoreInProgressBuilder = new RestoreInProgress.Builder(restoreInProgress); + } else { + restoreInProgressBuilder = new RestoreInProgress.Builder(); + } + builder.putCustom(RestoreInProgress.TYPE, restoreInProgressBuilder.add(restoreEntry).build()); } else { shards = ImmutableOpenMap.of(); } @@ -485,7 +498,7 @@ public class RestoreService implements ClusterStateApplier { @Override public void clusterStateProcessed(String source, ClusterState oldState, ClusterState newState) { - listener.onResponse(new RestoreCompletionResponse(snapshot, restoreInfo)); + listener.onResponse(new RestoreCompletionResponse(restoreUUID, snapshot, restoreInfo)); } }); @@ -498,8 +511,8 @@ public class RestoreService implements ClusterStateApplier { public static RestoreInProgress updateRestoreStateWithDeletedIndices(RestoreInProgress oldRestore, Set deletedIndices) { boolean changesMade = false; - final List entries = new ArrayList<>(); - for (RestoreInProgress.Entry entry : oldRestore.entries()) { + RestoreInProgress.Builder builder = new RestoreInProgress.Builder(); + for (RestoreInProgress.Entry entry : oldRestore) { ImmutableOpenMap.Builder shardsBuilder = null; for (ObjectObjectCursor cursor : entry.shards()) { ShardId shardId = cursor.key; @@ -513,27 +526,33 @@ public class RestoreService implements ClusterStateApplier { } if (shardsBuilder != null) { ImmutableOpenMap shards = shardsBuilder.build(); - entries.add(new RestoreInProgress.Entry(entry.snapshot(), overallState(RestoreInProgress.State.STARTED, shards), entry.indices(), shards)); + builder.add(new RestoreInProgress.Entry(entry.uuid(), entry.snapshot(), overallState(RestoreInProgress.State.STARTED, shards), entry.indices(), shards)); } else { - entries.add(entry); + builder.add(entry); } } if (changesMade) { - return new RestoreInProgress(entries.toArray(new RestoreInProgress.Entry[entries.size()])); + return builder.build(); } else { return oldRestore; } } public static final class RestoreCompletionResponse { + private final String uuid; private final Snapshot snapshot; private final RestoreInfo restoreInfo; - private RestoreCompletionResponse(final Snapshot snapshot, final RestoreInfo restoreInfo) { + private RestoreCompletionResponse(final String uuid, final Snapshot snapshot, final RestoreInfo restoreInfo) { + this.uuid = uuid; this.snapshot = snapshot; this.restoreInfo = restoreInfo; } + public String getUuid() { + return uuid; + } + public Snapshot getSnapshot() { return snapshot; } @@ -544,7 +563,7 @@ public class RestoreService implements ClusterStateApplier { } public static class RestoreInProgressUpdater extends RoutingChangesObserver.AbstractRoutingChangesObserver { - private final Map shardChanges = new HashMap<>(); + private final Map shardChanges = new HashMap<>(); @Override public void shardStarted(ShardRouting initializingShard, ShardRouting startedShard) { @@ -552,8 +571,8 @@ public class RestoreService implements ClusterStateApplier { if (initializingShard.primary()) { RecoverySource recoverySource = initializingShard.recoverySource(); if (recoverySource.getType() == RecoverySource.Type.SNAPSHOT) { - Snapshot snapshot = ((SnapshotRecoverySource) recoverySource).snapshot(); - changes(snapshot).shards.put(initializingShard.shardId(), + changes(recoverySource).shards.put( + initializingShard.shardId(), new ShardRestoreStatus(initializingShard.currentNodeId(), RestoreInProgress.State.SUCCESS)); } } @@ -564,13 +583,13 @@ public class RestoreService implements ClusterStateApplier { if (failedShard.primary() && failedShard.initializing()) { RecoverySource recoverySource = failedShard.recoverySource(); if (recoverySource.getType() == RecoverySource.Type.SNAPSHOT) { - Snapshot snapshot = ((SnapshotRecoverySource) recoverySource).snapshot(); // mark restore entry for this shard as failed when it's due to a file corruption. There is no need wait on retries // to restore this shard on another node if the snapshot files are corrupt. In case where a node just left or crashed, // however, we only want to acknowledge the restore operation once it has been successfully restored on another node. if (unassignedInfo.getFailure() != null && Lucene.isCorruptionException(unassignedInfo.getFailure().getCause())) { - changes(snapshot).shards.put(failedShard.shardId(), new ShardRestoreStatus(failedShard.currentNodeId(), - RestoreInProgress.State.FAILURE, unassignedInfo.getFailure().getCause().getMessage())); + changes(recoverySource).shards.put( + failedShard.shardId(), new ShardRestoreStatus(failedShard.currentNodeId(), + RestoreInProgress.State.FAILURE, unassignedInfo.getFailure().getCause().getMessage())); } } } @@ -581,9 +600,11 @@ public class RestoreService implements ClusterStateApplier { // if we force an empty primary, we should also fail the restore entry if (unassignedShard.recoverySource().getType() == RecoverySource.Type.SNAPSHOT && initializedShard.recoverySource().getType() != RecoverySource.Type.SNAPSHOT) { - Snapshot snapshot = ((SnapshotRecoverySource) unassignedShard.recoverySource()).snapshot(); - changes(snapshot).shards.put(unassignedShard.shardId(), new ShardRestoreStatus(null, - RestoreInProgress.State.FAILURE, "recovery source type changed from snapshot to " + initializedShard.recoverySource())); + changes(unassignedShard.recoverySource()).shards.put( + unassignedShard.shardId(), + new ShardRestoreStatus(null, + RestoreInProgress.State.FAILURE, "recovery source type changed from snapshot to " + initializedShard.recoverySource()) + ); } } @@ -592,19 +613,21 @@ public class RestoreService implements ClusterStateApplier { RecoverySource recoverySource = unassignedShard.recoverySource(); if (recoverySource.getType() == RecoverySource.Type.SNAPSHOT) { if (newUnassignedInfo.getLastAllocationStatus() == UnassignedInfo.AllocationStatus.DECIDERS_NO) { - Snapshot snapshot = ((SnapshotRecoverySource) recoverySource).snapshot(); String reason = "shard could not be allocated to any of the nodes"; - changes(snapshot).shards.put(unassignedShard.shardId(), + changes(recoverySource).shards.put( + unassignedShard.shardId(), new ShardRestoreStatus(unassignedShard.currentNodeId(), RestoreInProgress.State.FAILURE, reason)); } } } /** - * Helper method that creates update entry for the given shard id if such an entry does not exist yet. + * Helper method that creates update entry for the given recovery source's restore uuid + * if such an entry does not exist yet. */ - private Updates changes(Snapshot snapshot) { - return shardChanges.computeIfAbsent(snapshot, k -> new Updates()); + private Updates changes(RecoverySource recoverySource) { + assert recoverySource.getType() == RecoverySource.Type.SNAPSHOT; + return shardChanges.computeIfAbsent(((SnapshotRecoverySource) recoverySource).restoreUUID(), k -> new Updates()); } private static class Updates { @@ -613,38 +636,38 @@ public class RestoreService implements ClusterStateApplier { public RestoreInProgress applyChanges(final RestoreInProgress oldRestore) { if (shardChanges.isEmpty() == false) { - final List entries = new ArrayList<>(); - for (RestoreInProgress.Entry entry : oldRestore.entries()) { - Snapshot snapshot = entry.snapshot(); - Updates updates = shardChanges.get(snapshot); - if (updates.shards.isEmpty() == false) { - ImmutableOpenMap.Builder shardsBuilder = ImmutableOpenMap.builder(entry.shards()); + RestoreInProgress.Builder builder = new RestoreInProgress.Builder(); + for (RestoreInProgress.Entry entry : oldRestore) { + Updates updates = shardChanges.get(entry.uuid()); + ImmutableOpenMap shardStates = entry.shards(); + if (updates != null && updates.shards.isEmpty() == false) { + ImmutableOpenMap.Builder shardsBuilder = ImmutableOpenMap.builder(shardStates); for (Map.Entry shard : updates.shards.entrySet()) { - shardsBuilder.put(shard.getKey(), shard.getValue()); + ShardId shardId = shard.getKey(); + ShardRestoreStatus status = shardStates.get(shardId); + if (status == null || status.state().completed() == false) { + shardsBuilder.put(shardId, shard.getValue()); + } } ImmutableOpenMap shards = shardsBuilder.build(); RestoreInProgress.State newState = overallState(RestoreInProgress.State.STARTED, shards); - entries.add(new RestoreInProgress.Entry(entry.snapshot(), newState, entry.indices(), shards)); + builder.add(new RestoreInProgress.Entry(entry.uuid(), entry.snapshot(), newState, entry.indices(), shards)); } else { - entries.add(entry); + builder.add(entry); } } - return new RestoreInProgress(entries.toArray(new RestoreInProgress.Entry[entries.size()])); + return builder.build(); } else { return oldRestore; } } } - public static RestoreInProgress.Entry restoreInProgress(ClusterState state, Snapshot snapshot) { + public static RestoreInProgress.Entry restoreInProgress(ClusterState state, String restoreUUID) { final RestoreInProgress restoreInProgress = state.custom(RestoreInProgress.TYPE); if (restoreInProgress != null) { - for (RestoreInProgress.Entry e : restoreInProgress.entries()) { - if (e.snapshot().equals(snapshot)) { - return e; - } - } + return restoreInProgress.get(restoreUUID); } return null; } @@ -652,15 +675,15 @@ public class RestoreService implements ClusterStateApplier { static class CleanRestoreStateTaskExecutor implements ClusterStateTaskExecutor, ClusterStateTaskListener { static class Task { - final Snapshot snapshot; + final String uuid; - Task(Snapshot snapshot) { - this.snapshot = snapshot; + Task(String uuid) { + this.uuid = uuid; } @Override public String toString() { - return "clean restore state for restoring snapshot " + snapshot; + return "clean restore state for restore " + uuid; } } @@ -673,25 +696,24 @@ public class RestoreService implements ClusterStateApplier { @Override public ClusterTasksResult execute(final ClusterState currentState, final List tasks) throws Exception { final ClusterTasksResult.Builder resultBuilder = ClusterTasksResult.builder().successes(tasks); - Set completedSnapshots = tasks.stream().map(e -> e.snapshot).collect(Collectors.toSet()); - final List entries = new ArrayList<>(); + Set completedRestores = tasks.stream().map(e -> e.uuid).collect(Collectors.toSet()); + RestoreInProgress.Builder restoreInProgressBuilder = new RestoreInProgress.Builder(); final RestoreInProgress restoreInProgress = currentState.custom(RestoreInProgress.TYPE); boolean changed = false; if (restoreInProgress != null) { - for (RestoreInProgress.Entry entry : restoreInProgress.entries()) { - if (completedSnapshots.contains(entry.snapshot()) == false) { - entries.add(entry); - } else { + for (RestoreInProgress.Entry entry : restoreInProgress) { + if (completedRestores.contains(entry.uuid())) { changed = true; + } else { + restoreInProgressBuilder.add(entry); } } } if (changed == false) { return resultBuilder.build(currentState); } - RestoreInProgress updatedRestoreInProgress = new RestoreInProgress(entries.toArray(new RestoreInProgress.Entry[entries.size()])); ImmutableOpenMap.Builder builder = ImmutableOpenMap.builder(currentState.getCustoms()); - builder.put(RestoreInProgress.TYPE, updatedRestoreInProgress); + builder.put(RestoreInProgress.TYPE, restoreInProgressBuilder.build()); ImmutableOpenMap customs = builder.build(); return resultBuilder.build(ClusterState.builder(currentState).customs(customs).build()); } @@ -713,12 +735,12 @@ public class RestoreService implements ClusterStateApplier { RestoreInProgress restoreInProgress = state.custom(RestoreInProgress.TYPE); if (restoreInProgress != null) { - for (RestoreInProgress.Entry entry : restoreInProgress.entries()) { + for (RestoreInProgress.Entry entry : restoreInProgress) { if (entry.state().completed()) { assert completed(entry.shards()) : "state says completed but restore entries are not"; clusterService.submitStateUpdateTask( "clean up snapshot restore state", - new CleanRestoreStateTaskExecutor.Task(entry.snapshot()), + new CleanRestoreStateTaskExecutor.Task(entry.uuid()), ClusterStateTaskConfig.build(Priority.URGENT), cleanRestoreStateTaskExecutor, cleanRestoreStateTaskExecutor); @@ -815,7 +837,7 @@ public class RestoreService implements ClusterStateApplier { RestoreInProgress restore = currentState.custom(RestoreInProgress.TYPE); if (restore != null) { Set indicesToFail = null; - for (RestoreInProgress.Entry entry : restore.entries()) { + for (RestoreInProgress.Entry entry : restore) { for (ObjectObjectCursor shard : entry.shards()) { if (!shard.value.state().completed()) { IndexMetaData indexMetaData = currentState.metaData().index(shard.key.getIndex()); @@ -853,10 +875,10 @@ public class RestoreService implements ClusterStateApplier { * @return true if repository is currently in use by one of the running snapshots */ public static boolean isRepositoryInUse(ClusterState clusterState, String repository) { - RestoreInProgress snapshots = clusterState.custom(RestoreInProgress.TYPE); - if (snapshots != null) { - for (RestoreInProgress.Entry snapshot : snapshots.entries()) { - if (repository.equals(snapshot.snapshot().getRepository())) { + RestoreInProgress restoreInProgress = clusterState.custom(RestoreInProgress.TYPE); + if (restoreInProgress != null) { + for (RestoreInProgress.Entry entry: restoreInProgress) { + if (repository.equals(entry.snapshot().getRepository())) { return true; } } diff --git a/server/src/main/java/org/elasticsearch/snapshots/SnapshotsService.java b/server/src/main/java/org/elasticsearch/snapshots/SnapshotsService.java index 4c1b84b0f22..fa7c757aaca 100644 --- a/server/src/main/java/org/elasticsearch/snapshots/SnapshotsService.java +++ b/server/src/main/java/org/elasticsearch/snapshots/SnapshotsService.java @@ -1137,7 +1137,7 @@ public class SnapshotsService extends AbstractLifecycleComponent implements Clus // don't allow snapshot deletions while a restore is taking place, // otherwise we could end up deleting a snapshot that is being restored // and the files the restore depends on would all be gone - if (restoreInProgress.entries().isEmpty() == false) { + if (restoreInProgress.isEmpty() == false) { throw new ConcurrentSnapshotExecutionException(snapshot, "cannot delete snapshot during a restore"); } } diff --git a/server/src/main/java/org/elasticsearch/transport/ConnectionProfile.java b/server/src/main/java/org/elasticsearch/transport/ConnectionProfile.java index bcab23c1fbd..07d4818ffaf 100644 --- a/server/src/main/java/org/elasticsearch/transport/ConnectionProfile.java +++ b/server/src/main/java/org/elasticsearch/transport/ConnectionProfile.java @@ -73,16 +73,16 @@ public final class ConnectionProfile { * @return the connection profile */ public static ConnectionProfile buildDefaultConnectionProfile(Settings settings) { - int connectionsPerNodeRecovery = TransportService.CONNECTIONS_PER_NODE_RECOVERY.get(settings); - int connectionsPerNodeBulk = TransportService.CONNECTIONS_PER_NODE_BULK.get(settings); - int connectionsPerNodeReg = TransportService.CONNECTIONS_PER_NODE_REG.get(settings); - int connectionsPerNodeState = TransportService.CONNECTIONS_PER_NODE_STATE.get(settings); - int connectionsPerNodePing = TransportService.CONNECTIONS_PER_NODE_PING.get(settings); + int connectionsPerNodeRecovery = TransportSettings.CONNECTIONS_PER_NODE_RECOVERY.get(settings); + int connectionsPerNodeBulk = TransportSettings.CONNECTIONS_PER_NODE_BULK.get(settings); + int connectionsPerNodeReg = TransportSettings.CONNECTIONS_PER_NODE_REG.get(settings); + int connectionsPerNodeState = TransportSettings.CONNECTIONS_PER_NODE_STATE.get(settings); + int connectionsPerNodePing = TransportSettings.CONNECTIONS_PER_NODE_PING.get(settings); Builder builder = new Builder(); - builder.setConnectTimeout(TransportService.TCP_CONNECT_TIMEOUT.get(settings)); - builder.setHandshakeTimeout(TransportService.TCP_CONNECT_TIMEOUT.get(settings)); - builder.setPingInterval(TcpTransport.PING_SCHEDULE.get(settings)); - builder.setCompressionEnabled(Transport.TRANSPORT_TCP_COMPRESS.get(settings)); + builder.setConnectTimeout(TransportSettings.CONNECT_TIMEOUT.get(settings)); + builder.setHandshakeTimeout(TransportSettings.CONNECT_TIMEOUT.get(settings)); + builder.setPingInterval(TransportSettings.PING_SCHEDULE.get(settings)); + builder.setCompressionEnabled(TransportSettings.TRANSPORT_COMPRESS.get(settings)); builder.addConnections(connectionsPerNodeBulk, TransportRequestOptions.Type.BULK); builder.addConnections(connectionsPerNodePing, TransportRequestOptions.Type.PING); // if we are not master eligible we don't need a dedicated channel to publish the state diff --git a/server/src/main/java/org/elasticsearch/transport/RemoteClusterConnection.java b/server/src/main/java/org/elasticsearch/transport/RemoteClusterConnection.java index 674cf5ae8d3..87dd99e6590 100644 --- a/server/src/main/java/org/elasticsearch/transport/RemoteClusterConnection.java +++ b/server/src/main/java/org/elasticsearch/transport/RemoteClusterConnection.java @@ -738,8 +738,8 @@ final class RemoteClusterConnection implements TransportConnectionListener, Clos private static ConnectionManager createConnectionManager(Settings settings, String clusterAlias, TransportService transportService) { ConnectionProfile.Builder builder = new ConnectionProfile.Builder() - .setConnectTimeout(TransportService.TCP_CONNECT_TIMEOUT.get(settings)) - .setHandshakeTimeout(TransportService.TCP_CONNECT_TIMEOUT.get(settings)) + .setConnectTimeout(TransportSettings.CONNECT_TIMEOUT.get(settings)) + .setHandshakeTimeout(TransportSettings.CONNECT_TIMEOUT.get(settings)) .addConnections(6, TransportRequestOptions.Type.REG, TransportRequestOptions.Type.PING) // TODO make this configurable? // we don't want this to be used for anything else but search .addConnections(0, TransportRequestOptions.Type.BULK, diff --git a/server/src/main/java/org/elasticsearch/transport/RemoteClusterService.java b/server/src/main/java/org/elasticsearch/transport/RemoteClusterService.java index 52da474f2dd..fda0b90f19e 100644 --- a/server/src/main/java/org/elasticsearch/transport/RemoteClusterService.java +++ b/server/src/main/java/org/elasticsearch/transport/RemoteClusterService.java @@ -174,13 +174,13 @@ public final class RemoteClusterService extends RemoteClusterAware implements Cl public static final Setting.AffixSetting REMOTE_CLUSTER_PING_SCHEDULE = Setting.affixKeySetting( "cluster.remote.", "transport.ping_schedule", - key -> timeSetting(key, TcpTransport.PING_SCHEDULE, Setting.Property.NodeScope), + key -> timeSetting(key, TransportSettings.PING_SCHEDULE, Setting.Property.NodeScope), REMOTE_CLUSTERS_SEEDS); public static final Setting.AffixSetting REMOTE_CLUSTER_COMPRESS = Setting.affixKeySetting( "cluster.remote.", "transport.compress", - key -> boolSetting(key, Transport.TRANSPORT_TCP_COMPRESS, Setting.Property.NodeScope), + key -> boolSetting(key, TransportSettings.TRANSPORT_COMPRESS, Setting.Property.NodeScope), REMOTE_CLUSTERS_SEEDS); private static final Predicate DEFAULT_NODE_PREDICATE = (node) -> Version.CURRENT.isCompatible(node.getVersion()) diff --git a/server/src/main/java/org/elasticsearch/transport/TcpTransport.java b/server/src/main/java/org/elasticsearch/transport/TcpTransport.java index a56484cab28..8edf97c9295 100644 --- a/server/src/main/java/org/elasticsearch/transport/TcpTransport.java +++ b/server/src/main/java/org/elasticsearch/transport/TcpTransport.java @@ -100,17 +100,10 @@ import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicReference; import java.util.concurrent.locks.ReadWriteLock; import java.util.concurrent.locks.ReentrantReadWriteLock; -import java.util.function.Function; import java.util.regex.Matcher; import java.util.regex.Pattern; -import static java.util.Collections.emptyList; import static java.util.Collections.unmodifiableMap; -import static org.elasticsearch.common.settings.Setting.affixKeySetting; -import static org.elasticsearch.common.settings.Setting.boolSetting; -import static org.elasticsearch.common.settings.Setting.intSetting; -import static org.elasticsearch.common.settings.Setting.listSetting; -import static org.elasticsearch.common.settings.Setting.timeSetting; import static org.elasticsearch.common.transport.NetworkExceptionHelper.isCloseConnectionException; import static org.elasticsearch.common.transport.NetworkExceptionHelper.isConnectException; import static org.elasticsearch.common.util.concurrent.ConcurrentCollections.newConcurrentMap; @@ -120,59 +113,12 @@ public abstract class TcpTransport extends AbstractLifecycleComponent implements public static final String TRANSPORT_WORKER_THREAD_NAME_PREFIX = "transport_worker"; - public static final Setting> HOST = - listSetting("transport.host", emptyList(), Function.identity(), Setting.Property.NodeScope); - public static final Setting> BIND_HOST = - listSetting("transport.bind_host", HOST, Function.identity(), Setting.Property.NodeScope); - public static final Setting> PUBLISH_HOST = - listSetting("transport.publish_host", HOST, Function.identity(), Setting.Property.NodeScope); - public static final Setting PORT = - new Setting<>("transport.tcp.port", "9300-9400", Function.identity(), Setting.Property.NodeScope); - public static final Setting PUBLISH_PORT = - intSetting("transport.publish_port", -1, -1, Setting.Property.NodeScope); - public static final String DEFAULT_PROFILE = "default"; - // the scheduled internal ping interval setting, defaults to disabled (-1) - public static final Setting PING_SCHEDULE = - timeSetting("transport.ping_schedule", TimeValue.timeValueSeconds(-1), Setting.Property.NodeScope); - public static final Setting TCP_NO_DELAY = - boolSetting("transport.tcp_no_delay", NetworkService.TCP_NO_DELAY, Setting.Property.NodeScope); - public static final Setting TCP_KEEP_ALIVE = - boolSetting("transport.tcp.keep_alive", NetworkService.TCP_KEEP_ALIVE, Setting.Property.NodeScope); - public static final Setting TCP_REUSE_ADDRESS = - boolSetting("transport.tcp.reuse_address", NetworkService.TCP_REUSE_ADDRESS, Setting.Property.NodeScope); - public static final Setting TCP_SEND_BUFFER_SIZE = - Setting.byteSizeSetting("transport.tcp.send_buffer_size", NetworkService.TCP_SEND_BUFFER_SIZE, Setting.Property.NodeScope); - public static final Setting TCP_RECEIVE_BUFFER_SIZE = - Setting.byteSizeSetting("transport.tcp.receive_buffer_size", NetworkService.TCP_RECEIVE_BUFFER_SIZE, Setting.Property.NodeScope); - - - public static final Setting.AffixSetting TCP_NO_DELAY_PROFILE = affixKeySetting("transport.profiles.", "tcp_no_delay", - key -> boolSetting(key, TcpTransport.TCP_NO_DELAY, Setting.Property.NodeScope)); - public static final Setting.AffixSetting TCP_KEEP_ALIVE_PROFILE = affixKeySetting("transport.profiles.", "tcp_keep_alive", - key -> boolSetting(key, TcpTransport.TCP_KEEP_ALIVE, Setting.Property.NodeScope)); - public static final Setting.AffixSetting TCP_REUSE_ADDRESS_PROFILE = affixKeySetting("transport.profiles.", "reuse_address", - key -> boolSetting(key, TcpTransport.TCP_REUSE_ADDRESS, Setting.Property.NodeScope)); - public static final Setting.AffixSetting TCP_SEND_BUFFER_SIZE_PROFILE = affixKeySetting("transport.profiles.", - "send_buffer_size", key -> Setting.byteSizeSetting(key, TcpTransport.TCP_SEND_BUFFER_SIZE, Setting.Property.NodeScope)); - public static final Setting.AffixSetting TCP_RECEIVE_BUFFER_SIZE_PROFILE = affixKeySetting("transport.profiles.", - "receive_buffer_size", key -> Setting.byteSizeSetting(key, TcpTransport.TCP_RECEIVE_BUFFER_SIZE, Setting.Property.NodeScope)); - - public static final Setting.AffixSetting> BIND_HOST_PROFILE = affixKeySetting("transport.profiles.", "bind_host", - key -> listSetting(key, BIND_HOST, Function.identity(), Setting.Property.NodeScope)); - public static final Setting.AffixSetting> PUBLISH_HOST_PROFILE = affixKeySetting("transport.profiles.", "publish_host", - key -> listSetting(key, PUBLISH_HOST, Function.identity(), Setting.Property.NodeScope)); - public static final Setting.AffixSetting PORT_PROFILE = affixKeySetting("transport.profiles.", "port", - key -> new Setting<>(key, PORT, Function.identity(), Setting.Property.NodeScope)); - public static final Setting.AffixSetting PUBLISH_PORT_PROFILE = affixKeySetting("transport.profiles.", "publish_port", - key -> intSetting(key, -1, -1, Setting.Property.NodeScope)); // This is the number of bytes necessary to read the message size private static final int BYTES_NEEDED_FOR_MESSAGE_SIZE = TcpHeader.MARKER_BYTES_SIZE + TcpHeader.MESSAGE_LENGTH_SIZE; private static final long NINETY_PER_HEAP_SIZE = (long) (JvmInfo.jvmInfo().getMem().getHeapMax().getBytes() * 0.9); private static final BytesReference EMPTY_BYTES_REFERENCE = new BytesArray(new byte[0]); - public static final String FEATURE_PREFIX = "transport.features"; - public static final Setting DEFAULT_FEATURES_SETTING = Setting.groupSetting(FEATURE_PREFIX + ".", Setting.Property.NodeScope); private final String[] features; protected final Settings settings; @@ -220,7 +166,7 @@ public abstract class TcpTransport extends AbstractLifecycleComponent implements this.pageCacheRecycler = pageCacheRecycler; this.circuitBreakerService = circuitBreakerService; this.namedWriteableRegistry = namedWriteableRegistry; - this.compressAllResponses = Transport.TRANSPORT_TCP_COMPRESS.get(settings); + this.compressAllResponses = TransportSettings.TRANSPORT_COMPRESS.get(settings); this.networkService = networkService; this.transportName = transportName; this.transportLogger = new TransportLogger(); @@ -233,7 +179,7 @@ public abstract class TcpTransport extends AbstractLifecycleComponent implements this.keepAlive = new TransportKeepAlive(threadPool, this::internalSendMessage); this.nodeName = Node.NODE_NAME_SETTING.get(settings); - final Settings defaultFeatures = DEFAULT_FEATURES_SETTING.get(settings); + final Settings defaultFeatures = TransportSettings.DEFAULT_FEATURES_SETTING.get(settings); if (defaultFeatures == null) { this.features = new String[0]; } else { @@ -500,7 +446,7 @@ public abstract class TcpTransport extends AbstractLifecycleComponent implements publishHosts = Arrays.asList(boundAddressesHostStrings); } if (publishHosts.isEmpty()) { - publishHosts = NetworkService.GLOBAL_NETWORK_PUBLISHHOST_SETTING.get(settings); + publishHosts = NetworkService.GLOBAL_NETWORK_PUBLISH_HOST_SETTING.get(settings); } final InetAddress publishInetAddress; @@ -546,15 +492,15 @@ public abstract class TcpTransport extends AbstractLifecycleComponent implements String profileExplanation = profileSettings.isDefaultProfile ? "" : " for profile " + profileSettings.profileName; throw new BindTransportException("Failed to auto-resolve publish port" + profileExplanation + ", multiple bound addresses " + boundAddresses + " with distinct ports and none of them matched the publish address (" + publishInetAddress + "). " + - "Please specify a unique port by setting " + PORT.getKey() + " or " + - PUBLISH_PORT.getKey()); + "Please specify a unique port by setting " + TransportSettings.PORT.getKey() + " or " + + TransportSettings.PUBLISH_PORT.getKey()); } return publishPort; } @Override public TransportAddress[] addressesFromString(String address, int perAddressLimit) throws UnknownHostException { - return parse(address, settings.get("transport.profiles.default.port", PORT.get(settings)), perAddressLimit); + return parse(address, settings.get("transport.profiles.default.port", TransportSettings.PORT.get(settings)), perAddressLimit); } // this code is a take on guava's HostAndPort, like a HostAndPortRange @@ -1463,12 +1409,12 @@ public abstract class TcpTransport extends AbstractLifecycleComponent implements boolean isDefaultSet = false; for (String profile : settings.getGroups("transport.profiles.", true).keySet()) { profiles.add(new ProfileSettings(settings, profile)); - if (DEFAULT_PROFILE.equals(profile)) { + if (TransportSettings.DEFAULT_PROFILE.equals(profile)) { isDefaultSet = true; } } if (isDefaultSet == false) { - profiles.add(new ProfileSettings(settings, DEFAULT_PROFILE)); + profiles.add(new ProfileSettings(settings, TransportSettings.DEFAULT_PROFILE)); } return Collections.unmodifiableSet(profiles); } @@ -1491,23 +1437,22 @@ public abstract class TcpTransport extends AbstractLifecycleComponent implements public ProfileSettings(Settings settings, String profileName) { this.profileName = profileName; - isDefaultProfile = DEFAULT_PROFILE.equals(profileName); - tcpKeepAlive = TCP_KEEP_ALIVE_PROFILE.getConcreteSettingForNamespace(profileName).get(settings); - tcpNoDelay = TCP_NO_DELAY_PROFILE.getConcreteSettingForNamespace(profileName).get(settings); - reuseAddress = TCP_REUSE_ADDRESS_PROFILE.getConcreteSettingForNamespace(profileName).get(settings); - sendBufferSize = TCP_SEND_BUFFER_SIZE_PROFILE.getConcreteSettingForNamespace(profileName).get(settings); - receiveBufferSize = TCP_RECEIVE_BUFFER_SIZE_PROFILE.getConcreteSettingForNamespace(profileName).get(settings); - List profileBindHosts = BIND_HOST_PROFILE.getConcreteSettingForNamespace(profileName).get(settings); - bindHosts = (profileBindHosts.isEmpty() ? NetworkService.GLOBAL_NETWORK_BINDHOST_SETTING.get(settings) - : profileBindHosts); - publishHosts = PUBLISH_HOST_PROFILE.getConcreteSettingForNamespace(profileName).get(settings); - Setting concretePort = PORT_PROFILE.getConcreteSettingForNamespace(profileName); + isDefaultProfile = TransportSettings.DEFAULT_PROFILE.equals(profileName); + tcpKeepAlive = TransportSettings.TCP_KEEP_ALIVE_PROFILE.getConcreteSettingForNamespace(profileName).get(settings); + tcpNoDelay = TransportSettings.TCP_NO_DELAY_PROFILE.getConcreteSettingForNamespace(profileName).get(settings); + reuseAddress = TransportSettings.TCP_REUSE_ADDRESS_PROFILE.getConcreteSettingForNamespace(profileName).get(settings); + sendBufferSize = TransportSettings.TCP_SEND_BUFFER_SIZE_PROFILE.getConcreteSettingForNamespace(profileName).get(settings); + receiveBufferSize = TransportSettings.TCP_RECEIVE_BUFFER_SIZE_PROFILE.getConcreteSettingForNamespace(profileName).get(settings); + List profileBindHosts = TransportSettings.BIND_HOST_PROFILE.getConcreteSettingForNamespace(profileName).get(settings); + bindHosts = (profileBindHosts.isEmpty() ? NetworkService.GLOBAL_NETWORK_BIND_HOST_SETTING.get(settings) : profileBindHosts); + publishHosts = TransportSettings.PUBLISH_HOST_PROFILE.getConcreteSettingForNamespace(profileName).get(settings); + Setting concretePort = TransportSettings.PORT_PROFILE.getConcreteSettingForNamespace(profileName); if (concretePort.exists(settings) == false && isDefaultProfile == false) { throw new IllegalStateException("profile [" + profileName + "] has no port configured"); } - portOrRange = PORT_PROFILE.getConcreteSettingForNamespace(profileName).get(settings); - publishPort = isDefaultProfile ? PUBLISH_PORT.get(settings) : - PUBLISH_PORT_PROFILE.getConcreteSettingForNamespace(profileName).get(settings); + portOrRange = TransportSettings.PORT_PROFILE.getConcreteSettingForNamespace(profileName).get(settings); + publishPort = isDefaultProfile ? TransportSettings.PUBLISH_PORT.get(settings) : + TransportSettings.PUBLISH_PORT_PROFILE.getConcreteSettingForNamespace(profileName).get(settings); } } diff --git a/server/src/main/java/org/elasticsearch/transport/Transport.java b/server/src/main/java/org/elasticsearch/transport/Transport.java index e44e0b7877c..4a8a061602a 100644 --- a/server/src/main/java/org/elasticsearch/transport/Transport.java +++ b/server/src/main/java/org/elasticsearch/transport/Transport.java @@ -26,8 +26,6 @@ import org.elasticsearch.common.breaker.CircuitBreaker; import org.elasticsearch.common.breaker.NoopCircuitBreaker; import org.elasticsearch.common.component.LifecycleComponent; import org.elasticsearch.common.lease.Releasable; -import org.elasticsearch.common.settings.Setting; -import org.elasticsearch.common.settings.Setting.Property; import org.elasticsearch.common.transport.BoundTransportAddress; import org.elasticsearch.common.transport.TransportAddress; import org.elasticsearch.common.util.concurrent.ConcurrentCollections; @@ -44,8 +42,6 @@ import java.util.function.Predicate; public interface Transport extends LifecycleComponent { - Setting TRANSPORT_TCP_COMPRESS = Setting.boolSetting("transport.tcp.compress", false, Property.NodeScope); - /** * Registers a new request handler */ diff --git a/server/src/main/java/org/elasticsearch/transport/TransportService.java b/server/src/main/java/org/elasticsearch/transport/TransportService.java index 3aeb41296a9..b5e97ac3ae6 100644 --- a/server/src/main/java/org/elasticsearch/transport/TransportService.java +++ b/server/src/main/java/org/elasticsearch/transport/TransportService.java @@ -24,7 +24,6 @@ import org.apache.logging.log4j.Logger; import org.apache.logging.log4j.message.ParameterizedMessage; import org.elasticsearch.Version; import org.elasticsearch.action.ActionListener; -import org.elasticsearch.action.admin.cluster.node.liveness.TransportLivenessAction; import org.elasticsearch.client.Client; import org.elasticsearch.client.transport.TransportClient; import org.elasticsearch.cluster.ClusterName; @@ -38,15 +37,11 @@ import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Streamable; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.logging.Loggers; -import org.elasticsearch.common.network.NetworkService; import org.elasticsearch.common.regex.Regex; import org.elasticsearch.common.settings.ClusterSettings; -import org.elasticsearch.common.settings.Setting; -import org.elasticsearch.common.settings.Setting.Property; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.transport.BoundTransportAddress; import org.elasticsearch.common.transport.TransportAddress; -import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.util.concurrent.AbstractRunnable; import org.elasticsearch.common.util.concurrent.EsRejectedExecutionException; import org.elasticsearch.common.util.concurrent.FutureUtils; @@ -75,26 +70,9 @@ import java.util.function.Function; import java.util.function.Predicate; import java.util.function.Supplier; -import static java.util.Collections.emptyList; -import static org.elasticsearch.common.settings.Setting.intSetting; -import static org.elasticsearch.common.settings.Setting.listSetting; -import static org.elasticsearch.common.settings.Setting.timeSetting; - public class TransportService extends AbstractLifecycleComponent implements TransportMessageListener, TransportConnectionListener { private static final Logger logger = LogManager.getLogger(TransportService.class); - public static final Setting CONNECTIONS_PER_NODE_RECOVERY = - intSetting("transport.connections_per_node.recovery", 2, 1, Setting.Property.NodeScope); - public static final Setting CONNECTIONS_PER_NODE_BULK = - intSetting("transport.connections_per_node.bulk", 3, 1, Setting.Property.NodeScope); - public static final Setting CONNECTIONS_PER_NODE_REG = - intSetting("transport.connections_per_node.reg", 6, 1, Setting.Property.NodeScope); - public static final Setting CONNECTIONS_PER_NODE_STATE = - intSetting("transport.connections_per_node.state", 1, 1, Setting.Property.NodeScope); - public static final Setting CONNECTIONS_PER_NODE_PING = - intSetting("transport.connections_per_node.ping", 1, 1, Setting.Property.NodeScope); - public static final Setting TCP_CONNECT_TIMEOUT = - timeSetting("transport.tcp.connect_timeout", NetworkService.TCP_CONNECT_TIMEOUT, Setting.Property.NodeScope); public static final String DIRECT_RESPONSE_PROFILE = ".direct"; public static final String HANDSHAKE_ACTION_NAME = "internal:transport/handshake"; @@ -124,13 +102,6 @@ public class TransportService extends AbstractLifecycleComponent implements Tran // tracer log - public static final Setting> TRACE_LOG_INCLUDE_SETTING = - listSetting("transport.tracer.include", emptyList(), Function.identity(), Property.Dynamic, Property.NodeScope); - public static final Setting> TRACE_LOG_EXCLUDE_SETTING = - listSetting("transport.tracer.exclude", - Arrays.asList("internal:discovery/zen/fd*", "internal:coordination/fault_detection/*", TransportLivenessAction.NAME), - Function.identity(), Property.Dynamic, Property.NodeScope); - private final Logger tracerLog; volatile String[] tracerLogInclude; @@ -172,7 +143,7 @@ public class TransportService extends AbstractLifecycleComponent implements Tran * Build the service. * * @param clusterSettings if non null, the {@linkplain TransportService} will register with the {@link ClusterSettings} for settings - * updates for {@link #TRACE_LOG_EXCLUDE_SETTING} and {@link #TRACE_LOG_INCLUDE_SETTING}. + * * updates for {@link TransportSettings#TRACE_LOG_EXCLUDE_SETTING} and {@link TransportSettings#TRACE_LOG_INCLUDE_SETTING}. */ public TransportService(Settings settings, Transport transport, ThreadPool threadPool, TransportInterceptor transportInterceptor, Function localNodeFactory, @Nullable ClusterSettings clusterSettings, @@ -193,8 +164,8 @@ public class TransportService extends AbstractLifecycleComponent implements Tran this.localNodeFactory = localNodeFactory; this.connectionManager = connectionManager; this.clusterName = ClusterName.CLUSTER_NAME_SETTING.get(settings); - setTracerLogInclude(TRACE_LOG_INCLUDE_SETTING.get(settings)); - setTracerLogExclude(TRACE_LOG_EXCLUDE_SETTING.get(settings)); + setTracerLogInclude(TransportSettings.TRACE_LOG_INCLUDE_SETTING.get(settings)); + setTracerLogExclude(TransportSettings.TRACE_LOG_EXCLUDE_SETTING.get(settings)); tracerLog = Loggers.getLogger(logger, ".tracer"); taskManager = createTaskManager(settings, threadPool, taskHeaders); this.interceptor = transportInterceptor; @@ -203,8 +174,8 @@ public class TransportService extends AbstractLifecycleComponent implements Tran remoteClusterService = new RemoteClusterService(settings, this); responseHandlers = transport.getResponseHandlers(); if (clusterSettings != null) { - clusterSettings.addSettingsUpdateConsumer(TRACE_LOG_INCLUDE_SETTING, this::setTracerLogInclude); - clusterSettings.addSettingsUpdateConsumer(TRACE_LOG_EXCLUDE_SETTING, this::setTracerLogExclude); + clusterSettings.addSettingsUpdateConsumer(TransportSettings.TRACE_LOG_INCLUDE_SETTING, this::setTracerLogInclude); + clusterSettings.addSettingsUpdateConsumer(TransportSettings.TRACE_LOG_EXCLUDE_SETTING, this::setTracerLogExclude); if (connectToRemoteCluster) { remoteClusterService.listenForUpdates(clusterSettings); } diff --git a/server/src/main/java/org/elasticsearch/transport/TransportSettings.java b/server/src/main/java/org/elasticsearch/transport/TransportSettings.java new file mode 100644 index 00000000000..60e230004ca --- /dev/null +++ b/server/src/main/java/org/elasticsearch/transport/TransportSettings.java @@ -0,0 +1,168 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.transport; + +import org.elasticsearch.action.admin.cluster.node.liveness.TransportLivenessAction; +import org.elasticsearch.common.network.NetworkService; +import org.elasticsearch.common.settings.Setting; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.unit.ByteSizeValue; +import org.elasticsearch.common.unit.TimeValue; + +import java.util.Arrays; +import java.util.List; +import java.util.function.Function; + +import static java.util.Collections.emptyList; +import static org.elasticsearch.common.settings.Setting.affixKeySetting; +import static org.elasticsearch.common.settings.Setting.boolSetting; +import static org.elasticsearch.common.settings.Setting.intSetting; +import static org.elasticsearch.common.settings.Setting.listSetting; +import static org.elasticsearch.common.settings.Setting.timeSetting; + +public final class TransportSettings { + + public static final String DEFAULT_PROFILE = "default"; + public static final String FEATURE_PREFIX = "transport.features"; + + public static final Setting> HOST = + listSetting("transport.host", emptyList(), Function.identity(), Setting.Property.NodeScope); + public static final Setting> PUBLISH_HOST = + listSetting("transport.publish_host", HOST, Function.identity(), Setting.Property.NodeScope); + public static final Setting.AffixSetting> PUBLISH_HOST_PROFILE = + affixKeySetting("transport.profiles.", "publish_host", key -> listSetting(key, PUBLISH_HOST, Function.identity(), + Setting.Property.NodeScope)); + public static final Setting> BIND_HOST = + listSetting("transport.bind_host", HOST, Function.identity(), Setting.Property.NodeScope); + public static final Setting.AffixSetting> BIND_HOST_PROFILE = affixKeySetting("transport.profiles.", "bind_host", + key -> listSetting(key, BIND_HOST, Function.identity(), Setting.Property.NodeScope)); + // TODO: Deprecate in 7.0 + public static final Setting OLD_PORT = + new Setting<>("transport.tcp.port", "9300-9400", Function.identity(), Setting.Property.NodeScope); + public static final Setting PORT = + new Setting<>("transport.port", OLD_PORT, Function.identity(), Setting.Property.NodeScope); + public static final Setting.AffixSetting PORT_PROFILE = affixKeySetting("transport.profiles.", "port", + key -> new Setting<>(key, PORT, Function.identity(), Setting.Property.NodeScope)); + public static final Setting PUBLISH_PORT = + intSetting("transport.publish_port", -1, -1, Setting.Property.NodeScope); + public static final Setting.AffixSetting PUBLISH_PORT_PROFILE = affixKeySetting("transport.profiles.", "publish_port", + key -> intSetting(key, -1, -1, Setting.Property.NodeScope)); + // TODO: Deprecate in 7.0 + public static final Setting OLD_TRANSPORT_COMPRESS = + boolSetting("transport.tcp.compress", false, Setting.Property.NodeScope); + public static final Setting TRANSPORT_COMPRESS = + boolSetting("transport.compress", OLD_TRANSPORT_COMPRESS, Setting.Property.NodeScope); + // the scheduled internal ping interval setting, defaults to disabled (-1) + public static final Setting PING_SCHEDULE = + timeSetting("transport.ping_schedule", TimeValue.timeValueSeconds(-1), Setting.Property.NodeScope); + // TODO: Deprecate in 7.0 + public static final Setting TCP_CONNECT_TIMEOUT = + timeSetting("transport.tcp.connect_timeout", NetworkService.TCP_CONNECT_TIMEOUT, Setting.Property.NodeScope); + public static final Setting CONNECT_TIMEOUT = + timeSetting("transport.connect_timeout", TCP_CONNECT_TIMEOUT, Setting.Property.NodeScope); + public static final Setting DEFAULT_FEATURES_SETTING = Setting.groupSetting(FEATURE_PREFIX + ".", Setting.Property.NodeScope); + + // Tcp socket settings + + // TODO: Deprecate in 7.0 + public static final Setting OLD_TCP_NO_DELAY = + boolSetting("transport.tcp_no_delay", NetworkService.TCP_NO_DELAY, Setting.Property.NodeScope); + public static final Setting TCP_NO_DELAY = + boolSetting("transport.tcp.no_delay", OLD_TCP_NO_DELAY, Setting.Property.NodeScope); + // TODO: Deprecate in 7.0 + public static final Setting.AffixSetting OLD_TCP_NO_DELAY_PROFILE = + affixKeySetting("transport.profiles.", "tcp_no_delay", key -> boolSetting(key, TCP_NO_DELAY, Setting.Property.NodeScope)); + public static final Setting.AffixSetting TCP_NO_DELAY_PROFILE = + affixKeySetting("transport.profiles.", "tcp.no_delay", + key -> boolSetting(key, + fallback(key, OLD_TCP_NO_DELAY_PROFILE, "tcp\\.no_delay$", "tcp_no_delay"), + Setting.Property.NodeScope)); + public static final Setting TCP_KEEP_ALIVE = + boolSetting("transport.tcp.keep_alive", NetworkService.TCP_KEEP_ALIVE, Setting.Property.NodeScope); + // TODO: Deprecate in 7.0 + public static final Setting.AffixSetting OLD_TCP_KEEP_ALIVE_PROFILE = + affixKeySetting("transport.profiles.", "tcp_keep_alive", key -> boolSetting(key, TCP_KEEP_ALIVE, Setting.Property.NodeScope)); + public static final Setting.AffixSetting TCP_KEEP_ALIVE_PROFILE = + affixKeySetting("transport.profiles.", "tcp.keep_alive", + key -> boolSetting(key, + fallback(key, OLD_TCP_KEEP_ALIVE_PROFILE, "tcp\\.keep_alive$", "tcp_keep_alive"), + Setting.Property.NodeScope)); + public static final Setting TCP_REUSE_ADDRESS = + boolSetting("transport.tcp.reuse_address", NetworkService.TCP_REUSE_ADDRESS, Setting.Property.NodeScope); + // TODO: Deprecate in 7.0 + public static final Setting.AffixSetting OLD_TCP_REUSE_ADDRESS_PROFILE = + affixKeySetting("transport.profiles.", "reuse_address", key -> boolSetting(key, TCP_REUSE_ADDRESS, Setting.Property.NodeScope)); + public static final Setting.AffixSetting TCP_REUSE_ADDRESS_PROFILE = + affixKeySetting("transport.profiles.", "tcp.reuse_address", + key -> boolSetting(key, + fallback(key, OLD_TCP_REUSE_ADDRESS_PROFILE, "tcp\\.reuse_address$", "reuse_address"), + Setting.Property.NodeScope)); + public static final Setting TCP_SEND_BUFFER_SIZE = + Setting.byteSizeSetting("transport.tcp.send_buffer_size", NetworkService.TCP_SEND_BUFFER_SIZE, Setting.Property.NodeScope); + // TODO: Deprecate in 7.0 + public static final Setting.AffixSetting OLD_TCP_SEND_BUFFER_SIZE_PROFILE = + affixKeySetting("transport.profiles.", "send_buffer_size", + key -> Setting.byteSizeSetting(key, TCP_SEND_BUFFER_SIZE, Setting.Property.NodeScope)); + public static final Setting.AffixSetting TCP_SEND_BUFFER_SIZE_PROFILE = + affixKeySetting("transport.profiles.", "tcp.send_buffer_size", + key -> Setting.byteSizeSetting(key, + fallback(key, OLD_TCP_SEND_BUFFER_SIZE_PROFILE, "tcp\\.send_buffer_size$", "send_buffer_size"), + Setting.Property.NodeScope)); + public static final Setting TCP_RECEIVE_BUFFER_SIZE = + Setting.byteSizeSetting("transport.tcp.receive_buffer_size", NetworkService.TCP_RECEIVE_BUFFER_SIZE, Setting.Property.NodeScope); + // TODO: Deprecate in 7.0 + public static final Setting.AffixSetting OLD_TCP_RECEIVE_BUFFER_SIZE_PROFILE = + affixKeySetting("transport.profiles.", "receive_buffer_size", + key -> Setting.byteSizeSetting(key, TCP_RECEIVE_BUFFER_SIZE, Setting.Property.NodeScope)); + public static final Setting.AffixSetting TCP_RECEIVE_BUFFER_SIZE_PROFILE = + affixKeySetting("transport.profiles.", "tcp.receive_buffer_size", + key -> Setting.byteSizeSetting(key, + fallback(key, OLD_TCP_RECEIVE_BUFFER_SIZE_PROFILE, "tcp\\.receive_buffer_size$", "receive_buffer_size"), + Setting.Property.NodeScope)); + + // Connections per node settings + + public static final Setting CONNECTIONS_PER_NODE_RECOVERY = + intSetting("transport.connections_per_node.recovery", 2, 1, Setting.Property.NodeScope); + public static final Setting CONNECTIONS_PER_NODE_BULK = + intSetting("transport.connections_per_node.bulk", 3, 1, Setting.Property.NodeScope); + public static final Setting CONNECTIONS_PER_NODE_REG = + intSetting("transport.connections_per_node.reg", 6, 1, Setting.Property.NodeScope); + public static final Setting CONNECTIONS_PER_NODE_STATE = + intSetting("transport.connections_per_node.state", 1, 1, Setting.Property.NodeScope); + public static final Setting CONNECTIONS_PER_NODE_PING = + intSetting("transport.connections_per_node.ping", 1, 1, Setting.Property.NodeScope); + + // Tracer settings + + public static final Setting> TRACE_LOG_INCLUDE_SETTING = + listSetting("transport.tracer.include", emptyList(), Function.identity(), Setting.Property.Dynamic, Setting.Property.NodeScope); + public static final Setting> TRACE_LOG_EXCLUDE_SETTING = + listSetting("transport.tracer.exclude", + Arrays.asList("internal:discovery/zen/fd*", "internal:coordination/fault_detection/*", TransportLivenessAction.NAME), + Function.identity(), Setting.Property.Dynamic, Setting.Property.NodeScope); + + private TransportSettings() { + } + + private static Setting fallback(String key, Setting.AffixSetting affixSetting, String regex, String replacement) { + return "_na_".equals(key) ? affixSetting.getConcreteSettingForNamespace(key) + : affixSetting.getConcreteSetting(key.replaceAll(regex, replacement)); + } +} diff --git a/server/src/test/java/org/elasticsearch/VersionTests.java b/server/src/test/java/org/elasticsearch/VersionTests.java index ae7442f9ed6..0e230bfd9af 100644 --- a/server/src/test/java/org/elasticsearch/VersionTests.java +++ b/server/src/test/java/org/elasticsearch/VersionTests.java @@ -125,7 +125,8 @@ public class VersionTests extends ESTestCase { for (int i = 0; i < iters; i++) { Version version = randomVersion(random()); if (version != Version.CURRENT) { - assertThat("Version: " + version + " should be before: " + Version.CURRENT + " but wasn't", version.before(Version.CURRENT), is(true)); + assertThat("Version: " + version + " should be before: " + Version.CURRENT + " but wasn't", + version.before(Version.CURRENT), is(true)); } } } @@ -161,7 +162,11 @@ public class VersionTests extends ESTestCase { public void testIndexCreatedVersion() { // an actual index has a IndexMetaData.SETTING_INDEX_UUID final Version version = Version.V_6_0_0_beta1; - assertEquals(version, Version.indexCreated(Settings.builder().put(IndexMetaData.SETTING_INDEX_UUID, "foo").put(IndexMetaData.SETTING_VERSION_CREATED, version).build())); + assertEquals(version, Version.indexCreated( + Settings.builder() + .put(IndexMetaData.SETTING_INDEX_UUID, "foo") + .put(IndexMetaData.SETTING_VERSION_CREATED, version) + .build())); } public void testMinCompatVersion() { diff --git a/server/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TaskManagerTestCase.java b/server/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TaskManagerTestCase.java index a455bc5b882..59292ba077f 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TaskManagerTestCase.java +++ b/server/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TaskManagerTestCase.java @@ -41,15 +41,15 @@ import org.elasticsearch.common.lease.Releasable; import org.elasticsearch.common.network.NetworkService; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.transport.BoundTransportAddress; -import org.elasticsearch.common.util.BigArrays; +import org.elasticsearch.common.util.PageCacheRecycler; import org.elasticsearch.indices.breaker.NoneCircuitBreakerService; import org.elasticsearch.tasks.TaskManager; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.tasks.MockTaskManager; import org.elasticsearch.threadpool.TestThreadPool; import org.elasticsearch.threadpool.ThreadPool; -import org.elasticsearch.transport.MockTcpTransport; import org.elasticsearch.transport.TransportService; +import org.elasticsearch.transport.nio.MockNioTransport; import org.junit.After; import org.junit.AfterClass; import org.junit.BeforeClass; @@ -174,9 +174,9 @@ public abstract class TaskManagerTestCase extends ESTestCase { return discoveryNode.get(); }; transportService = new TransportService(settings, - new MockTcpTransport(settings, threadPool, BigArrays.NON_RECYCLING_INSTANCE, new NoneCircuitBreakerService(), - new NamedWriteableRegistry(ClusterModule.getNamedWriteables()), - new NetworkService(Collections.emptyList())), + new MockNioTransport(settings, Version.CURRENT, threadPool, new NetworkService(Collections.emptyList()), + PageCacheRecycler.NON_RECYCLING_INSTANCE, new NamedWriteableRegistry(ClusterModule.getNamedWriteables()), + new NoneCircuitBreakerService()), threadPool, TransportService.NOOP_TRANSPORT_INTERCEPTOR, boundTransportAddressDiscoveryNodeFunction, null, Collections.emptySet()) { @Override diff --git a/server/src/test/java/org/elasticsearch/action/bulk/TransportShardBulkActionTests.java b/server/src/test/java/org/elasticsearch/action/bulk/TransportShardBulkActionTests.java index ca19dcc2509..37e82884c51 100644 --- a/server/src/test/java/org/elasticsearch/action/bulk/TransportShardBulkActionTests.java +++ b/server/src/test/java/org/elasticsearch/action/bulk/TransportShardBulkActionTests.java @@ -268,7 +268,8 @@ public class TransportShardBulkActionTests extends IndexShardTestCase { Engine.IndexResult success = new FakeIndexResult(1, 1, 13, true, resultLocation); IndexShard shard = mock(IndexShard.class); - when(shard.applyIndexOperationOnPrimary(anyLong(), any(), any(), anyLong(), anyBoolean())).thenReturn(mappingUpdate); + when(shard.applyIndexOperationOnPrimary(anyLong(), any(), any(), anyLong(), anyLong(), anyLong(), anyBoolean())) + .thenReturn(mappingUpdate); // Pretend the mappings haven't made it to the node yet BulkPrimaryExecutionContext context = new BulkPrimaryExecutionContext(bulkShardRequest, shard); @@ -285,9 +286,10 @@ public class TransportShardBulkActionTests extends IndexShardTestCase { assertThat("mappings were \"updated\" once", updateCalled.get(), equalTo(1)); // Verify that the shard "executed" the operation twice - verify(shard, times(2)).applyIndexOperationOnPrimary(anyLong(), any(), any(), anyLong(), anyBoolean()); + verify(shard, times(2)).applyIndexOperationOnPrimary(anyLong(), any(), any(), anyLong(), anyLong(), anyLong(), anyBoolean()); - when(shard.applyIndexOperationOnPrimary(anyLong(), any(), any(), anyLong(), anyBoolean())).thenReturn(success); + when(shard.applyIndexOperationOnPrimary(anyLong(), any(), any(), anyLong(), anyLong(), anyLong(), anyBoolean())) + .thenReturn(success); TransportShardBulkAction.executeBulkItemRequest(context, null, threadPool::absoluteTimeInMillis, (update, shardId, type) -> fail("should not have had to update the mappings"), () -> {}); @@ -295,7 +297,7 @@ public class TransportShardBulkActionTests extends IndexShardTestCase { // Verify that the shard "executed" the operation only once (2 for previous invocations plus // 1 for this execution) - verify(shard, times(3)).applyIndexOperationOnPrimary(anyLong(), any(), any(), anyLong(), anyBoolean()); + verify(shard, times(3)).applyIndexOperationOnPrimary(anyLong(), any(), any(), anyLong(), anyLong(), anyLong(), anyBoolean()); BulkItemResponse primaryResponse = bulkShardRequest.items()[0].getPrimaryResponse(); @@ -488,7 +490,8 @@ public class TransportShardBulkActionTests extends IndexShardTestCase { Exception err = new ElasticsearchException("I'm dead <(x.x)>"); Engine.IndexResult indexResult = new Engine.IndexResult(err, 0, 0, 0); IndexShard shard = mock(IndexShard.class); - when(shard.applyIndexOperationOnPrimary(anyLong(), any(), any(), anyLong(), anyBoolean())).thenReturn(indexResult); + when(shard.applyIndexOperationOnPrimary(anyLong(), any(), any(), anyLong(), anyLong(), anyLong(), anyBoolean())) + .thenReturn(indexResult); when(shard.indexSettings()).thenReturn(indexSettings); UpdateHelper updateHelper = mock(UpdateHelper.class); @@ -536,7 +539,8 @@ public class TransportShardBulkActionTests extends IndexShardTestCase { "I'm conflicted <(;_;)>"); Engine.IndexResult indexResult = new Engine.IndexResult(err, 0, 0, 0); IndexShard shard = mock(IndexShard.class); - when(shard.applyIndexOperationOnPrimary(anyLong(), any(), any(), anyLong(), anyBoolean())).thenReturn(indexResult); + when(shard.applyIndexOperationOnPrimary(anyLong(), any(), any(), anyLong(), anyLong(), anyLong(), anyBoolean())) + .thenReturn(indexResult); when(shard.indexSettings()).thenReturn(indexSettings); UpdateHelper updateHelper = mock(UpdateHelper.class); @@ -581,7 +585,8 @@ public class TransportShardBulkActionTests extends IndexShardTestCase { Translog.Location resultLocation = new Translog.Location(42, 42, 42); Engine.IndexResult indexResult = new FakeIndexResult(1, 1, 13, created, resultLocation); IndexShard shard = mock(IndexShard.class); - when(shard.applyIndexOperationOnPrimary(anyLong(), any(), any(), anyLong(), anyBoolean())).thenReturn(indexResult); + when(shard.applyIndexOperationOnPrimary(anyLong(), any(), any(), anyLong(), anyLong(), anyLong(), anyBoolean())) + .thenReturn(indexResult); when(shard.indexSettings()).thenReturn(indexSettings); UpdateHelper updateHelper = mock(UpdateHelper.class); @@ -626,7 +631,7 @@ public class TransportShardBulkActionTests extends IndexShardTestCase { final long resultSeqNo = 13; Engine.DeleteResult deleteResult = new FakeDeleteResult(1, 1, resultSeqNo, found, resultLocation); IndexShard shard = mock(IndexShard.class); - when(shard.applyDeleteOperationOnPrimary(anyLong(), any(), any(), any())).thenReturn(deleteResult); + when(shard.applyDeleteOperationOnPrimary(anyLong(), any(), any(), any(), anyLong(), anyLong())).thenReturn(deleteResult); when(shard.indexSettings()).thenReturn(indexSettings); UpdateHelper updateHelper = mock(UpdateHelper.class); @@ -769,7 +774,7 @@ public class TransportShardBulkActionTests extends IndexShardTestCase { Engine.IndexResult success = new FakeIndexResult(1, 1, 13, true, resultLocation); IndexShard shard = mock(IndexShard.class); - when(shard.applyIndexOperationOnPrimary(anyLong(), any(), any(), anyLong(), anyBoolean())).thenAnswer(ir -> { + when(shard.applyIndexOperationOnPrimary(anyLong(), any(), any(), anyLong(), anyLong(), anyLong(), anyBoolean())).thenAnswer(ir -> { if (randomBoolean()) { return conflictedResult; } diff --git a/server/src/test/java/org/elasticsearch/action/resync/TransportResyncReplicationActionTests.java b/server/src/test/java/org/elasticsearch/action/resync/TransportResyncReplicationActionTests.java index 66fbc9936c9..0ca4be52504 100644 --- a/server/src/test/java/org/elasticsearch/action/resync/TransportResyncReplicationActionTests.java +++ b/server/src/test/java/org/elasticsearch/action/resync/TransportResyncReplicationActionTests.java @@ -18,6 +18,7 @@ */ package org.elasticsearch.action.resync; +import org.elasticsearch.Version; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.PlainActionFuture; @@ -34,7 +35,7 @@ import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.lease.Releasable; import org.elasticsearch.common.network.NetworkService; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.util.BigArrays; +import org.elasticsearch.common.util.PageCacheRecycler; import org.elasticsearch.discovery.DiscoverySettings; import org.elasticsearch.index.Index; import org.elasticsearch.index.IndexService; @@ -49,7 +50,7 @@ import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.transport.MockTransportService; import org.elasticsearch.threadpool.TestThreadPool; import org.elasticsearch.threadpool.ThreadPool; -import org.elasticsearch.transport.MockTcpTransport; +import org.elasticsearch.transport.nio.MockNioTransport; import org.junit.AfterClass; import org.junit.BeforeClass; @@ -99,8 +100,9 @@ public class TransportResyncReplicationActionTests extends ESTestCase { .addGlobalBlock(DiscoverySettings.NO_MASTER_BLOCK_ALL) .addIndexBlock(indexName, IndexMetaData.INDEX_WRITE_BLOCK))); - try (MockTcpTransport transport = new MockTcpTransport(Settings.EMPTY, threadPool, BigArrays.NON_RECYCLING_INSTANCE, - new NoneCircuitBreakerService(), new NamedWriteableRegistry(emptyList()), new NetworkService(emptyList()))) { + try (MockNioTransport transport = new MockNioTransport(Settings.EMPTY, Version.CURRENT, threadPool, + new NetworkService(emptyList()), PageCacheRecycler.NON_RECYCLING_INSTANCE, new NamedWriteableRegistry(emptyList()), + new NoneCircuitBreakerService())) { final MockTransportService transportService = new MockTransportService(Settings.EMPTY, transport, threadPool, NOOP_TRANSPORT_INTERCEPTOR, x -> clusterService.localNode(), null, Collections.emptySet()); diff --git a/server/src/test/java/org/elasticsearch/action/search/SearchPhaseControllerTests.java b/server/src/test/java/org/elasticsearch/action/search/SearchPhaseControllerTests.java index c1a170c69ed..4a8afe22b18 100644 --- a/server/src/test/java/org/elasticsearch/action/search/SearchPhaseControllerTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/SearchPhaseControllerTests.java @@ -20,10 +20,16 @@ package org.elasticsearch.action.search; import com.carrotsearch.randomizedtesting.RandomizedContext; +import org.apache.lucene.search.FieldDoc; import org.apache.lucene.search.ScoreDoc; +import org.apache.lucene.search.SortField; import org.apache.lucene.search.TopDocs; +import org.apache.lucene.search.TopFieldDocs; import org.apache.lucene.search.TotalHits; import org.apache.lucene.search.TotalHits.Relation; +import org.apache.lucene.search.grouping.CollapseTopFieldDocs; +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.common.lucene.Lucene; import org.elasticsearch.common.lucene.search.TopDocsAndMaxScore; import org.elasticsearch.common.text.Text; import org.elasticsearch.common.util.BigArrays; @@ -47,7 +53,6 @@ import org.elasticsearch.search.suggest.completion.CompletionSuggestion; import org.elasticsearch.test.ESTestCase; import org.junit.Before; -import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; @@ -138,7 +143,7 @@ public class SearchPhaseControllerTests extends ESTestCase { () -> generateQueryResults(nShards, suggestions, searchHitsSize, useConstantScore)); } - public void testMerge() throws IOException { + public void testMerge() { List suggestions = new ArrayList<>(); int maxSuggestSize = 0; for (int i = 0; i < randomIntBetween(1, 5); i++) { @@ -152,8 +157,8 @@ public class SearchPhaseControllerTests extends ESTestCase { for (boolean trackTotalHits : new boolean[] {true, false}) { SearchPhaseController.ReducedQueryPhase reducedQueryPhase = searchPhaseController.reducedQueryPhase(queryResults.asList(), false, trackTotalHits); - AtomicArray searchPhaseResultAtomicArray = generateFetchResults(nShards, reducedQueryPhase.scoreDocs, - reducedQueryPhase.suggest); + AtomicArray searchPhaseResultAtomicArray = generateFetchResults(nShards, + reducedQueryPhase.sortedTopDocs.scoreDocs, reducedQueryPhase.suggest); InternalSearchResponse mergedResponse = searchPhaseController.merge(false, reducedQueryPhase, searchPhaseResultAtomicArray.asList(), searchPhaseResultAtomicArray::get); @@ -166,7 +171,7 @@ public class SearchPhaseControllerTests extends ESTestCase { suggestSize += stream.collect(Collectors.summingInt(e -> e.getOptions().size())); } assertThat(suggestSize, lessThanOrEqualTo(maxSuggestSize)); - assertThat(mergedResponse.hits().getHits().length, equalTo(reducedQueryPhase.scoreDocs.length - suggestSize)); + assertThat(mergedResponse.hits().getHits().length, equalTo(reducedQueryPhase.sortedTopDocs.scoreDocs.length - suggestSize)); Suggest suggestResult = mergedResponse.suggest(); for (Suggest.Suggestion suggestion : reducedQueryPhase.suggest) { assertThat(suggestion, instanceOf(CompletionSuggestion.class)); @@ -183,24 +188,24 @@ public class SearchPhaseControllerTests extends ESTestCase { } } - private AtomicArray generateQueryResults(int nShards, + private static AtomicArray generateQueryResults(int nShards, List suggestions, int searchHitsSize, boolean useConstantScore) { AtomicArray queryResults = new AtomicArray<>(nShards); for (int shardIndex = 0; shardIndex < nShards; shardIndex++) { QuerySearchResult querySearchResult = new QuerySearchResult(shardIndex, new SearchShardTarget("", new Index("", ""), shardIndex, null)); - TopDocs topDocs = new TopDocs(new TotalHits(0, TotalHits.Relation.EQUAL_TO), new ScoreDoc[0]); + final TopDocs topDocs; float maxScore = 0; - if (searchHitsSize > 0) { + if (searchHitsSize == 0) { + topDocs = Lucene.EMPTY_TOP_DOCS; + } else { int nDocs = randomIntBetween(0, searchHitsSize); ScoreDoc[] scoreDocs = new ScoreDoc[nDocs]; for (int i = 0; i < nDocs; i++) { float score = useConstantScore ? 1.0F : Math.abs(randomFloat()); scoreDocs[i] = new ScoreDoc(i, score); - if (score > maxScore) { - maxScore = score; - } + maxScore = Math.max(score, maxScore); } topDocs = new TopDocs(new TotalHits(scoreDocs.length, TotalHits.Relation.EQUAL_TO), scoreDocs); } @@ -283,7 +288,7 @@ public class SearchPhaseControllerTests extends ESTestCase { } } } - SearchHit[] hits = searchHits.toArray(new SearchHit[searchHits.size()]); + SearchHit[] hits = searchHits.toArray(new SearchHit[0]); fetchSearchResult.hits(new SearchHits(hits, new TotalHits(hits.length, Relation.EQUAL_TO), maxScore)); fetchResults.set(shardIndex, fetchSearchResult); } @@ -336,6 +341,10 @@ public class SearchPhaseControllerTests extends ESTestCase { assertEquals(numTotalReducePhases, reduce.numReducePhases); InternalMax max = (InternalMax) reduce.aggregations.asList().get(0); assertEquals(3.0D, max.getValue(), 0.0D); + assertFalse(reduce.sortedTopDocs.isSortedByField); + assertNull(reduce.sortedTopDocs.sortFields); + assertNull(reduce.sortedTopDocs.collapseField); + assertNull(reduce.sortedTopDocs.collapseValues); } public void testConsumerConcurrently() throws InterruptedException { @@ -374,13 +383,17 @@ public class SearchPhaseControllerTests extends ESTestCase { SearchPhaseController.ReducedQueryPhase reduce = consumer.reduce(); InternalMax internalMax = (InternalMax) reduce.aggregations.asList().get(0); assertEquals(max.get(), internalMax.getValue(), 0.0D); - assertEquals(1, reduce.scoreDocs.length); + assertEquals(1, reduce.sortedTopDocs.scoreDocs.length); assertEquals(max.get(), reduce.maxScore, 0.0f); assertEquals(expectedNumResults, reduce.totalHits.value); - assertEquals(max.get(), reduce.scoreDocs[0].score, 0.0f); + assertEquals(max.get(), reduce.sortedTopDocs.scoreDocs[0].score, 0.0f); + assertFalse(reduce.sortedTopDocs.isSortedByField); + assertNull(reduce.sortedTopDocs.sortFields); + assertNull(reduce.sortedTopDocs.collapseField); + assertNull(reduce.sortedTopDocs.collapseValues); } - public void testConsumerOnlyAggs() throws InterruptedException { + public void testConsumerOnlyAggs() { int expectedNumResults = randomIntBetween(1, 100); int bufferSize = randomIntBetween(2, 200); SearchRequest request = new SearchRequest(); @@ -390,29 +403,31 @@ public class SearchPhaseControllerTests extends ESTestCase { searchPhaseController.newSearchPhaseResults(request, expectedNumResults); AtomicInteger max = new AtomicInteger(); for (int i = 0; i < expectedNumResults; i++) { - int id = i; int number = randomIntBetween(1, 1000); max.updateAndGet(prev -> Math.max(prev, number)); - QuerySearchResult result = new QuerySearchResult(id, new SearchShardTarget("node", new Index("a", "b"), id, null)); + QuerySearchResult result = new QuerySearchResult(i, new SearchShardTarget("node", new Index("a", "b"), i, null)); result.topDocs(new TopDocsAndMaxScore(new TopDocs(new TotalHits(1, TotalHits.Relation.EQUAL_TO), new ScoreDoc[0]), number), new DocValueFormat[0]); InternalAggregations aggs = new InternalAggregations(Arrays.asList(new InternalMax("test", (double) number, DocValueFormat.RAW, Collections.emptyList(), Collections.emptyMap()))); result.aggregations(aggs); - result.setShardIndex(id); + result.setShardIndex(i); result.size(1); consumer.consumeResult(result); } SearchPhaseController.ReducedQueryPhase reduce = consumer.reduce(); InternalMax internalMax = (InternalMax) reduce.aggregations.asList().get(0); assertEquals(max.get(), internalMax.getValue(), 0.0D); - assertEquals(0, reduce.scoreDocs.length); + assertEquals(0, reduce.sortedTopDocs.scoreDocs.length); assertEquals(max.get(), reduce.maxScore, 0.0f); assertEquals(expectedNumResults, reduce.totalHits.value); + assertFalse(reduce.sortedTopDocs.isSortedByField); + assertNull(reduce.sortedTopDocs.sortFields); + assertNull(reduce.sortedTopDocs.collapseField); + assertNull(reduce.sortedTopDocs.collapseValues); } - - public void testConsumerOnlyHits() throws InterruptedException { + public void testConsumerOnlyHits() { int expectedNumResults = randomIntBetween(1, 100); int bufferSize = randomIntBetween(2, 200); SearchRequest request = new SearchRequest(); @@ -424,24 +439,26 @@ public class SearchPhaseControllerTests extends ESTestCase { searchPhaseController.newSearchPhaseResults(request, expectedNumResults); AtomicInteger max = new AtomicInteger(); for (int i = 0; i < expectedNumResults; i++) { - int id = i; int number = randomIntBetween(1, 1000); max.updateAndGet(prev -> Math.max(prev, number)); - QuerySearchResult result = new QuerySearchResult(id, new SearchShardTarget("node", new Index("a", "b"), id, null)); + QuerySearchResult result = new QuerySearchResult(i, new SearchShardTarget("node", new Index("a", "b"), i, null)); result.topDocs(new TopDocsAndMaxScore(new TopDocs(new TotalHits(1, TotalHits.Relation.EQUAL_TO), new ScoreDoc[] {new ScoreDoc(0, number)}), number), new DocValueFormat[0]); - result.setShardIndex(id); + result.setShardIndex(i); result.size(1); consumer.consumeResult(result); } SearchPhaseController.ReducedQueryPhase reduce = consumer.reduce(); - assertEquals(1, reduce.scoreDocs.length); + assertEquals(1, reduce.sortedTopDocs.scoreDocs.length); assertEquals(max.get(), reduce.maxScore, 0.0f); assertEquals(expectedNumResults, reduce.totalHits.value); - assertEquals(max.get(), reduce.scoreDocs[0].score, 0.0f); + assertEquals(max.get(), reduce.sortedTopDocs.scoreDocs[0].score, 0.0f); + assertFalse(reduce.sortedTopDocs.isSortedByField); + assertNull(reduce.sortedTopDocs.sortFields); + assertNull(reduce.sortedTopDocs.collapseField); + assertNull(reduce.sortedTopDocs.collapseValues); } - public void testNewSearchPhaseResults() { for (int i = 0; i < 10; i++) { int expectedNumResults = randomIntBetween(1, 10); @@ -497,15 +514,87 @@ public class SearchPhaseControllerTests extends ESTestCase { consumer.consumeResult(result); } // 4*3 results = 12 we get result 5 to 10 here with from=5 and size=5 - SearchPhaseController.ReducedQueryPhase reduce = consumer.reduce(); - assertEquals(5, reduce.scoreDocs.length); + ScoreDoc[] scoreDocs = reduce.sortedTopDocs.scoreDocs; + assertEquals(5, scoreDocs.length); assertEquals(100.f, reduce.maxScore, 0.0f); assertEquals(12, reduce.totalHits.value); - assertEquals(95.0f, reduce.scoreDocs[0].score, 0.0f); - assertEquals(94.0f, reduce.scoreDocs[1].score, 0.0f); - assertEquals(93.0f, reduce.scoreDocs[2].score, 0.0f); - assertEquals(92.0f, reduce.scoreDocs[3].score, 0.0f); - assertEquals(91.0f, reduce.scoreDocs[4].score, 0.0f); + assertEquals(95.0f, scoreDocs[0].score, 0.0f); + assertEquals(94.0f, scoreDocs[1].score, 0.0f); + assertEquals(93.0f, scoreDocs[2].score, 0.0f); + assertEquals(92.0f, scoreDocs[3].score, 0.0f); + assertEquals(91.0f, scoreDocs[4].score, 0.0f); + } + + public void testConsumerSortByField() { + int expectedNumResults = randomIntBetween(1, 100); + int bufferSize = randomIntBetween(2, 200); + SearchRequest request = new SearchRequest(); + int size = randomIntBetween(1, 10); + request.setBatchedReduceSize(bufferSize); + InitialSearchPhase.ArraySearchPhaseResults consumer = + searchPhaseController.newSearchPhaseResults(request, expectedNumResults); + AtomicInteger max = new AtomicInteger(); + SortField[] sortFields = {new SortField("field", SortField.Type.INT, true)}; + DocValueFormat[] docValueFormats = {DocValueFormat.RAW}; + for (int i = 0; i < expectedNumResults; i++) { + int number = randomIntBetween(1, 1000); + max.updateAndGet(prev -> Math.max(prev, number)); + FieldDoc[] fieldDocs = {new FieldDoc(0, Float.NaN, new Object[]{number})}; + TopDocs topDocs = new TopFieldDocs(new TotalHits(1, Relation.EQUAL_TO), fieldDocs, sortFields); + QuerySearchResult result = new QuerySearchResult(i, new SearchShardTarget("node", new Index("a", "b"), i, null)); + result.topDocs(new TopDocsAndMaxScore(topDocs, Float.NaN), docValueFormats); + result.setShardIndex(i); + result.size(size); + consumer.consumeResult(result); + } + SearchPhaseController.ReducedQueryPhase reduce = consumer.reduce(); + assertEquals(Math.min(expectedNumResults, size), reduce.sortedTopDocs.scoreDocs.length); + assertEquals(expectedNumResults, reduce.totalHits.value); + assertEquals(max.get(), ((FieldDoc)reduce.sortedTopDocs.scoreDocs[0]).fields[0]); + assertTrue(reduce.sortedTopDocs.isSortedByField); + assertEquals(1, reduce.sortedTopDocs.sortFields.length); + assertEquals("field", reduce.sortedTopDocs.sortFields[0].getField()); + assertEquals(SortField.Type.INT, reduce.sortedTopDocs.sortFields[0].getType()); + assertNull(reduce.sortedTopDocs.collapseField); + assertNull(reduce.sortedTopDocs.collapseValues); + } + + public void testConsumerFieldCollapsing() { + int expectedNumResults = randomIntBetween(30, 100); + int bufferSize = randomIntBetween(2, 200); + SearchRequest request = new SearchRequest(); + int size = randomIntBetween(5, 10); + request.setBatchedReduceSize(bufferSize); + InitialSearchPhase.ArraySearchPhaseResults consumer = + searchPhaseController.newSearchPhaseResults(request, expectedNumResults); + SortField[] sortFields = {new SortField("field", SortField.Type.STRING)}; + BytesRef a = new BytesRef("a"); + BytesRef b = new BytesRef("b"); + BytesRef c = new BytesRef("c"); + Object[] collapseValues = new Object[]{a, b, c}; + DocValueFormat[] docValueFormats = {DocValueFormat.RAW}; + for (int i = 0; i < expectedNumResults; i++) { + Object[] values = {randomFrom(collapseValues)}; + FieldDoc[] fieldDocs = {new FieldDoc(0, Float.NaN, values)}; + TopDocs topDocs = new CollapseTopFieldDocs("field", new TotalHits(1, Relation.EQUAL_TO), fieldDocs, sortFields, values); + QuerySearchResult result = new QuerySearchResult(i, new SearchShardTarget("node", new Index("a", "b"), i, null)); + result.topDocs(new TopDocsAndMaxScore(topDocs, Float.NaN), docValueFormats); + result.setShardIndex(i); + result.size(size); + consumer.consumeResult(result); + } + SearchPhaseController.ReducedQueryPhase reduce = consumer.reduce(); + assertEquals(3, reduce.sortedTopDocs.scoreDocs.length); + assertEquals(expectedNumResults, reduce.totalHits.value); + assertEquals(a, ((FieldDoc)reduce.sortedTopDocs.scoreDocs[0]).fields[0]); + assertEquals(b, ((FieldDoc)reduce.sortedTopDocs.scoreDocs[1]).fields[0]); + assertEquals(c, ((FieldDoc)reduce.sortedTopDocs.scoreDocs[2]).fields[0]); + assertTrue(reduce.sortedTopDocs.isSortedByField); + assertEquals(1, reduce.sortedTopDocs.sortFields.length); + assertEquals("field", reduce.sortedTopDocs.sortFields[0].getField()); + assertEquals(SortField.Type.STRING, reduce.sortedTopDocs.sortFields[0].getType()); + assertEquals("field", reduce.sortedTopDocs.collapseField); + assertArrayEquals(collapseValues, reduce.sortedTopDocs.collapseValues); } } diff --git a/server/src/test/java/org/elasticsearch/action/support/replication/BroadcastReplicationTests.java b/server/src/test/java/org/elasticsearch/action/support/replication/BroadcastReplicationTests.java index 5529701db15..4c91bfaa420 100644 --- a/server/src/test/java/org/elasticsearch/action/support/replication/BroadcastReplicationTests.java +++ b/server/src/test/java/org/elasticsearch/action/support/replication/BroadcastReplicationTests.java @@ -18,6 +18,7 @@ */ package org.elasticsearch.action.support.replication; +import org.elasticsearch.Version; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.NoShardAvailableActionException; import org.elasticsearch.action.UnavailableShardsException; @@ -38,7 +39,7 @@ import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.network.NetworkService; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.util.BigArrays; +import org.elasticsearch.common.util.PageCacheRecycler; import org.elasticsearch.common.util.concurrent.ConcurrentCollections; import org.elasticsearch.core.internal.io.IOUtils; import org.elasticsearch.index.shard.ShardId; @@ -49,8 +50,8 @@ import org.elasticsearch.tasks.Task; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.threadpool.TestThreadPool; import org.elasticsearch.threadpool.ThreadPool; -import org.elasticsearch.transport.MockTcpTransport; import org.elasticsearch.transport.TransportService; +import org.elasticsearch.transport.nio.MockNioTransport; import org.junit.After; import org.junit.AfterClass; import org.junit.Before; @@ -92,9 +93,9 @@ public class BroadcastReplicationTests extends ESTestCase { @Before public void setUp() throws Exception { super.setUp(); - MockTcpTransport transport = new MockTcpTransport(Settings.EMPTY, - threadPool, BigArrays.NON_RECYCLING_INSTANCE, circuitBreakerService, new NamedWriteableRegistry(Collections.emptyList()), - new NetworkService(Collections.emptyList())); + MockNioTransport transport = new MockNioTransport(Settings.EMPTY, Version.CURRENT, + threadPool, new NetworkService(Collections.emptyList()), PageCacheRecycler.NON_RECYCLING_INSTANCE, + new NamedWriteableRegistry(Collections.emptyList()), circuitBreakerService); clusterService = createClusterService(threadPool); transportService = new TransportService(clusterService.getSettings(), transport, threadPool, TransportService.NOOP_TRANSPORT_INTERCEPTOR, x -> clusterService.localNode(), null, Collections.emptySet()); diff --git a/server/src/test/java/org/elasticsearch/action/support/replication/TransportReplicationActionTests.java b/server/src/test/java/org/elasticsearch/action/support/replication/TransportReplicationActionTests.java index 064058d3adb..41a300c28f3 100644 --- a/server/src/test/java/org/elasticsearch/action/support/replication/TransportReplicationActionTests.java +++ b/server/src/test/java/org/elasticsearch/action/support/replication/TransportReplicationActionTests.java @@ -21,6 +21,7 @@ package org.elasticsearch.action.support.replication; import org.apache.lucene.store.AlreadyClosedException; import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.Version; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.UnavailableShardsException; import org.elasticsearch.action.admin.indices.close.CloseIndexRequest; @@ -56,7 +57,7 @@ import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.lease.Releasable; import org.elasticsearch.common.network.NetworkService; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.util.BigArrays; +import org.elasticsearch.common.util.PageCacheRecycler; import org.elasticsearch.index.Index; import org.elasticsearch.index.IndexNotFoundException; import org.elasticsearch.index.IndexService; @@ -75,13 +76,13 @@ import org.elasticsearch.test.transport.CapturingTransport; import org.elasticsearch.test.transport.MockTransportService; import org.elasticsearch.threadpool.TestThreadPool; import org.elasticsearch.threadpool.ThreadPool; -import org.elasticsearch.transport.MockTcpTransport; import org.elasticsearch.transport.Transport; import org.elasticsearch.transport.TransportChannel; import org.elasticsearch.transport.TransportException; import org.elasticsearch.transport.TransportRequest; import org.elasticsearch.transport.TransportResponse; import org.elasticsearch.transport.TransportService; +import org.elasticsearch.transport.nio.MockNioTransport; import org.hamcrest.Matcher; import org.junit.After; import org.junit.AfterClass; @@ -1064,8 +1065,9 @@ public class TransportReplicationActionTests extends ESTestCase { AtomicBoolean throwException = new AtomicBoolean(true); final ReplicationTask task = maybeTask(); NamedWriteableRegistry namedWriteableRegistry = new NamedWriteableRegistry(Collections.emptyList()); - final Transport transport = new MockTcpTransport(Settings.EMPTY, threadPool, BigArrays.NON_RECYCLING_INSTANCE, - new NoneCircuitBreakerService(), namedWriteableRegistry, new NetworkService(Collections.emptyList())); + final Transport transport = new MockNioTransport(Settings.EMPTY, Version.CURRENT, threadPool, + new NetworkService(Collections.emptyList()), PageCacheRecycler.NON_RECYCLING_INSTANCE, namedWriteableRegistry, + new NoneCircuitBreakerService()); transportService = new MockTransportService(Settings.EMPTY, transport, threadPool, TransportService.NOOP_TRANSPORT_INTERCEPTOR, x -> clusterService.localNode(), null, Collections.emptySet()); transportService.start(); diff --git a/server/src/test/java/org/elasticsearch/client/transport/TransportClientTests.java b/server/src/test/java/org/elasticsearch/client/transport/TransportClientTests.java index 1dc30e951b6..03ac1ebc3b6 100644 --- a/server/src/test/java/org/elasticsearch/client/transport/TransportClientTests.java +++ b/server/src/test/java/org/elasticsearch/client/transport/TransportClientTests.java @@ -31,7 +31,7 @@ import org.elasticsearch.env.Environment; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.transport.MockTransportClient; -import org.elasticsearch.transport.TcpTransport; +import org.elasticsearch.transport.TransportSettings; import java.io.IOException; import java.util.Arrays; @@ -72,7 +72,7 @@ public class TransportClientTests extends ESTestCase { .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir()) .build(); try (TransportClient client = new MockTransportClient(baseSettings, Arrays.asList(MockPlugin.class))) { - final Settings settings = TcpTransport.DEFAULT_FEATURES_SETTING.get(client.settings()); + final Settings settings = TransportSettings.DEFAULT_FEATURES_SETTING.get(client.settings()); assertThat(settings.keySet(), hasItem("transport_client")); assertThat(settings.get("transport_client"), equalTo("true")); } diff --git a/server/src/test/java/org/elasticsearch/cluster/ClusterModuleTests.java b/server/src/test/java/org/elasticsearch/cluster/ClusterModuleTests.java index 9734bf37b5b..ad54d46585f 100644 --- a/server/src/test/java/org/elasticsearch/cluster/ClusterModuleTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/ClusterModuleTests.java @@ -205,8 +205,8 @@ public class ClusterModuleTests extends ModuleTestCase { final String whiteListedClusterCustom = randomFrom(ClusterModule.PRE_6_3_CLUSTER_CUSTOMS_WHITE_LIST); final String whiteListedMetaDataCustom = randomFrom(ClusterModule.PRE_6_3_METADATA_CUSTOMS_WHITE_LIST); final ClusterState clusterState = ClusterState.builder(ClusterName.DEFAULT) - .putCustom(whiteListedClusterCustom, new RestoreInProgress()) - .putCustom("other", new RestoreInProgress()) + .putCustom(whiteListedClusterCustom, new RestoreInProgress.Builder().build()) + .putCustom("other", new RestoreInProgress.Builder().build()) .metaData(MetaData.builder() .putCustom(whiteListedMetaDataCustom, new RepositoriesMetaData(Collections.emptyList())) .putCustom("other", new RepositoriesMetaData(Collections.emptyList())) diff --git a/server/src/test/java/org/elasticsearch/cluster/ClusterStateDiffIT.java b/server/src/test/java/org/elasticsearch/cluster/ClusterStateDiffIT.java index 5cbb625ed96..d795971ac12 100644 --- a/server/src/test/java/org/elasticsearch/cluster/ClusterStateDiffIT.java +++ b/server/src/test/java/org/elasticsearch/cluster/ClusterStateDiffIT.java @@ -721,11 +721,13 @@ public class ClusterStateDiffIT extends ESIntegTestCase { (long) randomIntBetween(0, 1000), ImmutableOpenMap.of())); case 1: - return new RestoreInProgress(new RestoreInProgress.Entry( + return new RestoreInProgress.Builder().add( + new RestoreInProgress.Entry( + UUIDs.randomBase64UUID(), new Snapshot(randomName("repo"), new SnapshotId(randomName("snap"), UUIDs.randomBase64UUID())), RestoreInProgress.State.fromValue((byte) randomIntBetween(0, 3)), emptyList(), - ImmutableOpenMap.of())); + ImmutableOpenMap.of())).build(); default: throw new IllegalArgumentException("Shouldn't be here"); } diff --git a/server/src/test/java/org/elasticsearch/cluster/coordination/DiscoveryUpgradeServiceTests.java b/server/src/test/java/org/elasticsearch/cluster/coordination/DiscoveryUpgradeServiceTests.java index ed29e235bc8..01e9c140331 100644 --- a/server/src/test/java/org/elasticsearch/cluster/coordination/DiscoveryUpgradeServiceTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/coordination/DiscoveryUpgradeServiceTests.java @@ -32,7 +32,7 @@ public class DiscoveryUpgradeServiceTests extends ESTestCase { = new DiscoveryNode(UUIDs.randomBase64UUID(random()), buildNewFakeTransportAddress(), Version.CURRENT); final DiscoveryNode fakeNode = DiscoveryUpgradeService.createDiscoveryNodeWithImpossiblyHighId(discoveryNode); assertThat(discoveryNode.getId(), lessThan(fakeNode.getId())); + assertThat(UUIDs.randomBase64UUID(random()), lessThan(fakeNode.getId())); assertThat(fakeNode.getId(), containsString(discoveryNode.getId())); - } } diff --git a/server/src/test/java/org/elasticsearch/cluster/routing/ShardRoutingTests.java b/server/src/test/java/org/elasticsearch/cluster/routing/ShardRoutingTests.java index 1216f143686..5bcac4a1e26 100644 --- a/server/src/test/java/org/elasticsearch/cluster/routing/ShardRoutingTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/routing/ShardRoutingTests.java @@ -164,7 +164,7 @@ public class ShardRoutingTests extends ESTestCase { } else { otherRouting = new ShardRouting(otherRouting.shardId(), otherRouting.currentNodeId(), otherRouting.relocatingNodeId(), otherRouting.primary(), otherRouting.state(), - new RecoverySource.SnapshotRecoverySource(new Snapshot("test", + new RecoverySource.SnapshotRecoverySource(UUIDs.randomBase64UUID(), new Snapshot("test", new SnapshotId("s1", UUIDs.randomBase64UUID())), Version.CURRENT, "test"), otherRouting.unassignedInfo(), otherRouting.allocationId(), otherRouting.getExpectedShardSize()); } diff --git a/server/src/test/java/org/elasticsearch/cluster/routing/UnassignedInfoTests.java b/server/src/test/java/org/elasticsearch/cluster/routing/UnassignedInfoTests.java index a2e8f9d7f3f..9cf4ef44709 100644 --- a/server/src/test/java/org/elasticsearch/cluster/routing/UnassignedInfoTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/routing/UnassignedInfoTests.java @@ -142,6 +142,7 @@ public class UnassignedInfoTests extends ESAllocationTestCase { ClusterState clusterState = ClusterState.builder(ClusterName.CLUSTER_NAME_SETTING.getDefault(Settings.EMPTY)) .metaData(metaData) .routingTable(RoutingTable.builder().addAsNewRestore(metaData.index("test"), new SnapshotRecoverySource( + UUIDs.randomBase64UUID(), new Snapshot("rep1", new SnapshotId("snp1", UUIDs.randomBase64UUID())), Version.CURRENT, "test"), new IntHashSet()).build()).build(); for (ShardRouting shard : clusterState.getRoutingNodes().shardsWithState(UNASSIGNED)) { @@ -157,7 +158,8 @@ public class UnassignedInfoTests extends ESAllocationTestCase { ClusterState clusterState = ClusterState.builder(ClusterName.CLUSTER_NAME_SETTING.getDefault(Settings.EMPTY)) .metaData(metaData) .routingTable(RoutingTable.builder().addAsRestore(metaData.index("test"), - new SnapshotRecoverySource(new Snapshot("rep1", + new SnapshotRecoverySource( + UUIDs.randomBase64UUID(), new Snapshot("rep1", new SnapshotId("snp1", UUIDs.randomBase64UUID())), Version.CURRENT, "test")).build()).build(); for (ShardRouting shard : clusterState.getRoutingNodes().shardsWithState(UNASSIGNED)) { assertThat(shard.unassignedInfo().getReason(), equalTo(UnassignedInfo.Reason.EXISTING_INDEX_RESTORED)); diff --git a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/BalanceConfigurationTests.java b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/BalanceConfigurationTests.java index 41881beae82..5c2bfbadd21 100644 --- a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/BalanceConfigurationTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/BalanceConfigurationTests.java @@ -45,8 +45,11 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.test.gateway.TestGatewayAllocator; import org.hamcrest.Matchers; +import java.util.stream.Collectors; + import static org.elasticsearch.cluster.routing.ShardRoutingState.INITIALIZING; import static org.elasticsearch.cluster.routing.ShardRoutingState.STARTED; +import static org.elasticsearch.cluster.routing.ShardRoutingState.UNASSIGNED; public class BalanceConfigurationTests extends ESAllocationTestCase { @@ -85,34 +88,32 @@ public class BalanceConfigurationTests extends ESAllocationTestCase { (numberOfNodes + 1) - (numberOfNodes + 1) / 2, numberOfIndices, numberOfReplicas, numberOfShards, balanceTreshold); } - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/35365") public void testReplicaBalance() { /* Tests balance over replicas only */ final float indexBalance = 0.0f; final float replicaBalance = 1.0f; - final float balanceTreshold = 1.0f; + final float balanceThreshold = 1.0f; Settings.Builder settings = Settings.builder(); settings.put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE_SETTING.getKey(), ClusterRebalanceAllocationDecider.ClusterRebalanceType.ALWAYS.toString()); settings.put(BalancedShardsAllocator.INDEX_BALANCE_FACTOR_SETTING.getKey(), indexBalance); settings.put(BalancedShardsAllocator.SHARD_BALANCE_FACTOR_SETTING.getKey(), replicaBalance); - settings.put(BalancedShardsAllocator.THRESHOLD_SETTING.getKey(), balanceTreshold); + settings.put(BalancedShardsAllocator.THRESHOLD_SETTING.getKey(), balanceThreshold); AllocationService strategy = createAllocationService(settings.build(), new TestGatewayAllocator()); ClusterState clusterState = initCluster(strategy); - assertReplicaBalance(logger, clusterState.getRoutingNodes(), numberOfNodes, numberOfIndices, - numberOfReplicas, numberOfShards, balanceTreshold); + assertReplicaBalance(clusterState.getRoutingNodes(), numberOfNodes, numberOfIndices, + numberOfReplicas, numberOfShards, balanceThreshold); clusterState = addNode(clusterState, strategy); - assertReplicaBalance(logger, clusterState.getRoutingNodes(), numberOfNodes + 1, - numberOfIndices, numberOfReplicas, numberOfShards, balanceTreshold); + assertReplicaBalance(clusterState.getRoutingNodes(), numberOfNodes + 1, + numberOfIndices, numberOfReplicas, numberOfShards, balanceThreshold); clusterState = removeNodes(clusterState, strategy); - assertReplicaBalance(logger, clusterState.getRoutingNodes(), - (numberOfNodes + 1) - (numberOfNodes + 1) / 2, numberOfIndices, numberOfReplicas, numberOfShards, balanceTreshold); - + assertReplicaBalance(clusterState.getRoutingNodes(), + numberOfNodes + 1 - (numberOfNodes + 1) / 2, numberOfIndices, numberOfReplicas, numberOfShards, balanceThreshold); } private ClusterState initCluster(AllocationService strategy) { @@ -199,9 +200,25 @@ public class BalanceConfigurationTests extends ESAllocationTestCase { } - private void assertReplicaBalance(Logger logger, RoutingNodes nodes, int numberOfNodes, int numberOfIndices, int numberOfReplicas, - int numberOfShards, float treshold) { - final int numShards = numberOfIndices * numberOfShards * (numberOfReplicas + 1); + private void assertReplicaBalance(RoutingNodes nodes, int numberOfNodes, int numberOfIndices, int numberOfReplicas, + int numberOfShards, float treshold) { + final int unassigned = nodes.unassigned().size(); + + if (unassigned > 0) { + // Ensure that if there any unassigned shards, all of their replicas are unassigned as well + // (i.e. unassigned count is always [replicas] + 1 for each shard unassigned shardId) + nodes.shardsWithState(UNASSIGNED).stream().collect( + Collectors.toMap( + ShardRouting::shardId, + s -> 1, + (a, b) -> a + b + )).values().forEach( + count -> assertEquals(numberOfReplicas + 1, count.longValue()) + ); + } + assertEquals(numberOfNodes, nodes.size()); + + final int numShards = numberOfIndices * numberOfShards * (numberOfReplicas + 1) - unassigned; final float avgNumShards = (float) (numShards) / (float) (numberOfNodes); final int minAvgNumberOfShards = Math.round(Math.round(Math.floor(avgNumShards - treshold))); final int maxAvgNumberOfShards = Math.round(Math.round(Math.ceil(avgNumShards + treshold))); diff --git a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/NodeVersionAllocationDeciderTests.java b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/NodeVersionAllocationDeciderTests.java index 7a1c901671c..e59b492606a 100644 --- a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/NodeVersionAllocationDeciderTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/NodeVersionAllocationDeciderTests.java @@ -366,7 +366,9 @@ public class NodeVersionAllocationDeciderTests extends ESAllocationTestCase { ClusterState state = ClusterState.builder(ClusterName.CLUSTER_NAME_SETTING.getDefault(Settings.EMPTY)) .metaData(metaData) .routingTable(RoutingTable.builder().addAsRestore(metaData.index("test"), - new SnapshotRecoverySource(new Snapshot("rep1", new SnapshotId("snp1", UUIDs.randomBase64UUID())), + new SnapshotRecoverySource( + UUIDs.randomBase64UUID(), + new Snapshot("rep1", new SnapshotId("snp1", UUIDs.randomBase64UUID())), Version.CURRENT, "test")).build()) .nodes(DiscoveryNodes.builder().add(newNode).add(oldNode1).add(oldNode2)).build(); AllocationDeciders allocationDeciders = new AllocationDeciders(Arrays.asList( @@ -482,9 +484,11 @@ public class NodeVersionAllocationDeciderTests extends ESAllocationTestCase { newNode.node().getVersion() + "] to a node with older version [" + oldNode.node().getVersion() + "]")); final SnapshotRecoverySource newVersionSnapshot = new SnapshotRecoverySource( + UUIDs.randomBase64UUID(), new Snapshot("rep1", new SnapshotId("snp1", UUIDs.randomBase64UUID())), newNode.node().getVersion(), "test"); final SnapshotRecoverySource oldVersionSnapshot = new SnapshotRecoverySource( + UUIDs.randomBase64UUID(), new Snapshot("rep1", new SnapshotId("snp1", UUIDs.randomBase64UUID())), oldNode.node().getVersion(), "test"); diff --git a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/ThrottlingAllocationTests.java b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/ThrottlingAllocationTests.java index b67e0ccae22..3c88de4b639 100644 --- a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/ThrottlingAllocationTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/ThrottlingAllocationTests.java @@ -42,6 +42,7 @@ import org.elasticsearch.cluster.routing.allocation.command.AllocationCommands; import org.elasticsearch.cluster.routing.allocation.command.MoveAllocationCommand; import org.elasticsearch.cluster.routing.allocation.decider.Decision; import org.elasticsearch.cluster.routing.allocation.decider.ThrottlingAllocationDecider; +import org.elasticsearch.common.UUIDs; import org.elasticsearch.common.collect.ImmutableOpenMap; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.Index; @@ -335,6 +336,7 @@ public class ThrottlingAllocationTests extends ESAllocationTestCase { RoutingTable.Builder routingTableBuilder = RoutingTable.builder(); Snapshot snapshot = new Snapshot("repo", new SnapshotId("snap", "randomId")); Set snapshotIndices = new HashSet<>(); + String restoreUUID = UUIDs.randomBase64UUID(); for (ObjectCursor cursor: metaData.indices().values()) { Index index = cursor.value.getIndex(); IndexMetaData.Builder indexMetaDataBuilder = IndexMetaData.builder(cursor.value); @@ -357,12 +359,14 @@ public class ThrottlingAllocationTests extends ESAllocationTestCase { case 3: snapshotIndices.add(index.getName()); routingTableBuilder.addAsNewRestore(indexMetaData, - new SnapshotRecoverySource(snapshot, Version.CURRENT, indexMetaData.getIndex().getName()), new IntHashSet()); + new SnapshotRecoverySource( + restoreUUID, snapshot, Version.CURRENT, indexMetaData.getIndex().getName()), new IntHashSet()); break; case 4: snapshotIndices.add(index.getName()); routingTableBuilder.addAsRestore(indexMetaData, - new SnapshotRecoverySource(snapshot, Version.CURRENT, indexMetaData.getIndex().getName())); + new SnapshotRecoverySource( + restoreUUID, snapshot, Version.CURRENT, indexMetaData.getIndex().getName())); break; case 5: routingTableBuilder.addAsNew(indexMetaData); @@ -385,9 +389,9 @@ public class ThrottlingAllocationTests extends ESAllocationTestCase { } } - RestoreInProgress.Entry restore = new RestoreInProgress.Entry(snapshot, RestoreInProgress.State.INIT, + RestoreInProgress.Entry restore = new RestoreInProgress.Entry(restoreUUID, snapshot, RestoreInProgress.State.INIT, new ArrayList<>(snapshotIndices), restoreShards.build()); - restores.put(RestoreInProgress.TYPE, new RestoreInProgress(restore)); + restores.put(RestoreInProgress.TYPE, new RestoreInProgress.Builder().add(restore).build()); } return ClusterState.builder(CLUSTER_NAME_SETTING.getDefault(Settings.EMPTY)) diff --git a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/RestoreInProgressAllocationDeciderTests.java b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/RestoreInProgressAllocationDeciderTests.java index 86190b107e5..60e3e2438c1 100644 --- a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/RestoreInProgressAllocationDeciderTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/RestoreInProgressAllocationDeciderTests.java @@ -37,6 +37,7 @@ import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.cluster.routing.ShardRoutingState; import org.elasticsearch.cluster.routing.UnassignedInfo; import org.elasticsearch.cluster.routing.allocation.RoutingAllocation; +import org.elasticsearch.common.UUIDs; import org.elasticsearch.common.collect.ImmutableOpenMap; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.snapshots.Snapshot; @@ -139,10 +140,11 @@ public class RestoreInProgressAllocationDeciderTests extends ESAllocationTestCas Snapshot snapshot = recoverySource.snapshot(); RestoreInProgress.State restoreState = RestoreInProgress.State.STARTED; - RestoreInProgress.Entry restore = new RestoreInProgress.Entry(snapshot, restoreState, singletonList("test"), shards.build()); + RestoreInProgress.Entry restore = + new RestoreInProgress.Entry(recoverySource.restoreUUID(), snapshot, restoreState, singletonList("test"), shards.build()); clusterState = ClusterState.builder(clusterState) - .putCustom(RestoreInProgress.TYPE, new RestoreInProgress(restore)) + .putCustom(RestoreInProgress.TYPE, new RestoreInProgress.Builder().add(restore).build()) .routingTable(routingTable) .build(); @@ -202,6 +204,6 @@ public class RestoreInProgressAllocationDeciderTests extends ESAllocationTestCas private RecoverySource.SnapshotRecoverySource createSnapshotRecoverySource(final String snapshotName) { Snapshot snapshot = new Snapshot("_repository", new SnapshotId(snapshotName, "_uuid")); - return new RecoverySource.SnapshotRecoverySource(snapshot, Version.CURRENT, "test"); + return new RecoverySource.SnapshotRecoverySource(UUIDs.randomBase64UUID(), snapshot, Version.CURRENT, "test"); } } diff --git a/server/src/test/java/org/elasticsearch/cluster/serialization/ClusterSerializationTests.java b/server/src/test/java/org/elasticsearch/cluster/serialization/ClusterSerializationTests.java index 0379b706c82..061defa6002 100644 --- a/server/src/test/java/org/elasticsearch/cluster/serialization/ClusterSerializationTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/serialization/ClusterSerializationTests.java @@ -118,14 +118,15 @@ public class ClusterSerializationTests extends ESAllocationTestCase { )); if (includeRestore) { builder.putCustom(RestoreInProgress.TYPE, - new RestoreInProgress( + new RestoreInProgress.Builder().add( new RestoreInProgress.Entry( - new Snapshot("repo2", new SnapshotId("snap2", UUIDs.randomBase64UUID())), + UUIDs.randomBase64UUID(), new Snapshot("repo2", new SnapshotId("snap2", UUIDs.randomBase64UUID())), RestoreInProgress.State.STARTED, Collections.singletonList("index_name"), ImmutableOpenMap.of() ) - )); + ).build() + ); } ClusterState clusterState = builder.incrementVersion().build(); diff --git a/server/src/test/java/org/elasticsearch/common/io/stream/StreamTests.java b/server/src/test/java/org/elasticsearch/common/io/stream/StreamTests.java index 6431a3469b6..05cc442c48e 100644 --- a/server/src/test/java/org/elasticsearch/common/io/stream/StreamTests.java +++ b/server/src/test/java/org/elasticsearch/common/io/stream/StreamTests.java @@ -213,7 +213,6 @@ public class StreamTests extends ESTestCase { } public void testWritableArrays() throws IOException { - final String[] strings = generateRandomStringArray(10, 10, false, true); WriteableString[] sourceArray = Arrays.stream(strings).map(WriteableString::new).toArray(WriteableString[]::new); WriteableString[] targetArray; @@ -233,6 +232,28 @@ public class StreamTests extends ESTestCase { assertThat(targetArray, equalTo(sourceArray)); } + public void testArrays() throws IOException { + final String[] strings; + final String[] deserialized; + Writeable.Writer writer = StreamOutput::writeString; + Writeable.Reader reader = StreamInput::readString; + BytesStreamOutput out = new BytesStreamOutput(); + if (randomBoolean()) { + if (randomBoolean()) { + strings = null; + } else { + strings = generateRandomStringArray(10, 10, false, true); + } + out.writeOptionalArray(writer, strings); + deserialized = out.bytes().streamInput().readOptionalArray(reader, String[]::new); + } else { + strings = generateRandomStringArray(10, 10, false, true); + out.writeArray(writer, strings); + deserialized = out.bytes().streamInput().readArray(reader, String[]::new); + } + assertThat(deserialized, equalTo(strings)); + } + public void testSetOfLongs() throws IOException { final int size = randomIntBetween(0, 6); final Set sourceSet = new HashSet<>(size); diff --git a/server/src/test/java/org/elasticsearch/common/joda/JavaJodaTimeDuellingTests.java b/server/src/test/java/org/elasticsearch/common/joda/JavaJodaTimeDuellingTests.java index b3790b325c6..d32824df419 100644 --- a/server/src/test/java/org/elasticsearch/common/joda/JavaJodaTimeDuellingTests.java +++ b/server/src/test/java/org/elasticsearch/common/joda/JavaJodaTimeDuellingTests.java @@ -473,7 +473,7 @@ public class JavaJodaTimeDuellingTests extends ESTestCase { public void testSeveralTimeFormats() { DateFormatter jodaFormatter = DateFormatter.forPattern("year_month_day||ordinal_date"); - DateFormatter javaFormatter = DateFormatters.forPattern("year_month_day||ordinal_date"); + DateFormatter javaFormatter = DateFormatter.forPattern("8year_month_day||8ordinal_date"); assertSameDate("2018-12-12", "year_month_day||ordinal_date", jodaFormatter, javaFormatter); assertSameDate("2018-128", "year_month_day||ordinal_date", jodaFormatter, javaFormatter); } @@ -488,7 +488,7 @@ public class JavaJodaTimeDuellingTests extends ESTestCase { } private void assertSameDate(String input, String format) { - DateFormatter jodaFormatter = Joda.forPattern(format, Locale.ROOT); + DateFormatter jodaFormatter = Joda.forPattern(format); DateFormatter javaFormatter = DateFormatters.forPattern(format); assertSameDate(input, format, jodaFormatter, javaFormatter); @@ -512,7 +512,7 @@ public class JavaJodaTimeDuellingTests extends ESTestCase { } private void assertJodaParseException(String input, String format, String expectedMessage) { - DateFormatter jodaFormatter = Joda.forPattern(format, Locale.ROOT); + DateFormatter jodaFormatter = Joda.forPattern(format); IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> jodaFormatter.parseJoda(input)); assertThat(e.getMessage(), containsString(expectedMessage)); } diff --git a/server/src/test/java/org/elasticsearch/common/joda/JodaDateMathParserTests.java b/server/src/test/java/org/elasticsearch/common/joda/JodaDateMathParserTests.java index a9cf5302aa2..e502dfc6f96 100644 --- a/server/src/test/java/org/elasticsearch/common/joda/JodaDateMathParserTests.java +++ b/server/src/test/java/org/elasticsearch/common/joda/JodaDateMathParserTests.java @@ -27,7 +27,6 @@ import org.elasticsearch.test.ESTestCase; import org.joda.time.DateTimeZone; import java.time.ZoneId; -import java.util.Locale; import java.util.concurrent.atomic.AtomicBoolean; import java.util.function.LongSupplier; @@ -186,7 +185,7 @@ public class JodaDateMathParserTests extends ESTestCase { assertDateMathEquals("2014-11-18T09:20", "2014-11-18T08:20:59.999Z", 0, true, DateTimeZone.forID("CET")); // implicit rounding with explicit timezone in the date format - DateFormatter formatter = DateFormatter.forPattern("YYYY-MM-ddZ"); + DateFormatter formatter = DateFormatter.forPattern("yyyy-MM-ddZ"); DateMathParser parser = formatter.toDateMathParser(); long time = parser.parse("2011-10-09+01:00", () -> 0, false, (ZoneId) null); assertEquals(this.parser.parse("2011-10-09T00:00:00.000+01:00", () -> 0), time); @@ -261,7 +260,7 @@ public class JodaDateMathParserTests extends ESTestCase { assertDateMathEquals("1418248078000||/m", "2014-12-10T21:47:00.000"); // also check other time units - JodaDateMathParser parser = new JodaDateMathParser(Joda.forPattern("epoch_second", Locale.ROOT)); + JodaDateMathParser parser = new JodaDateMathParser(Joda.forPattern("epoch_second")); long datetime = parser.parse("1418248078", () -> 0); assertDateEquals(datetime, "1418248078", "2014-12-10T21:47:58.000"); @@ -308,7 +307,7 @@ public class JodaDateMathParserTests extends ESTestCase { } public void testThatUnixTimestampMayNotHaveTimeZone() { - JodaDateMathParser parser = new JodaDateMathParser(Joda.forPattern("epoch_millis", Locale.ROOT)); + JodaDateMathParser parser = new JodaDateMathParser(Joda.forPattern("epoch_millis")); try { parser.parse("1234567890123", () -> 42, false, ZoneId.of("CET")); fail("Expected ElasticsearchParseException"); diff --git a/server/src/test/java/org/elasticsearch/common/joda/SimpleJodaTests.java b/server/src/test/java/org/elasticsearch/common/joda/SimpleJodaTests.java index 4f745849182..5f0bff1abf5 100644 --- a/server/src/test/java/org/elasticsearch/common/joda/SimpleJodaTests.java +++ b/server/src/test/java/org/elasticsearch/common/joda/SimpleJodaTests.java @@ -346,11 +346,11 @@ public class SimpleJodaTests extends ESTestCase { } public void testThatEpochParserIsPrinter() { - JodaDateFormatter formatter = Joda.forPattern("epoch_millis", Locale.ROOT); + JodaDateFormatter formatter = Joda.forPattern("epoch_millis"); assertThat(formatter.parser.isPrinter(), is(true)); assertThat(formatter.printer.isPrinter(), is(true)); - JodaDateFormatter epochSecondFormatter = Joda.forPattern("epoch_second", Locale.ROOT); + JodaDateFormatter epochSecondFormatter = Joda.forPattern("epoch_second"); assertThat(epochSecondFormatter.parser.isPrinter(), is(true)); assertThat(epochSecondFormatter.printer.isPrinter(), is(true)); } @@ -736,6 +736,23 @@ public class SimpleJodaTests extends ESTestCase { } } + public void testDeprecatedFormatSpecifiers() { + Joda.forPattern("CC"); + assertWarnings("Use of 'C' (century-of-era) is deprecated and will not be supported in the" + + " next major version of Elasticsearch."); + Joda.forPattern("YYYY"); + assertWarnings("Use of 'Y' (year-of-era) will change to 'y' in the" + + " next major version of Elasticsearch. Prefix your date format with '8' to use the new specifier."); + Joda.forPattern("xxxx"); + assertWarnings("Use of 'x' (week-based-year) will change" + + " to 'Y' in the next major version of Elasticsearch. Prefix your date format with '8' to use the new specifier."); + // multiple deprecations + Joda.forPattern("CC-YYYY"); + assertWarnings("Use of 'C' (century-of-era) is deprecated and will not be supported in the" + + " next major version of Elasticsearch.", "Use of 'Y' (year-of-era) will change to 'y' in the" + + " next major version of Elasticsearch. Prefix your date format with '8' to use the new specifier."); + } + private void assertValidDateFormatParsing(String pattern, String dateToParse) { assertValidDateFormatParsing(pattern, dateToParse, dateToParse); } diff --git a/server/src/test/java/org/elasticsearch/common/lucene/LuceneTests.java b/server/src/test/java/org/elasticsearch/common/lucene/LuceneTests.java index b677247f266..ea894a2edd0 100644 --- a/server/src/test/java/org/elasticsearch/common/lucene/LuceneTests.java +++ b/server/src/test/java/org/elasticsearch/common/lucene/LuceneTests.java @@ -23,6 +23,7 @@ import org.apache.lucene.analysis.core.KeywordAnalyzer; import org.apache.lucene.document.Document; import org.apache.lucene.document.Field; import org.apache.lucene.document.Field.Store; +import org.apache.lucene.document.LatLonDocValuesField; import org.apache.lucene.document.StringField; import org.apache.lucene.document.TextField; import org.apache.lucene.index.DirectoryReader; @@ -37,8 +38,12 @@ import org.apache.lucene.index.SoftDeletesRetentionMergePolicy; import org.apache.lucene.index.Term; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.MatchAllDocsQuery; -import org.apache.lucene.search.ScoreMode; import org.apache.lucene.search.ScoreDoc; +import org.apache.lucene.search.ScoreMode; +import org.apache.lucene.search.SortField; +import org.apache.lucene.search.SortedNumericSortField; +import org.apache.lucene.search.SortedSetSelector; +import org.apache.lucene.search.SortedSetSortField; import org.apache.lucene.search.TermQuery; import org.apache.lucene.search.TopDocs; import org.apache.lucene.search.Weight; @@ -46,8 +51,18 @@ import org.apache.lucene.store.Directory; import org.apache.lucene.store.MMapDirectory; import org.apache.lucene.store.MockDirectoryWrapper; import org.apache.lucene.util.Bits; +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.common.collect.Tuple; +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.core.internal.io.IOUtils; +import org.elasticsearch.index.fielddata.IndexFieldData; +import org.elasticsearch.index.fielddata.fieldcomparator.BytesRefFieldComparatorSource; +import org.elasticsearch.index.fielddata.fieldcomparator.DoubleValuesComparatorSource; +import org.elasticsearch.index.fielddata.fieldcomparator.FloatValuesComparatorSource; +import org.elasticsearch.index.fielddata.fieldcomparator.LongValuesComparatorSource; +import org.elasticsearch.search.MultiValueMode; import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.test.VersionUtils; import java.io.IOException; import java.io.StringReader; @@ -62,6 +77,8 @@ import java.util.concurrent.atomic.AtomicBoolean; import static org.hamcrest.Matchers.equalTo; public class LuceneTests extends ESTestCase { + private static final NamedWriteableRegistry EMPTY_REGISTRY = new NamedWriteableRegistry(Collections.emptyList()); + public void testWaitForIndex() throws Exception { final MockDirectoryWrapper dir = newMockDirectory(); @@ -498,4 +515,148 @@ public class LuceneTests extends ESTestCase { } IOUtils.close(writer, dir); } + + public void testSortFieldSerialization() throws IOException { + Tuple sortFieldTuple = randomSortField(); + SortField deserialized = copyInstance(sortFieldTuple.v1(), EMPTY_REGISTRY, Lucene::writeSortField, Lucene::readSortField, + VersionUtils.randomVersion(random())); + assertEquals(sortFieldTuple.v2(), deserialized); + } + + public void testSortValueSerialization() throws IOException { + Object sortValue = randomSortValue(); + Object deserialized = copyInstance(sortValue, EMPTY_REGISTRY, Lucene::writeSortValue, Lucene::readSortValue, + VersionUtils.randomVersion(random())); + assertEquals(sortValue, deserialized); + } + + public static Object randomSortValue() { + switch(randomIntBetween(0, 8)) { + case 0: + return randomAlphaOfLengthBetween(3, 10); + case 1: + return randomInt(); + case 2: + return randomLong(); + case 3: + return randomFloat(); + case 4: + return randomDouble(); + case 5: + return randomByte(); + case 6: + return randomShort(); + case 7: + return randomBoolean(); + case 8: + return new BytesRef(randomAlphaOfLengthBetween(3, 10)); + default: + throw new UnsupportedOperationException(); + } + } + + public static Tuple randomSortField() { + switch(randomIntBetween(0, 2)) { + case 0: + return randomSortFieldCustomComparatorSource(); + case 1: + return randomCustomSortField(); + case 2: + String field = randomAlphaOfLengthBetween(3, 10); + SortField.Type type = randomFrom(SortField.Type.values()); + if ((type == SortField.Type.SCORE || type == SortField.Type.DOC) && randomBoolean()) { + field = null; + } + SortField sortField = new SortField(field, type, randomBoolean()); + Object missingValue = randomMissingValue(sortField.getType()); + if (missingValue != null) { + sortField.setMissingValue(missingValue); + } + return Tuple.tuple(sortField, sortField); + default: + throw new UnsupportedOperationException(); + } + } + + private static Tuple randomSortFieldCustomComparatorSource() { + String field = randomAlphaOfLengthBetween(3, 10); + IndexFieldData.XFieldComparatorSource comparatorSource; + boolean reverse = randomBoolean(); + Object missingValue = null; + switch(randomIntBetween(0, 3)) { + case 0: + comparatorSource = new LongValuesComparatorSource(null, randomBoolean() ? randomLong() : null, + randomFrom(MultiValueMode.values()), null); + break; + case 1: + comparatorSource = new DoubleValuesComparatorSource(null, randomBoolean() ? randomDouble() : null, + randomFrom(MultiValueMode.values()), null); + break; + case 2: + comparatorSource = new FloatValuesComparatorSource(null, randomBoolean() ? randomFloat() : null, + randomFrom(MultiValueMode.values()), null); + break; + case 3: + comparatorSource = new BytesRefFieldComparatorSource(null, + randomBoolean() ? "_first" : "_last", randomFrom(MultiValueMode.values()), null); + missingValue = comparatorSource.missingValue(reverse); + break; + default: + throw new UnsupportedOperationException(); + } + SortField sortField = new SortField(field, comparatorSource, reverse); + SortField expected = new SortField(field, comparatorSource.reducedType(), reverse); + expected.setMissingValue(missingValue); + return Tuple.tuple(sortField, expected); + } + + private static Tuple randomCustomSortField() { + String field = randomAlphaOfLengthBetween(3, 10); + switch(randomIntBetween(0, 2)) { + case 0: { + SortField sortField = LatLonDocValuesField.newDistanceSort(field, 0, 0); + SortField expected = new SortField(field, SortField.Type.DOUBLE); + expected.setMissingValue(Double.POSITIVE_INFINITY); + return Tuple.tuple(sortField, expected); + } + case 1: { + SortedSetSortField sortField = new SortedSetSortField(field, randomBoolean(), randomFrom(SortedSetSelector.Type.values())); + SortField expected = new SortField(sortField.getField(), SortField.Type.STRING, sortField.getReverse()); + Object missingValue = randomMissingValue(SortField.Type.STRING); + sortField.setMissingValue(missingValue); + expected.setMissingValue(missingValue); + return Tuple.tuple(sortField, expected); + } + case 2: { + SortField.Type type = randomFrom(SortField.Type.DOUBLE, SortField.Type.INT, SortField.Type.FLOAT, SortField.Type.LONG); + SortedNumericSortField sortField = new SortedNumericSortField(field, type, randomBoolean()); + SortField expected = new SortField(sortField.getField(), sortField.getNumericType(), sortField.getReverse()); + Object missingValue = randomMissingValue(type); + if (missingValue != null) { + sortField.setMissingValue(missingValue); + expected.setMissingValue(missingValue); + } + return Tuple.tuple(sortField, expected); + } + default: + throw new UnsupportedOperationException(); + } + } + + private static Object randomMissingValue(SortField.Type type) { + switch(type) { + case INT: + return randomInt(); + case FLOAT: + return randomFloat(); + case DOUBLE: + return randomDouble(); + case LONG: + return randomLong(); + case STRING: + return randomBoolean() ? SortField.STRING_FIRST : SortField.STRING_LAST; + default: + return null; + } + } } diff --git a/server/src/test/java/org/elasticsearch/common/settings/ScopedSettingsTests.java b/server/src/test/java/org/elasticsearch/common/settings/ScopedSettingsTests.java index 81060ee1036..9194a60382d 100644 --- a/server/src/test/java/org/elasticsearch/common/settings/ScopedSettingsTests.java +++ b/server/src/test/java/org/elasticsearch/common/settings/ScopedSettingsTests.java @@ -29,7 +29,7 @@ import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.settings.Setting.Property; import org.elasticsearch.index.IndexModule; import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.transport.TransportService; +import org.elasticsearch.transport.TransportSettings; import java.io.IOException; import java.util.ArrayList; @@ -614,7 +614,7 @@ public class ScopedSettingsTests extends ESTestCase { // array settings - complex matcher assertNotNull(settings.get("transport.tracer.include." + randomIntBetween(1, 100))); - assertSame(TransportService.TRACE_LOG_INCLUDE_SETTING, settings.get("transport.tracer.include." + randomIntBetween(1, 100))); + assertSame(TransportSettings.TRACE_LOG_INCLUDE_SETTING, settings.get("transport.tracer.include." + randomIntBetween(1, 100))); // array settings - complex matcher - only accepts numbers assertNull(settings.get("transport.tracer.include.FOO")); @@ -756,7 +756,7 @@ public class ScopedSettingsTests extends ESTestCase { public void testUpdateTracer() { ClusterSettings settings = new ClusterSettings(Settings.EMPTY, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS); AtomicReference> ref = new AtomicReference<>(); - settings.addSettingsUpdateConsumer(TransportService.TRACE_LOG_INCLUDE_SETTING, ref::set); + settings.addSettingsUpdateConsumer(TransportSettings.TRACE_LOG_INCLUDE_SETTING, ref::set); settings.applySettings(Settings.builder() .putList("transport.tracer.include", "internal:index/shard/recovery/*", "internal:gateway/local*").build()); assertNotNull(ref.get().size()); diff --git a/server/src/test/java/org/elasticsearch/common/time/DateFormattersTests.java b/server/src/test/java/org/elasticsearch/common/time/DateFormattersTests.java index 8f927bda22e..98e58f7a0eb 100644 --- a/server/src/test/java/org/elasticsearch/common/time/DateFormattersTests.java +++ b/server/src/test/java/org/elasticsearch/common/time/DateFormattersTests.java @@ -19,6 +19,7 @@ package org.elasticsearch.common.time; +import org.elasticsearch.common.joda.JodaDateFormatter; import org.elasticsearch.test.ESTestCase; import java.time.Instant; @@ -30,6 +31,7 @@ import java.util.Locale; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.not; import static org.hamcrest.Matchers.nullValue; @@ -192,4 +194,16 @@ public class DateFormattersTests extends ESTestCase { assertThat(epochMillisFormatter, sameInstance(DateFormatters.forPattern("epoch_millis"))); assertThat(epochMillisFormatter, equalTo(DateFormatters.forPattern("epoch_millis"))); } + + public void testForceJava8() { + assertThat(DateFormatter.forPattern("8yyyy-MM-dd"), instanceOf(JavaDateFormatter.class)); + // named formats too + assertThat(DateFormatter.forPattern("8date_optional_time"), instanceOf(JavaDateFormatter.class)); + // named formats too + DateFormatter formatter = DateFormatter.forPattern("8date_optional_time||ww-MM-dd"); + assertThat(formatter, instanceOf(DateFormatters.MergedDateFormatter.class)); + DateFormatters.MergedDateFormatter mergedFormatter = (DateFormatters.MergedDateFormatter) formatter; + assertThat(mergedFormatter.formatters.get(0), instanceOf(JavaDateFormatter.class)); + assertThat(mergedFormatter.formatters.get(1), instanceOf(JodaDateFormatter.class)); + } } diff --git a/server/src/test/java/org/elasticsearch/common/time/JavaDateMathParserTests.java b/server/src/test/java/org/elasticsearch/common/time/JavaDateMathParserTests.java index c061e545a29..6c8afa1e3db 100644 --- a/server/src/test/java/org/elasticsearch/common/time/JavaDateMathParserTests.java +++ b/server/src/test/java/org/elasticsearch/common/time/JavaDateMathParserTests.java @@ -239,7 +239,7 @@ public class JavaDateMathParserTests extends ESTestCase { assertDateMathEquals("1418248078000||/m", "2014-12-10T21:47:00.000"); // also check other time units - DateMathParser parser = DateFormatters.forPattern("epoch_second||dateOptionalTime").toDateMathParser(); + DateMathParser parser = DateFormatter.forPattern("8epoch_second||8dateOptionalTime").toDateMathParser(); long datetime = parser.parse("1418248078", () -> 0); assertDateEquals(datetime, "1418248078", "2014-12-10T21:47:58.000"); diff --git a/server/src/test/java/org/elasticsearch/discovery/AbstractDisruptionTestCase.java b/server/src/test/java/org/elasticsearch/discovery/AbstractDisruptionTestCase.java index e5a21ef7e93..7a9a6570d0d 100644 --- a/server/src/test/java/org/elasticsearch/discovery/AbstractDisruptionTestCase.java +++ b/server/src/test/java/org/elasticsearch/discovery/AbstractDisruptionTestCase.java @@ -46,7 +46,7 @@ import org.elasticsearch.test.disruption.NetworkDisruption.TwoPartitions; import org.elasticsearch.test.disruption.ServiceDisruptionScheme; import org.elasticsearch.test.disruption.SlowClusterStateProcessing; import org.elasticsearch.test.transport.MockTransportService; -import org.elasticsearch.transport.TransportService; +import org.elasticsearch.transport.TransportSettings; import org.junit.Before; import java.util.Arrays; @@ -142,7 +142,7 @@ public abstract class AbstractDisruptionTestCase extends ESIntegTestCase { .put(JoinHelper.JOIN_TIMEOUT_SETTING.getKey(), "10s") // still long to induce failures but to long so test won't time out .put(DiscoverySettings.PUBLISH_TIMEOUT_SETTING.getKey(), "1s") // <-- for hitting simulated network failures quickly .put(Coordinator.PUBLISH_TIMEOUT_SETTING.getKey(), "1s") // <-- for hitting simulated network failures quickly - .put(TransportService.TCP_CONNECT_TIMEOUT.getKey(), "10s") // Network delay disruption waits for the min between this + .put(TransportSettings.CONNECT_TIMEOUT.getKey(), "10s") // Network delay disruption waits for the min between this // value and the time of disruption and does not recover immediately // when disruption is stop. We should make sure we recover faster // then the default of 30s, causing ensureGreen and friends to time out diff --git a/server/src/test/java/org/elasticsearch/discovery/MasterDisruptionIT.java b/server/src/test/java/org/elasticsearch/discovery/MasterDisruptionIT.java index f79b82fad7f..652ce1fca1e 100644 --- a/server/src/test/java/org/elasticsearch/discovery/MasterDisruptionIT.java +++ b/server/src/test/java/org/elasticsearch/discovery/MasterDisruptionIT.java @@ -61,6 +61,7 @@ import java.util.concurrent.TimeUnit; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.Matchers.not; import static org.hamcrest.Matchers.nullValue; @@ -171,10 +172,9 @@ public class MasterDisruptionIT extends AbstractDisruptionTestCase { majoritySide.remove(oldMasterNode); // Keeps track of the previous and current master when a master node transition took place on each node on the majority side: - final Map>> masters = Collections.synchronizedMap(new HashMap>>()); + final Map>> masters = Collections.synchronizedMap(new HashMap<>()); for (final String node : majoritySide) { - masters.put(node, new ArrayList>()); + masters.put(node, new ArrayList<>()); internalCluster().getInstance(ClusterService.class, node).addListener(event -> { DiscoveryNode previousMaster = event.previousState().nodes().getMasterNode(); DiscoveryNode currentMaster = event.state().nodes().getMasterNode(); @@ -223,7 +223,7 @@ public class MasterDisruptionIT extends AbstractDisruptionTestCase { internalCluster().getInstance(ClusterService.class, oldMasterNode).submitStateUpdateTask("sneaky-update", new ClusterStateUpdateTask(Priority.IMMEDIATE) { @Override - public ClusterState execute(ClusterState currentState) throws Exception { + public ClusterState execute(ClusterState currentState) { return ClusterState.builder(currentState).build(); } @@ -250,16 +250,16 @@ public class MasterDisruptionIT extends AbstractDisruptionTestCase { for (Map.Entry>> entry : masters.entrySet()) { String nodeName = entry.getKey(); List> recordedMasterTransition = entry.getValue(); - assertThat("[" + nodeName + "] Each node should only record two master node transitions", recordedMasterTransition.size(), - equalTo(2)); - assertThat("[" + nodeName + "] First transition's previous master should be [null]", recordedMasterTransition.get(0).v1(), - equalTo(oldMasterNode)); - assertThat("[" + nodeName + "] First transition's current master should be [" + newMasterNode + "]", recordedMasterTransition - .get(0).v2(), nullValue()); - assertThat("[" + nodeName + "] Second transition's previous master should be [null]", recordedMasterTransition.get(1).v1(), - nullValue()); + assertThat("[" + nodeName + "] Each node should only record two master node transitions", + recordedMasterTransition, hasSize(2)); + assertThat("[" + nodeName + "] First transition's previous master should be [" + oldMasterNode + "]", + recordedMasterTransition.get(0).v1(), equalTo(oldMasterNode)); + assertThat("[" + nodeName + "] First transition's current master should be [null]", + recordedMasterTransition.get(0).v2(), nullValue()); + assertThat("[" + nodeName + "] Second transition's previous master should be [null]", + recordedMasterTransition.get(1).v1(), nullValue()); assertThat("[" + nodeName + "] Second transition's current master should be [" + newMasterNode + "]", - recordedMasterTransition.get(1).v2(), equalTo(newMasterNode)); + recordedMasterTransition.get(1).v2(), equalTo(newMasterNode)); } } @@ -506,7 +506,7 @@ public class MasterDisruptionIT extends AbstractDisruptionTestCase { } - void assertDiscoveryCompleted(List nodes) throws InterruptedException { + private void assertDiscoveryCompleted(List nodes) throws InterruptedException { for (final String node : nodes) { assertTrue( "node [" + node + "] is still joining master", @@ -524,5 +524,4 @@ public class MasterDisruptionIT extends AbstractDisruptionTestCase { ); } } - } diff --git a/server/src/test/java/org/elasticsearch/discovery/ZenFaultDetectionTests.java b/server/src/test/java/org/elasticsearch/discovery/ZenFaultDetectionTests.java index fbe3ef00a06..85d8f95c6b1 100644 --- a/server/src/test/java/org/elasticsearch/discovery/ZenFaultDetectionTests.java +++ b/server/src/test/java/org/elasticsearch/discovery/ZenFaultDetectionTests.java @@ -46,6 +46,7 @@ import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportConnectionListener; import org.elasticsearch.transport.TransportRequestOptions; import org.elasticsearch.transport.TransportService; +import org.elasticsearch.transport.TransportSettings; import org.elasticsearch.transport.nio.MockNioTransport; import org.hamcrest.Matcher; import org.hamcrest.Matchers; @@ -136,7 +137,7 @@ public class ZenFaultDetectionTests extends ESTestCase { Settings.builder() .put(settings) // trace zenfd actions but keep the default otherwise - .putList(TransportService.TRACE_LOG_EXCLUDE_SETTING.getKey(), TransportLivenessAction.NAME) + .putList(TransportSettings.TRACE_LOG_EXCLUDE_SETTING.getKey(), TransportLivenessAction.NAME) .build(), new MockNioTransport(settings, version, threadPool, new NetworkService(Collections.emptyList()), PageCacheRecycler.NON_RECYCLING_INSTANCE, namedWriteableRegistry, circuitBreakerService), diff --git a/server/src/test/java/org/elasticsearch/discovery/zen/FileBasedUnicastHostsProviderTests.java b/server/src/test/java/org/elasticsearch/discovery/zen/FileBasedUnicastHostsProviderTests.java index 8fba12197ca..cc9295cee2e 100644 --- a/server/src/test/java/org/elasticsearch/discovery/zen/FileBasedUnicastHostsProviderTests.java +++ b/server/src/test/java/org/elasticsearch/discovery/zen/FileBasedUnicastHostsProviderTests.java @@ -19,21 +19,22 @@ package org.elasticsearch.discovery.zen; +import org.elasticsearch.Version; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.network.NetworkService; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.transport.BoundTransportAddress; import org.elasticsearch.common.transport.TransportAddress; import org.elasticsearch.common.unit.TimeValue; -import org.elasticsearch.common.util.BigArrays; +import org.elasticsearch.common.util.PageCacheRecycler; import org.elasticsearch.env.Environment; import org.elasticsearch.indices.breaker.NoneCircuitBreakerService; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.transport.MockTransportService; import org.elasticsearch.threadpool.TestThreadPool; import org.elasticsearch.threadpool.ThreadPool; -import org.elasticsearch.transport.MockTcpTransport; import org.elasticsearch.transport.TransportService; +import org.elasticsearch.transport.nio.MockNioTransport; import org.junit.After; import org.junit.Before; @@ -78,10 +79,11 @@ public class FileBasedUnicastHostsProviderTests extends ESTestCase { @Before public void createTransportSvc() { - final MockTcpTransport transport = new MockTcpTransport(Settings.EMPTY, threadPool, BigArrays.NON_RECYCLING_INSTANCE, - new NoneCircuitBreakerService(), + final MockNioTransport transport = new MockNioTransport(Settings.EMPTY, Version.CURRENT, threadPool, + new NetworkService(Collections.emptyList()), + PageCacheRecycler.NON_RECYCLING_INSTANCE, new NamedWriteableRegistry(Collections.emptyList()), - new NetworkService(Collections.emptyList())) { + new NoneCircuitBreakerService()) { @Override public BoundTransportAddress boundAddress() { return new BoundTransportAddress( diff --git a/server/src/test/java/org/elasticsearch/discovery/zen/PublishClusterStateActionTests.java b/server/src/test/java/org/elasticsearch/discovery/zen/PublishClusterStateActionTests.java index 4bd7b4e663a..183df5c9564 100644 --- a/server/src/test/java/org/elasticsearch/discovery/zen/PublishClusterStateActionTests.java +++ b/server/src/test/java/org/elasticsearch/discovery/zen/PublishClusterStateActionTests.java @@ -57,6 +57,7 @@ import org.elasticsearch.transport.TransportChannel; import org.elasticsearch.transport.TransportConnectionListener; import org.elasticsearch.transport.TransportResponse; import org.elasticsearch.transport.TransportService; +import org.elasticsearch.transport.TransportSettings; import org.junit.After; import org.junit.Before; @@ -182,8 +183,8 @@ public class PublishClusterStateActionTests extends ESTestCase { ThreadPool threadPool, Logger logger, Map nodes) throws Exception { final Settings settings = Settings.builder() .put("name", name) - .put(TransportService.TRACE_LOG_INCLUDE_SETTING.getKey(), "").put( - TransportService.TRACE_LOG_EXCLUDE_SETTING.getKey(), "NOTHING") + .put(TransportSettings.TRACE_LOG_INCLUDE_SETTING.getKey(), "").put( + TransportSettings.TRACE_LOG_EXCLUDE_SETTING.getKey(), "NOTHING") .put(basSettings) .build(); diff --git a/server/src/test/java/org/elasticsearch/discovery/zen/UnicastZenPingTests.java b/server/src/test/java/org/elasticsearch/discovery/zen/UnicastZenPingTests.java index c380341b51b..fedbf02a8e8 100644 --- a/server/src/test/java/org/elasticsearch/discovery/zen/UnicastZenPingTests.java +++ b/server/src/test/java/org/elasticsearch/discovery/zen/UnicastZenPingTests.java @@ -37,7 +37,6 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.transport.BoundTransportAddress; import org.elasticsearch.common.transport.TransportAddress; import org.elasticsearch.common.unit.TimeValue; -import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.common.util.PageCacheRecycler; import org.elasticsearch.common.util.concurrent.AbstractRunnable; import org.elasticsearch.common.util.concurrent.ConcurrentCollections; @@ -49,14 +48,13 @@ import org.elasticsearch.test.VersionUtils; import org.elasticsearch.test.transport.MockTransportService; import org.elasticsearch.threadpool.TestThreadPool; import org.elasticsearch.threadpool.ThreadPool; -import org.elasticsearch.transport.MockTcpTransport; -import org.elasticsearch.transport.TcpTransport; import org.elasticsearch.transport.Transport; import org.elasticsearch.transport.TransportConnectionListener; import org.elasticsearch.transport.TransportException; import org.elasticsearch.transport.TransportRequestOptions; import org.elasticsearch.transport.TransportResponseHandler; import org.elasticsearch.transport.TransportService; +import org.elasticsearch.transport.TransportSettings; import org.elasticsearch.transport.nio.MockNioTransport; import org.junit.After; import org.junit.Before; @@ -139,9 +137,9 @@ public class UnicastZenPingTests extends ESTestCase { public void testSimplePings() throws IOException, InterruptedException, ExecutionException { // use ephemeral ports - final Settings settings = Settings.builder().put("cluster.name", "test").put(TcpTransport.PORT.getKey(), 0).build(); + final Settings settings = Settings.builder().put("cluster.name", "test").put(TransportSettings.PORT.getKey(), 0).build(); final Settings settingsMismatch = - Settings.builder().put(settings).put("cluster.name", "mismatch").put(TcpTransport.PORT.getKey(), 0).build(); + Settings.builder().put(settings).put("cluster.name", "mismatch").put(TransportSettings.PORT.getKey(), 0).build(); NetworkService networkService = new NetworkService(Collections.emptyList()); @@ -265,7 +263,7 @@ public class UnicastZenPingTests extends ESTestCase { public void testUnknownHostNotCached() throws ExecutionException, InterruptedException { // use ephemeral ports - final Settings settings = Settings.builder().put("cluster.name", "test").put(TcpTransport.PORT.getKey(), 0).build(); + final Settings settings = Settings.builder().put("cluster.name", "test").put(TransportSettings.PORT.getKey(), 0).build(); final NetworkService networkService = new NetworkService(Collections.emptyList()); @@ -374,13 +372,14 @@ public class UnicastZenPingTests extends ESTestCase { public void testPortLimit() throws InterruptedException { final NetworkService networkService = new NetworkService(Collections.emptyList()); - final Transport transport = new MockTcpTransport( + final Transport transport = new MockNioTransport( Settings.EMPTY, + Version.CURRENT, threadPool, - BigArrays.NON_RECYCLING_INSTANCE, - new NoneCircuitBreakerService(), + networkService, + PageCacheRecycler.NON_RECYCLING_INSTANCE, new NamedWriteableRegistry(Collections.emptyList()), - networkService) { + new NoneCircuitBreakerService()) { @Override public BoundTransportAddress boundAddress() { @@ -415,13 +414,14 @@ public class UnicastZenPingTests extends ESTestCase { public void testRemovingLocalAddresses() throws InterruptedException { final NetworkService networkService = new NetworkService(Collections.emptyList()); final InetAddress loopbackAddress = InetAddress.getLoopbackAddress(); - final Transport transport = new MockTcpTransport( + final Transport transport = new MockNioTransport( Settings.EMPTY, + Version.CURRENT, threadPool, - BigArrays.NON_RECYCLING_INSTANCE, - new NoneCircuitBreakerService(), + networkService, + PageCacheRecycler.NON_RECYCLING_INSTANCE, new NamedWriteableRegistry(Collections.emptyList()), - networkService) { + new NoneCircuitBreakerService()) { @Override public BoundTransportAddress boundAddress() { @@ -460,13 +460,14 @@ public class UnicastZenPingTests extends ESTestCase { final NetworkService networkService = new NetworkService(Collections.emptyList()); final String hostname = randomAlphaOfLength(8); final UnknownHostException unknownHostException = new UnknownHostException(hostname); - final Transport transport = new MockTcpTransport( + final Transport transport = new MockNioTransport( Settings.EMPTY, + Version.CURRENT, threadPool, - BigArrays.NON_RECYCLING_INSTANCE, - new NoneCircuitBreakerService(), + networkService, + PageCacheRecycler.NON_RECYCLING_INSTANCE, new NamedWriteableRegistry(Collections.emptyList()), - networkService) { + new NoneCircuitBreakerService()) { @Override public BoundTransportAddress boundAddress() { @@ -506,13 +507,14 @@ public class UnicastZenPingTests extends ESTestCase { final Logger logger = mock(Logger.class); final NetworkService networkService = new NetworkService(Collections.emptyList()); final CountDownLatch latch = new CountDownLatch(1); - final Transport transport = new MockTcpTransport( + final Transport transport = new MockNioTransport( Settings.EMPTY, + Version.CURRENT, threadPool, - BigArrays.NON_RECYCLING_INSTANCE, - new NoneCircuitBreakerService(), + networkService, + PageCacheRecycler.NON_RECYCLING_INSTANCE, new NamedWriteableRegistry(Collections.emptyList()), - networkService) { + new NoneCircuitBreakerService()) { @Override public BoundTransportAddress boundAddress() { @@ -567,17 +569,18 @@ public class UnicastZenPingTests extends ESTestCase { } public void testResolveReuseExistingNodeConnections() throws ExecutionException, InterruptedException { - final Settings settings = Settings.builder().put("cluster.name", "test").put(TcpTransport.PORT.getKey(), 0).build(); + final Settings settings = Settings.builder().put("cluster.name", "test").put(TransportSettings.PORT.getKey(), 0).build(); NetworkService networkService = new NetworkService(Collections.emptyList()); - final BiFunction supplier = (s, v) -> new MockTcpTransport( + final BiFunction supplier = (s, v) -> new MockNioTransport( s, + Version.CURRENT, threadPool, - BigArrays.NON_RECYCLING_INSTANCE, - new NoneCircuitBreakerService(), + networkService, + PageCacheRecycler.NON_RECYCLING_INSTANCE, new NamedWriteableRegistry(Collections.emptyList()), - networkService); + new NoneCircuitBreakerService()); NetworkHandle handleA = startServices(settings, threadPool, "UZP_A", Version.CURRENT, supplier, EnumSet.allOf(Role.class)); closeables.push(handleA.transportService); @@ -632,7 +635,7 @@ public class UnicastZenPingTests extends ESTestCase { } public void testPingingTemporalPings() throws ExecutionException, InterruptedException { - final Settings settings = Settings.builder().put("cluster.name", "test").put(TcpTransport.PORT.getKey(), 0).build(); + final Settings settings = Settings.builder().put("cluster.name", "test").put(TransportSettings.PORT.getKey(), 0).build(); NetworkService networkService = new NetworkService(Collections.emptyList()); @@ -771,7 +774,7 @@ public class UnicastZenPingTests extends ESTestCase { final Set nodeRoles) { final Settings nodeSettings = Settings.builder().put(settings) .put("node.name", nodeId) - .put(TransportService.TRACE_LOG_INCLUDE_SETTING.getKey(), "internal:discovery/zen/unicast") + .put(TransportSettings.TRACE_LOG_INCLUDE_SETTING.getKey(), "internal:discovery/zen/unicast") .build(); final Transport transport = supplier.apply(nodeSettings, version); final MockTransportService transportService = diff --git a/server/src/test/java/org/elasticsearch/gateway/PrimaryShardAllocatorTests.java b/server/src/test/java/org/elasticsearch/gateway/PrimaryShardAllocatorTests.java index e6fc0c535df..3be4f1c1d80 100644 --- a/server/src/test/java/org/elasticsearch/gateway/PrimaryShardAllocatorTests.java +++ b/server/src/test/java/org/elasticsearch/gateway/PrimaryShardAllocatorTests.java @@ -391,7 +391,8 @@ public class PrimaryShardAllocatorTests extends ESAllocationTestCase { final Snapshot snapshot = new Snapshot("test", new SnapshotId("test", UUIDs.randomBase64UUID())); RoutingTable routingTable = RoutingTable.builder() - .addAsRestore(metaData.index(shardId.getIndex()), new SnapshotRecoverySource(snapshot, Version.CURRENT, shardId.getIndexName())) + .addAsRestore(metaData.index(shardId.getIndex()), + new SnapshotRecoverySource(UUIDs.randomBase64UUID(), snapshot, Version.CURRENT, shardId.getIndexName())) .build(); ClusterState state = ClusterState.builder(org.elasticsearch.cluster.ClusterName.CLUSTER_NAME_SETTING.getDefault(Settings.EMPTY)) .metaData(metaData) diff --git a/server/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java b/server/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java index c0156d54cc8..bf0f704f264 100644 --- a/server/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java +++ b/server/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java @@ -3609,8 +3609,8 @@ public class InternalEngineTests extends EngineTestCase { } engine.refresh("test"); try (Engine.Searcher searcher = engine.acquireSearcher("test")) { - TopDocs topDocs = searcher.searcher().search(new MatchAllDocsQuery(), 10); - assertEquals(numDocs, topDocs.totalHits.value); + int count = searcher.searcher().count(new MatchAllDocsQuery()); + assertEquals(numDocs, count); } if (primary) { // primaries rely on lucene dedup and may index the same document twice @@ -3716,8 +3716,8 @@ public class InternalEngineTests extends EngineTestCase { engine.refresh("test"); try (Engine.Searcher searcher = engine.acquireSearcher("test")) { - TopDocs topDocs = searcher.searcher().search(new MatchAllDocsQuery(), 10); - assertEquals(docs.size(), topDocs.totalHits.value); + int count = searcher.searcher().count(new MatchAllDocsQuery()); + assertEquals(docs.size(), count); } assertEquals(0, engine.getNumVersionLookups()); assertEquals(0, engine.getNumIndexVersionsLookups()); @@ -5426,41 +5426,17 @@ public class InternalEngineTests extends EngineTestCase { final IndexSettings indexSettings = IndexSettingsModule.newIndexSettings(indexMetaData); final AtomicLong globalCheckpoint = new AtomicLong(SequenceNumbers.NO_OPS_PERFORMED); Path translogPath = createTempDir(); - int numOps = scaledRandomIntBetween(1, 500); - List operations = new ArrayList<>(); - for (int i = 0; i < numOps; i++) { - long seqNo = i; - final ParsedDocument doc = EngineTestCase.createParsedDoc(Integer.toString(between(1, 100)), null); - if (randomBoolean()) { - operations.add(new Engine.Index(EngineTestCase.newUid(doc), doc, seqNo, primaryTerm.get(), - i, null, Engine.Operation.Origin.REPLICA, threadPool.relativeTimeInMillis(), -1, true, UNASSIGNED_SEQ_NO, 0L)); - } else if (randomBoolean()) { - operations.add(new Engine.Delete(doc.type(), doc.id(), EngineTestCase.newUid(doc), seqNo, primaryTerm.get(), - i, null, Engine.Operation.Origin.REPLICA, threadPool.relativeTimeInMillis(), - UNASSIGNED_SEQ_NO, 0L)); - } else { - operations.add(new Engine.NoOp(seqNo, primaryTerm.get(), Engine.Operation.Origin.REPLICA, - threadPool.relativeTimeInMillis(), "test-" + i)); - } - } - Randomness.shuffle(operations); + List operations = generateHistoryOnReplica(between(1, 500), randomBoolean(), randomBoolean(), randomBoolean()); List> commits = new ArrayList<>(); commits.add(new ArrayList<>()); try (Store store = createStore()) { - EngineConfig config = config(indexSettings, store, translogPath, - newMergePolicy(), null, null, globalCheckpoint::get); + EngineConfig config = config(indexSettings, store, translogPath, NoMergePolicy.INSTANCE, null, null, globalCheckpoint::get); final List docs; try (InternalEngine engine = createEngine(config)) { List flushedOperations = new ArrayList<>(); for (Engine.Operation op : operations) { flushedOperations.add(op); - if (op instanceof Engine.Index) { - engine.index((Engine.Index) op); - } else if (op instanceof Engine.Delete) { - engine.delete((Engine.Delete) op); - } else { - engine.noOp((Engine.NoOp) op); - } + applyOperation(engine, op); if (randomInt(100) < 10) { engine.refresh("test"); } diff --git a/server/src/test/java/org/elasticsearch/index/fielddata/ScriptDocValuesDatesTests.java b/server/src/test/java/org/elasticsearch/index/fielddata/ScriptDocValuesDatesTests.java deleted file mode 100644 index 42f5470ee89..00000000000 --- a/server/src/test/java/org/elasticsearch/index/fielddata/ScriptDocValuesDatesTests.java +++ /dev/null @@ -1,93 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.index.fielddata; - -import org.elasticsearch.index.fielddata.ScriptDocValues.Dates; -import org.elasticsearch.test.ESTestCase; - -import java.security.AccessControlContext; -import java.security.AccessController; -import java.security.PermissionCollection; -import java.security.Permissions; -import java.security.PrivilegedAction; -import java.security.ProtectionDomain; -import java.util.HashSet; -import java.util.Set; -import java.util.function.BiConsumer; - -import static org.hamcrest.Matchers.contains; - -public class ScriptDocValuesDatesTests extends ESTestCase { - - public void testGetValues() { - Set keys = new HashSet<>(); - Set warnings = new HashSet<>(); - - Dates dates = biconsumerWrap((deprecationKey, deprecationMessage) -> { - keys.add(deprecationKey); - warnings.add(deprecationMessage); - - // Create a temporary directory to prove we are running with the server's permissions. - createTempDir(); - }); - - /* - * Invoke getValues() without any permissions to verify it still works. - * This is done using the callback created above, which creates a temp - * directory, which is not possible with "noPermission". - */ - PermissionCollection noPermissions = new Permissions(); - AccessControlContext noPermissionsAcc = new AccessControlContext( - new ProtectionDomain[] { - new ProtectionDomain(null, noPermissions) - } - ); - AccessController.doPrivileged(new PrivilegedAction(){ - public Void run() { - dates.getValues(); - return null; - } - }, noPermissionsAcc); - - assertThat(warnings, contains( - "Deprecated getValues used, the field is a list and should be accessed directly." - + " For example, use doc['foo'] instead of doc['foo'].values.")); - assertThat(keys, contains("ScriptDocValues#getValues")); - - - } - - private Dates biconsumerWrap(BiConsumer deprecationHandler) { - return new Dates(new AbstractSortedNumericDocValues() { - @Override - public boolean advanceExact(int doc) { - return true; - } - @Override - public int docValueCount() { - return 0; - } - @Override - public long nextValue() { - return 0L; - } - }, deprecationHandler); - } -} diff --git a/server/src/test/java/org/elasticsearch/index/fielddata/ScriptDocValuesGeoPointsTests.java b/server/src/test/java/org/elasticsearch/index/fielddata/ScriptDocValuesGeoPointsTests.java index 2924694c102..72d890edc79 100644 --- a/server/src/test/java/org/elasticsearch/index/fielddata/ScriptDocValuesGeoPointsTests.java +++ b/server/src/test/java/org/elasticsearch/index/fielddata/ScriptDocValuesGeoPointsTests.java @@ -21,22 +21,10 @@ package org.elasticsearch.index.fielddata; import org.elasticsearch.common.geo.GeoPoint; import org.elasticsearch.common.geo.GeoUtils; -import org.elasticsearch.index.fielddata.ScriptDocValues.GeoPoints; import org.elasticsearch.test.ESTestCase; import java.io.IOException; -import java.security.AccessControlContext; -import java.security.AccessController; -import java.security.PermissionCollection; -import java.security.Permissions; -import java.security.PrivilegedAction; -import java.security.ProtectionDomain; import java.util.Arrays; -import java.util.HashSet; -import java.util.Set; -import java.util.function.BiConsumer; - -import static org.hamcrest.Matchers.contains; public class ScriptDocValuesGeoPointsTests extends ESTestCase { @@ -83,51 +71,18 @@ public class ScriptDocValuesGeoPointsTests extends ESTestCase { final double lon1 = randomLon(); final double lon2 = randomLon(); - Set warnings = new HashSet<>(); - Set keys = new HashSet<>(); - final MultiGeoPointValues values = wrap(new GeoPoint(lat1, lon1), new GeoPoint(lat2, lon2)); - final ScriptDocValues.GeoPoints script = geoPointsWrap(values, (deprecationKey, deprecationMessage) -> { - keys.add(deprecationKey); - warnings.add(deprecationMessage); + final ScriptDocValues.GeoPoints script = new ScriptDocValues.GeoPoints(values); - // Create a temporary directory to prove we are running with the server's permissions. - createTempDir(); - }); script.setNextDocId(1); assertEquals(true, script.isEmpty()); script.setNextDocId(0); assertEquals(false, script.isEmpty()); assertEquals(new GeoPoint(lat1, lon1), script.getValue()); - assertEquals(Arrays.asList(new GeoPoint(lat1, lon1), new GeoPoint(lat2, lon2)), script.getValues()); assertEquals(lat1, script.getLat(), 0); assertEquals(lon1, script.getLon(), 0); assertTrue(Arrays.equals(new double[] {lat1, lat2}, script.getLats())); assertTrue(Arrays.equals(new double[] {lon1, lon2}, script.getLons())); - - /* - * Invoke getValues() without any permissions to verify it still works. - * This is done using the callback created above, which creates a temp - * directory, which is not possible with "noPermission". - */ - PermissionCollection noPermissions = new Permissions(); - AccessControlContext noPermissionsAcc = new AccessControlContext( - new ProtectionDomain[] { - new ProtectionDomain(null, noPermissions) - } - ); - AccessController.doPrivileged(new PrivilegedAction(){ - public Void run() { - script.getValues(); - return null; - } - }, noPermissionsAcc); - - assertThat(warnings, contains( - "Deprecated getValues used, the field is a list and should be accessed directly." - + " For example, use doc['foo'] instead of doc['foo'].values.")); - assertThat(keys, contains("ScriptDocValues#getValues")); - } public void testGeoDistance() throws IOException { @@ -155,9 +110,4 @@ public class ScriptDocValuesGeoPointsTests extends ESTestCase { script.planeDistanceWithDefault(otherLat, otherLon, 42) / 1000d, 0.01); assertEquals(42, emptyScript.planeDistanceWithDefault(otherLat, otherLon, 42), 0); } - - private GeoPoints geoPointsWrap(MultiGeoPointValues in, BiConsumer deprecationHandler) { - return new GeoPoints(in, deprecationHandler); - } - } diff --git a/server/src/test/java/org/elasticsearch/index/fielddata/ScriptDocValuesLongsTests.java b/server/src/test/java/org/elasticsearch/index/fielddata/ScriptDocValuesLongsTests.java index fd2a18a1662..a5674e4da7d 100644 --- a/server/src/test/java/org/elasticsearch/index/fielddata/ScriptDocValuesLongsTests.java +++ b/server/src/test/java/org/elasticsearch/index/fielddata/ScriptDocValuesLongsTests.java @@ -23,17 +23,7 @@ import org.elasticsearch.index.fielddata.ScriptDocValues.Longs; import org.elasticsearch.test.ESTestCase; import java.io.IOException; -import java.security.AccessControlContext; -import java.security.AccessController; -import java.security.PermissionCollection; -import java.security.Permissions; -import java.security.PrivilegedAction; -import java.security.ProtectionDomain; -import java.util.HashSet; -import java.util.Set; -import java.util.function.BiConsumer; -import static org.hamcrest.Matchers.contains; public class ScriptDocValuesLongsTests extends ESTestCase { public void testLongs() throws IOException { @@ -45,16 +35,7 @@ public class ScriptDocValuesLongsTests extends ESTestCase { } } - Set warnings = new HashSet<>(); - Set keys = new HashSet<>(); - - Longs longs = wrap(values, (deprecationKey, deprecationMessage) -> { - keys.add(deprecationKey); - warnings.add(deprecationMessage); - - // Create a temporary directory to prove we are running with the server's permissions. - createTempDir(); - }); + Longs longs = wrap(values); for (int round = 0; round < 10; round++) { int d = between(0, values.length - 1); @@ -67,42 +48,16 @@ public class ScriptDocValuesLongsTests extends ESTestCase { "Use doc[].size()==0 to check if a document is missing a field!", e.getMessage()); } assertEquals(values[d].length, longs.size()); - assertEquals(values[d].length, longs.getValues().size()); for (int i = 0; i < values[d].length; i++) { assertEquals(values[d][i], longs.get(i).longValue()); - assertEquals(values[d][i], longs.getValues().get(i).longValue()); } - Exception e = expectThrows(UnsupportedOperationException.class, () -> longs.getValues().add(100L)); + Exception e = expectThrows(UnsupportedOperationException.class, () -> longs.add(100L)); assertEquals("doc values are unmodifiable", e.getMessage()); } - - /* - * Invoke getValues() without any permissions to verify it still works. - * This is done using the callback created above, which creates a temp - * directory, which is not possible with "noPermission". - */ - PermissionCollection noPermissions = new Permissions(); - AccessControlContext noPermissionsAcc = new AccessControlContext( - new ProtectionDomain[] { - new ProtectionDomain(null, noPermissions) - } - ); - AccessController.doPrivileged(new PrivilegedAction(){ - public Void run() { - longs.getValues(); - return null; - } - }, noPermissionsAcc); - - assertThat(warnings, contains( - "Deprecated getValues used, the field is a list and should be accessed directly." - + " For example, use doc['foo'] instead of doc['foo'].values.")); - assertThat(keys, contains("ScriptDocValues#getValues")); - } - private Longs wrap(long[][] values, BiConsumer deprecationCallback) { + private Longs wrap(long[][] values) { return new Longs(new AbstractSortedNumericDocValues() { long[] current; int i; @@ -121,6 +76,6 @@ public class ScriptDocValuesLongsTests extends ESTestCase { public long nextValue() { return current[i++]; } - }, deprecationCallback); + }); } } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/DateFieldTypeTests.java b/server/src/test/java/org/elasticsearch/index/mapper/DateFieldTypeTests.java index 9d5b44b6726..072170aff09 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/DateFieldTypeTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/DateFieldTypeTests.java @@ -61,13 +61,13 @@ public class DateFieldTypeTests extends FieldTypeTestCase { addModifier(new Modifier("format", false) { @Override public void modify(MappedFieldType ft) { - ((DateFieldType) ft).setDateTimeFormatter(DateFormatter.forPattern("basic_week_date", Locale.ROOT)); + ((DateFieldType) ft).setDateTimeFormatter(DateFormatter.forPattern("basic_week_date")); } }); addModifier(new Modifier("locale", false) { @Override public void modify(MappedFieldType ft) { - ((DateFieldType) ft).setDateTimeFormatter(DateFormatter.forPattern("date_optional_time", Locale.CANADA)); + ((DateFieldType) ft).setDateTimeFormatter(DateFormatter.forPattern("date_optional_time").withLocale(Locale.CANADA)); } }); nowInMillis = randomNonNegativeLong(); @@ -144,7 +144,7 @@ public class DateFieldTypeTests extends FieldTypeTestCase { assertEquals("2015-10-12T15:10:55.000+01:00", ft.docValueFormat(null, DateTimeZone.forOffsetHours(1)).format(instant)); assertEquals("2015", - createDefaultFieldType().docValueFormat("YYYY", DateTimeZone.UTC).format(instant)); + createDefaultFieldType().docValueFormat("yyyy", DateTimeZone.UTC).format(instant)); assertEquals(instant, ft.docValueFormat(null, DateTimeZone.UTC).parseLong("2015-10-12T14:10:55", false, null)); assertEquals(instant + 999, diff --git a/server/src/test/java/org/elasticsearch/index/mapper/RangeFieldTypeTests.java b/server/src/test/java/org/elasticsearch/index/mapper/RangeFieldTypeTests.java index 460f7527961..f04a193ef96 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/RangeFieldTypeTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/RangeFieldTypeTests.java @@ -62,13 +62,13 @@ public class RangeFieldTypeTests extends FieldTypeTestCase { addModifier(new Modifier("format", true) { @Override public void modify(MappedFieldType ft) { - ((RangeFieldType) ft).setDateTimeFormatter(DateFormatter.forPattern("basic_week_date", Locale.ROOT)); + ((RangeFieldType) ft).setDateTimeFormatter(DateFormatter.forPattern("basic_week_date")); } }); addModifier(new Modifier("locale", true) { @Override public void modify(MappedFieldType ft) { - ((RangeFieldType) ft).setDateTimeFormatter(DateFormatter.forPattern("date_optional_time", Locale.CANADA)); + ((RangeFieldType) ft).setDateTimeFormatter(DateFormatter.forPattern("date_optional_time").withLocale(Locale.CANADA)); } }); } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/RootObjectMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/RootObjectMapperTests.java index 574d4eee70a..0b805eb7266 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/RootObjectMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/RootObjectMapperTests.java @@ -95,7 +95,7 @@ public class RootObjectMapperTests extends ESSingleNodeTestCase { String mapping = Strings.toString(XContentFactory.jsonBuilder() .startObject() .startObject("type") - .field("dynamic_date_formats", Arrays.asList("YYYY-MM-dd")) + .field("dynamic_date_formats", Arrays.asList("yyyy-MM-dd")) .endObject() .endObject()); MapperService mapperService = createIndex("test").mapperService(); diff --git a/server/src/test/java/org/elasticsearch/index/mapper/TextFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/TextFieldMapperTests.java index f4856d51a2d..56caa94466c 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/TextFieldMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/TextFieldMapperTests.java @@ -57,6 +57,7 @@ import org.elasticsearch.index.mapper.TextFieldMapper.TextFieldType; import org.elasticsearch.index.query.MatchPhraseQueryBuilder; import org.elasticsearch.index.query.QueryShardContext; import org.elasticsearch.index.search.MatchQuery; +import org.elasticsearch.index.seqno.SequenceNumbers; import org.elasticsearch.index.shard.IndexShard; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.ESSingleNodeTestCase; @@ -251,7 +252,7 @@ public class TextFieldMapperTests extends ESSingleNodeTestCase { IndexShard shard = indexService.getShard(0); shard.applyIndexOperationOnPrimary(Versions.MATCH_ANY, VersionType.INTERNAL, - sourceToParse, IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP, false); + sourceToParse, SequenceNumbers.UNASSIGNED_SEQ_NO, 0, IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP, false); shard.refresh("test"); try (Engine.Searcher searcher = shard.acquireSearcher("test")) { LeafReader leaf = searcher.getDirectoryReader().leaves().get(0).reader(); @@ -293,7 +294,7 @@ public class TextFieldMapperTests extends ESSingleNodeTestCase { IndexShard shard = indexService.getShard(0); shard.applyIndexOperationOnPrimary(Versions.MATCH_ANY, VersionType.INTERNAL, - sourceToParse, IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP, false); + sourceToParse, SequenceNumbers.UNASSIGNED_SEQ_NO, 0, IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP, false); shard.refresh("test"); try (Engine.Searcher searcher = shard.acquireSearcher("test")) { LeafReader leaf = searcher.getDirectoryReader().leaves().get(0).reader(); diff --git a/server/src/test/java/org/elasticsearch/index/query/IntervalBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/IntervalBuilderTests.java new file mode 100644 index 00000000000..a565db41516 --- /dev/null +++ b/server/src/test/java/org/elasticsearch/index/query/IntervalBuilderTests.java @@ -0,0 +1,141 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.index.query; + +import org.apache.lucene.analysis.CachingTokenFilter; +import org.apache.lucene.analysis.CannedTokenStream; +import org.apache.lucene.analysis.Token; +import org.apache.lucene.analysis.standard.StandardAnalyzer; +import org.apache.lucene.search.intervals.Intervals; +import org.apache.lucene.search.intervals.IntervalsSource; +import org.elasticsearch.test.ESTestCase; + +import java.io.IOException; + +public class IntervalBuilderTests extends ESTestCase { + + private static final IntervalBuilder BUILDER = new IntervalBuilder("field1", new StandardAnalyzer()); + + public void testSimpleTerm() throws IOException { + + CannedTokenStream ts = new CannedTokenStream(new Token("term1", 1, 2)); + + IntervalsSource source = BUILDER.analyzeText(new CachingTokenFilter(ts), -1, true); + IntervalsSource expected = Intervals.term("term1"); + + assertEquals(expected, source); + } + + public void testOrdered() throws IOException { + + CannedTokenStream ts = new CannedTokenStream( + new Token("term1", 1, 2), + new Token("term2", 3, 4), + new Token("term3", 5, 6) + ); + + IntervalsSource source = BUILDER.analyzeText(new CachingTokenFilter(ts), -1, true); + IntervalsSource expected = Intervals.ordered( + Intervals.term("term1"), Intervals.term("term2"), Intervals.term("term3") + ); + + assertEquals(expected, source); + + } + + public void testUnordered() throws IOException { + + CannedTokenStream ts = new CannedTokenStream( + new Token("term1", 1, 2), + new Token("term2", 3, 4), + new Token("term3", 5, 6) + ); + + IntervalsSource source = BUILDER.analyzeText(new CachingTokenFilter(ts), -1, false); + IntervalsSource expected = Intervals.unordered( + Intervals.term("term1"), Intervals.term("term2"), Intervals.term("term3") + ); + + assertEquals(expected, source); + + } + + public void testPhrase() throws IOException { + + CannedTokenStream ts = new CannedTokenStream( + new Token("term1", 1, 2), + new Token("term2", 3, 4), + new Token("term3", 5, 6) + ); + + IntervalsSource source = BUILDER.analyzeText(new CachingTokenFilter(ts), 0, true); + IntervalsSource expected = Intervals.phrase( + Intervals.term("term1"), Intervals.term("term2"), Intervals.term("term3") + ); + + assertEquals(expected, source); + + } + + public void testSimpleSynonyms() throws IOException { + + CannedTokenStream ts = new CannedTokenStream( + new Token("term1", 1, 2), + new Token("term2", 3, 4), + new Token("term4", 0, 3, 4), + new Token("term3", 5, 6) + ); + + IntervalsSource source = BUILDER.analyzeText(new CachingTokenFilter(ts), -1, true); + IntervalsSource expected = Intervals.ordered( + Intervals.term("term1"), Intervals.or(Intervals.term("term2"), Intervals.term("term4")), Intervals.term("term3") + ); + + assertEquals(expected, source); + + } + + public void testGraphSynonyms() throws IOException { + + // term1 term2/term3:2 term4 term5 + + Token graphToken = new Token("term2", 3, 4); + graphToken.setPositionLength(2); + + CannedTokenStream ts = new CannedTokenStream( + new Token("term1", 1, 2), + graphToken, + new Token("term3", 0, 3, 4), + new Token("term4", 5, 6), + new Token("term5", 6, 7) + ); + + IntervalsSource source = BUILDER.analyzeText(new CachingTokenFilter(ts), -1, true); + IntervalsSource expected = Intervals.ordered( + Intervals.term("term1"), + Intervals.or(Intervals.term("term2"), Intervals.phrase("term3", "term4")), + Intervals.term("term5") + ); + + assertEquals(expected, source); + + } + +} diff --git a/server/src/test/java/org/elasticsearch/index/query/IntervalQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/IntervalQueryBuilderTests.java new file mode 100644 index 00000000000..06ab542ebc0 --- /dev/null +++ b/server/src/test/java/org/elasticsearch/index/query/IntervalQueryBuilderTests.java @@ -0,0 +1,280 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.index.query; + +import org.apache.lucene.search.BoostQuery; +import org.apache.lucene.search.MatchNoDocsQuery; +import org.apache.lucene.search.Query; +import org.apache.lucene.search.intervals.IntervalQuery; +import org.apache.lucene.search.intervals.Intervals; +import org.elasticsearch.common.ParsingException; +import org.elasticsearch.search.internal.SearchContext; +import org.elasticsearch.test.AbstractQueryTestCase; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.List; + +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.instanceOf; + +public class IntervalQueryBuilderTests extends AbstractQueryTestCase { + + @Override + protected IntervalQueryBuilder doCreateTestQueryBuilder() { + return new IntervalQueryBuilder(STRING_FIELD_NAME, createRandomSource()); + } + + @Override + public void testUnknownField() throws IOException { + super.testUnknownField(); + } + + private static final String[] filters = new String[]{ + "containing", "contained_by", "not_containing", "not_contained_by", "not_overlapping" + }; + + private IntervalsSourceProvider.IntervalFilter createRandomFilter() { + if (randomInt(20) > 18) { + return new IntervalsSourceProvider.IntervalFilter(createRandomSource(), randomFrom(filters)); + } + return null; + } + + private IntervalsSourceProvider createRandomSource() { + switch (randomInt(20)) { + case 0: + case 1: + int orCount = randomInt(4) + 1; + List orSources = new ArrayList<>(); + for (int i = 0; i < orCount; i++) { + orSources.add(createRandomSource()); + } + return new IntervalsSourceProvider.Disjunction(orSources, createRandomFilter()); + case 2: + case 3: + int count = randomInt(5) + 1; + List subSources = new ArrayList<>(); + for (int i = 0; i < count; i++) { + subSources.add(createRandomSource()); + } + boolean ordered = randomBoolean(); + int maxGaps = randomInt(5) - 1; + IntervalsSourceProvider.IntervalFilter filter = createRandomFilter(); + return new IntervalsSourceProvider.Combine(subSources, ordered, maxGaps, filter); + default: + int wordCount = randomInt(4) + 1; + List words = new ArrayList<>(); + for (int i = 0; i < wordCount; i++) { + words.add(randomRealisticUnicodeOfLengthBetween(4, 20)); + } + String text = String.join(" ", words); + boolean mOrdered = randomBoolean(); + int maxMGaps = randomInt(5) - 1; + String analyzer = randomFrom("simple", "keyword", "whitespace"); + return new IntervalsSourceProvider.Match(text, maxMGaps, mOrdered, analyzer, createRandomFilter()); + } + } + + @Override + protected void doAssertLuceneQuery(IntervalQueryBuilder queryBuilder, Query query, SearchContext context) throws IOException { + assertThat(query, instanceOf(IntervalQuery.class)); + } + + public void testMatchInterval() throws IOException { + + String json = "{ \"intervals\" : " + + "{ \"" + STRING_FIELD_NAME + "\" : { \"match\" : { \"query\" : \"Hello world\" } } } }"; + + IntervalQueryBuilder builder = (IntervalQueryBuilder) parseQuery(json); + Query expected = new IntervalQuery(STRING_FIELD_NAME, + Intervals.unordered(Intervals.term("hello"), Intervals.term("world"))); + + assertEquals(expected, builder.toQuery(createShardContext())); + + json = "{ \"intervals\" : " + + "{ \"" + STRING_FIELD_NAME + "\" : { " + + " \"match\" : { " + + " \"query\" : \"Hello world\"," + + " \"max_gaps\" : 40 } } } }"; + + builder = (IntervalQueryBuilder) parseQuery(json); + expected = new IntervalQuery(STRING_FIELD_NAME, + Intervals.maxgaps(40, Intervals.unordered(Intervals.term("hello"), Intervals.term("world")))); + assertEquals(expected, builder.toQuery(createShardContext())); + + json = "{ \"intervals\" : " + + "{ \"" + STRING_FIELD_NAME + "\" : { " + + " \"match\" : { " + + " \"query\" : \"Hello world\"," + + " \"ordered\" : true }," + + " \"boost\" : 2 } } }"; + + builder = (IntervalQueryBuilder) parseQuery(json); + expected = new BoostQuery(new IntervalQuery(STRING_FIELD_NAME, + Intervals.ordered(Intervals.term("hello"), Intervals.term("world"))), 2); + assertEquals(expected, builder.toQuery(createShardContext())); + + json = "{ \"intervals\" : " + + "{ \"" + STRING_FIELD_NAME + "\" : { " + + " \"match\" : { " + + " \"query\" : \"Hello world\"," + + " \"max_gaps\" : 10," + + " \"analyzer\" : \"whitespace\"," + + " \"ordered\" : true } } } }"; + + builder = (IntervalQueryBuilder) parseQuery(json); + expected = new IntervalQuery(STRING_FIELD_NAME, + Intervals.maxgaps(10, Intervals.ordered(Intervals.term("Hello"), Intervals.term("world")))); + assertEquals(expected, builder.toQuery(createShardContext())); + + json = "{ \"intervals\" : " + + "{ \"" + STRING_FIELD_NAME + "\" : { " + + " \"match\" : { " + + " \"query\" : \"Hello world\"," + + " \"max_gaps\" : 10," + + " \"analyzer\" : \"whitespace\"," + + " \"ordered\" : true," + + " \"filter\" : {" + + " \"containing\" : {" + + " \"match\" : { \"query\" : \"blah\" } } } } } } }"; + + builder = (IntervalQueryBuilder) parseQuery(json); + expected = new IntervalQuery(STRING_FIELD_NAME, + Intervals.containing(Intervals.maxgaps(10, Intervals.ordered(Intervals.term("Hello"), Intervals.term("world"))), + Intervals.term("blah"))); + assertEquals(expected, builder.toQuery(createShardContext())); + } + + public void testOrInterval() throws IOException { + + String json = "{ \"intervals\" : { \"" + STRING_FIELD_NAME + "\": {" + + " \"any_of\" : { " + + " \"intervals\" : [" + + " { \"match\" : { \"query\" : \"one\" } }," + + " { \"match\" : { \"query\" : \"two\" } } ] } } } }"; + IntervalQueryBuilder builder = (IntervalQueryBuilder) parseQuery(json); + Query expected = new IntervalQuery(STRING_FIELD_NAME, + Intervals.or(Intervals.term("one"), Intervals.term("two"))); + assertEquals(expected, builder.toQuery(createShardContext())); + + json = "{ \"intervals\" : { \"" + STRING_FIELD_NAME + "\": {" + + " \"any_of\" : { " + + " \"intervals\" : [" + + " { \"match\" : { \"query\" : \"one\" } }," + + " { \"match\" : { \"query\" : \"two\" } } ]," + + " \"filter\" : {" + + " \"not_containing\" : { \"match\" : { \"query\" : \"three\" } } } } } } }"; + builder = (IntervalQueryBuilder) parseQuery(json); + expected = new IntervalQuery(STRING_FIELD_NAME, + Intervals.notContaining( + Intervals.or(Intervals.term("one"), Intervals.term("two")), + Intervals.term("three"))); + assertEquals(expected, builder.toQuery(createShardContext())); + } + + public void testCombineInterval() throws IOException { + + String json = "{ \"intervals\" : { \"" + STRING_FIELD_NAME + "\": {" + + " \"all_of\" : {" + + " \"ordered\" : true," + + " \"intervals\" : [" + + " { \"match\" : { \"query\" : \"one\" } }," + + " { \"all_of\" : { " + + " \"ordered\" : false," + + " \"intervals\" : [" + + " { \"match\" : { \"query\" : \"two\" } }," + + " { \"match\" : { \"query\" : \"three\" } } ] } } ]," + + " \"max_gaps\" : 30," + + " \"filter\" : { " + + " \"contained_by\" : { " + + " \"match\" : { " + + " \"query\" : \"SENTENCE\"," + + " \"analyzer\" : \"keyword\" } } } }," + + " \"boost\" : 1.5 } } }"; + IntervalQueryBuilder builder = (IntervalQueryBuilder) parseQuery(json); + Query expected = new BoostQuery(new IntervalQuery(STRING_FIELD_NAME, + Intervals.containedBy( + Intervals.maxgaps(30, Intervals.ordered( + Intervals.term("one"), + Intervals.unordered(Intervals.term("two"), Intervals.term("three")))), + Intervals.term("SENTENCE"))), 1.5f); + assertEquals(expected, builder.toQuery(createShardContext())); + + } + + public void testCombineDisjunctionInterval() throws IOException { + String json = "{ \"intervals\" : " + + "{ \"" + STRING_FIELD_NAME + "\": { " + + " \"all_of\" : {" + + " \"ordered\" : true," + + " \"intervals\" : [" + + " { \"match\" : { \"query\" : \"atmosphere\" } }," + + " { \"any_of\" : {" + + " \"intervals\" : [" + + " { \"match\" : { \"query\" : \"cold\" } }," + + " { \"match\" : { \"query\" : \"outside\" } } ] } } ]," + + " \"max_gaps\" : 30," + + " \"filter\" : { " + + " \"not_contained_by\" : { " + + " \"match\" : { \"query\" : \"freeze\" } } } } } } }"; + + IntervalQueryBuilder builder = (IntervalQueryBuilder) parseQuery(json); + Query expected = new IntervalQuery(STRING_FIELD_NAME, + Intervals.notContainedBy( + Intervals.maxgaps(30, Intervals.ordered( + Intervals.term("atmosphere"), + Intervals.or(Intervals.term("cold"), Intervals.term("outside")) + )), + Intervals.term("freeze"))); + assertEquals(expected, builder.toQuery(createShardContext())); + } + + public void testNonIndexedFields() throws IOException { + IntervalsSourceProvider provider = createRandomSource(); + IntervalQueryBuilder b = new IntervalQueryBuilder("no_such_field", provider); + assertThat(b.toQuery(createShardContext()), equalTo(new MatchNoDocsQuery())); + + Exception e = expectThrows(IllegalArgumentException.class, () -> { + IntervalQueryBuilder builder = new IntervalQueryBuilder(INT_FIELD_NAME, provider); + builder.doToQuery(createShardContext()); + }); + assertThat(e.getMessage(), equalTo("Cannot create IntervalQuery over field [" + INT_FIELD_NAME + "] with no indexed positions")); + + e = expectThrows(IllegalArgumentException.class, () -> { + IntervalQueryBuilder builder = new IntervalQueryBuilder(STRING_FIELD_NAME_2, provider); + builder.doToQuery(createShardContext()); + }); + assertThat(e.getMessage(), equalTo("Cannot create IntervalQuery over field [" + + STRING_FIELD_NAME_2 + "] with no indexed positions")); + } + + public void testMultipleProviders() { + String json = "{ \"intervals\" : { \"" + STRING_FIELD_NAME + "\": { " + + "\"boost\" : 1," + + "\"match\" : { \"query\" : \"term1\" }," + + "\"all_of\" : { \"intervals\" : [ { \"query\" : \"term2\" } ] } }"; + + ParsingException e = expectThrows(ParsingException.class, () -> { + parseQuery(json); + }); + assertThat(e.getMessage(), equalTo("Only one interval rule can be specified, found [match] and [all_of]")); + } +} diff --git a/server/src/test/java/org/elasticsearch/index/query/MultiMatchQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/MultiMatchQueryBuilderTests.java index 58604f7e83c..43c76f028e2 100644 --- a/server/src/test/java/org/elasticsearch/index/query/MultiMatchQueryBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/MultiMatchQueryBuilderTests.java @@ -118,9 +118,6 @@ public class MultiMatchQueryBuilderTests extends AbstractQueryTestCase= 3 && queryText.substring(0,3).equalsIgnoreCase("now")) { fields.put(STRING_FIELD_NAME_2, 2.0f / randomIntBetween(1, 20)); } diff --git a/server/src/test/java/org/elasticsearch/index/replication/RecoveryDuringReplicationTests.java b/server/src/test/java/org/elasticsearch/index/replication/RecoveryDuringReplicationTests.java index f4c29800cdb..747d951d5a8 100644 --- a/server/src/test/java/org/elasticsearch/index/replication/RecoveryDuringReplicationTests.java +++ b/server/src/test/java/org/elasticsearch/index/replication/RecoveryDuringReplicationTests.java @@ -46,6 +46,7 @@ import org.elasticsearch.index.engine.EngineTestCase; import org.elasticsearch.index.engine.InternalEngineFactory; import org.elasticsearch.index.engine.InternalEngineTests; import org.elasticsearch.index.mapper.SourceToParse; +import org.elasticsearch.index.seqno.SequenceNumbers; import org.elasticsearch.index.shard.IndexShard; import org.elasticsearch.index.shard.IndexShardTestCase; import org.elasticsearch.index.shard.PrimaryReplicaSyncer; @@ -210,7 +211,7 @@ public class RecoveryDuringReplicationTests extends ESIndexLevelReplicationTestC Versions.MATCH_ANY, VersionType.INTERNAL, SourceToParse.source("index", "type", "primary", new BytesArray("{}"), XContentType.JSON), - randomNonNegativeLong(), + SequenceNumbers.UNASSIGNED_SEQ_NO, 0, randomNonNegativeLong(), false); } final IndexShard recoveredReplica = diff --git a/server/src/test/java/org/elasticsearch/index/shard/IndexShardIT.java b/server/src/test/java/org/elasticsearch/index/shard/IndexShardIT.java index dd14f5f8544..6035a81a1b9 100644 --- a/server/src/test/java/org/elasticsearch/index/shard/IndexShardIT.java +++ b/server/src/test/java/org/elasticsearch/index/shard/IndexShardIT.java @@ -63,6 +63,7 @@ import org.elasticsearch.index.engine.Engine; import org.elasticsearch.index.engine.SegmentsStats; import org.elasticsearch.index.flush.FlushStats; import org.elasticsearch.index.mapper.SourceToParse; +import org.elasticsearch.index.seqno.SequenceNumbers; import org.elasticsearch.index.translog.Translog; import org.elasticsearch.indices.IndicesService; import org.elasticsearch.indices.breaker.CircuitBreakerService; @@ -356,7 +357,7 @@ public class IndexShardIT extends ESSingleNodeTestCase { assertFalse(shard.shouldPeriodicallyFlush()); shard.applyIndexOperationOnPrimary(Versions.MATCH_ANY, VersionType.INTERNAL, SourceToParse.source("test", "test", "1", new BytesArray("{}"), XContentType.JSON), - IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP, false); + SequenceNumbers.UNASSIGNED_SEQ_NO, 0, IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP, false); assertTrue(shard.shouldPeriodicallyFlush()); final Translog translog = getTranslog(shard); assertEquals(2, translog.stats().getUncommittedOperations()); @@ -406,7 +407,7 @@ public class IndexShardIT extends ESSingleNodeTestCase { assertThat(translog.currentFileGeneration(), equalTo(generation + rolls)); final Engine.IndexResult result = shard.applyIndexOperationOnPrimary(Versions.MATCH_ANY, VersionType.INTERNAL, SourceToParse.source("test", "test", "1", new BytesArray("{}"), XContentType.JSON), - IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP, false); + SequenceNumbers.UNASSIGNED_SEQ_NO, 0, IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP, false); final Translog.Location location = result.getTranslogLocation(); shard.afterWriteOperation(); if (location.translogLocation + location.size > generationThreshold) { diff --git a/server/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java b/server/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java index fb77f1def5a..7679595a7fa 100644 --- a/server/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java +++ b/server/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java @@ -172,6 +172,7 @@ import static org.elasticsearch.cluster.routing.TestShardRouting.newShardRouting import static org.elasticsearch.common.lucene.Lucene.cleanLuceneIndex; import static org.elasticsearch.common.xcontent.ToXContent.EMPTY_PARAMS; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; +import static org.elasticsearch.index.seqno.SequenceNumbers.UNASSIGNED_SEQ_NO; import static org.elasticsearch.repositories.RepositoryData.EMPTY_REPO_GEN; import static org.elasticsearch.test.hamcrest.RegexMatcher.matches; import static org.hamcrest.Matchers.containsInAnyOrder; @@ -323,10 +324,10 @@ public class IndexShardTests extends IndexShardTestCase { expectThrows(IndexShardClosedException.class, () -> indexShard.acquireAllPrimaryOperationsPermits(null, TimeValue.timeValueSeconds(30L))); expectThrows(IndexShardClosedException.class, - () -> indexShard.acquireReplicaOperationPermit(indexShard.getPendingPrimaryTerm(), SequenceNumbers.UNASSIGNED_SEQ_NO, + () -> indexShard.acquireReplicaOperationPermit(indexShard.getPendingPrimaryTerm(), UNASSIGNED_SEQ_NO, randomNonNegativeLong(), null, ThreadPool.Names.WRITE, "")); expectThrows(IndexShardClosedException.class, - () -> indexShard.acquireAllReplicaOperationsPermits(indexShard.getPendingPrimaryTerm(), SequenceNumbers.UNASSIGNED_SEQ_NO, + () -> indexShard.acquireAllReplicaOperationsPermits(indexShard.getPendingPrimaryTerm(), UNASSIGNED_SEQ_NO, randomNonNegativeLong(), null, TimeValue.timeValueSeconds(30L))); } @@ -334,7 +335,7 @@ public class IndexShardTests extends IndexShardTestCase { IndexShard indexShard = newShard(false); expectThrows(IndexShardNotStartedException.class, () -> randomReplicaOperationPermitAcquisition(indexShard, indexShard.getPendingPrimaryTerm() + randomIntBetween(1, 100), - SequenceNumbers.UNASSIGNED_SEQ_NO, randomNonNegativeLong(), null, "")); + UNASSIGNED_SEQ_NO, randomNonNegativeLong(), null, "")); closeShards(indexShard); } @@ -828,7 +829,7 @@ public class IndexShardTests extends IndexShardTestCase { newGlobalCheckPoint = randomIntBetween((int) indexShard.getGlobalCheckpoint(), (int) localCheckPoint); } final long expectedLocalCheckpoint; - if (newGlobalCheckPoint == SequenceNumbers.UNASSIGNED_SEQ_NO) { + if (newGlobalCheckPoint == UNASSIGNED_SEQ_NO) { expectedLocalCheckpoint = SequenceNumbers.NO_OPS_PERFORMED; } else { expectedLocalCheckpoint = newGlobalCheckPoint; @@ -1039,10 +1040,10 @@ public class IndexShardTests extends IndexShardTestCase { indexOnReplicaWithGaps(indexShard, operations, Math.toIntExact(SequenceNumbers.NO_OPS_PERFORMED)); final long maxSeqNo = indexShard.seqNoStats().getMaxSeqNo(); - final long globalCheckpointOnReplica = randomLongBetween(SequenceNumbers.UNASSIGNED_SEQ_NO, indexShard.getLocalCheckpoint()); + final long globalCheckpointOnReplica = randomLongBetween(UNASSIGNED_SEQ_NO, indexShard.getLocalCheckpoint()); indexShard.updateGlobalCheckpointOnReplica(globalCheckpointOnReplica, "test"); - final long globalCheckpoint = randomLongBetween(SequenceNumbers.UNASSIGNED_SEQ_NO, indexShard.getLocalCheckpoint()); + final long globalCheckpoint = randomLongBetween(UNASSIGNED_SEQ_NO, indexShard.getLocalCheckpoint()); final long currentMaxSeqNoOfUpdates = indexShard.getMaxSeqNoOfUpdatesOrDeletes(); final long maxSeqNoOfUpdatesOrDeletes = randomLongBetween(SequenceNumbers.NO_OPS_PERFORMED, maxSeqNo); final Set docsBeforeRollback = getShardDocUIDs(indexShard); @@ -1104,9 +1105,9 @@ public class IndexShardTests extends IndexShardTestCase { final int operations = 1024 - scaledRandomIntBetween(0, 1024); indexOnReplicaWithGaps(indexShard, operations, Math.toIntExact(SequenceNumbers.NO_OPS_PERFORMED)); - final long globalCheckpointOnReplica = randomLongBetween(SequenceNumbers.UNASSIGNED_SEQ_NO, indexShard.getLocalCheckpoint()); + final long globalCheckpointOnReplica = randomLongBetween(UNASSIGNED_SEQ_NO, indexShard.getLocalCheckpoint()); indexShard.updateGlobalCheckpointOnReplica(globalCheckpointOnReplica, "test"); - final long globalCheckpoint = randomLongBetween(SequenceNumbers.UNASSIGNED_SEQ_NO, indexShard.getLocalCheckpoint()); + final long globalCheckpoint = randomLongBetween(UNASSIGNED_SEQ_NO, indexShard.getLocalCheckpoint()); Set docsBelowGlobalCheckpoint = getShardDocUIDs(indexShard).stream() .filter(id -> Long.parseLong(id) <= Math.max(globalCheckpointOnReplica, globalCheckpoint)).collect(Collectors.toSet()); final CountDownLatch latch = new CountDownLatch(1); @@ -1132,7 +1133,7 @@ public class IndexShardTests extends IndexShardTestCase { }, ""); latch.await(); - if (globalCheckpointOnReplica == SequenceNumbers.UNASSIGNED_SEQ_NO && globalCheckpoint == SequenceNumbers.UNASSIGNED_SEQ_NO) { + if (globalCheckpointOnReplica == UNASSIGNED_SEQ_NO && globalCheckpoint == UNASSIGNED_SEQ_NO) { assertThat(indexShard.getLocalCheckpoint(), equalTo(SequenceNumbers.NO_OPS_PERFORMED)); } else { assertThat(indexShard.getLocalCheckpoint(), equalTo(Math.max(globalCheckpoint, globalCheckpointOnReplica))); @@ -2097,7 +2098,7 @@ public class IndexShardTests extends IndexShardTestCase { RecoverySource.ExistingStoreRecoverySource.INSTANCE); final Snapshot snapshot = new Snapshot("foo", new SnapshotId("bar", UUIDs.randomBase64UUID())); routing = ShardRoutingHelper.newWithRestoreSource(routing, - new RecoverySource.SnapshotRecoverySource(snapshot, Version.CURRENT, "test")); + new RecoverySource.SnapshotRecoverySource(UUIDs.randomBase64UUID(), snapshot, Version.CURRENT, "test")); target = reinitShard(target, routing); Store sourceStore = source.store(); Store targetStore = target.store(); @@ -3711,10 +3712,11 @@ public class IndexShardTests extends IndexShardTestCase { Engine.IndexResult indexResult = indexDoc(shard, "some_type", "id", "{}"); assertTrue(indexResult.isCreated()); - DeleteResult deleteResult = shard.applyDeleteOperationOnPrimary(Versions.MATCH_ANY, "some_other_type", "id", VersionType.INTERNAL); + DeleteResult deleteResult = shard.applyDeleteOperationOnPrimary(Versions.MATCH_ANY, "some_other_type", "id", VersionType.INTERNAL, + UNASSIGNED_SEQ_NO, 0); assertFalse(deleteResult.isFound()); - deleteResult = shard.applyDeleteOperationOnPrimary(Versions.MATCH_ANY, "_doc", "id", VersionType.INTERNAL); + deleteResult = shard.applyDeleteOperationOnPrimary(Versions.MATCH_ANY, "_doc", "id", VersionType.INTERNAL, UNASSIGNED_SEQ_NO, 0); assertTrue(deleteResult.isFound()); closeShards(shard); diff --git a/server/src/test/java/org/elasticsearch/index/shard/PrimaryReplicaSyncerTests.java b/server/src/test/java/org/elasticsearch/index/shard/PrimaryReplicaSyncerTests.java index 62f6a7b2345..282404de9a4 100644 --- a/server/src/test/java/org/elasticsearch/index/shard/PrimaryReplicaSyncerTests.java +++ b/server/src/test/java/org/elasticsearch/index/shard/PrimaryReplicaSyncerTests.java @@ -28,7 +28,11 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.io.stream.ByteBufferStreamInput; import org.elasticsearch.common.io.stream.BytesStreamOutput; +import org.elasticsearch.common.io.stream.NamedWriteableAwareStreamInput; +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; +import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.lucene.uid.Versions; +import org.elasticsearch.common.network.NetworkModule; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.xcontent.ToXContent; @@ -38,6 +42,7 @@ import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.index.VersionType; import org.elasticsearch.index.mapper.SourceToParse; import org.elasticsearch.index.seqno.SequenceNumbers; +import org.elasticsearch.tasks.Task; import org.elasticsearch.tasks.TaskManager; import java.io.IOException; @@ -76,6 +81,7 @@ public class PrimaryReplicaSyncerTests extends IndexShardTestCase { // Index doc but not advance local checkpoint. shard.applyIndexOperationOnPrimary(Versions.MATCH_ANY, VersionType.INTERNAL, SourceToParse.source(shard.shardId().getIndexName(), "_doc", Integer.toString(i), new BytesArray("{}"), XContentType.JSON), + SequenceNumbers.UNASSIGNED_SEQ_NO, 0, randomBoolean() ? IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP : randomNonNegativeLong(), true); } @@ -145,7 +151,7 @@ public class PrimaryReplicaSyncerTests extends IndexShardTestCase { // Index doc but not advance local checkpoint. shard.applyIndexOperationOnPrimary(Versions.MATCH_ANY, VersionType.INTERNAL, SourceToParse.source(shard.shardId().getIndexName(), "_doc", Integer.toString(i), new BytesArray("{}"), XContentType.JSON), - IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP, false); + SequenceNumbers.UNASSIGNED_SEQ_NO, 0, IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP, false); } String allocationId = shard.routingEntry().allocationId().getId(); @@ -196,6 +202,19 @@ public class PrimaryReplicaSyncerTests extends IndexShardTestCase { assertEquals(status, serializedStatus); } + public void testStatusSerializationAsNamedWriteable() throws IOException { + PrimaryReplicaSyncer.ResyncTask.Status status = new PrimaryReplicaSyncer.ResyncTask.Status(randomAlphaOfLength(10), + randomIntBetween(0, 1000), randomIntBetween(0, 1000), randomIntBetween(0, 1000)); + try (BytesStreamOutput out = new BytesStreamOutput()) { + out.writeNamedWriteable(status); + try (StreamInput in = new NamedWriteableAwareStreamInput( + new ByteBufferStreamInput(ByteBuffer.wrap(out.bytes().toBytesRef().bytes)), + new NamedWriteableRegistry(NetworkModule.getNamedWriteables()))) { + assertThat(in.readNamedWriteable(Task.Status.class), equalTo(status)); + } + } + } + public void testStatusEquals() throws IOException { PrimaryReplicaSyncer.ResyncTask task = new PrimaryReplicaSyncer.ResyncTask(0, "type", "action", "desc", null, Collections.emptyMap()); diff --git a/server/src/test/java/org/elasticsearch/indexlifecycle/IndexLifecycleActionIT.java b/server/src/test/java/org/elasticsearch/indexlifecycle/IndexLifecycleActionIT.java index f4db4925da4..fd96179a4c3 100644 --- a/server/src/test/java/org/elasticsearch/indexlifecycle/IndexLifecycleActionIT.java +++ b/server/src/test/java/org/elasticsearch/indexlifecycle/IndexLifecycleActionIT.java @@ -67,7 +67,8 @@ public class IndexLifecycleActionIT extends ESIntegTestCase { final String node1 = getLocalNodeId(server_1); logger.info("Creating index [test]"); - CreateIndexResponse createIndexResponse = client().admin().indices().create(createIndexRequest("test").settings(settings)).actionGet(); + CreateIndexResponse createIndexResponse = client().admin().indices().create( + createIndexRequest("test").settings(settings)).actionGet(); assertAcked(createIndexResponse); ClusterState clusterState = client().admin().cluster().prepareState().get().getState(); @@ -90,7 +91,8 @@ public class IndexLifecycleActionIT extends ESIntegTestCase { // explicitly call reroute, so shards will get relocated to the new node (we delay it in ES in case other nodes join) client().admin().cluster().prepareReroute().execute().actionGet(); - clusterHealth = client().admin().cluster().health(clusterHealthRequest().waitForGreenStatus().waitForNodes("2").waitForNoRelocatingShards(true)).actionGet(); + clusterHealth = client().admin().cluster().health( + clusterHealthRequest().waitForGreenStatus().waitForNodes("2").waitForNoRelocatingShards(true)).actionGet(); assertThat(clusterHealth.isTimedOut(), equalTo(false)); assertThat(clusterHealth.getStatus(), equalTo(ClusterHealthStatus.GREEN)); assertThat(clusterHealth.getNumberOfDataNodes(), equalTo(2)); @@ -127,7 +129,8 @@ public class IndexLifecycleActionIT extends ESIntegTestCase { // explicitly call reroute, so shards will get relocated to the new node (we delay it in ES in case other nodes join) client().admin().cluster().prepareReroute().execute().actionGet(); - clusterHealth = client().admin().cluster().health(clusterHealthRequest().waitForGreenStatus().waitForNodes("3").waitForNoRelocatingShards(true)).actionGet(); + clusterHealth = client().admin().cluster().health( + clusterHealthRequest().waitForGreenStatus().waitForNodes("3").waitForNoRelocatingShards(true)).actionGet(); assertThat(clusterHealth.isTimedOut(), equalTo(false)); assertThat(clusterHealth.getStatus(), equalTo(ClusterHealthStatus.GREEN)); assertThat(clusterHealth.getNumberOfDataNodes(), equalTo(3)); @@ -145,7 +148,9 @@ public class IndexLifecycleActionIT extends ESIntegTestCase { routingNodeEntry2 = clusterState.getRoutingNodes().node(node2); RoutingNode routingNodeEntry3 = clusterState.getRoutingNodes().node(node3); - assertThat(routingNodeEntry1.numberOfShardsWithState(STARTED) + routingNodeEntry2.numberOfShardsWithState(STARTED) + routingNodeEntry3.numberOfShardsWithState(STARTED), equalTo(22)); + assertThat(routingNodeEntry1.numberOfShardsWithState(STARTED) + + routingNodeEntry2.numberOfShardsWithState(STARTED) + + routingNodeEntry3.numberOfShardsWithState(STARTED), equalTo(22)); assertThat(routingNodeEntry1.numberOfShardsWithState(RELOCATING), equalTo(0)); assertThat(routingNodeEntry1.numberOfShardsWithState(STARTED), anyOf(equalTo(7), equalTo(8))); @@ -168,7 +173,8 @@ public class IndexLifecycleActionIT extends ESIntegTestCase { client().admin().cluster().prepareReroute().get(); - clusterHealth = client().admin().cluster().health(clusterHealthRequest().waitForGreenStatus().waitForNoRelocatingShards(true).waitForNodes("2")).actionGet(); + clusterHealth = client().admin().cluster().health( + clusterHealthRequest().waitForGreenStatus().waitForNoRelocatingShards(true).waitForNodes("2")).actionGet(); assertThat(clusterHealth.isTimedOut(), equalTo(false)); assertThat(clusterHealth.getStatus(), equalTo(ClusterHealthStatus.GREEN)); assertThat(clusterHealth.getRelocatingShards(), equalTo(0)); @@ -211,7 +217,9 @@ public class IndexLifecycleActionIT extends ESIntegTestCase { } private void assertNodesPresent(RoutingNodes routingNodes, String... nodes) { - final Set keySet = StreamSupport.stream(routingNodes.spliterator(), false).map((p) -> (p.nodeId())).collect(Collectors.toSet()); + final Set keySet = StreamSupport.stream(routingNodes.spliterator(), false) + .map(RoutingNode::nodeId) + .collect(Collectors.toSet()); assertThat(keySet, containsInAnyOrder(nodes)); } } diff --git a/server/src/test/java/org/elasticsearch/indices/recovery/IndexRecoveryIT.java b/server/src/test/java/org/elasticsearch/indices/recovery/IndexRecoveryIT.java index 97207f28df4..ea15eceb8be 100644 --- a/server/src/test/java/org/elasticsearch/indices/recovery/IndexRecoveryIT.java +++ b/server/src/test/java/org/elasticsearch/indices/recovery/IndexRecoveryIT.java @@ -499,6 +499,7 @@ public class IndexRecoveryIT extends ESIntegTestCase { for (RecoveryState recoveryState : recoveryStates) { SnapshotRecoverySource recoverySource = new SnapshotRecoverySource( + ((SnapshotRecoverySource)recoveryState.getRecoverySource()).restoreUUID(), new Snapshot(REPO_NAME, createSnapshotResponse.getSnapshotInfo().snapshotId()), Version.CURRENT, INDEX_NAME); assertRecoveryState(recoveryState, 0, recoverySource, true, Stage.DONE, null, nodeA); diff --git a/server/src/test/java/org/elasticsearch/indices/recovery/RecoveryTests.java b/server/src/test/java/org/elasticsearch/indices/recovery/RecoveryTests.java index 2a53c79448d..694032bd988 100644 --- a/server/src/test/java/org/elasticsearch/indices/recovery/RecoveryTests.java +++ b/server/src/test/java/org/elasticsearch/indices/recovery/RecoveryTests.java @@ -314,7 +314,7 @@ public class RecoveryTests extends ESIndexLevelReplicationTestCase { Engine.IndexResult result = primaryShard.applyIndexOperationOnPrimary(Versions.MATCH_ANY, VersionType.INTERNAL, SourceToParse.source(primaryShard.shardId().getIndexName(), "_doc", Integer.toString(i), new BytesArray("{}"), XContentType.JSON), - IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP, false); + SequenceNumbers.UNASSIGNED_SEQ_NO, 0, IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP, false); assertThat(result.getResultType(), equalTo(Engine.Result.Type.SUCCESS)); if (randomBoolean()) { globalCheckpoint = randomLongBetween(globalCheckpoint, i); diff --git a/server/src/test/java/org/elasticsearch/rest/action/document/RestUpdateActionTests.java b/server/src/test/java/org/elasticsearch/rest/action/document/RestUpdateActionTests.java new file mode 100644 index 00000000000..cea3e9727e2 --- /dev/null +++ b/server/src/test/java/org/elasticsearch/rest/action/document/RestUpdateActionTests.java @@ -0,0 +1,50 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.rest.action.document; + +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.RestRequest.Method; +import org.elasticsearch.test.rest.FakeRestRequest; +import org.elasticsearch.test.rest.RestActionTestCase; +import org.junit.Before; + +public class RestUpdateActionTests extends RestActionTestCase { + + @Before + public void setUpAction() { + new RestUpdateAction(Settings.EMPTY, controller()); + } + + public void testTypeInPath() { + RestRequest deprecatedRequest = new FakeRestRequest.Builder(xContentRegistry()) + .withMethod(Method.POST) + .withPath("/some_index/some_type/some_id/_update") + .build(); + dispatchRequest(deprecatedRequest); + assertWarnings(RestUpdateAction.TYPES_DEPRECATION_MESSAGE); + + RestRequest validRequest = new FakeRestRequest.Builder(xContentRegistry()) + .withMethod(Method.POST) + .withPath("/some_index/_update/some_id") + .build(); + dispatchRequest(validRequest); + } +} diff --git a/server/src/test/java/org/elasticsearch/script/JodaCompatibleZonedDateTimeTests.java b/server/src/test/java/org/elasticsearch/script/JodaCompatibleZonedDateTimeTests.java index f5c7edea5b4..f01079d092f 100644 --- a/server/src/test/java/org/elasticsearch/script/JodaCompatibleZonedDateTimeTests.java +++ b/server/src/test/java/org/elasticsearch/script/JodaCompatibleZonedDateTimeTests.java @@ -255,4 +255,23 @@ public class JodaCompatibleZonedDateTimeTests extends ESTestCase { JodaCompatibleZonedDateTime dt = new JodaCompatibleZonedDateTime(Instant.EPOCH, ZoneOffset.ofTotalSeconds(0)); assertMethodDeprecation(() -> dt.toString("yyyy-MM-dd hh:mm"), "toString(String)", "a DateTimeFormatter"); } + + public void testIsEqual() { + assertTrue(javaTime.isEqual(javaTime)); + } + + public void testIsAfter() { + long millis = randomLongBetween(0, Integer.MAX_VALUE / 2); + JodaCompatibleZonedDateTime beforeTime = new JodaCompatibleZonedDateTime(Instant.ofEpochMilli(millis), ZoneOffset.ofHours(-7)); + millis = randomLongBetween(millis + 1, Integer.MAX_VALUE); + JodaCompatibleZonedDateTime afterTime = new JodaCompatibleZonedDateTime(Instant.ofEpochMilli(millis), ZoneOffset.ofHours(-7)); + assertTrue(afterTime.isAfter(beforeTime)); + } + public void testIsBefore() { + long millis = randomLongBetween(0, Integer.MAX_VALUE / 2); + JodaCompatibleZonedDateTime beforeTime = new JodaCompatibleZonedDateTime(Instant.ofEpochMilli(millis), ZoneOffset.ofHours(-7)); + millis = randomLongBetween(millis + 1, Integer.MAX_VALUE); + JodaCompatibleZonedDateTime afterTime = new JodaCompatibleZonedDateTime(Instant.ofEpochMilli(millis), ZoneOffset.ofHours(-7)); + assertTrue(beforeTime.isBefore(afterTime)); + } } diff --git a/server/src/test/java/org/elasticsearch/search/SearchHitTests.java b/server/src/test/java/org/elasticsearch/search/SearchHitTests.java index 8ea7af82b90..3ad39404afe 100644 --- a/server/src/test/java/org/elasticsearch/search/SearchHitTests.java +++ b/server/src/test/java/org/elasticsearch/search/SearchHitTests.java @@ -19,15 +19,6 @@ package org.elasticsearch.search; -import java.io.IOException; -import java.io.InputStream; -import java.util.ArrayList; -import java.util.Collections; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.function.Predicate; - import org.apache.lucene.search.Explanation; import org.apache.lucene.search.TotalHits; import org.elasticsearch.action.OriginalIndices; @@ -52,6 +43,15 @@ import org.elasticsearch.search.fetch.subphase.highlight.HighlightFieldTests; import org.elasticsearch.test.AbstractStreamableTestCase; import org.elasticsearch.test.RandomObjects; +import java.io.IOException; +import java.io.InputStream; +import java.util.ArrayList; +import java.util.Collections; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.function.Predicate; + import static org.elasticsearch.common.xcontent.XContentHelper.toXContent; import static org.elasticsearch.test.XContentTestUtils.insertRandomFields; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertToXContentEquivalent; @@ -65,7 +65,7 @@ public class SearchHitTests extends AbstractStreamableTestCase { return createTestItem(randomFrom(XContentType.values()), withOptionalInnerHits, withShardTarget); } - public static SearchHit createTestItem(XContentType xContentType, boolean withOptionalInnerHits, boolean withShardTarget) { + public static SearchHit createTestItem(XContentType xContentType, boolean withOptionalInnerHits, boolean transportSerialization) { int internalId = randomInt(); String uid = randomAlphaOfLength(10); Text type = new Text(randomAlphaOfLengthBetween(5, 10)); @@ -92,7 +92,7 @@ public class SearchHitTests extends AbstractStreamableTestCase { hit.version(randomLong()); } if (randomBoolean()) { - hit.sortValues(SearchSortValuesTests.createTestItem()); + hit.sortValues(SearchSortValuesTests.createTestItem(xContentType, transportSerialization)); } if (randomBoolean()) { int size = randomIntBetween(0, 5); @@ -120,12 +120,12 @@ public class SearchHitTests extends AbstractStreamableTestCase { Map innerHits = new HashMap<>(innerHitsSize); for (int i = 0; i < innerHitsSize; i++) { innerHits.put(randomAlphaOfLength(5), - SearchHitsTests.createTestItem(xContentType, false, withShardTarget)); + SearchHitsTests.createTestItem(xContentType, false, transportSerialization)); } hit.setInnerHits(innerHits); } } - if (withShardTarget && randomBoolean()) { + if (transportSerialization && randomBoolean()) { String index = randomAlphaOfLengthBetween(5, 10); String clusterAlias = randomBoolean() ? null : randomAlphaOfLengthBetween(5, 10); hit.shard(new SearchShardTarget(randomAlphaOfLengthBetween(5, 10), diff --git a/server/src/test/java/org/elasticsearch/search/SearchHitsTests.java b/server/src/test/java/org/elasticsearch/search/SearchHitsTests.java index 05ad84a4cc2..0bc9a72af78 100644 --- a/server/src/test/java/org/elasticsearch/search/SearchHitsTests.java +++ b/server/src/test/java/org/elasticsearch/search/SearchHitsTests.java @@ -19,11 +19,15 @@ package org.elasticsearch.search; +import org.apache.lucene.search.SortField; import org.apache.lucene.search.TotalHits; import org.apache.lucene.util.TestUtil; +import org.elasticsearch.Version; import org.elasticsearch.action.OriginalIndices; import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.lucene.LuceneTests; import org.elasticsearch.common.text.Text; import org.elasticsearch.common.xcontent.LoggingDeprecationHandler; import org.elasticsearch.common.xcontent.ToXContent; @@ -34,41 +38,75 @@ import org.elasticsearch.common.xcontent.json.JsonXContent; import org.elasticsearch.index.Index; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.test.AbstractStreamableXContentTestCase; +import org.elasticsearch.test.VersionUtils; import java.io.IOException; +import java.util.Base64; import java.util.Collections; import java.util.function.Predicate; public class SearchHitsTests extends AbstractStreamableXContentTestCase { + public static SearchHits createTestItem(boolean withOptionalInnerHits, boolean withShardTarget) { return createTestItem(randomFrom(XContentType.values()), withOptionalInnerHits, withShardTarget); } private static SearchHit[] createSearchHitArray(int size, XContentType xContentType, boolean withOptionalInnerHits, - boolean withShardTarget) { + boolean transportSerialization) { SearchHit[] hits = new SearchHit[size]; for (int i = 0; i < hits.length; i++) { - hits[i] = SearchHitTests.createTestItem(xContentType, withOptionalInnerHits, withShardTarget); + hits[i] = SearchHitTests.createTestItem(xContentType, withOptionalInnerHits, transportSerialization); } return hits; } - private static TotalHits randomTotalHits() { + private static TotalHits randomTotalHits(TotalHits.Relation relation) { long totalHits = TestUtil.nextLong(random(), 0, Long.MAX_VALUE); - TotalHits.Relation relation = randomFrom(TotalHits.Relation.values()); return new TotalHits(totalHits, relation); } - public static SearchHits createTestItem(XContentType xContentType, boolean withOptionalInnerHits, boolean withShardTarget) { + public static SearchHits createTestItem(XContentType xContentType, boolean withOptionalInnerHits, boolean transportSerialization) { + return createTestItem(xContentType, withOptionalInnerHits, transportSerialization, randomFrom(TotalHits.Relation.values())); + } + + private static SearchHits createTestItem(XContentType xContentType, boolean withOptionalInnerHits, boolean transportSerialization, + TotalHits.Relation totalHitsRelation) { int searchHits = randomIntBetween(0, 5); - SearchHit[] hits = createSearchHitArray(searchHits, xContentType, withOptionalInnerHits, withShardTarget); + SearchHit[] hits = createSearchHitArray(searchHits, xContentType, withOptionalInnerHits, transportSerialization); + TotalHits totalHits = frequently() ? randomTotalHits(totalHitsRelation) : null; float maxScore = frequently() ? randomFloat() : Float.NaN; - return new SearchHits(hits, frequently() ? randomTotalHits() : null, maxScore); + SortField[] sortFields = null; + String collapseField = null; + Object[] collapseValues = null; + if (transportSerialization) { + sortFields = randomBoolean() ? createSortFields(randomIntBetween(1, 5)) : null; + collapseField = randomAlphaOfLengthBetween(5, 10); + collapseValues = randomBoolean() ? createCollapseValues(randomIntBetween(1, 10)) : null; + } + return new SearchHits(hits, totalHits, maxScore, sortFields, collapseField, collapseValues); + } + + private static SortField[] createSortFields(int size) { + SortField[] sortFields = new SortField[size]; + for (int i = 0; i < sortFields.length; i++) { + //sort fields are simplified before serialization, we write directly the simplified version + //otherwise equality comparisons become complicated + sortFields[i] = LuceneTests.randomSortField().v2(); + } + return sortFields; + } + + private static Object[] createCollapseValues(int size) { + Object[] collapseValues = new Object[size]; + for (int i = 0; i < collapseValues.length; i++) { + collapseValues[i] = LuceneTests.randomSortValue(); + } + return collapseValues; } @Override protected SearchHits mutateInstance(SearchHits instance) { - switch (randomIntBetween(0, 2)) { + switch (randomIntBetween(0, 5)) { case 0: return new SearchHits(createSearchHitArray(instance.getHits().length + 1, randomFrom(XContentType.values()), false, randomBoolean()), @@ -76,7 +114,7 @@ public class SearchHitsTests extends AbstractStreamableXContentTestCase mutators = new ArrayList<>(); mutators.add(() -> mutation.indices(ArrayUtils.concat(searchRequest.indices(), new String[] { randomAlphaOfLength(10) }))); @@ -151,7 +152,7 @@ public class SearchRequestTests extends AbstractSearchTestCase { mutators.add(() -> mutation.types(ArrayUtils.concat(searchRequest.types(), new String[] { randomAlphaOfLength(10) }))); mutators.add(() -> mutation.preference(randomValueOtherThan(searchRequest.preference(), () -> randomAlphaOfLengthBetween(3, 10)))); mutators.add(() -> mutation.routing(randomValueOtherThan(searchRequest.routing(), () -> randomAlphaOfLengthBetween(3, 10)))); - mutators.add(() -> mutation.requestCache((randomValueOtherThan(searchRequest.requestCache(), () -> randomBoolean())))); + mutators.add(() -> mutation.requestCache((randomValueOtherThan(searchRequest.requestCache(), ESTestCase::randomBoolean)))); mutators.add(() -> mutation .scroll(randomValueOtherThan(searchRequest.scroll(), () -> new Scroll(new TimeValue(randomNonNegativeLong() % 100000))))); mutators.add(() -> mutation.searchType(randomValueOtherThan(searchRequest.searchType(), @@ -161,20 +162,7 @@ public class SearchRequestTests extends AbstractSearchTestCase { return mutation; } - private static SearchRequest copyRequest(SearchRequest searchRequest) throws IOException { - SearchRequest result = new SearchRequest(); - result.indices(searchRequest.indices()); - result.indicesOptions(searchRequest.indicesOptions()); - result.types(searchRequest.types()); - result.searchType(searchRequest.searchType()); - result.preference(searchRequest.preference()); - result.routing(searchRequest.routing()); - result.requestCache(searchRequest.requestCache()); - result.allowPartialSearchResults(searchRequest.allowPartialSearchResults()); - result.scroll(searchRequest.scroll()); - if (searchRequest.source() != null) { - result.source(searchRequest.source()); - } - return result; + private static SearchRequest copyRequest(SearchRequest searchRequest) { + return new SearchRequest(searchRequest); } } diff --git a/server/src/test/java/org/elasticsearch/search/SearchSortValuesTests.java b/server/src/test/java/org/elasticsearch/search/SearchSortValuesTests.java index d69039b72f5..f6b8dc828f4 100644 --- a/server/src/test/java/org/elasticsearch/search/SearchSortValuesTests.java +++ b/server/src/test/java/org/elasticsearch/search/SearchSortValuesTests.java @@ -19,108 +19,114 @@ package org.elasticsearch.search; +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.Version; import org.elasticsearch.common.Strings; -import org.elasticsearch.common.bytes.BytesReference; -import org.elasticsearch.common.io.stream.BytesStreamOutput; -import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.common.lucene.LuceneTests; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.common.xcontent.json.JsonXContent; -import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.test.AbstractSerializingTestCase; +import org.elasticsearch.test.RandomObjects; import java.io.IOException; -import java.util.ArrayList; import java.util.Arrays; -import java.util.List; -import java.util.function.Supplier; -import static org.elasticsearch.common.xcontent.XContentHelper.toXContent; -import static org.elasticsearch.test.EqualsHashCodeTestUtils.checkEqualsAndHashCode; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertToXContentEquivalent; - -public class SearchSortValuesTests extends ESTestCase { - - public static SearchSortValues createTestItem() { - List> valueSuppliers = new ArrayList<>(); - // this should reflect all values that are allowed to go through the transport layer - valueSuppliers.add(() -> null); - valueSuppliers.add(ESTestCase::randomInt); - valueSuppliers.add(ESTestCase::randomLong); - valueSuppliers.add(ESTestCase::randomDouble); - valueSuppliers.add(ESTestCase::randomFloat); - valueSuppliers.add(ESTestCase::randomByte); - valueSuppliers.add(ESTestCase::randomShort); - valueSuppliers.add(ESTestCase::randomBoolean); - valueSuppliers.add(() -> frequently() ? randomAlphaOfLengthBetween(1, 30) : randomRealisticUnicodeOfCodepointLength(30)); +public class SearchSortValuesTests extends AbstractSerializingTestCase { + public static SearchSortValues createTestItem(XContentType xContentType, boolean transportSerialization) { int size = randomIntBetween(1, 20); Object[] values = new Object[size]; + DocValueFormat[] sortValueFormats = new DocValueFormat[size]; for (int i = 0; i < size; i++) { - Supplier supplier = randomFrom(valueSuppliers); - values[i] = supplier.get(); + Object sortValue = randomSortValue(xContentType, transportSerialization); + values[i] = sortValue; + //make sure that for BytesRef, we provide a specific doc value format that overrides format(BytesRef) + sortValueFormats[i] = sortValue instanceof BytesRef ? DocValueFormat.RAW : randomDocValueFormat(); } - return new SearchSortValues(values); + return new SearchSortValues(values, sortValueFormats); } - public void testFromXContent() throws IOException { - SearchSortValues sortValues = createTestItem(); - XContentType xcontentType = randomFrom(XContentType.values()); - boolean humanReadable = randomBoolean(); - BytesReference originalBytes = toShuffledXContent(sortValues, xcontentType, ToXContent.EMPTY_PARAMS, humanReadable); + private static Object randomSortValue(XContentType xContentType, boolean transportSerialization) { + Object randomSortValue = LuceneTests.randomSortValue(); + //to simplify things, we directly serialize what we expect we would parse back when testing xcontent serialization + return transportSerialization ? randomSortValue : RandomObjects.getExpectedParsedValue(xContentType, randomSortValue); + } - SearchSortValues parsed; - try (XContentParser parser = createParser(xcontentType.xContent(), originalBytes)) { - parser.nextToken(); // skip to the elements start array token, fromXContent advances from there if called - parser.nextToken(); - parser.nextToken(); - parsed = SearchSortValues.fromXContent(parser); - parser.nextToken(); - assertEquals(XContentParser.Token.END_OBJECT, parser.currentToken()); - assertNull(parser.nextToken()); - } - assertToXContentEquivalent(originalBytes, toXContent(parsed, xcontentType, humanReadable), xcontentType); + private static DocValueFormat randomDocValueFormat() { + return randomFrom(DocValueFormat.BOOLEAN, DocValueFormat.RAW, DocValueFormat.IP, DocValueFormat.BINARY, DocValueFormat.GEOHASH); + } + + @Override + protected SearchSortValues doParseInstance(XContentParser parser) throws IOException { + parser.nextToken(); // skip to the elements start array token, fromXContent advances from there if called + parser.nextToken(); + parser.nextToken(); + SearchSortValues searchSortValues = SearchSortValues.fromXContent(parser); + parser.nextToken(); + assertEquals(XContentParser.Token.END_OBJECT, parser.currentToken()); + assertNull(parser.nextToken()); + return searchSortValues; + } + + @Override + protected SearchSortValues createXContextTestInstance(XContentType xContentType) { + return createTestItem(xContentType, false); + } + + @Override + protected SearchSortValues createTestInstance() { + return createTestItem(randomFrom(XContentType.values()), true); + } + + @Override + protected Writeable.Reader instanceReader() { + return SearchSortValues::new; + } + + @Override + protected String[] getShuffleFieldsExceptions() { + return new String[]{"sort"}; } public void testToXContent() throws IOException { - SearchSortValues sortValues = new SearchSortValues(new Object[]{ 1, "foo", 3.0}); - XContentBuilder builder = JsonXContent.contentBuilder(); - builder.startObject(); - sortValues.toXContent(builder, ToXContent.EMPTY_PARAMS); - builder.endObject(); - assertEquals("{\"sort\":[1,\"foo\",3.0]}", Strings.toString(builder)); - } - - /** - * Test equality and hashCode properties - */ - public void testEqualsAndHashcode() { - checkEqualsAndHashCode(createTestItem(), SearchSortValuesTests::copy, SearchSortValuesTests::mutate); - } - - public void testSerialization() throws IOException { - SearchSortValues sortValues = createTestItem(); - try (BytesStreamOutput output = new BytesStreamOutput()) { - sortValues.writeTo(output); - try (StreamInput in = output.bytes().streamInput()) { - SearchSortValues deserializedCopy = new SearchSortValues(in); - assertEquals(sortValues, deserializedCopy); - assertEquals(sortValues.hashCode(), deserializedCopy.hashCode()); - assertNotSame(sortValues, deserializedCopy); - } + { + SearchSortValues sortValues = new SearchSortValues(new Object[]{ 1, "foo", 3.0}); + XContentBuilder builder = JsonXContent.contentBuilder(); + builder.startObject(); + sortValues.toXContent(builder, ToXContent.EMPTY_PARAMS); + builder.endObject(); + assertEquals("{\"sort\":[1,\"foo\",3.0]}", Strings.toString(builder)); + } + { + SearchSortValues sortValues = new SearchSortValues(new Object[0]); + XContentBuilder builder = JsonXContent.contentBuilder(); + builder.startObject(); + sortValues.toXContent(builder, ToXContent.EMPTY_PARAMS); + builder.endObject(); + assertEquals("{}", Strings.toString(builder)); } } - private static SearchSortValues mutate(SearchSortValues original) { - Object[] sortValues = original.sortValues(); + @Override + protected SearchSortValues mutateInstance(SearchSortValues instance) { + Object[] sortValues = instance.sortValues(); if (sortValues.length == 0) { - return new SearchSortValues(new Object[] { 1 }); + return createTestInstance(); } - return new SearchSortValues(Arrays.copyOf(sortValues, sortValues.length + 1)); + if (randomBoolean()) { + return new SearchSortValues(new Object[0]); + } + Object[] values = Arrays.copyOf(sortValues, sortValues.length + 1); + values[sortValues.length] = randomSortValue(randomFrom(XContentType.values()), true); + return new SearchSortValues(values); } - private static SearchSortValues copy(SearchSortValues original) { - return new SearchSortValues(Arrays.copyOf(original.sortValues(), original.sortValues().length)); + @Override + protected SearchSortValues copyInstance(SearchSortValues instance, Version version) { + return new SearchSortValues(Arrays.copyOf(instance.sortValues(), instance.sortValues().length)); } } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/MinDocCountIT.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/MinDocCountIT.java index d0e8dcf4d0b..e7bf0fe4cf7 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/MinDocCountIT.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/MinDocCountIT.java @@ -84,19 +84,19 @@ public class MinDocCountIT extends AbstractTermsTestCase { scripts.put("doc['d']", vars -> { Map doc = (Map) vars.get("doc"); ScriptDocValues.Doubles value = (ScriptDocValues.Doubles) doc.get("d"); - return value.getValues(); + return value; }); scripts.put("doc['l']", vars -> { Map doc = (Map) vars.get("doc"); ScriptDocValues.Longs value = (ScriptDocValues.Longs) doc.get("l"); - return value.getValues(); + return value; }); scripts.put("doc['s']", vars -> { Map doc = (Map) vars.get("doc"); ScriptDocValues.Strings value = (ScriptDocValues.Strings) doc.get("s"); - return value.getValues(); + return value; }); return scripts; diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/RangeIT.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/RangeIT.java index c6b05e3c4f7..f1275172d76 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/RangeIT.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/RangeIT.java @@ -87,7 +87,7 @@ public class RangeIT extends ESIntegTestCase { scripts.put("doc['" + MULTI_VALUED_FIELD_NAME + "']", vars -> { Map doc = (Map) vars.get("doc"); ScriptDocValues.Longs value = (ScriptDocValues.Longs) doc.get(MULTI_VALUED_FIELD_NAME); - return value.getValues(); + return value; }); return scripts; diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/CardinalityIT.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/CardinalityIT.java index c607295ab49..759adddd9e8 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/CardinalityIT.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/CardinalityIT.java @@ -75,7 +75,7 @@ public class CardinalityIT extends ESIntegTestCase { scripts.put("doc['str_values']", vars -> { Map doc = (Map) vars.get("doc"); ScriptDocValues.Strings strValue = (ScriptDocValues.Strings) doc.get("str_values"); - return strValue.getValues(); + return strValue; }); scripts.put("doc[' + singleNumericField() + '].value", vars -> { @@ -85,7 +85,7 @@ public class CardinalityIT extends ESIntegTestCase { scripts.put("doc[' + multiNumericField(false) + ']", vars -> { Map doc =(Map) vars.get("doc"); - return ((ScriptDocValues) doc.get(multiNumericField(false))).getValues(); + return (ScriptDocValues) doc.get(multiNumericField(false)); }); return scripts; diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/InternalPercentilesBucketTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/InternalPercentilesBucketTests.java index 176966174c4..78ef2e1df39 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/InternalPercentilesBucketTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/InternalPercentilesBucketTests.java @@ -19,7 +19,11 @@ package org.elasticsearch.search.aggregations.pipeline; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.common.xcontent.ToXContent; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.json.JsonXContent; import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.aggregations.Aggregation.CommonFields; import org.elasticsearch.search.aggregations.ParsedAggregation; @@ -40,23 +44,29 @@ import java.util.Map; import java.util.function.Predicate; import static org.elasticsearch.search.aggregations.metrics.InternalPercentilesTestCase.randomPercents; +import static org.hamcrest.Matchers.equalTo; public class InternalPercentilesBucketTests extends InternalAggregationTestCase { @Override protected InternalPercentilesBucket createTestInstance(String name, List pipelineAggregators, Map metaData) { - return createTestInstance(name, pipelineAggregators, metaData, randomPercents()); + return createTestInstance(name, pipelineAggregators, metaData, randomPercents(), true); } private static InternalPercentilesBucket createTestInstance(String name, List pipelineAggregators, - Map metaData, double[] percents) { - DocValueFormat format = randomNumericDocValueFormat(); + Map metaData, double[] percents, boolean keyed) { final double[] percentiles = new double[percents.length]; for (int i = 0; i < percents.length; ++i) { percentiles[i] = frequently() ? randomDouble() : Double.NaN; } - return new InternalPercentilesBucket(name, percents, percentiles, format, pipelineAggregators, metaData); + return createTestInstance(name, pipelineAggregators, metaData, percents, percentiles, keyed); + } + + private static InternalPercentilesBucket createTestInstance(String name, List pipelineAggregators, + Map metaData, double[] percents, double[] percentiles, boolean keyed) { + DocValueFormat format = randomNumericDocValueFormat(); + return new InternalPercentilesBucket(name, percents, percentiles, keyed, format, pipelineAggregators, metaData); } @Override @@ -96,7 +106,8 @@ public class InternalPercentilesBucketTests extends InternalAggregationTestCase< */ public void testPercentOrder() { final double[] percents = new double[]{ 0.50, 0.25, 0.01, 0.99, 0.60 }; - InternalPercentilesBucket aggregation = createTestInstance("test", Collections.emptyList(), Collections.emptyMap(), percents); + InternalPercentilesBucket aggregation = createTestInstance("test", Collections.emptyList(), + Collections.emptyMap(), percents, randomBoolean()); Iterator iterator = aggregation.iterator(); for (double percent : percents) { assertTrue(iterator.hasNext()); @@ -110,7 +121,7 @@ public class InternalPercentilesBucketTests extends InternalAggregationTestCase< final double[] percents = new double[]{ 0.1, 0.2, 0.3}; final double[] percentiles = new double[]{ 0.10, 0.2}; IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> new InternalPercentilesBucket("test", percents, - percentiles, DocValueFormat.RAW, Collections.emptyList(), Collections.emptyMap())); + percentiles, randomBoolean(), DocValueFormat.RAW, Collections.emptyList(), Collections.emptyMap())); assertEquals("The number of provided percents and percentiles didn't match. percents: [0.1, 0.2, 0.3], percentiles: [0.1, 0.2]", e.getMessage()); } @@ -125,6 +136,52 @@ public class InternalPercentilesBucketTests extends InternalAggregationTestCase< } } + public void testEmptyRanksXContent() throws IOException { + double[] percents = new double[]{1,2,3}; + double[] percentiles = new double[3]; + for (int i = 0; i < 3; ++i) { + percentiles[i] = randomBoolean() ? Double.NaN : Double.POSITIVE_INFINITY; + } + boolean keyed = randomBoolean(); + + InternalPercentilesBucket agg = createTestInstance("test", Collections.emptyList(), Collections.emptyMap(), + percents, percentiles, keyed); + + XContentBuilder builder = JsonXContent.contentBuilder().prettyPrint(); + builder.startObject(); + agg.doXContentBody(builder, ToXContent.EMPTY_PARAMS); + builder.endObject(); + String expected; + if (keyed) { + expected = "{\n" + + " \"values\" : {\n" + + " \"1.0\" : null,\n" + + " \"2.0\" : null,\n" + + " \"3.0\" : null\n" + + " }\n" + + "}"; + } else { + expected = "{\n" + + " \"values\" : [\n" + + " {\n" + + " \"key\" : 1.0,\n" + + " \"value\" : null\n" + + " },\n" + + " {\n" + + " \"key\" : 2.0,\n" + + " \"value\" : null\n" + + " },\n" + + " {\n" + + " \"key\" : 3.0,\n" + + " \"value\" : null\n" + + " }\n" + + " ]\n" + + "}"; + } + + assertThat(Strings.toString(builder), equalTo(expected)); + } + @Override protected Predicate excludePathsFromXContentInsertion() { return path -> path.endsWith(CommonFields.VALUES.getPreferredName()); @@ -162,7 +219,7 @@ public class InternalPercentilesBucketTests extends InternalAggregationTestCase< default: throw new AssertionError("Illegal randomisation branch"); } - return new InternalPercentilesBucket(name, percents, percentiles, formatter, pipelineAggregators, metaData); + return new InternalPercentilesBucket(name, percents, percentiles, randomBoolean(), formatter, pipelineAggregators, metaData); } private double[] extractPercentiles(InternalPercentilesBucket instance) { diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/PercentilesBucketIT.java b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/PercentilesBucketIT.java index 4c67fb533e4..546104b0064 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/PercentilesBucketIT.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/PercentilesBucketIT.java @@ -100,7 +100,7 @@ public class PercentilesBucketIT extends ESIntegTestCase { .addAggregation(histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(interval) .extendedBounds(minRandomValue, maxRandomValue)) .addAggregation(percentilesBucket("percentiles_bucket", "histo>_count") - .percents(PERCENTS)).get(); + .setPercents(PERCENTS)).get(); assertSearchResponse(response); @@ -138,7 +138,7 @@ public class PercentilesBucketIT extends ESIntegTestCase { histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(interval) .extendedBounds(minRandomValue, maxRandomValue)) .subAggregation(percentilesBucket("percentiles_bucket", "histo>_count") - .percents(PERCENTS))).get(); + .setPercents(PERCENTS))).get(); assertSearchResponse(response); @@ -180,7 +180,7 @@ public class PercentilesBucketIT extends ESIntegTestCase { .prepareSearch("idx") .addAggregation(terms("terms").field("tag").subAggregation(sum("sum").field(SINGLE_VALUED_FIELD_NAME))) .addAggregation(percentilesBucket("percentiles_bucket", "terms>sum") - .percents(PERCENTS)).get(); + .setPercents(PERCENTS)).get(); assertSearchResponse(response); @@ -254,7 +254,7 @@ public class PercentilesBucketIT extends ESIntegTestCase { .extendedBounds(minRandomValue, maxRandomValue) .subAggregation(sum("sum").field(SINGLE_VALUED_FIELD_NAME))) .subAggregation(percentilesBucket("percentiles_bucket", "histo>sum") - .percents(PERCENTS))).get(); + .setPercents(PERCENTS))).get(); assertSearchResponse(response); @@ -308,7 +308,7 @@ public class PercentilesBucketIT extends ESIntegTestCase { .subAggregation(sum("sum").field(SINGLE_VALUED_FIELD_NAME))) .subAggregation(percentilesBucket("percentiles_bucket", "histo>sum") .gapPolicy(BucketHelpers.GapPolicy.INSERT_ZEROS) - .percents(PERCENTS))) + .setPercents(PERCENTS))) .get(); assertSearchResponse(response); @@ -354,7 +354,7 @@ public class PercentilesBucketIT extends ESIntegTestCase { .addAggregation(terms("terms").field("tag").includeExclude(new IncludeExclude(null, "tag.*")) .subAggregation(sum("sum").field(SINGLE_VALUED_FIELD_NAME))) .addAggregation(percentilesBucket("percentiles_bucket", "terms>sum") - .percents(PERCENTS)).get(); + .setPercents(PERCENTS)).get(); assertSearchResponse(response); @@ -377,7 +377,7 @@ public class PercentilesBucketIT extends ESIntegTestCase { .addAggregation(terms("terms").field("tag").includeExclude(new IncludeExclude(null, "tag.*")) .subAggregation(sum("sum").field(SINGLE_VALUED_FIELD_NAME))) .addAggregation(percentilesBucket("percentiles_bucket", "terms>sum") - .percents(PERCENTS)).get(); + .setPercents(PERCENTS)).get(); assertSearchResponse(response); @@ -406,7 +406,7 @@ public class PercentilesBucketIT extends ESIntegTestCase { client().prepareSearch("idx") .addAggregation(terms("terms").field("tag").subAggregation(sum("sum").field(SINGLE_VALUED_FIELD_NAME))) .addAggregation(percentilesBucket("percentiles_bucket", "terms>sum") - .percents(badPercents)).get(); + .setPercents(badPercents)).get(); fail("Illegal percent's were provided but no exception was thrown."); } catch (Exception e) { @@ -440,7 +440,7 @@ public class PercentilesBucketIT extends ESIntegTestCase { histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(interval) .extendedBounds(minRandomValue, maxRandomValue)) .subAggregation(percentilesBucket("percentiles_bucket", "histo>_count") - .percents(badPercents))).get(); + .setPercents(badPercents))).get(); fail("Illegal percent's were provided but no exception was thrown."); } catch (Exception e) { @@ -470,9 +470,9 @@ public class PercentilesBucketIT extends ESIntegTestCase { .subAggregation( histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(interval) .extendedBounds(minRandomValue, maxRandomValue)) - .subAggregation(percentilesBucket("percentile_histo_bucket", "histo>_count").percents(PERCENTS))) + .subAggregation(percentilesBucket("percentile_histo_bucket", "histo>_count").setPercents(PERCENTS))) .addAggregation(percentilesBucket("percentile_terms_bucket", "terms>percentile_histo_bucket.50") - .percents(PERCENTS)).get(); + .setPercents(PERCENTS)).get(); assertSearchResponse(response); @@ -530,9 +530,9 @@ public class PercentilesBucketIT extends ESIntegTestCase { histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(interval) .extendedBounds(minRandomValue, maxRandomValue)) .subAggregation(percentilesBucket("percentile_histo_bucket", "histo>_count") - .percents(percent))) + .setPercents(percent))) .addAggregation(percentilesBucket("percentile_terms_bucket", "terms>percentile_histo_bucket[99.9]") - .percents(percent)).get(); + .setPercents(percent)).get(); assertSearchResponse(response); diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/PercentilesBucketTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/PercentilesBucketTests.java index 7ad05059a73..165312a5bde 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/PercentilesBucketTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/PercentilesBucketTests.java @@ -43,7 +43,7 @@ public class PercentilesBucketTests extends AbstractBucketMetricsTestCase vars, String fieldName) { Map doc = (Map) vars.get("doc"); ScriptDocValues values = (ScriptDocValues) doc.get(fieldName); - return values.getValues(); + return values; } } diff --git a/server/src/test/java/org/elasticsearch/search/functionscore/ExplainableScriptIT.java b/server/src/test/java/org/elasticsearch/search/functionscore/ExplainableScriptIT.java index 8e7686a4d0d..3100b5401f1 100644 --- a/server/src/test/java/org/elasticsearch/search/functionscore/ExplainableScriptIT.java +++ b/server/src/test/java/org/elasticsearch/search/functionscore/ExplainableScriptIT.java @@ -108,7 +108,7 @@ public class ExplainableScriptIT extends ESIntegTestCase { @Override public double execute() { - return ((Number) ((ScriptDocValues) getDoc().get("number_field")).getValues().get(0)).doubleValue(); + return ((Number) ((ScriptDocValues) getDoc().get("number_field")).get(0)).doubleValue(); } } diff --git a/server/src/test/java/org/elasticsearch/search/functionscore/RandomScoreFunctionIT.java b/server/src/test/java/org/elasticsearch/search/functionscore/RandomScoreFunctionIT.java index c46f9f011a4..94fdac25efa 100644 --- a/server/src/test/java/org/elasticsearch/search/functionscore/RandomScoreFunctionIT.java +++ b/server/src/test/java/org/elasticsearch/search/functionscore/RandomScoreFunctionIT.java @@ -85,7 +85,7 @@ public class RandomScoreFunctionIT extends ESIntegTestCase { static Double scoringScript(Map vars, Function scoring) { Map doc = (Map) vars.get("doc"); - Double index = ((Number) ((ScriptDocValues) doc.get("index")).getValues().get(0)).doubleValue(); + Double index = ((Number) ((ScriptDocValues) doc.get("index")).get(0)).doubleValue(); Double score = scoring.apply((ScoreAccessor) vars.get("_score")).doubleValue(); Integer factor = (Integer) vars.get("factor"); return Math.log(index + (factor * score)); diff --git a/server/src/test/java/org/elasticsearch/search/query/MultiMatchQueryIT.java b/server/src/test/java/org/elasticsearch/search/query/MultiMatchQueryIT.java index f5688e0a421..76850a197d9 100644 --- a/server/src/test/java/org/elasticsearch/search/query/MultiMatchQueryIT.java +++ b/server/src/test/java/org/elasticsearch/search/query/MultiMatchQueryIT.java @@ -203,7 +203,7 @@ public class MultiMatchQueryIT extends ESIntegTestCase { searchResponse = client().prepareSearch("test") .setQuery(randomizeType(multiMatchQuery("marvel hero captain america", "full_name", "first_name", "last_name", "category") - .operator(Operator.OR).useDisMax(false).type(type))).get(); + .operator(Operator.OR).type(type))).get(); assertFirstHit(searchResponse, anyOf(hasId("theone"), hasId("theother"))); assertThat(searchResponse.getHits().getHits()[0].getScore(), greaterThan(searchResponse.getHits().getHits()[1].getScore())); @@ -323,14 +323,14 @@ public class MultiMatchQueryIT extends ESIntegTestCase { cutoffFrequency = randomBoolean() ? Math.min(1, numDocs * 1.f / between(10, 20)) : 1.f / between(10, 20); searchResponse = client().prepareSearch("test") .setQuery(randomizeType(multiMatchQuery("marvel hero captain america", "full_name", "first_name", "last_name", "category") - .operator(Operator.OR).useDisMax(false).cutoffFrequency(cutoffFrequency).type(type))).get(); + .operator(Operator.OR).cutoffFrequency(cutoffFrequency).type(type))).get(); assertFirstHit(searchResponse, anyOf(hasId("theone"), hasId("theother"))); assertThat(searchResponse.getHits().getHits()[0].getScore(), greaterThan(searchResponse.getHits().getHits()[1].getScore())); long size = searchResponse.getHits().getTotalHits().value; searchResponse = client().prepareSearch("test") .setQuery(randomizeType(multiMatchQuery("marvel hero captain america", "full_name", "first_name", "last_name", "category") - .operator(Operator.OR).useDisMax(false).type(type))).get(); + .operator(Operator.OR).type(type))).get(); assertFirstHit(searchResponse, anyOf(hasId("theone"), hasId("theother"))); assertThat("common terms expected to be a way smaller result set", size, lessThan(searchResponse.getHits().getTotalHits().value)); @@ -399,7 +399,7 @@ public class MultiMatchQueryIT extends ESIntegTestCase { SearchResponse left = client().prepareSearch("test").setSize(numDocs) .addSort(SortBuilders.scoreSort()).addSort(SortBuilders.fieldSort("_id")) .setQuery(randomizeType(multiMatchQueryBuilder - .operator(op).useDisMax(false).minimumShouldMatch(minShouldMatch).type(type))).get(); + .operator(op).tieBreaker(1.0f).minimumShouldMatch(minShouldMatch).type(type))).get(); SearchResponse right = client().prepareSearch("test").setSize(numDocs) .addSort(SortBuilders.scoreSort()).addSort(SortBuilders.fieldSort("_id")) @@ -418,7 +418,7 @@ public class MultiMatchQueryIT extends ESIntegTestCase { SearchResponse left = client().prepareSearch("test").setSize(numDocs) .addSort(SortBuilders.scoreSort()).addSort(SortBuilders.fieldSort("_id")) .setQuery(randomizeType(multiMatchQuery("capta", "full_name", "first_name", "last_name", "category") - .type(MatchQuery.Type.PHRASE_PREFIX).useDisMax(false).minimumShouldMatch(minShouldMatch))).get(); + .type(MatchQuery.Type.PHRASE_PREFIX).tieBreaker(1.0f).minimumShouldMatch(minShouldMatch))).get(); SearchResponse right = client().prepareSearch("test").setSize(numDocs) .addSort(SortBuilders.scoreSort()).addSort(SortBuilders.fieldSort("_id")) @@ -437,7 +437,7 @@ public class MultiMatchQueryIT extends ESIntegTestCase { left = client().prepareSearch("test").setSize(numDocs) .addSort(SortBuilders.scoreSort()).addSort(SortBuilders.fieldSort("_id")) .setQuery(randomizeType(multiMatchQuery("captain america", "full_name", "first_name", "last_name", "category") - .type(MatchQuery.Type.PHRASE).useDisMax(false).minimumShouldMatch(minShouldMatch))).get(); + .type(MatchQuery.Type.PHRASE).minimumShouldMatch(minShouldMatch))).get(); } else { left = client().prepareSearch("test").setSize(numDocs) .addSort(SortBuilders.scoreSort()).addSort(SortBuilders.fieldSort("_id")) diff --git a/server/src/test/java/org/elasticsearch/search/query/SearchQueryIT.java b/server/src/test/java/org/elasticsearch/search/query/SearchQueryIT.java index ebfd4dbbebf..0c2b0829c5f 100644 --- a/server/src/test/java/org/elasticsearch/search/query/SearchQueryIT.java +++ b/server/src/test/java/org/elasticsearch/search/query/SearchQueryIT.java @@ -681,7 +681,6 @@ public class SearchQueryIT extends ESIntegTestCase { // this uses dismax so scores are equal and the order can be arbitrary assertSearchHits(searchResponse, "1", "2"); - builder.useDisMax(false); searchResponse = client().prepareSearch() .setQuery(builder) .get(); @@ -786,7 +785,6 @@ public class SearchQueryIT extends ESIntegTestCase { MultiMatchQueryBuilder multiMatchQuery = multiMatchQuery("value1 value2 foo", "field1", "field2"); - multiMatchQuery.useDisMax(true); multiMatchQuery.minimumShouldMatch("70%"); SearchResponse searchResponse = client().prepareSearch() .setQuery(multiMatchQuery) @@ -800,7 +798,6 @@ public class SearchQueryIT extends ESIntegTestCase { assertFirstHit(searchResponse, hasId("1")); assertSecondHit(searchResponse, hasId("2")); - multiMatchQuery.useDisMax(false); multiMatchQuery.minimumShouldMatch("70%"); searchResponse = client().prepareSearch().setQuery(multiMatchQuery).get(); assertHitCount(searchResponse, 1L); @@ -1314,6 +1311,28 @@ public class SearchQueryIT extends ESIntegTestCase { assertHitCount(searchResponse, 2L); } + public void testIntervals() throws InterruptedException { + createIndex("test"); + + indexRandom(true, + client().prepareIndex("test", "test", "1") + .setSource("description", "it's cold outside, there's no kind of atmosphere")); + + String json = "{ \"intervals\" : " + + "{ \"description\": { " + + " \"all_of\" : {" + + " \"ordered\" : \"true\"," + + " \"intervals\" : [" + + " { \"any_of\" : {" + + " \"intervals\" : [" + + " { \"match\" : { \"query\" : \"cold\" } }," + + " { \"match\" : { \"query\" : \"outside\" } } ] } }," + + " { \"match\" : { \"query\" : \"atmosphere\" } } ]," + + " \"max_gaps\" : 30 } } } }"; + SearchResponse response = client().prepareSearch("test").setQuery(wrapperQuery(json)).get(); + assertHitCount(response, 1L); + } + // see #2994 public void testSimpleSpan() throws IOException, ExecutionException, InterruptedException { createIndex("test"); @@ -1475,11 +1494,11 @@ public class SearchQueryIT extends ESIntegTestCase { refresh(); SearchResponse searchResponse = client().prepareSearch("test") - .setQuery(multiMatchQuery("value2", "field2").field("field1", 2).lenient(true).useDisMax(false)).get(); + .setQuery(multiMatchQuery("value2", "field2").field("field1", 2).lenient(true)).get(); assertHitCount(searchResponse, 1L); searchResponse = client().prepareSearch("test") - .setQuery(multiMatchQuery("value2", "field2").field("field1", 2).lenient(true).useDisMax(true)).get(); + .setQuery(multiMatchQuery("value2", "field2").field("field1", 2).lenient(true)).get(); assertHitCount(searchResponse, 1L); searchResponse = client().prepareSearch("test") diff --git a/server/src/test/java/org/elasticsearch/search/sort/FieldSortIT.java b/server/src/test/java/org/elasticsearch/search/sort/FieldSortIT.java index 36e22626383..ad3e9fc52e0 100644 --- a/server/src/test/java/org/elasticsearch/search/sort/FieldSortIT.java +++ b/server/src/test/java/org/elasticsearch/search/sort/FieldSortIT.java @@ -95,13 +95,13 @@ public class FieldSortIT extends ESIntegTestCase { static Double sortDoubleScript(Map vars) { Map doc = (Map) vars.get("doc"); - Double index = ((Number) ((ScriptDocValues) doc.get("number")).getValues().get(0)).doubleValue(); + Double index = ((Number) ((ScriptDocValues) doc.get("number")).get(0)).doubleValue(); return index; } static String sortStringScript(Map vars) { Map doc = (Map) vars.get("doc"); - String value = ((String) ((ScriptDocValues) doc.get("keyword")).getValues().get(0)); + String value = ((String) ((ScriptDocValues) doc.get("keyword")).get(0)); return value; } } diff --git a/server/src/test/java/org/elasticsearch/search/sort/SimpleSortIT.java b/server/src/test/java/org/elasticsearch/search/sort/SimpleSortIT.java index 87aaab501f4..e59f33b5a20 100644 --- a/server/src/test/java/org/elasticsearch/search/sort/SimpleSortIT.java +++ b/server/src/test/java/org/elasticsearch/search/sort/SimpleSortIT.java @@ -87,7 +87,7 @@ public class SimpleSortIT extends ESIntegTestCase { scripts.put("doc['id'][0]", vars -> { Map doc = (Map) vars.get("doc"); - return ((ScriptDocValues.Strings) doc.get("id")).getValues().get(0); + return ((ScriptDocValues.Strings) doc.get("id")).get(0); }); scripts.put("get min long", vars -> getMinValueScript(vars, Long.MAX_VALUE, "lvalue", l -> (Long) l)); @@ -108,7 +108,7 @@ public class SimpleSortIT extends ESIntegTestCase { T retval = initialValue; Map doc = (Map) vars.get("doc"); ScriptDocValues values = (ScriptDocValues) doc.get(fieldName); - for (Object v : values.getValues()) { + for (Object v : values) { T value = converter.apply(v); retval = (value.compareTo(retval) < 0) ? value : retval; } diff --git a/server/src/test/java/org/elasticsearch/snapshots/SharedClusterSnapshotRestoreIT.java b/server/src/test/java/org/elasticsearch/snapshots/SharedClusterSnapshotRestoreIT.java index a65b1c4a77a..35e813756bc 100644 --- a/server/src/test/java/org/elasticsearch/snapshots/SharedClusterSnapshotRestoreIT.java +++ b/server/src/test/java/org/elasticsearch/snapshots/SharedClusterSnapshotRestoreIT.java @@ -89,6 +89,7 @@ import org.elasticsearch.repositories.RepositoriesService; import org.elasticsearch.repositories.Repository; import org.elasticsearch.repositories.RepositoryData; import org.elasticsearch.repositories.RepositoryException; +import org.elasticsearch.rest.RestStatus; import org.elasticsearch.script.MockScriptEngine; import org.elasticsearch.script.StoredScriptsIT; import org.elasticsearch.snapshots.mockstore.MockRepository; @@ -1113,7 +1114,8 @@ public class SharedClusterSnapshotRestoreIT extends AbstractSnapshotIntegTestCas // check that there is no restore in progress RestoreInProgress restoreInProgress = clusterStateResponse.getState().custom(RestoreInProgress.TYPE); assertNotNull("RestoreInProgress must be not null", restoreInProgress); - assertThat("RestoreInProgress must be empty", restoreInProgress.entries(), hasSize(0)); + assertTrue( + "RestoreInProgress must be empty but found entries in " + restoreInProgress, restoreInProgress.isEmpty()); // check that the shards have been created but are not assigned assertThat(clusterStateResponse.getState().getRoutingTable().allShards(indexName), hasSize(numShards.totalNumShards)); @@ -3510,6 +3512,127 @@ public class SharedClusterSnapshotRestoreIT extends AbstractSnapshotIntegTestCas assertThat(shardStats.getSeqNoStats().getMaxSeqNo(), equalTo(15L)); } + public void testParallelRestoreOperations() { + String indexName1 = "testindex1"; + String indexName2 = "testindex2"; + String repoName = "test-restore-snapshot-repo"; + String snapshotName1 = "test-restore-snapshot1"; + String snapshotName2 = "test-restore-snapshot2"; + String absolutePath = randomRepoPath().toAbsolutePath().toString(); + logger.info("Path [{}]", absolutePath); + String restoredIndexName1 = indexName1 + "-restored"; + String restoredIndexName2 = indexName2 + "-restored"; + String typeName = "actions"; + String expectedValue = "expected"; + + Client client = client(); + // Write a document + String docId = Integer.toString(randomInt()); + index(indexName1, typeName, docId, "value", expectedValue); + + String docId2 = Integer.toString(randomInt()); + index(indexName2, typeName, docId2, "value", expectedValue); + + logger.info("--> creating repository"); + assertAcked(client.admin().cluster().preparePutRepository(repoName) + .setType("fs").setSettings(Settings.builder() + .put("location", absolutePath) + )); + + logger.info("--> snapshot"); + CreateSnapshotResponse createSnapshotResponse = client.admin().cluster().prepareCreateSnapshot(repoName, snapshotName1) + .setWaitForCompletion(true) + .setIndices(indexName1) + .get(); + assertThat(createSnapshotResponse.getSnapshotInfo().successfulShards(), greaterThan(0)); + assertThat(createSnapshotResponse.getSnapshotInfo().successfulShards(), + equalTo(createSnapshotResponse.getSnapshotInfo().totalShards())); + assertThat(createSnapshotResponse.getSnapshotInfo().state(), equalTo(SnapshotState.SUCCESS)); + + CreateSnapshotResponse createSnapshotResponse2 = client.admin().cluster().prepareCreateSnapshot(repoName, snapshotName2) + .setWaitForCompletion(true) + .setIndices(indexName2) + .get(); + assertThat(createSnapshotResponse2.getSnapshotInfo().successfulShards(), greaterThan(0)); + assertThat(createSnapshotResponse2.getSnapshotInfo().successfulShards(), + equalTo(createSnapshotResponse2.getSnapshotInfo().totalShards())); + assertThat(createSnapshotResponse2.getSnapshotInfo().state(), equalTo(SnapshotState.SUCCESS)); + + RestoreSnapshotResponse restoreSnapshotResponse1 = client.admin().cluster().prepareRestoreSnapshot(repoName, snapshotName1) + .setWaitForCompletion(false) + .setRenamePattern(indexName1) + .setRenameReplacement(restoredIndexName1) + .get(); + RestoreSnapshotResponse restoreSnapshotResponse2 = client.admin().cluster().prepareRestoreSnapshot(repoName, snapshotName2) + .setWaitForCompletion(false) + .setRenamePattern(indexName2) + .setRenameReplacement(restoredIndexName2) + .get(); + assertThat(restoreSnapshotResponse1.status(), equalTo(RestStatus.ACCEPTED)); + assertThat(restoreSnapshotResponse2.status(), equalTo(RestStatus.ACCEPTED)); + ensureGreen(restoredIndexName1, restoredIndexName2); + assertThat(client.prepareGet(restoredIndexName1, typeName, docId).get().isExists(), equalTo(true)); + assertThat(client.prepareGet(restoredIndexName2, typeName, docId2).get().isExists(), equalTo(true)); + } + + public void testParallelRestoreOperationsFromSingleSnapshot() throws Exception { + String indexName1 = "testindex1"; + String indexName2 = "testindex2"; + String repoName = "test-restore-snapshot-repo"; + String snapshotName = "test-restore-snapshot"; + String absolutePath = randomRepoPath().toAbsolutePath().toString(); + logger.info("Path [{}]", absolutePath); + String restoredIndexName1 = indexName1 + "-restored"; + String restoredIndexName2 = indexName2 + "-restored"; + String typeName = "actions"; + String expectedValue = "expected"; + + Client client = client(); + // Write a document + String docId = Integer.toString(randomInt()); + index(indexName1, typeName, docId, "value", expectedValue); + + String docId2 = Integer.toString(randomInt()); + index(indexName2, typeName, docId2, "value", expectedValue); + + logger.info("--> creating repository"); + assertAcked(client.admin().cluster().preparePutRepository(repoName) + .setType("fs").setSettings(Settings.builder() + .put("location", absolutePath) + )); + + logger.info("--> snapshot"); + CreateSnapshotResponse createSnapshotResponse = client.admin().cluster().prepareCreateSnapshot(repoName, snapshotName) + .setWaitForCompletion(true) + .setIndices(indexName1, indexName2) + .get(); + assertThat(createSnapshotResponse.getSnapshotInfo().successfulShards(), greaterThan(0)); + assertThat(createSnapshotResponse.getSnapshotInfo().successfulShards(), + equalTo(createSnapshotResponse.getSnapshotInfo().totalShards())); + assertThat(createSnapshotResponse.getSnapshotInfo().state(), equalTo(SnapshotState.SUCCESS)); + + ActionFuture restoreSnapshotResponse1 = client.admin().cluster() + .prepareRestoreSnapshot(repoName, snapshotName) + .setIndices(indexName1) + .setRenamePattern(indexName1) + .setRenameReplacement(restoredIndexName1) + .execute(); + + boolean sameSourceIndex = randomBoolean(); + + ActionFuture restoreSnapshotResponse2 = client.admin().cluster() + .prepareRestoreSnapshot(repoName, snapshotName) + .setIndices(sameSourceIndex ? indexName1 : indexName2) + .setRenamePattern(sameSourceIndex ? indexName1 : indexName2) + .setRenameReplacement(restoredIndexName2) + .execute(); + assertThat(restoreSnapshotResponse1.get().status(), equalTo(RestStatus.ACCEPTED)); + assertThat(restoreSnapshotResponse2.get().status(), equalTo(RestStatus.ACCEPTED)); + ensureGreen(restoredIndexName1, restoredIndexName2); + assertThat(client.prepareGet(restoredIndexName1, typeName, docId).get().isExists(), equalTo(true)); + assertThat(client.prepareGet(restoredIndexName2, typeName, sameSourceIndex ? docId : docId2).get().isExists(), equalTo(true)); + } + @TestLogging("org.elasticsearch.snapshots:TRACE") public void testAbortedSnapshotDuringInitDoesNotStart() throws Exception { final Client client = client(); diff --git a/server/src/test/java/org/elasticsearch/transport/ConnectionProfileTests.java b/server/src/test/java/org/elasticsearch/transport/ConnectionProfileTests.java index 4f380de08ed..e26af6719e8 100644 --- a/server/src/test/java/org/elasticsearch/transport/ConnectionProfileTests.java +++ b/server/src/test/java/org/elasticsearch/transport/ConnectionProfileTests.java @@ -204,10 +204,10 @@ public class ConnectionProfileTests extends ESTestCase { assertEquals(1, profile.getNumConnectionsPerType(TransportRequestOptions.Type.STATE)); assertEquals(2, profile.getNumConnectionsPerType(TransportRequestOptions.Type.RECOVERY)); assertEquals(3, profile.getNumConnectionsPerType(TransportRequestOptions.Type.BULK)); - assertEquals(TransportService.TCP_CONNECT_TIMEOUT.get(Settings.EMPTY), profile.getConnectTimeout()); - assertEquals(TransportService.TCP_CONNECT_TIMEOUT.get(Settings.EMPTY), profile.getHandshakeTimeout()); - assertEquals(Transport.TRANSPORT_TCP_COMPRESS.get(Settings.EMPTY), profile.getCompressionEnabled()); - assertEquals(TcpTransport.PING_SCHEDULE.get(Settings.EMPTY), profile.getPingInterval()); + assertEquals(TransportSettings.CONNECT_TIMEOUT.get(Settings.EMPTY), profile.getConnectTimeout()); + assertEquals(TransportSettings.CONNECT_TIMEOUT.get(Settings.EMPTY), profile.getHandshakeTimeout()); + assertEquals(TransportSettings.TRANSPORT_COMPRESS.get(Settings.EMPTY), profile.getCompressionEnabled()); + assertEquals(TransportSettings.PING_SCHEDULE.get(Settings.EMPTY), profile.getPingInterval()); profile = ConnectionProfile.buildDefaultConnectionProfile(Settings.builder().put("node.master", false).build()); assertEquals(12, profile.getNumConnections()); diff --git a/server/src/test/java/org/elasticsearch/transport/PublishPortTests.java b/server/src/test/java/org/elasticsearch/transport/PublishPortTests.java index 0f121f0c401..b246d11b097 100644 --- a/server/src/test/java/org/elasticsearch/transport/PublishPortTests.java +++ b/server/src/test/java/org/elasticsearch/transport/PublishPortTests.java @@ -46,13 +46,13 @@ public class PublishPortTests extends ESTestCase { Settings settings; if (useProfile) { baseSettings = Settings.builder().put("transport.profiles.some_profile.port", 0).build(); - settings = randomBoolean() ? Settings.EMPTY : Settings.builder().put(TcpTransport.PUBLISH_PORT.getKey(), 9081).build(); + settings = randomBoolean() ? Settings.EMPTY : Settings.builder().put(TransportSettings.PUBLISH_PORT.getKey(), 9081).build(); settings = Settings.builder().put(settings).put(baseSettings).put("transport.profiles.some_profile.publish_port", 9080).build(); profile = "some_profile"; } else { baseSettings = Settings.EMPTY; - settings = Settings.builder().put(TcpTransport.PUBLISH_PORT.getKey(), 9081).build(); + settings = Settings.builder().put(TransportSettings.PUBLISH_PORT.getKey(), 9081).build(); settings = randomBoolean() ? settings : Settings.builder().put(settings).put("transport.profiles.default.publish_port", 9080).build(); profile = "default"; diff --git a/server/src/test/java/org/elasticsearch/transport/RemoteClusterServiceTests.java b/server/src/test/java/org/elasticsearch/transport/RemoteClusterServiceTests.java index 9a185163436..dfc5d4367b4 100644 --- a/server/src/test/java/org/elasticsearch/transport/RemoteClusterServiceTests.java +++ b/server/src/test/java/org/elasticsearch/transport/RemoteClusterServiceTests.java @@ -351,7 +351,7 @@ public class RemoteClusterServiceTests extends ESTestCase { settingsBuilder.putList("cluster.remote.cluster_1.seeds", seedNode.getAddress().toString()); if (randomBoolean()) { pingSchedule = TimeValue.timeValueSeconds(randomIntBetween(1, 10)); - settingsBuilder.put(TcpTransport.PING_SCHEDULE.getKey(), pingSchedule).build(); + settingsBuilder.put(TransportSettings.PING_SCHEDULE.getKey(), pingSchedule).build(); } else { pingSchedule = TimeValue.MINUS_ONE; } @@ -385,7 +385,7 @@ public class RemoteClusterServiceTests extends ESTestCase { Collections.shuffle(knownNodes, random()); Settings.Builder settingsBuilder = Settings.builder(); if (randomBoolean()) { - settingsBuilder.put(TcpTransport.PING_SCHEDULE.getKey(), TimeValue.timeValueSeconds(randomIntBetween(1, 10))); + settingsBuilder.put(TransportSettings.PING_SCHEDULE.getKey(), TimeValue.timeValueSeconds(randomIntBetween(1, 10))); } Settings transportSettings = settingsBuilder.build(); diff --git a/server/src/test/java/org/elasticsearch/transport/TcpTransportTests.java b/server/src/test/java/org/elasticsearch/transport/TcpTransportTests.java index b52ae8759f2..7cf8ed67d63 100644 --- a/server/src/test/java/org/elasticsearch/transport/TcpTransportTests.java +++ b/server/src/test/java/org/elasticsearch/transport/TcpTransportTests.java @@ -223,7 +223,7 @@ public class TcpTransportTests extends ESTestCase { }; DiscoveryNode node = new DiscoveryNode("foo", buildNewFakeTransportAddress(), Version.CURRENT); - ConnectionProfile.Builder profileBuilder = new ConnectionProfile.Builder(MockTcpTransport.LIGHT_PROFILE); + ConnectionProfile.Builder profileBuilder = new ConnectionProfile.Builder(TestProfiles.LIGHT_PROFILE); if (compressed) { profileBuilder.setCompressionEnabled(true); } else { diff --git a/server/src/test/java/org/elasticsearch/transport/TransportServiceHandshakeTests.java b/server/src/test/java/org/elasticsearch/transport/TransportServiceHandshakeTests.java index ed1dfded078..fa6b293adf7 100644 --- a/server/src/test/java/org/elasticsearch/transport/TransportServiceHandshakeTests.java +++ b/server/src/test/java/org/elasticsearch/transport/TransportServiceHandshakeTests.java @@ -26,13 +26,14 @@ import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.network.NetworkService; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.util.BigArrays; +import org.elasticsearch.common.util.PageCacheRecycler; import org.elasticsearch.indices.breaker.NoneCircuitBreakerService; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.VersionUtils; import org.elasticsearch.test.transport.MockTransportService; import org.elasticsearch.threadpool.TestThreadPool; import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.transport.nio.MockNioTransport; import org.junit.After; import org.junit.AfterClass; import org.junit.BeforeClass; @@ -60,14 +61,10 @@ public class TransportServiceHandshakeTests extends ESTestCase { private List transportServices = new ArrayList<>(); private NetworkHandle startServices(String nodeNameAndId, Settings settings, Version version) { - MockTcpTransport transport = - new MockTcpTransport( - settings, - threadPool, - BigArrays.NON_RECYCLING_INSTANCE, - new NoneCircuitBreakerService(), - new NamedWriteableRegistry(Collections.emptyList()), - new NetworkService(Collections.emptyList())); + MockNioTransport transport = + new MockNioTransport(settings, Version.CURRENT, threadPool, new NetworkService(Collections.emptyList()), + PageCacheRecycler.NON_RECYCLING_INSTANCE, new NamedWriteableRegistry(Collections.emptyList()), + new NoneCircuitBreakerService()); TransportService transportService = new MockTransportService(settings, transport, threadPool, TransportService.NOOP_TRANSPORT_INTERCEPTOR, (boundAddress) -> new DiscoveryNode( nodeNameAndId, @@ -112,7 +109,7 @@ public class TransportServiceHandshakeTests extends ESTestCase { emptyMap(), emptySet(), Version.CURRENT.minimumCompatibilityVersion()); - try (Transport.Connection connection = handleA.transportService.openConnection(discoveryNode, MockTcpTransport.LIGHT_PROFILE)){ + try (Transport.Connection connection = handleA.transportService.openConnection(discoveryNode, TestProfiles.LIGHT_PROFILE)){ DiscoveryNode connectedNode = handleA.transportService.handshake(connection, timeout); assertNotNull(connectedNode); // the name and version should be updated @@ -134,7 +131,7 @@ public class TransportServiceHandshakeTests extends ESTestCase { Version.CURRENT.minimumCompatibilityVersion()); IllegalStateException ex = expectThrows(IllegalStateException.class, () -> { try (Transport.Connection connection = handleA.transportService.openConnection(discoveryNode, - MockTcpTransport.LIGHT_PROFILE)) { + TestProfiles.LIGHT_PROFILE)) { handleA.transportService.handshake(connection, timeout); } }); @@ -155,7 +152,7 @@ public class TransportServiceHandshakeTests extends ESTestCase { Version.CURRENT.minimumCompatibilityVersion()); IllegalStateException ex = expectThrows(IllegalStateException.class, () -> { try (Transport.Connection connection = handleA.transportService.openConnection(discoveryNode, - MockTcpTransport.LIGHT_PROFILE)) { + TestProfiles.LIGHT_PROFILE)) { handleA.transportService.handshake(connection, timeout); } }); @@ -174,7 +171,7 @@ public class TransportServiceHandshakeTests extends ESTestCase { emptySet(), handleB.discoveryNode.getVersion()); ConnectTransportException ex = expectThrows(ConnectTransportException.class, () -> { - handleA.transportService.connectToNode(discoveryNode, MockTcpTransport.LIGHT_PROFILE); + handleA.transportService.connectToNode(discoveryNode, TestProfiles.LIGHT_PROFILE); }); assertThat(ex.getMessage(), containsString("unexpected remote node")); assertFalse(handleA.transportService.nodeConnected(discoveryNode)); @@ -192,7 +189,7 @@ public class TransportServiceHandshakeTests extends ESTestCase { emptySet(), handleB.discoveryNode.getVersion()); - handleA.transportService.connectToNode(discoveryNode, MockTcpTransport.LIGHT_PROFILE); + handleA.transportService.connectToNode(discoveryNode, TestProfiles.LIGHT_PROFILE); assertTrue(handleA.transportService.nodeConnected(discoveryNode)); } @@ -210,7 +207,7 @@ public class TransportServiceHandshakeTests extends ESTestCase { emptySet(), handleB.discoveryNode.getVersion()); ConnectTransportException ex = expectThrows(ConnectTransportException.class, () -> { - handleA.transportService.connectToNode(discoveryNode, MockTcpTransport.LIGHT_PROFILE); + handleA.transportService.connectToNode(discoveryNode, TestProfiles.LIGHT_PROFILE); }); assertThat(ex.getMessage(), containsString("unexpected remote node")); assertFalse(handleA.transportService.nodeConnected(discoveryNode)); diff --git a/server/src/test/java/org/elasticsearch/versioning/SimpleVersioningIT.java b/server/src/test/java/org/elasticsearch/versioning/SimpleVersioningIT.java index c0ee228a456..9de70f4339f 100644 --- a/server/src/test/java/org/elasticsearch/versioning/SimpleVersioningIT.java +++ b/server/src/test/java/org/elasticsearch/versioning/SimpleVersioningIT.java @@ -281,6 +281,74 @@ public class SimpleVersioningIT extends ESIntegTestCase { assertThat(deleteResponse.getVersion(), equalTo(4L)); } + public void testCompareAndSet() { + createIndex("test"); + ensureGreen(); + + IndexResponse indexResponse = client().prepareIndex("test", "type", "1").setSource("field1", "value1_1").execute().actionGet(); + assertThat(indexResponse.getSeqNo(), equalTo(0L)); + assertThat(indexResponse.getPrimaryTerm(), equalTo(1L)); + + indexResponse = client().prepareIndex("test", "type", "1").setSource("field1", "value1_2").setVersion(1).execute().actionGet(); + assertThat(indexResponse.getSeqNo(), equalTo(1L)); + assertThat(indexResponse.getPrimaryTerm(), equalTo(1L)); + + assertThrows( + client().prepareIndex("test", "type", "1").setSource("field1", "value1_1").setIfMatch(10, 1).execute(), + VersionConflictEngineException.class); + + assertThrows( + client().prepareIndex("test", "type", "1").setSource("field1", "value1_1").setIfMatch(10, 2).execute(), + VersionConflictEngineException.class); + + assertThrows( + client().prepareIndex("test", "type", "1").setSource("field1", "value1_1").setIfMatch(1, 2).execute(), + VersionConflictEngineException.class); + + + assertThrows(client().prepareDelete("test", "type", "1").setIfMatch(10, 1).execute(), VersionConflictEngineException.class); + assertThrows(client().prepareDelete("test", "type", "1").setIfMatch(10, 2).execute(), VersionConflictEngineException.class); + assertThrows(client().prepareDelete("test", "type", "1").setIfMatch(1, 2).execute(), VersionConflictEngineException.class); + + client().admin().indices().prepareRefresh().execute().actionGet(); + // TODO: Enable once get response returns seqNo +// for (int i = 0; i < 10; i++) { +// final GetResponse response = client().prepareGet("test", "type", "1").get(); +// assertThat(response.getSeqNo(), equalTo(1L)); +// assertThat(response.getPrimaryTerm(), equalTo(1L)); +// } + + // search with versioning + for (int i = 0; i < 10; i++) { + // TODO: ADD SEQ NO! + SearchResponse searchResponse = client().prepareSearch().setQuery(matchAllQuery()).setVersion(true).execute().actionGet(); + assertThat(searchResponse.getHits().getAt(0).getVersion(), equalTo(2L)); + } + + // search without versioning + for (int i = 0; i < 10; i++) { + SearchResponse searchResponse = client().prepareSearch().setQuery(matchAllQuery()).execute().actionGet(); + assertThat(searchResponse.getHits().getAt(0).getVersion(), equalTo(Versions.NOT_FOUND)); + } + + DeleteResponse deleteResponse = client().prepareDelete("test", "type", "1").setIfMatch(1, 1).execute().actionGet(); + assertEquals(DocWriteResponse.Result.DELETED, deleteResponse.getResult()); + assertThat(deleteResponse.getSeqNo(), equalTo(2L)); + assertThat(deleteResponse.getPrimaryTerm(), equalTo(1L)); + + assertThrows(client().prepareDelete("test", "type", "1").setIfMatch(1, 1).execute(), VersionConflictEngineException.class); + assertThrows(client().prepareDelete("test", "type", "1").setIfMatch(3, 2).execute(), VersionConflictEngineException.class); + assertThrows(client().prepareDelete("test", "type", "1").setIfMatch(1, 2).execute(), VersionConflictEngineException.class); + + + // This is intricate - the object was deleted but a delete transaction was with the right version. We add another one + // and thus the transaction is increased. + deleteResponse = client().prepareDelete("test", "type", "1").setIfMatch(2, 1).execute().actionGet(); + assertEquals(DocWriteResponse.Result.NOT_FOUND, deleteResponse.getResult()); + assertThat(deleteResponse.getSeqNo(), equalTo(3L)); + assertThat(deleteResponse.getPrimaryTerm(), equalTo(1L)); + } + public void testSimpleVersioningWithFlush() throws Exception { createIndex("test"); ensureGreen(); diff --git a/test/framework/src/main/java/org/elasticsearch/cluster/routing/TestShardRouting.java b/test/framework/src/main/java/org/elasticsearch/cluster/routing/TestShardRouting.java index 995900c1e65..9d892d192a2 100644 --- a/test/framework/src/main/java/org/elasticsearch/cluster/routing/TestShardRouting.java +++ b/test/framework/src/main/java/org/elasticsearch/cluster/routing/TestShardRouting.java @@ -144,6 +144,7 @@ public class TestShardRouting { RecoverySource.PeerRecoverySource.INSTANCE, RecoverySource.LocalShardsRecoverySource.INSTANCE, new RecoverySource.SnapshotRecoverySource( + UUIDs.randomBase64UUID(), new Snapshot("repo", new SnapshotId(randomAlphaOfLength(8), UUIDs.randomBase64UUID())), Version.CURRENT, "some_index")); diff --git a/test/framework/src/main/java/org/elasticsearch/index/engine/TranslogHandler.java b/test/framework/src/main/java/org/elasticsearch/index/engine/TranslogHandler.java index d2b9a259298..830fcec3726 100644 --- a/test/framework/src/main/java/org/elasticsearch/index/engine/TranslogHandler.java +++ b/test/framework/src/main/java/org/elasticsearch/index/engine/TranslogHandler.java @@ -125,7 +125,7 @@ public class TranslogHandler implements Engine.TranslogRecoveryRunner { source(indexName, index.type(), index.id(), index.source(), XContentHelper.xContentType(index.source())) .routing(index.routing()), index.seqNo(), index.primaryTerm(), - index.version(), null, origin, index.getAutoGeneratedIdTimestamp(), true); + index.version(), null, origin, index.getAutoGeneratedIdTimestamp(), true, SequenceNumbers.UNASSIGNED_SEQ_NO, 0); return engineIndex; case DELETE: final Translog.Delete delete = (Translog.Delete) operation; diff --git a/test/framework/src/main/java/org/elasticsearch/index/shard/IndexShardTestCase.java b/test/framework/src/main/java/org/elasticsearch/index/shard/IndexShardTestCase.java index 4199d414bf3..0a007f2a18e 100644 --- a/test/framework/src/main/java/org/elasticsearch/index/shard/IndexShardTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/index/shard/IndexShardTestCase.java @@ -34,6 +34,7 @@ import org.elasticsearch.cluster.routing.ShardRoutingState; import org.elasticsearch.cluster.routing.TestShardRouting; import org.elasticsearch.common.CheckedFunction; import org.elasticsearch.common.Nullable; +import org.elasticsearch.common.UUIDs; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.lucene.uid.Versions; import org.elasticsearch.common.settings.ClusterSettings; @@ -701,12 +702,12 @@ public abstract class IndexShardTestCase extends ESTestCase { Engine.IndexResult result; if (shard.routingEntry().primary()) { result = shard.applyIndexOperationOnPrimary(Versions.MATCH_ANY, VersionType.INTERNAL, sourceToParse, - IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP, false); + SequenceNumbers.UNASSIGNED_SEQ_NO, 0, IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP, false); if (result.getResultType() == Engine.Result.Type.MAPPING_UPDATE_REQUIRED) { updateMappings(shard, IndexMetaData.builder(shard.indexSettings().getIndexMetaData()) .putMapping(type, result.getRequiredMappingUpdate().toString()).build()); result = shard.applyIndexOperationOnPrimary(Versions.MATCH_ANY, VersionType.INTERNAL, sourceToParse, - IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP, false); + SequenceNumbers.UNASSIGNED_SEQ_NO, 0, IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP, false); } shard.updateLocalCheckpointForShard(shard.routingEntry().allocationId().getId(), shard.getLocalCheckpoint()); @@ -730,7 +731,8 @@ public abstract class IndexShardTestCase extends ESTestCase { protected Engine.DeleteResult deleteDoc(IndexShard shard, String type, String id) throws IOException { final Engine.DeleteResult result; if (shard.routingEntry().primary()) { - result = shard.applyDeleteOperationOnPrimary(Versions.MATCH_ANY, type, id, VersionType.INTERNAL); + result = shard.applyDeleteOperationOnPrimary( + Versions.MATCH_ANY, type, id, VersionType.INTERNAL, SequenceNumbers.UNASSIGNED_SEQ_NO, 0); shard.updateLocalCheckpointForShard(shard.routingEntry().allocationId().getId(), shard.getEngine().getLocalCheckpoint()); } else { final long seqNo = shard.seqNoStats().getMaxSeqNo() + 1; @@ -757,7 +759,8 @@ public abstract class IndexShardTestCase extends ESTestCase { final String index = shardId.getIndexName(); final IndexId indexId = new IndexId(shardId.getIndex().getName(), shardId.getIndex().getUUID()); final DiscoveryNode node = getFakeDiscoNode(shard.routingEntry().currentNodeId()); - final RecoverySource.SnapshotRecoverySource recoverySource = new RecoverySource.SnapshotRecoverySource(snapshot, version, index); + final RecoverySource.SnapshotRecoverySource recoverySource = + new RecoverySource.SnapshotRecoverySource(UUIDs.randomBase64UUID(), snapshot, version, index); final ShardRouting shardRouting = newShardRouting(shardId, node.getId(), true, ShardRoutingState.INITIALIZING, recoverySource); shard.markAsRecovering("from snapshot", new RecoveryState(shardRouting, node, null)); diff --git a/test/framework/src/main/java/org/elasticsearch/test/AbstractSerializingTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/AbstractSerializingTestCase.java index 5aeb30bfdbd..22ed586043d 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/AbstractSerializingTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/AbstractSerializingTestCase.java @@ -24,10 +24,13 @@ import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.common.xcontent.XContentType; import java.io.IOException; import java.util.function.Predicate; +import static org.elasticsearch.test.AbstractXContentTestCase.xContentTester; + public abstract class AbstractSerializingTestCase extends AbstractWireSerializingTestCase { /** @@ -35,17 +38,14 @@ public abstract class AbstractSerializingTestCase T copyInstance(T original, NamedWriteableRegistry namedWriteableRegistry, Writeable.Writer writer, + protected static T copyInstance(T original, NamedWriteableRegistry namedWriteableRegistry, Writeable.Writer writer, Writeable.Reader reader, Version version) throws IOException { try (BytesStreamOutput output = new BytesStreamOutput()) { output.setVersion(version); diff --git a/test/framework/src/main/java/org/elasticsearch/test/InternalTestCluster.java b/test/framework/src/main/java/org/elasticsearch/test/InternalTestCluster.java index 6ebcdf6358c..a625de41505 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/InternalTestCluster.java +++ b/test/framework/src/main/java/org/elasticsearch/test/InternalTestCluster.java @@ -109,9 +109,8 @@ import org.elasticsearch.test.discovery.TestZenDiscovery; import org.elasticsearch.test.disruption.ServiceDisruptionScheme; import org.elasticsearch.test.transport.MockTransportService; import org.elasticsearch.transport.MockTransportClient; -import org.elasticsearch.transport.TcpTransport; -import org.elasticsearch.transport.Transport; import org.elasticsearch.transport.TransportService; +import org.elasticsearch.transport.TransportSettings; import org.junit.Assert; import java.io.Closeable; @@ -361,7 +360,7 @@ public final class InternalTestCluster extends TestCluster { builder.put(Environment.PATH_SHARED_DATA_SETTING.getKey(), baseDir.resolve("custom")); builder.put(Environment.PATH_HOME_SETTING.getKey(), baseDir); builder.put(Environment.PATH_REPO_SETTING.getKey(), baseDir.resolve("repos")); - builder.put(TcpTransport.PORT.getKey(), 0); + builder.put(TransportSettings.PORT.getKey(), 0); builder.put("http.port", 0); if (Strings.hasLength(System.getProperty("tests.es.logger.level"))) { builder.put("logger.level", System.getProperty("tests.es.logger.level")); @@ -438,7 +437,7 @@ public final class InternalTestCluster extends TestCluster { private Settings getRandomNodeSettings(long seed) { Random random = new Random(seed); Builder builder = Settings.builder(); - builder.put(Transport.TRANSPORT_TCP_COMPRESS.getKey(), rarely(random)); + builder.put(TransportSettings.TRANSPORT_COMPRESS.getKey(), rarely(random)); if (random.nextBoolean()) { builder.put("cache.recycler.page.type", RandomPicks.randomFrom(random, PageCacheRecycler.Type.values())); } @@ -460,9 +459,9 @@ public final class InternalTestCluster extends TestCluster { // randomize tcp settings if (random.nextBoolean()) { - builder.put(TransportService.CONNECTIONS_PER_NODE_RECOVERY.getKey(), random.nextInt(2) + 1); - builder.put(TransportService.CONNECTIONS_PER_NODE_BULK.getKey(), random.nextInt(3) + 1); - builder.put(TransportService.CONNECTIONS_PER_NODE_REG.getKey(), random.nextInt(6) + 1); + builder.put(TransportSettings.CONNECTIONS_PER_NODE_RECOVERY.getKey(), random.nextInt(2) + 1); + builder.put(TransportSettings.CONNECTIONS_PER_NODE_BULK.getKey(), random.nextInt(3) + 1); + builder.put(TransportSettings.CONNECTIONS_PER_NODE_REG.getKey(), random.nextInt(6) + 1); } if (random.nextBoolean()) { @@ -490,7 +489,7 @@ public final class InternalTestCluster extends TestCluster { } if (random.nextBoolean()) { - builder.put(TcpTransport.PING_SCHEDULE.getKey(), RandomNumbers.randomIntBetween(random, 100, 2000) + "ms"); + builder.put(TransportSettings.PING_SCHEDULE.getKey(), RandomNumbers.randomIntBetween(random, 100, 2000) + "ms"); } if (random.nextBoolean()) { diff --git a/test/framework/src/main/java/org/elasticsearch/test/RandomObjects.java b/test/framework/src/main/java/org/elasticsearch/test/RandomObjects.java index 68434f0f29e..c81d0810f08 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/RandomObjects.java +++ b/test/framework/src/main/java/org/elasticsearch/test/RandomObjects.java @@ -21,7 +21,6 @@ package org.elasticsearch.test; import com.carrotsearch.randomizedtesting.generators.RandomPicks; import com.carrotsearch.randomizedtesting.generators.RandomStrings; - import org.elasticsearch.ElasticsearchException; import org.elasticsearch.action.support.replication.ReplicationResponse.ShardInfo; import org.elasticsearch.action.support.replication.ReplicationResponse.ShardInfo.Failure; @@ -72,78 +71,85 @@ public final class RandomObjects { */ public static Tuple, List> randomStoredFieldValues(Random random, XContentType xContentType) { int numValues = randomIntBetween(random, 1, 5); - List originalValues = new ArrayList<>(); - List expectedParsedValues = new ArrayList<>(); + List originalValues = randomStoredFieldValues(random, numValues); + List expectedParsedValues = new ArrayList<>(numValues); + for (Object originalValue : originalValues) { + expectedParsedValues.add(getExpectedParsedValue(xContentType, originalValue)); + } + return Tuple.tuple(originalValues, expectedParsedValues); + } + + private static List randomStoredFieldValues(Random random, int numValues) { + List values = new ArrayList<>(numValues); int dataType = randomIntBetween(random, 0, 8); for (int i = 0; i < numValues; i++) { switch(dataType) { case 0: - long randomLong = random.nextLong(); - originalValues.add(randomLong); - expectedParsedValues.add(randomLong); + values.add(random.nextLong()); break; case 1: - int randomInt = random.nextInt(); - originalValues.add(randomInt); - expectedParsedValues.add(randomInt); + values.add(random.nextInt()); break; case 2: - Short randomShort = (short) random.nextInt(); - originalValues.add(randomShort); - expectedParsedValues.add(randomShort.intValue()); + values.add((short) random.nextInt()); break; case 3: - Byte randomByte = (byte)random.nextInt(); - originalValues.add(randomByte); - expectedParsedValues.add(randomByte.intValue()); + values.add((byte) random.nextInt()); break; case 4: - double randomDouble = random.nextDouble(); - originalValues.add(randomDouble); - expectedParsedValues.add(randomDouble); + values.add(random.nextDouble()); break; case 5: - Float randomFloat = random.nextFloat(); - originalValues.add(randomFloat); - if (xContentType == XContentType.CBOR) { - //with CBOR we get back a float - expectedParsedValues.add(randomFloat); - } else if (xContentType == XContentType.SMILE) { - //with SMILE we get back a double (this will change in Jackson 2.9 where it will return a Float) - expectedParsedValues.add(randomFloat.doubleValue()); - } else { - //with JSON AND YAML we get back a double, but with float precision. - expectedParsedValues.add(Double.parseDouble(randomFloat.toString())); - } + values.add(random.nextFloat()); break; case 6: - boolean randomBoolean = random.nextBoolean(); - originalValues.add(randomBoolean); - expectedParsedValues.add(randomBoolean); + values.add(random.nextBoolean()); break; case 7: - String randomString = random.nextBoolean() ? RandomStrings.randomAsciiLettersOfLengthBetween(random, 3, 10) : - randomUnicodeOfLengthBetween(random, 3, 10); - originalValues.add(randomString); - expectedParsedValues.add(randomString); + values.add(random.nextBoolean() ? RandomStrings.randomAsciiLettersOfLengthBetween(random, 3, 10) : + randomUnicodeOfLengthBetween(random, 3, 10)); break; case 8: byte[] randomBytes = RandomStrings.randomUnicodeOfLengthBetween(random, 10, 50).getBytes(StandardCharsets.UTF_8); - BytesArray randomBytesArray = new BytesArray(randomBytes); - originalValues.add(randomBytesArray); - if (xContentType == XContentType.JSON || xContentType == XContentType.YAML) { - //JSON and YAML write the base64 format - expectedParsedValues.add(Base64.getEncoder().encodeToString(randomBytes)); - } else { - //SMILE and CBOR write the original bytes as they support binary format - expectedParsedValues.add(randomBytesArray); - } + values.add(new BytesArray(randomBytes)); break; default: throw new UnsupportedOperationException(); } } - return Tuple.tuple(originalValues, expectedParsedValues); + return values; + } + + /** + * Converts the provided field value to its corresponding expected value once printed out + * via {@link org.elasticsearch.common.xcontent.ToXContent#toXContent(XContentBuilder, ToXContent.Params)} and parsed back via + * {@link org.elasticsearch.common.xcontent.XContentParser#objectText()}. + * Generates values based on what can get printed out. Stored fields values are retrieved from lucene and converted via + * {@link org.elasticsearch.index.mapper.MappedFieldType#valueForDisplay(Object)} to either strings, numbers or booleans. + */ + public static Object getExpectedParsedValue(XContentType xContentType, Object value) { + if (value instanceof BytesArray) { + if (xContentType == XContentType.JSON || xContentType == XContentType.YAML) { + //JSON and YAML write the base64 format + return Base64.getEncoder().encodeToString(((BytesArray)value).toBytesRef().bytes); + } + } + if (value instanceof Float) { + if (xContentType == XContentType.SMILE) { + //with SMILE we get back a double (this will change in Jackson 2.9 where it will return a Float) + return ((Float)value).doubleValue(); + } else { + //with JSON AND YAML we get back a double, but with float precision. + return Double.parseDouble(value.toString()); + } + } + if (value instanceof Byte) { + return ((Byte)value).intValue(); + } + if (value instanceof Short) { + return ((Short)value).intValue(); + } + return value; } /** diff --git a/test/framework/src/main/java/org/elasticsearch/test/transport/MockTransportService.java b/test/framework/src/main/java/org/elasticsearch/test/transport/MockTransportService.java index c0879af2dfa..403ac96104a 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/transport/MockTransportService.java +++ b/test/framework/src/main/java/org/elasticsearch/test/transport/MockTransportService.java @@ -50,12 +50,12 @@ import org.elasticsearch.transport.ConnectTransportException; import org.elasticsearch.transport.ConnectionManager; import org.elasticsearch.transport.ConnectionProfile; import org.elasticsearch.transport.RequestHandlerRegistry; -import org.elasticsearch.transport.TcpTransport; import org.elasticsearch.transport.Transport; import org.elasticsearch.transport.TransportInterceptor; import org.elasticsearch.transport.TransportRequest; import org.elasticsearch.transport.TransportRequestOptions; import org.elasticsearch.transport.TransportService; +import org.elasticsearch.transport.TransportSettings; import org.elasticsearch.transport.nio.MockNioTransport; import java.io.IOException; @@ -109,7 +109,7 @@ public final class MockTransportService extends TransportService { // be smart enough to re-connect depending on what is tested. To reduce the risk, since this is very hard to debug we use // a different default port range per JVM unless the incoming settings override it int basePort = 10300 + (JVM_ORDINAL * 100); // use a non-default port otherwise some cluster in this JVM might reuse a port - settings = Settings.builder().put(TcpTransport.PORT.getKey(), basePort + "-" + (basePort + 100)).put(settings).build(); + settings = Settings.builder().put(TransportSettings.PORT.getKey(), basePort + "-" + (basePort + 100)).put(settings).build(); NamedWriteableRegistry namedWriteableRegistry = new NamedWriteableRegistry(ClusterModule.getNamedWriteables()); return new MockNioTransport(settings, version, threadPool, new NetworkService(Collections.emptyList()), new MockPageCacheRecycler(settings), namedWriteableRegistry, new NoneCircuitBreakerService()); @@ -130,7 +130,8 @@ public final class MockTransportService extends TransportService { * Build the service. * * @param clusterSettings if non null the {@linkplain TransportService} will register with the {@link ClusterSettings} for settings - * updates for {@link #TRACE_LOG_EXCLUDE_SETTING} and {@link #TRACE_LOG_INCLUDE_SETTING}. + * updates for {@link TransportSettings#TRACE_LOG_EXCLUDE_SETTING} and + * {@link TransportSettings#TRACE_LOG_INCLUDE_SETTING}. */ public MockTransportService(Settings settings, Transport transport, ThreadPool threadPool, TransportInterceptor interceptor, @Nullable ClusterSettings clusterSettings) { @@ -143,7 +144,8 @@ public final class MockTransportService extends TransportService { * Build the service. * * @param clusterSettings if non null the {@linkplain TransportService} will register with the {@link ClusterSettings} for settings - * updates for {@link #TRACE_LOG_EXCLUDE_SETTING} and {@link #TRACE_LOG_INCLUDE_SETTING}. + * updates for {@link TransportSettings#TRACE_LOG_EXCLUDE_SETTING} and + * {@link TransportSettings#TRACE_LOG_INCLUDE_SETTING}. */ public MockTransportService(Settings settings, Transport transport, ThreadPool threadPool, TransportInterceptor interceptor, Function localNodeFactory, @@ -319,7 +321,7 @@ public final class MockTransportService extends TransportService { } // TODO: Replace with proper setting - TimeValue connectingTimeout = TransportService.TCP_CONNECT_TIMEOUT.getDefault(Settings.EMPTY); + TimeValue connectingTimeout = TransportSettings.CONNECT_TIMEOUT.getDefault(Settings.EMPTY); try { if (delay.millis() < connectingTimeout.millis()) { Thread.sleep(delay.millis()); diff --git a/test/framework/src/main/java/org/elasticsearch/transport/AbstractSimpleTransportTestCase.java b/test/framework/src/main/java/org/elasticsearch/transport/AbstractSimpleTransportTestCase.java index d108b088a04..8c173625cce 100644 --- a/test/framework/src/main/java/org/elasticsearch/transport/AbstractSimpleTransportTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/transport/AbstractSimpleTransportTestCase.java @@ -42,7 +42,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.transport.BoundTransportAddress; import org.elasticsearch.common.transport.TransportAddress; import org.elasticsearch.common.unit.TimeValue; -import org.elasticsearch.common.util.BigArrays; +import org.elasticsearch.common.util.PageCacheRecycler; import org.elasticsearch.common.util.concurrent.AbstractRunnable; import org.elasticsearch.common.util.concurrent.ConcurrentCollections; import org.elasticsearch.core.internal.io.IOUtils; @@ -56,6 +56,7 @@ import org.elasticsearch.test.transport.MockTransportService; import org.elasticsearch.test.transport.StubbableTransport; import org.elasticsearch.threadpool.TestThreadPool; import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.transport.nio.MockNioTransport; import org.junit.After; import org.junit.Before; @@ -127,11 +128,11 @@ public abstract class AbstractSimpleTransportTestCase extends ESTestCase { threadPool = new TestThreadPool(getClass().getName()); clusterSettings = new ClusterSettings(Settings.EMPTY, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS); Settings connectionSettings = Settings.builder() - .put(TransportService.CONNECTIONS_PER_NODE_RECOVERY.getKey(), 1) - .put(TransportService.CONNECTIONS_PER_NODE_BULK.getKey(), 1) - .put(TransportService.CONNECTIONS_PER_NODE_REG.getKey(), 2) - .put(TransportService.CONNECTIONS_PER_NODE_STATE.getKey(), 1) - .put(TransportService.CONNECTIONS_PER_NODE_PING.getKey(), 1) + .put(TransportSettings.CONNECTIONS_PER_NODE_RECOVERY.getKey(), 1) + .put(TransportSettings.CONNECTIONS_PER_NODE_BULK.getKey(), 1) + .put(TransportSettings.CONNECTIONS_PER_NODE_REG.getKey(), 2) + .put(TransportSettings.CONNECTIONS_PER_NODE_STATE.getKey(), 1) + .put(TransportSettings.CONNECTIONS_PER_NODE_PING.getKey(), 1) .build(); serviceA = buildService("TS_A", version0, clusterSettings, connectionSettings); // this one supports dynamic tracer updates @@ -171,8 +172,8 @@ public abstract class AbstractSimpleTransportTestCase extends ESTestCase { Settings.builder() .put(settings) .put(Node.NODE_NAME_SETTING.getKey(), name) - .put(TransportService.TRACE_LOG_INCLUDE_SETTING.getKey(), "") - .put(TransportService.TRACE_LOG_EXCLUDE_SETTING.getKey(), "NOTHING") + .put(TransportSettings.TRACE_LOG_INCLUDE_SETTING.getKey(), "") + .put(TransportSettings.TRACE_LOG_EXCLUDE_SETTING.getKey(), "NOTHING") .build(), version, clusterSettings, doHandshake); @@ -513,7 +514,7 @@ public abstract class AbstractSimpleTransportTestCase extends ESTestCase { } }); - Settings settingsWithCompress = Settings.builder().put(Transport.TRANSPORT_TCP_COMPRESS.getKey(), true).build(); + Settings settingsWithCompress = Settings.builder().put(TransportSettings.TRANSPORT_COMPRESS.getKey(), true).build(); ConnectionProfile connectionProfile = ConnectionProfile.buildDefaultConnectionProfile(settingsWithCompress); serviceC.connectToNode(serviceA.getLocalDiscoNode(), connectionProfile); @@ -567,7 +568,7 @@ public abstract class AbstractSimpleTransportTestCase extends ESTestCase { } }); - Settings settingsWithCompress = Settings.builder().put(Transport.TRANSPORT_TCP_COMPRESS.getKey(), true).build(); + Settings settingsWithCompress = Settings.builder().put(TransportSettings.TRANSPORT_COMPRESS.getKey(), true).build(); ConnectionProfile connectionProfile = ConnectionProfile.buildDefaultConnectionProfile(settingsWithCompress); serviceC.connectToNode(serviceA.getLocalDiscoNode(), connectionProfile); @@ -1050,8 +1051,8 @@ public abstract class AbstractSimpleTransportTestCase extends ESTestCase { excludeSettings = "DOESN'T_MATCH"; } clusterSettings.applySettings(Settings.builder() - .put(TransportService.TRACE_LOG_INCLUDE_SETTING.getKey(), includeSettings) - .put(TransportService.TRACE_LOG_EXCLUDE_SETTING.getKey(), excludeSettings) + .put(TransportSettings.TRACE_LOG_INCLUDE_SETTING.getKey(), includeSettings) + .put(TransportSettings.TRACE_LOG_EXCLUDE_SETTING.getKey(), excludeSettings) .build()); tracer.reset(4); @@ -1521,7 +1522,7 @@ public abstract class AbstractSimpleTransportTestCase extends ESTestCase { // all is well } - expectThrows(ConnectTransportException.class, () -> serviceB.openConnection(nodeA, MockTcpTransport.LIGHT_PROFILE)); + expectThrows(ConnectTransportException.class, () -> serviceB.openConnection(nodeA, TestProfiles.LIGHT_PROFILE)); } public void testMockUnresponsiveRule() throws IOException { @@ -1572,7 +1573,7 @@ public abstract class AbstractSimpleTransportTestCase extends ESTestCase { // all is well } - expectThrows(ConnectTransportException.class, () -> serviceB.openConnection(nodeA, MockTcpTransport.LIGHT_PROFILE)); + expectThrows(ConnectTransportException.class, () -> serviceB.openConnection(nodeA, TestProfiles.LIGHT_PROFILE)); } @@ -1734,8 +1735,8 @@ public abstract class AbstractSimpleTransportTestCase extends ESTestCase { TransportService serviceC = build( Settings.builder() .put("name", "TS_TEST") - .put(TransportService.TRACE_LOG_INCLUDE_SETTING.getKey(), "") - .put(TransportService.TRACE_LOG_EXCLUDE_SETTING.getKey(), "NOTHING") + .put(TransportSettings.TRACE_LOG_INCLUDE_SETTING.getKey(), "") + .put(TransportSettings.TRACE_LOG_EXCLUDE_SETTING.getKey(), "NOTHING") .build(), version0, null, true); @@ -2033,8 +2034,9 @@ public abstract class AbstractSimpleTransportTestCase extends ESTestCase { TcpTransport originalTransport = (TcpTransport) serviceA.getOriginalTransport(); NamedWriteableRegistry namedWriteableRegistry = new NamedWriteableRegistry(Collections.emptyList()); - MockTcpTransport transport = new MockTcpTransport(Settings.EMPTY, threadPool, BigArrays.NON_RECYCLING_INSTANCE, - new NoneCircuitBreakerService(), namedWriteableRegistry, new NetworkService(Collections.emptyList())) { + MockNioTransport transport = new MockNioTransport(Settings.EMPTY, Version.CURRENT, threadPool, + new NetworkService(Collections.emptyList()), PageCacheRecycler.NON_RECYCLING_INSTANCE, namedWriteableRegistry, + new NoneCircuitBreakerService()) { @Override protected String handleRequest(TcpChannel mockChannel, String profileName, StreamInput stream, long requestId, int messageLengthBytes, Version version, InetSocketAddress remoteAddress, byte status) @@ -2682,7 +2684,7 @@ public abstract class AbstractSimpleTransportTestCase extends ESTestCase { public void testProfilesIncludesDefault() { Set profileSettings = TcpTransport.getProfileSettings(Settings.EMPTY); assertEquals(1, profileSettings.size()); - assertEquals(TcpTransport.DEFAULT_PROFILE, profileSettings.stream().findAny().get().profileName); + assertEquals(TransportSettings.DEFAULT_PROFILE, profileSettings.stream().findAny().get().profileName); profileSettings = TcpTransport.getProfileSettings(Settings.builder() .put("transport.profiles.test.port", "0") diff --git a/test/framework/src/main/java/org/elasticsearch/transport/MockTcpTransport.java b/test/framework/src/main/java/org/elasticsearch/transport/MockTcpTransport.java deleted file mode 100644 index 6b328f18625..00000000000 --- a/test/framework/src/main/java/org/elasticsearch/transport/MockTcpTransport.java +++ /dev/null @@ -1,477 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.elasticsearch.transport; - -import org.apache.logging.log4j.LogManager; -import org.apache.logging.log4j.Logger; -import org.elasticsearch.Version; -import org.elasticsearch.action.ActionListener; -import org.elasticsearch.cli.SuppressForbidden; -import org.elasticsearch.cluster.node.DiscoveryNode; -import org.elasticsearch.common.bytes.BytesArray; -import org.elasticsearch.common.bytes.BytesReference; -import org.elasticsearch.common.concurrent.CompletableContext; -import org.elasticsearch.common.io.stream.BytesStreamOutput; -import org.elasticsearch.common.io.stream.InputStreamStreamInput; -import org.elasticsearch.common.io.stream.NamedWriteableRegistry; -import org.elasticsearch.common.io.stream.ReleasableBytesStreamOutput; -import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.common.network.NetworkService; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.unit.ByteSizeValue; -import org.elasticsearch.common.util.BigArrays; -import org.elasticsearch.common.util.CancellableThreads; -import org.elasticsearch.common.util.PageCacheRecycler; -import org.elasticsearch.common.util.concurrent.AbstractRunnable; -import org.elasticsearch.common.util.concurrent.EsExecutors; -import org.elasticsearch.core.internal.io.IOUtils; -import org.elasticsearch.indices.breaker.CircuitBreakerService; -import org.elasticsearch.mocksocket.MockServerSocket; -import org.elasticsearch.mocksocket.MockSocket; -import org.elasticsearch.threadpool.ThreadPool; - -import java.io.BufferedInputStream; -import java.io.BufferedOutputStream; -import java.io.Closeable; -import java.io.EOFException; -import java.io.IOException; -import java.io.OutputStream; -import java.net.InetSocketAddress; -import java.net.ServerSocket; -import java.net.Socket; -import java.net.SocketException; -import java.util.Collections; -import java.util.HashSet; -import java.util.Set; -import java.util.concurrent.ConcurrentHashMap; -import java.util.concurrent.CountDownLatch; -import java.util.concurrent.Executor; -import java.util.concurrent.ExecutorService; -import java.util.concurrent.Executors; -import java.util.concurrent.TimeUnit; -import java.util.concurrent.atomic.AtomicBoolean; - -/** - * This is a socket based blocking TcpTransport implementation that is used for tests - * that need real networking. This implementation is a test only implementation that implements - * the networking layer in the worst possible way since it blocks and uses a thread per request model. - */ -public class MockTcpTransport extends TcpTransport { - private static final Logger logger = LogManager.getLogger(MockTcpTransport.class); - - /** - * A pre-built light connection profile that shares a single connection across all - * types. - */ - static final ConnectionProfile LIGHT_PROFILE; - - private final Set openChannels = new HashSet<>(); - - static { - ConnectionProfile.Builder builder = new ConnectionProfile.Builder(); - builder.addConnections(1, - TransportRequestOptions.Type.BULK, - TransportRequestOptions.Type.PING, - TransportRequestOptions.Type.RECOVERY, - TransportRequestOptions.Type.REG, - TransportRequestOptions.Type.STATE); - LIGHT_PROFILE = builder.build(); - } - - private final ExecutorService executor; - - public MockTcpTransport(Settings settings, ThreadPool threadPool, BigArrays bigArrays, - CircuitBreakerService circuitBreakerService, NamedWriteableRegistry namedWriteableRegistry, - NetworkService networkService) { - this(settings, threadPool, bigArrays, circuitBreakerService, namedWriteableRegistry, networkService, Version.CURRENT); - } - - public MockTcpTransport(Settings settings, ThreadPool threadPool, BigArrays bigArrays, - CircuitBreakerService circuitBreakerService, NamedWriteableRegistry namedWriteableRegistry, - NetworkService networkService, Version mockVersion) { - super("mock-tcp-transport", settings, mockVersion, threadPool, PageCacheRecycler.NON_RECYCLING_INSTANCE, circuitBreakerService, - namedWriteableRegistry, networkService); - // we have our own crazy cached threadpool this one is not bounded at all... - // using the ES thread factory here is crucial for tests otherwise disruption tests won't block that thread - executor = Executors.newCachedThreadPool(EsExecutors.daemonThreadFactory(settings, Transports.TEST_MOCK_TRANSPORT_THREAD_PREFIX)); - } - - @Override - protected MockChannel bind(final String name, InetSocketAddress address) throws IOException { - MockServerSocket socket = new MockServerSocket(); - socket.setReuseAddress(TCP_REUSE_ADDRESS.get(settings)); - ByteSizeValue tcpReceiveBufferSize = TCP_RECEIVE_BUFFER_SIZE.get(settings); - if (tcpReceiveBufferSize.getBytes() > 0) { - socket.setReceiveBufferSize(tcpReceiveBufferSize.bytesAsInt()); - } - socket.bind(address); - MockChannel serverMockChannel = new MockChannel(socket, name); - CountDownLatch started = new CountDownLatch(1); - executor.execute(new AbstractRunnable() { - @Override - public void onFailure(Exception e) { - onException(serverMockChannel, e); - } - - @Override - protected void doRun() throws Exception { - started.countDown(); - serverMockChannel.accept(executor); - } - }); - try { - started.await(); - } catch (InterruptedException e) { - Thread.currentThread().interrupt(); - } - return serverMockChannel; - } - - private void readMessage(MockChannel mockChannel, StreamInput input) throws IOException { - Socket socket = mockChannel.activeChannel; - byte[] minimalHeader = new byte[TcpHeader.MARKER_BYTES_SIZE + TcpHeader.MESSAGE_LENGTH_SIZE]; - try { - input.readFully(minimalHeader); - } catch (EOFException eof) { - throw new IOException("Connection reset by peer"); - } - - // Read message length will throw stream corrupted exception if the marker bytes incorrect - int msgSize = TcpTransport.readMessageLength(new BytesArray(minimalHeader)); - if (msgSize == -1) { - socket.getOutputStream().flush(); - } else { - final byte[] buffer = new byte[msgSize]; - input.readFully(buffer); - int expectedSize = TcpHeader.MARKER_BYTES_SIZE + TcpHeader.MESSAGE_LENGTH_SIZE + msgSize; - try (BytesStreamOutput output = new ReleasableBytesStreamOutput(expectedSize, bigArrays)) { - output.write(minimalHeader); - output.write(buffer); - consumeNetworkReads(mockChannel, output.bytes()); - } - } - } - - @Override - @SuppressForbidden(reason = "real socket for mocking remote connections") - protected MockChannel initiateChannel(DiscoveryNode node) throws IOException { - InetSocketAddress address = node.getAddress().address(); - final MockSocket socket = new MockSocket(); - final MockChannel channel = new MockChannel(socket, address, false, "none"); - - boolean success = false; - try { - configureSocket(socket); - success = true; - } finally { - if (success == false) { - IOUtils.close(socket); - } - } - - executor.submit(() -> { - try { - socket.connect(address); - socket.setSoLinger(false, 0); - channel.connectFuture.complete(null); - channel.loopRead(executor); - } catch (Exception ex) { - channel.connectFuture.completeExceptionally(ex); - } - }); - - return channel; - } - - @Override - protected ConnectionProfile maybeOverrideConnectionProfile(ConnectionProfile connectionProfile) { - ConnectionProfile.Builder builder = new ConnectionProfile.Builder(); - Set allTypesWithConnection = new HashSet<>(); - Set allTypesWithoutConnection = new HashSet<>(); - for (ConnectionProfile.ConnectionTypeHandle handle : connectionProfile.getHandles()) { - Set types = handle.getTypes(); - if (handle.length > 0) { - allTypesWithConnection.addAll(types); - } else { - allTypesWithoutConnection.addAll(types); - } - } - // make sure we maintain at least the types that are supported by this profile even if we only use a single channel for them. - builder.addConnections(1, allTypesWithConnection.toArray(new TransportRequestOptions.Type[0])); - if (allTypesWithoutConnection.isEmpty() == false) { - builder.addConnections(0, allTypesWithoutConnection.toArray(new TransportRequestOptions.Type[0])); - } - builder.setHandshakeTimeout(connectionProfile.getHandshakeTimeout()); - builder.setConnectTimeout(connectionProfile.getConnectTimeout()); - builder.setPingInterval(connectionProfile.getPingInterval()); - builder.setCompressionEnabled(connectionProfile.getCompressionEnabled()); - return builder.build(); - } - - private void configureSocket(Socket socket) throws SocketException { - socket.setTcpNoDelay(TCP_NO_DELAY.get(settings)); - ByteSizeValue tcpSendBufferSize = TCP_SEND_BUFFER_SIZE.get(settings); - if (tcpSendBufferSize.getBytes() > 0) { - socket.setSendBufferSize(tcpSendBufferSize.bytesAsInt()); - } - ByteSizeValue tcpReceiveBufferSize = TCP_RECEIVE_BUFFER_SIZE.get(settings); - if (tcpReceiveBufferSize.getBytes() > 0) { - socket.setReceiveBufferSize(tcpReceiveBufferSize.bytesAsInt()); - } - socket.setReuseAddress(TCP_REUSE_ADDRESS.get(settings)); - } - - public final class MockChannel implements Closeable, TcpChannel, TcpServerChannel { - private final AtomicBoolean isOpen = new AtomicBoolean(true); - private final InetSocketAddress localAddress; - private final ServerSocket serverSocket; - private final Set workerChannels = Collections.newSetFromMap(new ConcurrentHashMap<>()); - private final Socket activeChannel; - private final boolean isServer; - private final String profile; - private final CancellableThreads cancellableThreads = new CancellableThreads(); - private final CompletableContext closeFuture = new CompletableContext<>(); - private final CompletableContext connectFuture = new CompletableContext<>(); - private final ChannelStats stats = new ChannelStats(); - - /** - * Constructs a new MockChannel instance intended for handling the actual incoming / outgoing traffic. - * - * @param socket The client socket. Mut not be null. - * @param localAddress Address associated with the corresponding local server socket. Must not be null. - * @param profile The associated profile name. - */ - MockChannel(Socket socket, InetSocketAddress localAddress, boolean isServer, String profile) { - this.localAddress = localAddress; - this.activeChannel = socket; - this.isServer = isServer; - this.serverSocket = null; - this.profile = profile; - synchronized (openChannels) { - openChannels.add(this); - } - } - - /** - * Constructs a new MockChannel instance intended for accepting requests. - * - * @param serverSocket The associated server socket. Must not be null. - * @param profile The associated profile name. - */ - MockChannel(ServerSocket serverSocket, String profile) { - this.localAddress = (InetSocketAddress) serverSocket.getLocalSocketAddress(); - this.serverSocket = serverSocket; - this.profile = profile; - this.isServer = false; - this.activeChannel = null; - synchronized (openChannels) { - openChannels.add(this); - } - } - - public void accept(Executor executor) throws IOException { - while (isOpen.get()) { - Socket incomingSocket = serverSocket.accept(); - MockChannel incomingChannel = null; - try { - configureSocket(incomingSocket); - synchronized (this) { - if (isOpen.get()) { - InetSocketAddress localAddress = new InetSocketAddress(incomingSocket.getLocalAddress(), - incomingSocket.getPort()); - incomingChannel = new MockChannel(incomingSocket, localAddress, true, profile); - MockChannel finalIncomingChannel = incomingChannel; - incomingChannel.addCloseListener(new ActionListener() { - @Override - public void onResponse(Void aVoid) { - workerChannels.remove(finalIncomingChannel); - } - - @Override - public void onFailure(Exception e) { - workerChannels.remove(finalIncomingChannel); - } - }); - serverAcceptedChannel(incomingChannel); - //establish a happens-before edge between closing and accepting a new connection - workerChannels.add(incomingChannel); - - // this spawns a new thread immediately, so OK under lock - incomingChannel.loopRead(executor); - // the channel is properly registered and will be cleared by the close code. - incomingSocket = null; - incomingChannel = null; - } - } - } finally { - // ensure we don't leak sockets and channels in the failure case. Note that we null both - // if there are no exceptions so this becomes a no op. - IOUtils.closeWhileHandlingException(incomingSocket, incomingChannel); - } - } - } - - void loopRead(Executor executor) { - executor.execute(new AbstractRunnable() { - @Override - public void onFailure(Exception e) { - if (isOpen.get()) { - try { - onException(MockChannel.this, e); - } catch (Exception ex) { - logger.warn("failed on handling exception", ex); - IOUtils.closeWhileHandlingException(MockChannel.this); // pure paranoia - } - } - } - - @Override - protected void doRun() throws Exception { - StreamInput input = new InputStreamStreamInput(new BufferedInputStream(activeChannel.getInputStream())); - // There is a (slim) chance that we get interrupted right after a loop iteration, so check explicitly - while (isOpen.get() && !Thread.currentThread().isInterrupted()) { - cancellableThreads.executeIO(() -> readMessage(MockChannel.this, input)); - } - } - }); - } - - synchronized void close0() throws IOException { - // establish a happens-before edge between closing and accepting a new connection - // we have to sync this entire block to ensure that our openChannels checks work correctly. - // The close block below will close all worker channels but if one of the worker channels runs into an exception - // for instance due to a disconnect the handling of this exception might be executed concurrently. - // now if we are in-turn concurrently call close we might not wait for the actual close to happen and that will, down the road - // make the assertion trip that not all channels are closed. - if (isOpen.compareAndSet(true, false)) { - final boolean removedChannel; - synchronized (openChannels) { - removedChannel = openChannels.remove(this); - } - IOUtils.close(serverSocket, activeChannel, () -> IOUtils.close(workerChannels), - () -> cancellableThreads.cancel("channel closed")); - assert removedChannel: "Channel was not removed or removed twice?"; - } - } - - @Override - public String toString() { - return "MockChannel{" + - "profile='" + profile + '\'' + - ", isOpen=" + isOpen + - ", localAddress=" + localAddress + - ", isServerSocket=" + (serverSocket != null) + - '}'; - } - - @Override - public void close() { - try { - close0(); - closeFuture.complete(null); - } catch (IOException e) { - closeFuture.completeExceptionally(e); - } - } - - @Override - public String getProfile() { - return profile; - } - - @Override - public boolean isServerChannel() { - return isServer; - } - - @Override - public void addCloseListener(ActionListener listener) { - closeFuture.addListener(ActionListener.toBiConsumer(listener)); - } - - @Override - public void addConnectListener(ActionListener listener) { - connectFuture.addListener(ActionListener.toBiConsumer(listener)); - } - - @Override - public ChannelStats getChannelStats() { - return stats; - } - - @Override - public boolean isOpen() { - return isOpen.get(); - } - - @Override - public InetSocketAddress getLocalAddress() { - return localAddress; - } - - @Override - public InetSocketAddress getRemoteAddress() { - return (InetSocketAddress) activeChannel.getRemoteSocketAddress(); - } - - @Override - public void sendMessage(BytesReference reference, ActionListener listener) { - try { - synchronized (this) { - OutputStream outputStream = new BufferedOutputStream(activeChannel.getOutputStream()); - reference.writeTo(outputStream); - outputStream.flush(); - } - listener.onResponse(null); - } catch (IOException e) { - listener.onFailure(e); - onException(this, e); - } - } - } - - - @Override - protected void doStart() { - boolean success = false; - try { - if (NetworkService.NETWORK_SERVER.get(settings)) { - // loop through all profiles and start them up, special handling for default one - for (ProfileSettings profileSettings : profileSettings) { - bindServer(profileSettings); - } - } - super.doStart(); - success = true; - } finally { - if (success == false) { - doStop(); - } - } - } - - @Override - protected void stopInternal() { - ThreadPool.terminate(executor, 10, TimeUnit.SECONDS); - synchronized (openChannels) { - assert openChannels.isEmpty() : "there are still open channels: " + openChannels; - } - } -} - diff --git a/test/framework/src/main/java/org/elasticsearch/transport/TestProfiles.java b/test/framework/src/main/java/org/elasticsearch/transport/TestProfiles.java new file mode 100644 index 00000000000..e69bc2fe369 --- /dev/null +++ b/test/framework/src/main/java/org/elasticsearch/transport/TestProfiles.java @@ -0,0 +1,49 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.transport; + +import org.elasticsearch.common.settings.Settings; + +final class TestProfiles { + + private TestProfiles() {} + + /** + * A pre-built light connection profile that shares a single connection across all + * types. + */ + static final ConnectionProfile LIGHT_PROFILE; + + static { + ConnectionProfile source = ConnectionProfile.buildDefaultConnectionProfile(Settings.EMPTY); + ConnectionProfile.Builder builder = new ConnectionProfile.Builder(); + builder.setConnectTimeout(source.getConnectTimeout()); + builder.setHandshakeTimeout(source.getHandshakeTimeout()); + builder.setCompressionEnabled(source.getCompressionEnabled()); + builder.setPingInterval(source.getPingInterval()); + builder.addConnections(1, + TransportRequestOptions.Type.BULK, + TransportRequestOptions.Type.PING, + TransportRequestOptions.Type.RECOVERY, + TransportRequestOptions.Type.REG, + TransportRequestOptions.Type.STATE); + LIGHT_PROFILE = builder.build(); + } +} diff --git a/test/framework/src/test/java/org/elasticsearch/test/test/InternalTestClusterTests.java b/test/framework/src/test/java/org/elasticsearch/test/test/InternalTestClusterTests.java index 05c99ef83d4..52c599c89ba 100644 --- a/test/framework/src/test/java/org/elasticsearch/test/test/InternalTestClusterTests.java +++ b/test/framework/src/test/java/org/elasticsearch/test/test/InternalTestClusterTests.java @@ -38,7 +38,7 @@ import org.elasticsearch.test.InternalTestCluster; import org.elasticsearch.test.MockHttpTransport; import org.elasticsearch.test.NodeConfigurationSource; import org.elasticsearch.test.discovery.TestZenDiscovery; -import org.elasticsearch.transport.TcpTransport; +import org.elasticsearch.transport.TransportSettings; import java.io.IOException; import java.nio.file.Files; @@ -109,7 +109,7 @@ public class InternalTestClusterTests extends ESTestCase { static { clusterUniqueSettings.add(ClusterName.CLUSTER_NAME_SETTING.getKey()); - clusterUniqueSettings.add(TcpTransport.PORT.getKey()); + clusterUniqueSettings.add(TransportSettings.PORT.getKey()); clusterUniqueSettings.add("http.port"); } diff --git a/test/framework/src/test/java/org/elasticsearch/transport/MockTcpTransportTests.java b/test/framework/src/test/java/org/elasticsearch/transport/MockTcpTransportTests.java deleted file mode 100644 index 5c7fdc10649..00000000000 --- a/test/framework/src/test/java/org/elasticsearch/transport/MockTcpTransportTests.java +++ /dev/null @@ -1,62 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.elasticsearch.transport; - -import org.elasticsearch.Version; -import org.elasticsearch.action.ActionListener; -import org.elasticsearch.cluster.node.DiscoveryNode; -import org.elasticsearch.common.io.stream.NamedWriteableRegistry; -import org.elasticsearch.common.network.NetworkService; -import org.elasticsearch.common.settings.ClusterSettings; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.util.BigArrays; -import org.elasticsearch.indices.breaker.NoneCircuitBreakerService; -import org.elasticsearch.test.transport.MockTransportService; - -import java.util.Collections; - -public class MockTcpTransportTests extends AbstractSimpleTransportTestCase { - - @Override - protected MockTransportService build(Settings settings, Version version, ClusterSettings clusterSettings, boolean doHandshake) { - NamedWriteableRegistry namedWriteableRegistry = new NamedWriteableRegistry(Collections.emptyList()); - Transport transport = new MockTcpTransport(settings, threadPool, BigArrays.NON_RECYCLING_INSTANCE, - new NoneCircuitBreakerService(), namedWriteableRegistry, new NetworkService(Collections.emptyList()), version) { - - @Override - public void executeHandshake(DiscoveryNode node, TcpChannel channel, ConnectionProfile profile, - ActionListener listener) { - if (doHandshake) { - super.executeHandshake(node, channel, profile, listener); - } else { - listener.onResponse(version.minimumCompatibilityVersion()); - } - } - }; - MockTransportService mockTransportService = - MockTransportService.createNewService(settings, transport, version, threadPool, clusterSettings, Collections.emptySet()); - mockTransportService.start(); - return mockTransportService; - } - - @Override - public int channelsPerNodeConnection() { - return 1; - } -} diff --git a/test/framework/src/test/java/org/elasticsearch/transport/nio/SimpleMockNioTransportTests.java b/test/framework/src/test/java/org/elasticsearch/transport/nio/SimpleMockNioTransportTests.java index 84b921cf88f..344701b7b43 100644 --- a/test/framework/src/test/java/org/elasticsearch/transport/nio/SimpleMockNioTransportTests.java +++ b/test/framework/src/test/java/org/elasticsearch/transport/nio/SimpleMockNioTransportTests.java @@ -37,8 +37,8 @@ import org.elasticsearch.transport.BindTransportException; import org.elasticsearch.transport.ConnectTransportException; import org.elasticsearch.transport.ConnectionProfile; import org.elasticsearch.transport.TcpChannel; -import org.elasticsearch.transport.TcpTransport; import org.elasticsearch.transport.Transport; +import org.elasticsearch.transport.TransportSettings; import java.io.IOException; import java.net.InetAddress; @@ -78,7 +78,7 @@ public class SimpleMockNioTransportTests extends AbstractSimpleTransportTestCase @Override protected MockTransportService build(Settings settings, Version version, ClusterSettings clusterSettings, boolean doHandshake) { settings = Settings.builder().put(settings) - .put(TcpTransport.PORT.getKey(), "0") + .put(TransportSettings.PORT.getKey(), "0") .build(); MockTransportService transportService = nioFromThreadPool(settings, threadPool, version, clusterSettings, doHandshake); transportService.start(); @@ -108,7 +108,7 @@ public class SimpleMockNioTransportTests extends AbstractSimpleTransportTestCase int port = serviceA.boundAddress().publishAddress().getPort(); Settings settings = Settings.builder() .put(Node.NODE_NAME_SETTING.getKey(), "foobar") - .put("transport.tcp.port", port) + .put(TransportSettings.PORT.getKey(), port) .build(); ClusterSettings clusterSettings = new ClusterSettings(settings, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS); BindTransportException bindTransportException = expectThrows(BindTransportException.class, () -> { diff --git a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/CcrLicenseChecker.java b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/CcrLicenseChecker.java index b586c8f96f5..01117d2575b 100644 --- a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/CcrLicenseChecker.java +++ b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/CcrLicenseChecker.java @@ -328,7 +328,7 @@ public final class CcrLicenseChecker { message.append(indices.length == 1 ? " index " : " indices "); message.append(Arrays.toString(indices)); - HasPrivilegesResponse.ResourcePrivileges resourcePrivileges = response.getIndexPrivileges().get(0); + HasPrivilegesResponse.ResourcePrivileges resourcePrivileges = response.getIndexPrivileges().iterator().next(); for (Map.Entry entry : resourcePrivileges.getPrivileges().entrySet()) { if (entry.getValue() == false) { message.append(", privilege for action ["); diff --git a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/TransportFollowStatsAction.java b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/TransportFollowStatsAction.java index aa7a7afeae0..8ab66aec8e8 100644 --- a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/TransportFollowStatsAction.java +++ b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/TransportFollowStatsAction.java @@ -22,6 +22,9 @@ import org.elasticsearch.xpack.ccr.Ccr; import org.elasticsearch.xpack.ccr.CcrLicenseChecker; import org.elasticsearch.xpack.core.ccr.action.FollowStatsAction; +import java.util.Arrays; +import java.util.Collections; +import java.util.HashSet; import java.util.List; import java.util.Objects; import java.util.Set; @@ -82,12 +85,15 @@ public class TransportFollowStatsAction extends TransportTasksAction< return; } + final Set requestedFollowerIndices = request.indices() != null ? + new HashSet<>(Arrays.asList(request.indices())) : Collections.emptySet(); final Set followerIndices = persistentTasksMetaData.tasks().stream() .filter(persistentTask -> persistentTask.getTaskName().equals(ShardFollowTask.NAME)) .map(persistentTask -> { ShardFollowTask shardFollowTask = (ShardFollowTask) persistentTask.getParams(); return shardFollowTask.getFollowShardId().getIndexName(); }) + .filter(followerIndex -> requestedFollowerIndices.isEmpty() || requestedFollowerIndices.contains(followerIndex)) .collect(Collectors.toSet()); for (final Task task : taskManager.getTasks().values()) { diff --git a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/CcrSingleNodeTestCase.java b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/CcrSingleNodeTestCase.java index 439950019a6..417de7cd985 100644 --- a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/CcrSingleNodeTestCase.java +++ b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/CcrSingleNodeTestCase.java @@ -67,19 +67,19 @@ public abstract class CcrSingleNodeTestCase extends ESSingleNodeTestCase { assertAcked(client().admin().cluster().updateSettings(updateSettingsRequest).actionGet()); } - protected ResumeFollowAction.Request getResumeFollowRequest() { + protected ResumeFollowAction.Request getResumeFollowRequest(String followerIndex) { ResumeFollowAction.Request request = new ResumeFollowAction.Request(); - request.setFollowerIndex("follower"); + request.setFollowerIndex(followerIndex); request.setMaxRetryDelay(TimeValue.timeValueMillis(10)); request.setReadPollTimeout(TimeValue.timeValueMillis(10)); return request; } - protected PutFollowAction.Request getPutFollowRequest() { + protected PutFollowAction.Request getPutFollowRequest(String leaderIndex, String followerIndex) { PutFollowAction.Request request = new PutFollowAction.Request(); request.setRemoteCluster("local"); - request.setLeaderIndex("leader"); - request.setFollowRequest(getResumeFollowRequest()); + request.setLeaderIndex(leaderIndex); + request.setFollowRequest(getResumeFollowRequest(followerIndex)); return request; } diff --git a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/CcrLicenseIT.java b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/CcrLicenseIT.java index 928584316c5..f8e7eab1c86 100644 --- a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/CcrLicenseIT.java +++ b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/CcrLicenseIT.java @@ -49,7 +49,7 @@ public class CcrLicenseIT extends CcrSingleNodeTestCase { } public void testThatFollowingIndexIsUnavailableWithNonCompliantLicense() throws InterruptedException { - final ResumeFollowAction.Request followRequest = getResumeFollowRequest(); + final ResumeFollowAction.Request followRequest = getResumeFollowRequest("follower"); final CountDownLatch latch = new CountDownLatch(1); client().execute( ResumeFollowAction.INSTANCE, @@ -71,7 +71,7 @@ public class CcrLicenseIT extends CcrSingleNodeTestCase { } public void testThatCreateAndFollowingIndexIsUnavailableWithNonCompliantLicense() throws InterruptedException { - final PutFollowAction.Request createAndFollowRequest = getPutFollowRequest(); + final PutFollowAction.Request createAndFollowRequest = getPutFollowRequest("leader", "follower"); final CountDownLatch latch = new CountDownLatch(1); client().execute( PutFollowAction.INSTANCE, diff --git a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/CcrRepositoryIT.java b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/CcrRepositoryIT.java index aff5a2862e1..0057df49b7c 100644 --- a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/CcrRepositoryIT.java +++ b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/CcrRepositoryIT.java @@ -158,12 +158,13 @@ public class CcrRepositoryIT extends CcrIntegTestCase { public void onResponse(RestoreService.RestoreCompletionResponse restoreCompletionResponse) { if (restoreCompletionResponse.getRestoreInfo() == null) { final Snapshot snapshot = restoreCompletionResponse.getSnapshot(); + final String uuid = restoreCompletionResponse.getUuid(); ClusterStateListener clusterStateListener = new ClusterStateListener() { @Override public void clusterChanged(ClusterChangedEvent changedEvent) { - final RestoreInProgress.Entry prevEntry = restoreInProgress(changedEvent.previousState(), snapshot); - final RestoreInProgress.Entry newEntry = restoreInProgress(changedEvent.state(), snapshot); + final RestoreInProgress.Entry prevEntry = restoreInProgress(changedEvent.previousState(), uuid); + final RestoreInProgress.Entry newEntry = restoreInProgress(changedEvent.state(), uuid); if (prevEntry == null) { // When there is a master failure after a restore has been started, this listener might not be registered // on the current master and as such it might miss some intermediary cluster states due to batching. diff --git a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/LocalIndexFollowingIT.java b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/LocalIndexFollowingIT.java index 8d096fe1f59..f8de29c616b 100644 --- a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/LocalIndexFollowingIT.java +++ b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/LocalIndexFollowingIT.java @@ -31,7 +31,7 @@ public class LocalIndexFollowingIT extends CcrSingleNodeTestCase { assertAcked(client().admin().indices().prepareCreate("leader").setSource(leaderIndexSettings, XContentType.JSON)); ensureGreen("leader"); - final PutFollowAction.Request followRequest = getPutFollowRequest(); + final PutFollowAction.Request followRequest = getPutFollowRequest("leader", "follower"); client().execute(PutFollowAction.INSTANCE, followRequest).get(); final long firstBatchNumDocs = randomIntBetween(2, 64); @@ -61,7 +61,7 @@ public class LocalIndexFollowingIT extends CcrSingleNodeTestCase { client().prepareIndex("leader", "doc").setSource("{}", XContentType.JSON).get(); } - client().execute(ResumeFollowAction.INSTANCE, getResumeFollowRequest()).get(); + client().execute(ResumeFollowAction.INSTANCE, getResumeFollowRequest("follower")).get(); assertBusy(() -> { assertThat(client().prepareSearch("follower").get().getHits().getTotalHits().value, equalTo(firstBatchNumDocs + secondBatchNumDocs + thirdBatchNumDocs)); @@ -73,9 +73,9 @@ public class LocalIndexFollowingIT extends CcrSingleNodeTestCase { final String leaderIndexSettings = getIndexSettings(2, 0, singletonMap(IndexSettings.INDEX_SOFT_DELETES_SETTING.getKey(), "false")); assertAcked(client().admin().indices().prepareCreate("leader-index").setSource(leaderIndexSettings, XContentType.JSON)); - ResumeFollowAction.Request followRequest = getResumeFollowRequest(); + ResumeFollowAction.Request followRequest = getResumeFollowRequest("follower"); followRequest.setFollowerIndex("follower-index"); - PutFollowAction.Request putFollowRequest = getPutFollowRequest(); + PutFollowAction.Request putFollowRequest = getPutFollowRequest("leader", "follower"); putFollowRequest.setLeaderIndex("leader-index"); putFollowRequest.setFollowRequest(followRequest); IllegalArgumentException error = expectThrows(IllegalArgumentException.class, @@ -84,8 +84,9 @@ public class LocalIndexFollowingIT extends CcrSingleNodeTestCase { assertThat(client().admin().indices().prepareExists("follower-index").get().isExists(), equalTo(false)); } - private String getIndexSettings(final int numberOfShards, final int numberOfReplicas, - final Map additionalIndexSettings) throws IOException { + public static String getIndexSettings(final int numberOfShards, + final int numberOfReplicas, + final Map additionalIndexSettings) throws IOException { final String settings; try (XContentBuilder builder = jsonBuilder()) { builder.startObject(); diff --git a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/FollowStatsIT.java b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/FollowStatsIT.java index 1c9521d99d4..42901e26971 100644 --- a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/FollowStatsIT.java +++ b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/FollowStatsIT.java @@ -9,21 +9,28 @@ package org.elasticsearch.xpack.ccr.action; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.admin.cluster.state.ClusterStateRequest; import org.elasticsearch.action.admin.cluster.state.ClusterStateResponse; +import org.elasticsearch.common.xcontent.XContentType; +import org.elasticsearch.index.IndexSettings; import org.elasticsearch.persistent.PersistentTasksCustomMetaData; -import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.xpack.CcrSingleNodeTestCase; import org.elasticsearch.xpack.core.ccr.action.FollowStatsAction; +import org.elasticsearch.xpack.core.ccr.action.PauseFollowAction; +import org.elasticsearch.xpack.core.ccr.action.PutFollowAction; +import java.util.Comparator; import java.util.concurrent.CountDownLatch; import java.util.concurrent.atomic.AtomicBoolean; +import static java.util.Collections.singletonMap; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; +import static org.elasticsearch.xpack.ccr.LocalIndexFollowingIT.getIndexSettings; +import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.collection.IsEmptyCollection.empty; /* * Test scope is important to ensure that other tests added to this suite do not interfere with the expectation in * testStatsWhenNoPersistentTasksMetaDataExists that the cluster state does not contain any persistent tasks metadata. */ -@ESIntegTestCase.ClusterScope(scope = ESIntegTestCase.Scope.TEST) public class FollowStatsIT extends CcrSingleNodeTestCase { /** @@ -65,4 +72,40 @@ public class FollowStatsIT extends CcrSingleNodeTestCase { assertTrue(onResponse.get()); } + public void testFollowStatsApiFollowerIndexFiltering() throws Exception { + final String leaderIndexSettings = getIndexSettings(1, 0, + singletonMap(IndexSettings.INDEX_SOFT_DELETES_SETTING.getKey(), "true")); + assertAcked(client().admin().indices().prepareCreate("leader1").setSource(leaderIndexSettings, XContentType.JSON)); + ensureGreen("leader1"); + assertAcked(client().admin().indices().prepareCreate("leader2").setSource(leaderIndexSettings, XContentType.JSON)); + ensureGreen("leader2"); + + PutFollowAction.Request followRequest = getPutFollowRequest("leader1", "follower1"); + client().execute(PutFollowAction.INSTANCE, followRequest).get(); + + followRequest = getPutFollowRequest("leader2", "follower2"); + client().execute(PutFollowAction.INSTANCE, followRequest).get(); + + FollowStatsAction.StatsRequest statsRequest = new FollowStatsAction.StatsRequest(); + statsRequest.setIndices(new String[] {"follower1"}); + FollowStatsAction.StatsResponses response = client().execute(FollowStatsAction.INSTANCE, statsRequest).actionGet(); + assertThat(response.getStatsResponses().size(), equalTo(1)); + assertThat(response.getStatsResponses().get(0).status().followerIndex(), equalTo("follower1")); + + statsRequest = new FollowStatsAction.StatsRequest(); + statsRequest.setIndices(new String[] {"follower2"}); + response = client().execute(FollowStatsAction.INSTANCE, statsRequest).actionGet(); + assertThat(response.getStatsResponses().size(), equalTo(1)); + assertThat(response.getStatsResponses().get(0).status().followerIndex(), equalTo("follower2")); + + response = client().execute(FollowStatsAction.INSTANCE, new FollowStatsAction.StatsRequest()).actionGet(); + assertThat(response.getStatsResponses().size(), equalTo(2)); + response.getStatsResponses().sort(Comparator.comparing(o -> o.status().followerIndex())); + assertThat(response.getStatsResponses().get(0).status().followerIndex(), equalTo("follower1")); + assertThat(response.getStatsResponses().get(1).status().followerIndex(), equalTo("follower2")); + + assertAcked(client().execute(PauseFollowAction.INSTANCE, new PauseFollowAction.Request("follower1")).actionGet()); + assertAcked(client().execute(PauseFollowAction.INSTANCE, new PauseFollowAction.Request("follower2")).actionGet()); + } + } diff --git a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/ShardFollowTaskReplicationTests.java b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/ShardFollowTaskReplicationTests.java index 17ce05ccff1..9dc7c6648ee 100644 --- a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/ShardFollowTaskReplicationTests.java +++ b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/ShardFollowTaskReplicationTests.java @@ -102,11 +102,11 @@ public class ShardFollowTaskReplicationTests extends ESIndexLevelReplicationTest followerGroup.assertAllEqual(indexedDocIds.size() - deleteDocIds.size()); }); shardFollowTask.markAsCompleted(); - assertConsistentHistoryBetweenLeaderAndFollower(leaderGroup, followerGroup); + assertConsistentHistoryBetweenLeaderAndFollower(leaderGroup, followerGroup, true); } } - public void testFailLeaderReplicaShard() throws Exception { + public void testAddRemoveShardOnLeader() throws Exception { try (ReplicationGroup leaderGroup = createGroup(1 + randomInt(1)); ReplicationGroup followerGroup = createFollowGroup(randomInt(2))) { leaderGroup.startAll(); @@ -120,33 +120,32 @@ public class ShardFollowTaskReplicationTests extends ESIndexLevelReplicationTest leaderSeqNoStats.getMaxSeqNo(), followerSeqNoStats.getGlobalCheckpoint(), followerSeqNoStats.getMaxSeqNo()); - int docCount = 256; - leaderGroup.appendDocs(1); - Runnable task = () -> { - try { - leaderGroup.appendDocs(docCount - 1); - leaderGroup.syncGlobalCheckpoint(); - } catch (Exception e) { - throw new AssertionError(e); + int batches = between(0, 10); + int docCount = 0; + boolean hasPromotion = false; + for (int i = 0; i < batches; i++) { + docCount += leaderGroup.indexDocs(between(1, 5)); + if (leaderGroup.getReplicas().isEmpty() == false && randomInt(100) < 5) { + IndexShard closingReplica = randomFrom(leaderGroup.getReplicas()); + leaderGroup.removeReplica(closingReplica); + closingReplica.close("test", false); + closingReplica.store().close(); + } else if (leaderGroup.getReplicas().isEmpty() == false && rarely()) { + IndexShard newPrimary = randomFrom(leaderGroup.getReplicas()); + leaderGroup.promoteReplicaToPrimary(newPrimary).get(); + hasPromotion = true; + } else if (randomInt(100) < 5) { + leaderGroup.addReplica(); + leaderGroup.startReplicas(1); } - }; - Thread thread = new Thread(task); - thread.start(); - - // Remove and add a new replica - IndexShard luckyReplica = randomFrom(leaderGroup.getReplicas()); - leaderGroup.removeReplica(luckyReplica); - luckyReplica.close("stop replica", false); - luckyReplica.store().close(); - leaderGroup.addReplica(); - leaderGroup.startReplicas(1); - thread.join(); - + leaderGroup.syncGlobalCheckpoint(); + } leaderGroup.assertAllEqual(docCount); assertThat(shardFollowTask.getFailure(), nullValue()); - assertBusy(() -> followerGroup.assertAllEqual(docCount)); + int expectedDoc = docCount; + assertBusy(() -> followerGroup.assertAllEqual(expectedDoc)); shardFollowTask.markAsCompleted(); - assertConsistentHistoryBetweenLeaderAndFollower(leaderGroup, followerGroup); + assertConsistentHistoryBetweenLeaderAndFollower(leaderGroup, followerGroup, hasPromotion == false); } } @@ -288,7 +287,7 @@ public class ShardFollowTaskReplicationTests extends ESIndexLevelReplicationTest try { assertBusy(() -> { assertThat(followerGroup.getPrimary().getGlobalCheckpoint(), equalTo(leadingPrimary.getGlobalCheckpoint())); - assertConsistentHistoryBetweenLeaderAndFollower(leaderGroup, followerGroup); + assertConsistentHistoryBetweenLeaderAndFollower(leaderGroup, followerGroup, true); }); } finally { shardFollowTask.markAsCompleted(); @@ -479,7 +478,8 @@ public class ShardFollowTaskReplicationTests extends ESIndexLevelReplicationTest }; } - private void assertConsistentHistoryBetweenLeaderAndFollower(ReplicationGroup leader, ReplicationGroup follower) throws Exception { + private void assertConsistentHistoryBetweenLeaderAndFollower(ReplicationGroup leader, ReplicationGroup follower, + boolean assertMaxSeqNoOfUpdatesOrDeletes) throws Exception { final List> docAndSeqNosOnLeader = getDocIdAndSeqNos(leader.getPrimary()).stream() .map(d -> Tuple.tuple(d.getId(), d.getSeqNo())).collect(Collectors.toList()); final Set> operationsOnLeader = new HashSet<>(); @@ -490,7 +490,9 @@ public class ShardFollowTaskReplicationTests extends ESIndexLevelReplicationTest } } for (IndexShard followingShard : follower) { - assertThat(followingShard.getMaxSeqNoOfUpdatesOrDeletes(), equalTo(leader.getPrimary().getMaxSeqNoOfUpdatesOrDeletes())); + if (assertMaxSeqNoOfUpdatesOrDeletes) { + assertThat(followingShard.getMaxSeqNoOfUpdatesOrDeletes(), equalTo(leader.getPrimary().getMaxSeqNoOfUpdatesOrDeletes())); + } List> docAndSeqNosOnFollower = getDocIdAndSeqNos(followingShard).stream() .map(d -> Tuple.tuple(d.getId(), d.getSeqNo())).collect(Collectors.toList()); assertThat(docAndSeqNosOnFollower, equalTo(docAndSeqNosOnLeader)); diff --git a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/index/engine/FollowingEngineTests.java b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/index/engine/FollowingEngineTests.java index fbfd0fda228..1b4856487af 100644 --- a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/index/engine/FollowingEngineTests.java +++ b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/index/engine/FollowingEngineTests.java @@ -318,12 +318,11 @@ public class FollowingEngineTests extends ESTestCase { Engine.Index index = (Engine.Index) op; result = engine.index(new Engine.Index(index.uid(), index.parsedDoc(), index.seqNo(), primaryTerm, index.version(), versionType, origin, index.startTime(), index.getAutoGeneratedIdTimestamp(), index.isRetry(), - SequenceNumbers.UNASSIGNED_SEQ_NO, 0)); + index.getIfSeqNoMatch(), index.getIfPrimaryTermMatch())); } else if (op instanceof Engine.Delete) { Engine.Delete delete = (Engine.Delete) op; result = engine.delete(new Engine.Delete(delete.type(), delete.id(), delete.uid(), delete.seqNo(), primaryTerm, - delete.version(), versionType, origin, delete.startTime(), - SequenceNumbers.UNASSIGNED_SEQ_NO, 0)); + delete.version(), versionType, origin, delete.startTime(), delete.getIfSeqNoMatch(), delete.getIfPrimaryTermMatch())); } else { Engine.NoOp noOp = (Engine.NoOp) op; result = engine.noOp(new Engine.NoOp(noOp.seqNo(), primaryTerm, origin, noOp.startTime(), noOp.reason())); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/LicenseService.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/LicenseService.java index 34016b32666..dbf11026f47 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/LicenseService.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/LicenseService.java @@ -114,7 +114,7 @@ public class LicenseService extends AbstractLifecycleComponent implements Cluste public static final String LICENSE_JOB = "licenseJob"; - private static final DateFormatter DATE_FORMATTER = DateFormatter.forPattern("EEEE, MMMMM dd, yyyy", Locale.ROOT); + private static final DateFormatter DATE_FORMATTER = DateFormatter.forPattern("EEEE, MMMMM dd, yyyy"); private static final String ACKNOWLEDGEMENT_HEADER = "This license update requires acknowledgement. To acknowledge the license, " + "please read the following messages and update the license again, this time with the \"acknowledge=true\" parameter:"; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/user/HasPrivilegesResponse.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/user/HasPrivilegesResponse.java index f330789d834..22153ad0b10 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/user/HasPrivilegesResponse.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/user/HasPrivilegesResponse.java @@ -13,14 +13,14 @@ import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.XContentBuilder; import java.io.IOException; -import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.Comparator; import java.util.HashMap; -import java.util.List; import java.util.Map; import java.util.Objects; +import java.util.Set; +import java.util.TreeSet; /** * Response for a {@link HasPrivilegesRequest} @@ -29,8 +29,8 @@ public class HasPrivilegesResponse extends ActionResponse implements ToXContentO private String username; private boolean completeMatch; private Map cluster; - private List index; - private Map> application; + private Set index; + private Map> application; public HasPrivilegesResponse() { this("", true, Collections.emptyMap(), Collections.emptyList(), Collections.emptyMap()); @@ -41,15 +41,17 @@ public class HasPrivilegesResponse extends ActionResponse implements ToXContentO super(); this.username = username; this.completeMatch = completeMatch; - this.cluster = new HashMap<>(cluster); - this.index = sorted(new ArrayList<>(index)); - this.application = new HashMap<>(); - application.forEach((key, val) -> this.application.put(key, Collections.unmodifiableList(sorted(new ArrayList<>(val))))); + this.cluster = Collections.unmodifiableMap(cluster); + this.index = Collections.unmodifiableSet(sorted(index)); + final Map> applicationPrivileges = new HashMap<>(); + application.forEach((key, val) -> applicationPrivileges.put(key, Collections.unmodifiableSet(sorted(val)))); + this.application = Collections.unmodifiableMap(applicationPrivileges); } - private static List sorted(List resources) { - Collections.sort(resources, Comparator.comparing(o -> o.resource)); - return resources; + private static Set sorted(Collection resources) { + final Set set = new TreeSet<>(Comparator.comparing(o -> o.resource)); + set.addAll(resources); + return set; } public String getUsername() { @@ -61,19 +63,19 @@ public class HasPrivilegesResponse extends ActionResponse implements ToXContentO } public Map getClusterPrivileges() { - return Collections.unmodifiableMap(cluster); + return cluster; } - public List getIndexPrivileges() { - return Collections.unmodifiableList(index); + public Set getIndexPrivileges() { + return index; } /** * Retrieves the results from checking application privileges, * @return A {@code Map} keyed by application-name */ - public Map> getApplicationPrivileges() { - return Collections.unmodifiableMap(application); + public Map> getApplicationPrivileges() { + return application; } @Override @@ -112,15 +114,15 @@ public class HasPrivilegesResponse extends ActionResponse implements ToXContentO } } - private static List readResourcePrivileges(StreamInput in) throws IOException { + private static Set readResourcePrivileges(StreamInput in) throws IOException { final int count = in.readVInt(); - final List list = new ArrayList<>(count); + final Set set = new TreeSet<>(Comparator.comparing(o -> o.resource)); for (int i = 0; i < count; i++) { final String index = in.readString(); final Map privileges = in.readMap(StreamInput::readString, StreamInput::readBoolean); - list.add(new ResourcePrivileges(index, privileges)); + set.add(new ResourcePrivileges(index, privileges)); } - return list; + return set; } @Override @@ -139,7 +141,7 @@ public class HasPrivilegesResponse extends ActionResponse implements ToXContentO } } - private static void writeResourcePrivileges(StreamOutput out, List privileges) throws IOException { + private static void writeResourcePrivileges(StreamOutput out, Set privileges) throws IOException { out.writeVInt(privileges.size()); for (ResourcePrivileges priv : privileges) { out.writeString(priv.resource); @@ -179,7 +181,7 @@ public class HasPrivilegesResponse extends ActionResponse implements ToXContentO return builder; } - private void appendResources(XContentBuilder builder, String field, List privileges) + private void appendResources(XContentBuilder builder, String field, Set privileges) throws IOException { builder.startObject(field); for (HasPrivilegesResponse.ResourcePrivileges privilege : privileges) { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/transport/netty4/SecurityNetty4Transport.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/transport/netty4/SecurityNetty4Transport.java index 3712c27c435..4ed4246597b 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/transport/netty4/SecurityNetty4Transport.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/transport/netty4/SecurityNetty4Transport.java @@ -25,7 +25,7 @@ import org.elasticsearch.indices.breaker.CircuitBreakerService; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.ConnectTransportException; import org.elasticsearch.transport.TcpChannel; -import org.elasticsearch.transport.TcpTransport; +import org.elasticsearch.transport.TransportSettings; import org.elasticsearch.transport.netty4.Netty4Transport; import org.elasticsearch.xpack.core.XPackSettings; import org.elasticsearch.xpack.core.security.transport.SSLExceptionHelper; @@ -87,8 +87,8 @@ public class SecurityNetty4Transport extends Netty4Transport { profileConfiguration.put(profileName, configuration); } - if (profileConfiguration.containsKey(TcpTransport.DEFAULT_PROFILE) == false) { - profileConfiguration.put(TcpTransport.DEFAULT_PROFILE, defaultConfiguration); + if (profileConfiguration.containsKey(TransportSettings.DEFAULT_PROFILE) == false) { + profileConfiguration.put(TransportSettings.DEFAULT_PROFILE, defaultConfiguration); } return profileConfiguration; } diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/snapshots/SourceOnlySnapshotShardTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/snapshots/SourceOnlySnapshotShardTests.java index 7058724ecf0..f1de51c95b6 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/snapshots/SourceOnlySnapshotShardTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/snapshots/SourceOnlySnapshotShardTests.java @@ -31,6 +31,7 @@ import org.elasticsearch.cluster.routing.RecoverySource; import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.cluster.routing.ShardRoutingState; import org.elasticsearch.cluster.routing.TestShardRouting; +import org.elasticsearch.common.UUIDs; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.lucene.uid.Versions; import org.elasticsearch.common.settings.Settings; @@ -47,6 +48,7 @@ import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.SeqNoFieldMapper; import org.elasticsearch.index.mapper.Uid; import org.elasticsearch.index.seqno.SeqNoStats; +import org.elasticsearch.index.seqno.SequenceNumbers; import org.elasticsearch.index.shard.IndexShard; import org.elasticsearch.index.shard.IndexShardState; import org.elasticsearch.index.shard.IndexShardTestCase; @@ -204,7 +206,8 @@ public class SourceOnlySnapshotShardTests extends IndexShardTestCase { shard.refresh("test"); ShardRouting shardRouting = TestShardRouting.newShardRouting(new ShardId("index", "_na_", 0), randomAlphaOfLength(10), true, ShardRoutingState.INITIALIZING, - new RecoverySource.SnapshotRecoverySource(new Snapshot("src_only", snapshotId), Version.CURRENT, indexId.getId())); + new RecoverySource.SnapshotRecoverySource( + UUIDs.randomBase64UUID(), new Snapshot("src_only", snapshotId), Version.CURRENT, indexId.getId())); IndexMetaData metaData = runAsSnapshot(threadPool, () -> repository.getSnapshotIndexMetaData(snapshotId, indexId)); IndexShard restoredShard = newShard(shardRouting, metaData, null, SourceOnlySnapshotRepository.getEngineFactory(), () -> {}); restoredShard.mapperService().merge(shard.indexSettings().getIndexMetaData(), MapperService.MergeReason.MAPPING_RECOVERY); @@ -292,7 +295,7 @@ public class SourceOnlySnapshotShardTests extends IndexShardTestCase { assert source != null : "_source is null but should have been filtered out at snapshot time"; Engine.Result result = targetShard.applyIndexOperationOnPrimary(Versions.MATCH_ANY, VersionType.INTERNAL, source (index, uid.type(), uid.id(), source, XContentHelper.xContentType(source)) - .routing(rootFieldsVisitor.routing()), 1, false); + .routing(rootFieldsVisitor.routing()), SequenceNumbers.UNASSIGNED_SEQ_NO, 0, 1, false); if (result.getResultType() != Engine.Result.Type.SUCCESS) { throw new IllegalStateException("failed applying post restore operation result: " + result .getResultType(), result.getFailure()); diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/indexlifecycle/WaitForRolloverReadyStepTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/indexlifecycle/WaitForRolloverReadyStepTests.java index f7378278ffb..46acda7fdeb 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/indexlifecycle/WaitForRolloverReadyStepTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/indexlifecycle/WaitForRolloverReadyStepTests.java @@ -81,10 +81,10 @@ public class WaitForRolloverReadyStepTests extends AbstractStepTestCase TimeValue.parseTimeValue(randomPositiveTimeValue(), "rollover_action_test")); break; case 4: - maxDocs = randomNonNegativeLong(); + maxDocs = randomValueOtherThan(maxDocs, () -> randomNonNegativeLong()); break; default: throw new AssertionError("Illegal randomisation branch"); diff --git a/x-pack/plugin/ilm/qa/rest/src/test/resources/rest-api-spec/test/ilm/40_explain_lifecycle.yml b/x-pack/plugin/ilm/qa/rest/src/test/resources/rest-api-spec/test/ilm/40_explain_lifecycle.yml index 8c8206d8611..dff5ed955ff 100644 --- a/x-pack/plugin/ilm/qa/rest/src/test/resources/rest-api-spec/test/ilm/40_explain_lifecycle.yml +++ b/x-pack/plugin/ilm/qa/rest/src/test/resources/rest-api-spec/test/ilm/40_explain_lifecycle.yml @@ -213,51 +213,3 @@ teardown: - is_false: indices.my_index - is_false: indices.my_index2 - is_false: indices.another_index - ---- -"Test new phase still has phase_time": - - - do: - ilm.put_lifecycle: - policy: "mypolicy" - body: | - { - "policy": { - "phases": { - "hot": { - "min_age": "1000s", - "actions": {} - }, - "warm": { - "min_age": "2000s", - "actions": { - "forcemerge": { - "max_num_segments": 10000 - } - } - } - } - } - } - - - do: - indices.create: - index: foo - body: - settings: - index.lifecycle.name: "mypolicy" - - - do: - ilm.explain_lifecycle: - index: "foo" - - - is_true: indices.foo.managed - - match: { indices.foo.index: "foo" } - - match: { indices.foo.policy: "mypolicy" } - - match: { indices.foo.phase: "new" } - - match: { indices.foo.action: "complete" } - - match: { indices.foo.step: "complete" } - - is_true: indices.foo.phase_time_millis - - is_false: indices.foo.failed_step - - is_false: indices.foo.step_info - - is_false: indices.foo.phase_execution diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/filestructurefinder/FileStructureUtils.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/filestructurefinder/FileStructureUtils.java index aacdc3b2cc6..9172de9deda 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/filestructurefinder/FileStructureUtils.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/filestructurefinder/FileStructureUtils.java @@ -34,8 +34,9 @@ public final class FileStructureUtils { private static final int NUM_TOP_HITS = 10; // NUMBER Grok pattern doesn't support scientific notation, so we extend it - private static final Grok NUMBER_GROK = new Grok(Grok.getBuiltinPatterns(), "^%{NUMBER}(?:[eE][+-]?[0-3]?[0-9]{1,2})?$"); - private static final Grok IP_GROK = new Grok(Grok.getBuiltinPatterns(), "^%{IP}$"); + private static final Grok NUMBER_GROK = new Grok(Grok.getBuiltinPatterns(), "^%{NUMBER}(?:[eE][+-]?[0-3]?[0-9]{1,2})?$", + TimeoutChecker.watchdog); + private static final Grok IP_GROK = new Grok(Grok.getBuiltinPatterns(), "^%{IP}$", TimeoutChecker.watchdog); private static final int KEYWORD_MAX_LEN = 256; private static final int KEYWORD_MAX_SPACES = 5; @@ -69,7 +70,7 @@ public final class FileStructureUtils { } // Accept the first match from the first sample that is compatible with all the other samples - for (Tuple candidate : findCandidates(explanation, sampleRecords, overrides)) { + for (Tuple candidate : findCandidates(explanation, sampleRecords, overrides, timeoutChecker)) { boolean allGood = true; for (Map sampleRecord : sampleRecords.subList(1, sampleRecords.size())) { @@ -87,7 +88,8 @@ public final class FileStructureUtils { timeoutChecker.check("timestamp field determination"); - TimestampMatch match = TimestampFormatFinder.findFirstFullMatch(fieldValue.toString(), overrides.getTimestampFormat()); + TimestampMatch match = TimestampFormatFinder.findFirstFullMatch(fieldValue.toString(), overrides.getTimestampFormat(), + timeoutChecker); if (match == null || match.candidateIndex != candidate.v2().candidateIndex) { if (overrides.getTimestampFormat() != null) { throw new IllegalArgumentException("Specified timestamp format [" + overrides.getTimestampFormat() + @@ -111,7 +113,7 @@ public final class FileStructureUtils { } private static List> findCandidates(List explanation, List> sampleRecords, - FileStructureOverrides overrides) { + FileStructureOverrides overrides, TimeoutChecker timeoutChecker) { assert sampleRecords.isEmpty() == false; Map firstRecord = sampleRecords.get(0); @@ -130,7 +132,8 @@ public final class FileStructureUtils { if (onlyConsiderField == null || onlyConsiderField.equals(fieldName)) { Object value = field.getValue(); if (value != null) { - TimestampMatch match = TimestampFormatFinder.findFirstFullMatch(value.toString(), overrides.getTimestampFormat()); + TimestampMatch match = TimestampFormatFinder.findFirstFullMatch(value.toString(), overrides.getTimestampFormat(), + timeoutChecker); if (match != null) { Tuple candidate = new Tuple<>(fieldName, match); candidates.add(candidate); @@ -211,7 +214,7 @@ public final class FileStructureUtils { } Collection fieldValuesAsStrings = fieldValues.stream().map(Object::toString).collect(Collectors.toList()); - Map mapping = guessScalarMapping(explanation, fieldName, fieldValuesAsStrings); + Map mapping = guessScalarMapping(explanation, fieldName, fieldValuesAsStrings, timeoutChecker); timeoutChecker.check("mapping determination"); return new Tuple<>(mapping, calculateFieldStats(fieldValuesAsStrings, timeoutChecker)); } @@ -238,10 +241,12 @@ public final class FileStructureUtils { * @param fieldValues Values of the field for which mappings are to be guessed. The guessed * mapping will be compatible with all the provided values. Must not be * empty. + * @param timeoutChecker Will abort the operation if its timeout is exceeded. * @return The sub-section of the index mappings most appropriate for the field, * for example { "type" : "keyword" }. */ - static Map guessScalarMapping(List explanation, String fieldName, Collection fieldValues) { + static Map guessScalarMapping(List explanation, String fieldName, Collection fieldValues, + TimeoutChecker timeoutChecker) { assert fieldValues.isEmpty() == false; @@ -251,11 +256,12 @@ public final class FileStructureUtils { // This checks if a date mapping would be appropriate, and, if so, finds the correct format Iterator iter = fieldValues.iterator(); - TimestampMatch timestampMatch = TimestampFormatFinder.findFirstFullMatch(iter.next()); + TimestampMatch timestampMatch = TimestampFormatFinder.findFirstFullMatch(iter.next(), timeoutChecker); while (timestampMatch != null && iter.hasNext()) { // To be mapped as type date all the values must match the same timestamp format - it is // not acceptable for all values to be dates, but with different formats - if (timestampMatch.equals(TimestampFormatFinder.findFirstFullMatch(iter.next(), timestampMatch.candidateIndex)) == false) { + if (timestampMatch.equals(TimestampFormatFinder.findFirstFullMatch(iter.next(), timestampMatch.candidateIndex, + timeoutChecker)) == false) { timestampMatch = null; } } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/filestructurefinder/GrokPatternCreator.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/filestructurefinder/GrokPatternCreator.java index 4c6549ad393..6620afcb714 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/filestructurefinder/GrokPatternCreator.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/filestructurefinder/GrokPatternCreator.java @@ -453,7 +453,7 @@ public final class GrokPatternCreator { this.fieldName = fieldName; // The (?m) here has the Ruby meaning, which is equivalent to (?s) in Java grok = new Grok(Grok.getBuiltinPatterns(), "(?m)%{DATA:" + PREFACE + "}" + preBreak + - "%{" + grokPatternName + ":" + VALUE + "}" + postBreak + "%{GREEDYDATA:" + EPILOGUE + "}"); + "%{" + grokPatternName + ":" + VALUE + "}" + postBreak + "%{GREEDYDATA:" + EPILOGUE + "}", TimeoutChecker.watchdog); } @Override @@ -472,7 +472,7 @@ public final class GrokPatternCreator { TimeoutChecker timeoutChecker) { Collection values = new ArrayList<>(); for (String snippet : snippets) { - Map captures = grok.captures(snippet); + Map captures = timeoutChecker.grokCaptures(grok, snippet, "full message Grok pattern field extraction"); // If the pattern doesn't match then captures will be null if (captures == null) { throw new IllegalStateException("[%{" + grokPatternName + "}] does not match snippet [" + snippet + "]"); @@ -480,14 +480,13 @@ public final class GrokPatternCreator { prefaces.add(captures.getOrDefault(PREFACE, "").toString()); values.add(captures.getOrDefault(VALUE, "").toString()); epilogues.add(captures.getOrDefault(EPILOGUE, "").toString()); - timeoutChecker.check("full message Grok pattern field extraction"); } String adjustedFieldName = buildFieldName(fieldNameCountStore, fieldName); if (mappings != null) { Map fullMappingType = Collections.singletonMap(FileStructureUtils.MAPPING_TYPE_SETTING, mappingType); if ("date".equals(mappingType)) { assert values.isEmpty() == false; - TimestampMatch timestampMatch = TimestampFormatFinder.findFirstFullMatch(values.iterator().next()); + TimestampMatch timestampMatch = TimestampFormatFinder.findFirstFullMatch(values.iterator().next(), timeoutChecker); if (timestampMatch != null) { fullMappingType = timestampMatch.getEsDateMappingTypeWithFormat(); } @@ -548,7 +547,7 @@ public final class GrokPatternCreator { throw new IllegalStateException("Cannot process KV matches until a field name has been determined"); } Grok grok = new Grok(Grok.getBuiltinPatterns(), "(?m)%{DATA:" + PREFACE + "}\\b" + - fieldName + "=%{USER:" + VALUE + "}%{GREEDYDATA:" + EPILOGUE + "}"); + fieldName + "=%{USER:" + VALUE + "}%{GREEDYDATA:" + EPILOGUE + "}", TimeoutChecker.watchdog); Collection values = new ArrayList<>(); for (String snippet : snippets) { Map captures = grok.captures(snippet); @@ -563,7 +562,8 @@ public final class GrokPatternCreator { } String adjustedFieldName = buildFieldName(fieldNameCountStore, fieldName); if (mappings != null) { - mappings.put(adjustedFieldName, FileStructureUtils.guessScalarMapping(explanation, adjustedFieldName, values)); + mappings.put(adjustedFieldName, + FileStructureUtils.guessScalarMapping(explanation, adjustedFieldName, values, timeoutChecker)); timeoutChecker.check("mapping determination"); } if (fieldStats != null) { @@ -610,7 +610,7 @@ public final class GrokPatternCreator { private FullMatchGrokPatternCandidate(String grokPattern, String timeField) { this.grokPattern = grokPattern; this.timeField = timeField; - grok = new Grok(Grok.getBuiltinPatterns(), grokPattern); + grok = new Grok(Grok.getBuiltinPatterns(), grokPattern, TimeoutChecker.watchdog); } public String getTimeField() { @@ -640,7 +640,8 @@ public final class GrokPatternCreator { Map> valuesPerField = new HashMap<>(); for (String sampleMessage : sampleMessages) { - Map captures = grok.captures(sampleMessage); + Map captures = timeoutChecker.grokCaptures(grok, sampleMessage, + "full message Grok pattern field extraction"); // If the pattern doesn't match then captures will be null if (captures == null) { throw new IllegalStateException("[" + grokPattern + "] does not match snippet [" + sampleMessage + "]"); @@ -658,7 +659,6 @@ public final class GrokPatternCreator { } }); } - timeoutChecker.check("full message Grok pattern field extraction"); } for (Map.Entry> valuesForField : valuesPerField.entrySet()) { @@ -667,7 +667,7 @@ public final class GrokPatternCreator { // Exclude the time field because that will be dropped and replaced with @timestamp if (fieldName.equals(timeField) == false) { mappings.put(fieldName, - FileStructureUtils.guessScalarMapping(explanation, fieldName, valuesForField.getValue())); + FileStructureUtils.guessScalarMapping(explanation, fieldName, valuesForField.getValue(), timeoutChecker)); timeoutChecker.check("mapping determination"); } } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/filestructurefinder/TextLogFileStructureFinder.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/filestructurefinder/TextLogFileStructureFinder.java index 591a3261282..c61a48beb11 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/filestructurefinder/TextLogFileStructureFinder.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/filestructurefinder/TextLogFileStructureFinder.java @@ -155,7 +155,7 @@ public class TextLogFileStructureFinder implements FileStructureFinder { int remainingLines = sampleLines.length; double differenceBetweenTwoHighestWeights = 0.0; for (String sampleLine : sampleLines) { - TimestampMatch match = TimestampFormatFinder.findFirstMatch(sampleLine, overrides.getTimestampFormat()); + TimestampMatch match = TimestampFormatFinder.findFirstMatch(sampleLine, overrides.getTimestampFormat(), timeoutChecker); if (match != null) { TimestampMatch pureMatch = new TimestampMatch(match.candidateIndex, "", match.jodaTimestampFormats, match.javaTimestampFormats, match.simplePattern, match.grokPatternName, ""); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/filestructurefinder/TimeoutChecker.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/filestructurefinder/TimeoutChecker.java index 30c01882729..f8a9368b842 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/filestructurefinder/TimeoutChecker.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/filestructurefinder/TimeoutChecker.java @@ -6,14 +6,19 @@ package org.elasticsearch.xpack.ml.filestructurefinder; import org.elasticsearch.ElasticsearchTimeoutException; +import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.util.concurrent.FutureUtils; import org.elasticsearch.grok.Grok; +import org.elasticsearch.grok.ThreadWatchdog; import java.io.Closeable; +import java.util.Map; +import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ScheduledExecutorService; import java.util.concurrent.ScheduledFuture; import java.util.concurrent.TimeUnit; +import java.util.concurrent.atomic.AtomicBoolean; /** * This class can be used to keep track of when a long running operation started and @@ -32,9 +37,13 @@ import java.util.concurrent.TimeUnit; */ public class TimeoutChecker implements Closeable { + private static final TimeoutCheckerWatchdog timeoutCheckerWatchdog = new TimeoutCheckerWatchdog(); + public static final ThreadWatchdog watchdog = timeoutCheckerWatchdog; + private final String operation; - private final ScheduledFuture future; private final TimeValue timeout; + private final Thread checkedThread; + private final ScheduledFuture future; private volatile boolean timeoutExceeded; /** @@ -48,6 +57,8 @@ public class TimeoutChecker implements Closeable { public TimeoutChecker(String operation, TimeValue timeout, ScheduledExecutorService scheduler) { this.operation = operation; this.timeout = timeout; + this.checkedThread = Thread.currentThread(); + timeoutCheckerWatchdog.add(checkedThread, timeout); this.future = (timeout != null) ? scheduler.schedule(this::setTimeoutExceeded, timeout.nanos(), TimeUnit.NANOSECONDS) : null; } @@ -57,6 +68,7 @@ public class TimeoutChecker implements Closeable { @Override public void close() { FutureUtils.cancel(future); + timeoutCheckerWatchdog.remove(checkedThread); } /** @@ -72,7 +84,80 @@ public class TimeoutChecker implements Closeable { } } + /** + * Wrapper around {@link Grok#captures} that translates any timeout exception + * to the style thrown by this class's {@link #check} method. + * @param grok The grok pattern from which captures are to be extracted. + * @param text The text to match and extract values from. + * @param where Which stage of the operation is currently in progress? + * @return A map containing field names and their respective coerced values that matched. + * @throws ElasticsearchTimeoutException If the operation is found to have taken longer than the permitted time. + */ + public Map grokCaptures(Grok grok, String text, String where) { + + try { + return grok.captures(text); + } finally { + // If a timeout has occurred then this check will overwrite any timeout exception thrown by Grok.captures() and this + // is intentional - the exception from this class makes more sense in the context of the find file structure API + check(where); + } + } + private void setTimeoutExceeded() { timeoutExceeded = true; + timeoutCheckerWatchdog.interruptLongRunningThreadIfRegistered(checkedThread); + } + + /** + * An implementation of the type of watchdog used by the {@link Grok} class to interrupt + * matching operations that take too long. Rather than have a timeout per match operation + * like the {@link ThreadWatchdog.Default} implementation, the interruption is governed by + * a {@link TimeoutChecker} associated with the thread doing the matching. + */ + static class TimeoutCheckerWatchdog implements ThreadWatchdog { + + final ConcurrentHashMap> registry = new ConcurrentHashMap<>(); + + void add(Thread thread, TimeValue timeout) { + Tuple previousValue = registry.put(thread, new Tuple<>(new AtomicBoolean(false), timeout)); + assert previousValue == null; + } + + @Override + public void register() { + Tuple value = registry.get(Thread.currentThread()); + if (value != null) { + boolean wasFalse = value.v1().compareAndSet(false, true); + assert wasFalse; + } + } + + @Override + public long maxExecutionTimeInMillis() { + Tuple value = registry.get(Thread.currentThread()); + return value != null ? value.v2().getMillis() : Long.MAX_VALUE; + } + + @Override + public void unregister() { + Tuple value = registry.get(Thread.currentThread()); + if (value != null) { + boolean wasTrue = value.v1().compareAndSet(true, false); + assert wasTrue; + } + } + + void remove(Thread thread) { + Tuple previousValue = registry.remove(thread); + assert previousValue != null; + } + + void interruptLongRunningThreadIfRegistered(Thread thread) { + Tuple value = registry.get(thread); + if (value.v1().get()) { + thread.interrupt(); + } + } } } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/filestructurefinder/TimestampFormatFinder.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/filestructurefinder/TimestampFormatFinder.java index 7ed95f656b4..392e7b4e0be 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/filestructurefinder/TimestampFormatFinder.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/filestructurefinder/TimestampFormatFinder.java @@ -173,20 +173,22 @@ public final class TimestampFormatFinder { /** * Find the first timestamp format that matches part of the supplied value. * @param text The value that the returned timestamp format must exist within. + * @param timeoutChecker Will abort the operation if its timeout is exceeded. * @return The timestamp format, or null if none matches. */ - public static TimestampMatch findFirstMatch(String text) { - return findFirstMatch(text, 0); + public static TimestampMatch findFirstMatch(String text, TimeoutChecker timeoutChecker) { + return findFirstMatch(text, 0, timeoutChecker); } /** * Find the first timestamp format that matches part of the supplied value. * @param text The value that the returned timestamp format must exist within. * @param requiredFormat A timestamp format that any returned match must support. + * @param timeoutChecker Will abort the operation if its timeout is exceeded. * @return The timestamp format, or null if none matches. */ - public static TimestampMatch findFirstMatch(String text, String requiredFormat) { - return findFirstMatch(text, 0, requiredFormat); + public static TimestampMatch findFirstMatch(String text, String requiredFormat, TimeoutChecker timeoutChecker) { + return findFirstMatch(text, 0, requiredFormat, timeoutChecker); } /** @@ -194,10 +196,11 @@ public final class TimestampFormatFinder { * excluding a specified number of candidate formats. * @param text The value that the returned timestamp format must exist within. * @param ignoreCandidates The number of candidate formats to exclude from the search. + * @param timeoutChecker Will abort the operation if its timeout is exceeded. * @return The timestamp format, or null if none matches. */ - public static TimestampMatch findFirstMatch(String text, int ignoreCandidates) { - return findFirstMatch(text, ignoreCandidates, null); + public static TimestampMatch findFirstMatch(String text, int ignoreCandidates, TimeoutChecker timeoutChecker) { + return findFirstMatch(text, ignoreCandidates, null, timeoutChecker); } /** @@ -206,9 +209,10 @@ public final class TimestampFormatFinder { * @param text The value that the returned timestamp format must exist within. * @param ignoreCandidates The number of candidate formats to exclude from the search. * @param requiredFormat A timestamp format that any returned match must support. + * @param timeoutChecker Will abort the operation if its timeout is exceeded. * @return The timestamp format, or null if none matches. */ - public static TimestampMatch findFirstMatch(String text, int ignoreCandidates, String requiredFormat) { + public static TimestampMatch findFirstMatch(String text, int ignoreCandidates, String requiredFormat, TimeoutChecker timeoutChecker) { if (ignoreCandidates >= ORDERED_CANDIDATE_FORMATS.size()) { return null; } @@ -229,7 +233,8 @@ public final class TimestampFormatFinder { } } if (quicklyRuledOut == false) { - Map captures = candidate.strictSearchGrok.captures(text); + Map captures = timeoutChecker.grokCaptures(candidate.strictSearchGrok, text, + "timestamp format determination"); if (captures != null) { String preface = captures.getOrDefault(PREFACE, "").toString(); String epilogue = captures.getOrDefault(EPILOGUE, "").toString(); @@ -246,20 +251,22 @@ public final class TimestampFormatFinder { /** * Find the best timestamp format for matching an entire field value. * @param text The value that the returned timestamp format must match in its entirety. + * @param timeoutChecker Will abort the operation if its timeout is exceeded. * @return The timestamp format, or null if none matches. */ - public static TimestampMatch findFirstFullMatch(String text) { - return findFirstFullMatch(text, 0); + public static TimestampMatch findFirstFullMatch(String text, TimeoutChecker timeoutChecker) { + return findFirstFullMatch(text, 0, timeoutChecker); } /** * Find the best timestamp format for matching an entire field value. * @param text The value that the returned timestamp format must match in its entirety. * @param requiredFormat A timestamp format that any returned match must support. + * @param timeoutChecker Will abort the operation if its timeout is exceeded. * @return The timestamp format, or null if none matches. */ - public static TimestampMatch findFirstFullMatch(String text, String requiredFormat) { - return findFirstFullMatch(text, 0, requiredFormat); + public static TimestampMatch findFirstFullMatch(String text, String requiredFormat, TimeoutChecker timeoutChecker) { + return findFirstFullMatch(text, 0, requiredFormat, timeoutChecker); } /** @@ -267,10 +274,11 @@ public final class TimestampFormatFinder { * excluding a specified number of candidate formats. * @param text The value that the returned timestamp format must match in its entirety. * @param ignoreCandidates The number of candidate formats to exclude from the search. + * @param timeoutChecker Will abort the operation if its timeout is exceeded. * @return The timestamp format, or null if none matches. */ - public static TimestampMatch findFirstFullMatch(String text, int ignoreCandidates) { - return findFirstFullMatch(text, ignoreCandidates, null); + public static TimestampMatch findFirstFullMatch(String text, int ignoreCandidates, TimeoutChecker timeoutChecker) { + return findFirstFullMatch(text, ignoreCandidates, null, timeoutChecker); } /** @@ -279,9 +287,11 @@ public final class TimestampFormatFinder { * @param text The value that the returned timestamp format must match in its entirety. * @param ignoreCandidates The number of candidate formats to exclude from the search. * @param requiredFormat A timestamp format that any returned match must support. + * @param timeoutChecker Will abort the operation if its timeout is exceeded. * @return The timestamp format, or null if none matches. */ - public static TimestampMatch findFirstFullMatch(String text, int ignoreCandidates, String requiredFormat) { + public static TimestampMatch findFirstFullMatch(String text, int ignoreCandidates, String requiredFormat, + TimeoutChecker timeoutChecker) { if (ignoreCandidates >= ORDERED_CANDIDATE_FORMATS.size()) { return null; } @@ -290,7 +300,8 @@ public final class TimestampFormatFinder { for (CandidateTimestampFormat candidate : ORDERED_CANDIDATE_FORMATS.subList(ignoreCandidates, ORDERED_CANDIDATE_FORMATS.size())) { if (adjustedRequiredFormat == null || candidate.jodaTimestampFormats.contains(adjustedRequiredFormat) || candidate.javaTimestampFormats.contains(adjustedRequiredFormat)) { - Map captures = candidate.strictFullMatchGrok.captures(text); + Map captures = timeoutChecker.grokCaptures(candidate.strictFullMatchGrok, text, + "timestamp format determination"); if (captures != null) { return makeTimestampMatch(candidate, index, "", text, ""); } @@ -540,8 +551,8 @@ public final class TimestampFormatFinder { this.simplePattern = Pattern.compile(simpleRegex, Pattern.MULTILINE); // The (?m) here has the Ruby meaning, which is equivalent to (?s) in Java this.strictSearchGrok = new Grok(Grok.getBuiltinPatterns(), "(?m)%{DATA:" + PREFACE + "}" + strictGrokPattern + - "%{GREEDYDATA:" + EPILOGUE + "}"); - this.strictFullMatchGrok = new Grok(Grok.getBuiltinPatterns(), "^" + strictGrokPattern + "$"); + "%{GREEDYDATA:" + EPILOGUE + "}", TimeoutChecker.watchdog); + this.strictFullMatchGrok = new Grok(Grok.getBuiltinPatterns(), "^" + strictGrokPattern + "$", TimeoutChecker.watchdog); this.standardGrokPatternName = standardGrokPatternName; assert quickRuleOutIndices.stream() .noneMatch(quickRuleOutIndex -> quickRuleOutIndex < 0 || quickRuleOutIndex >= QUICK_RULE_OUT_PATTERNS.size()); diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/filestructurefinder/TimeoutCheckerTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/filestructurefinder/TimeoutCheckerTests.java index 125aab7e45e..8518096d644 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/filestructurefinder/TimeoutCheckerTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/filestructurefinder/TimeoutCheckerTests.java @@ -7,6 +7,7 @@ package org.elasticsearch.xpack.ml.filestructurefinder; import org.elasticsearch.ElasticsearchTimeoutException; import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.grok.Grok; import org.junit.After; import org.junit.Before; @@ -57,4 +58,37 @@ public class TimeoutCheckerTests extends FileStructureTestCase { }); } } + + public void testWatchdog() { + + assertFalse(Thread.interrupted()); + + TimeValue timeout = TimeValue.timeValueMillis(100); + try (TimeoutChecker timeoutChecker = new TimeoutChecker("watchdog test", timeout, scheduler)) { + + TimeoutChecker.watchdog.register(); + try { + expectThrows(InterruptedException.class, () -> Thread.sleep(10000)); + } finally { + TimeoutChecker.watchdog.unregister(); + } + } + } + + public void testGrokCaptures() throws Exception { + + assertFalse(Thread.interrupted()); + Grok grok = new Grok(Grok.getBuiltinPatterns(), "{%DATA:data}{%GREEDYDATA:greedydata}", TimeoutChecker.watchdog); + + TimeValue timeout = TimeValue.timeValueMillis(1); + try (TimeoutChecker timeoutChecker = new TimeoutChecker("grok captures test", timeout, scheduler)) { + + assertBusy(() -> { + ElasticsearchTimeoutException e = expectThrows(ElasticsearchTimeoutException.class, + () -> timeoutChecker.grokCaptures(grok, randomAlphaOfLength(1000000), "should timeout")); + assertEquals("Aborting grok captures test during [should timeout] as it has taken longer than the timeout of [" + + timeout + "]", e.getMessage()); + }); + } + } } diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/filestructurefinder/TimestampFormatFinderTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/filestructurefinder/TimestampFormatFinderTests.java index 4b9b0b36c34..6e256680eca 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/filestructurefinder/TimestampFormatFinderTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/filestructurefinder/TimestampFormatFinderTests.java @@ -17,10 +17,10 @@ public class TimestampFormatFinderTests extends FileStructureTestCase { public void testFindFirstMatchGivenNoMatch() { - assertNull(TimestampFormatFinder.findFirstMatch("")); - assertNull(TimestampFormatFinder.findFirstMatch("no timestamps in here")); - assertNull(TimestampFormatFinder.findFirstMatch(":::")); - assertNull(TimestampFormatFinder.findFirstMatch("/+")); + assertNull(TimestampFormatFinder.findFirstMatch("", NOOP_TIMEOUT_CHECKER)); + assertNull(TimestampFormatFinder.findFirstMatch("no timestamps in here", NOOP_TIMEOUT_CHECKER)); + assertNull(TimestampFormatFinder.findFirstMatch(":::", NOOP_TIMEOUT_CHECKER)); + assertNull(TimestampFormatFinder.findFirstMatch("/+", NOOP_TIMEOUT_CHECKER)); } public void testFindFirstMatchGivenOnlyIso8601() { @@ -132,23 +132,23 @@ public class TimestampFormatFinderTests extends FileStructureTestCase { public void testFindFirstMatchGivenOnlySystemDate() { assertEquals(new TimestampMatch(26, "", "UNIX_MS", "UNIX_MS", "\\b\\d{13}\\b", "POSINT", ""), - TimestampFormatFinder.findFirstMatch("1526400896374")); + TimestampFormatFinder.findFirstMatch("1526400896374", NOOP_TIMEOUT_CHECKER)); assertEquals(new TimestampMatch(26, "", "UNIX_MS", "UNIX_MS", "\\b\\d{13}\\b", "POSINT", ""), - TimestampFormatFinder.findFirstFullMatch("1526400896374")); + TimestampFormatFinder.findFirstFullMatch("1526400896374", NOOP_TIMEOUT_CHECKER)); assertEquals(new TimestampMatch(27, "", "UNIX", "UNIX", "\\b\\d{10}\\.\\d{3,9}\\b", "NUMBER", ""), - TimestampFormatFinder.findFirstMatch("1526400896.736")); + TimestampFormatFinder.findFirstMatch("1526400896.736", NOOP_TIMEOUT_CHECKER)); assertEquals(new TimestampMatch(27, "", "UNIX", "UNIX", "\\b\\d{10}\\.\\d{3,9}\\b", "NUMBER", ""), - TimestampFormatFinder.findFirstFullMatch("1526400896.736")); + TimestampFormatFinder.findFirstFullMatch("1526400896.736", NOOP_TIMEOUT_CHECKER)); assertEquals(new TimestampMatch(28, "", "UNIX", "UNIX", "\\b\\d{10}\\b", "POSINT", ""), - TimestampFormatFinder.findFirstMatch("1526400896")); + TimestampFormatFinder.findFirstMatch("1526400896", NOOP_TIMEOUT_CHECKER)); assertEquals(new TimestampMatch(28, "", "UNIX", "UNIX", "\\b\\d{10}\\b", "POSINT", ""), - TimestampFormatFinder.findFirstFullMatch("1526400896")); + TimestampFormatFinder.findFirstFullMatch("1526400896", NOOP_TIMEOUT_CHECKER)); assertEquals(new TimestampMatch(29, "", "TAI64N", "TAI64N", "\\b[0-9A-Fa-f]{24}\\b", "BASE16NUM", ""), - TimestampFormatFinder.findFirstMatch("400000005afb159a164ac980")); + TimestampFormatFinder.findFirstMatch("400000005afb159a164ac980", NOOP_TIMEOUT_CHECKER)); assertEquals(new TimestampMatch(29, "", "TAI64N", "TAI64N", "\\b[0-9A-Fa-f]{24}\\b", "BASE16NUM", ""), - TimestampFormatFinder.findFirstFullMatch("400000005afb159a164ac980")); + TimestampFormatFinder.findFirstFullMatch("400000005afb159a164ac980", NOOP_TIMEOUT_CHECKER)); } public void testFindFirstMatchGivenRealLogMessages() { @@ -157,45 +157,47 @@ public class TimestampFormatFinderTests extends FileStructureTestCase { "\\b\\d{4}-\\d{2}-\\d{2}T\\d{2}:\\d{2}:\\d{2},\\d{3}", "TIMESTAMP_ISO8601", "][INFO ][o.e.e.NodeEnvironment ] [node-0] heap size [3.9gb], compressed ordinary object pointers [true]"), TimestampFormatFinder.findFirstMatch("[2018-05-11T17:07:29,553][INFO ][o.e.e.NodeEnvironment ] [node-0] " + - "heap size [3.9gb], compressed ordinary object pointers [true]")); + "heap size [3.9gb], compressed ordinary object pointers [true]", NOOP_TIMEOUT_CHECKER)); assertEquals(new TimestampMatch(23, "192.168.62.101 - - [", "dd/MMM/YYYY:HH:mm:ss Z", "dd/MMM/yyyy:HH:mm:ss XX", "\\b\\d{2}/[A-Z]\\S{2}/\\d{4}:\\d{2}:\\d{2}:\\d{2} ", "HTTPDATE", "] \"POST //apiserv:8080/engine/v2/jobs HTTP/1.1\" 201 42 \"-\" \"curl/7.46.0\" 384"), TimestampFormatFinder.findFirstMatch("192.168.62.101 - - [29/Jun/2016:12:11:31 +0000] " + - "\"POST //apiserv:8080/engine/v2/jobs HTTP/1.1\" 201 42 \"-\" \"curl/7.46.0\" 384")); + "\"POST //apiserv:8080/engine/v2/jobs HTTP/1.1\" 201 42 \"-\" \"curl/7.46.0\" 384", NOOP_TIMEOUT_CHECKER)); assertEquals(new TimestampMatch(24, "", "MMM dd, YYYY h:mm:ss a", "MMM dd, yyyy h:mm:ss a", "\\b[A-Z]\\S{2,8} \\d{1,2}, \\d{4} \\d{1,2}:\\d{2}:\\d{2} [AP]M\\b", "CATALINA_DATESTAMP", " org.apache.tomcat.util.http.Parameters processParameters"), - TimestampFormatFinder.findFirstMatch("Aug 29, 2009 12:03:57 AM org.apache.tomcat.util.http.Parameters processParameters")); + TimestampFormatFinder.findFirstMatch("Aug 29, 2009 12:03:57 AM org.apache.tomcat.util.http.Parameters processParameters", + NOOP_TIMEOUT_CHECKER)); assertEquals(new TimestampMatch(22, "", Arrays.asList("MMM dd HH:mm:ss", "MMM d HH:mm:ss"), Arrays.asList("MMM dd HH:mm:ss", "MMM d HH:mm:ss"), "\\b[A-Z]\\S{2,8} {1,2}\\d{1,2} \\d{2}:\\d{2}:\\d{2}\\b", "SYSLOGTIMESTAMP", " esxi1.acme.com Vpxa: " + "[3CB3FB90 verbose 'vpxavpxaInvtVm' opID=WFU-33d82c31] [VpxaInvtVmChangeListener] Guest DiskInfo Changed"), TimestampFormatFinder.findFirstMatch("Oct 19 17:04:44 esxi1.acme.com Vpxa: [3CB3FB90 verbose 'vpxavpxaInvtVm' " + - "opID=WFU-33d82c31] [VpxaInvtVmChangeListener] Guest DiskInfo Changed")); + "opID=WFU-33d82c31] [VpxaInvtVmChangeListener] Guest DiskInfo Changed", NOOP_TIMEOUT_CHECKER)); assertEquals(new TimestampMatch(10, "559550912540598297\t", "ISO8601", "ISO8601", "\\b\\d{4}-\\d{2}-\\d{2}T\\d{2}:\\d{2}:\\d{2}", "TIMESTAMP_ISO8601", "\t2016-04-20T21:06:53Z\t38545844\tserv02nw07\t192.168.114.28\tAuthpriv\tInfo\tsshd\tsubsystem request for sftp"), TimestampFormatFinder.findFirstMatch("559550912540598297\t2016-04-20T14:06:53\t2016-04-20T21:06:53Z\t38545844\tserv02nw07\t" + - "192.168.114.28\tAuthpriv\tInfo\tsshd\tsubsystem request for sftp")); + "192.168.114.28\tAuthpriv\tInfo\tsshd\tsubsystem request for sftp", NOOP_TIMEOUT_CHECKER)); assertEquals(new TimestampMatch(22, "", Arrays.asList("MMM dd HH:mm:ss", "MMM d HH:mm:ss"), Arrays.asList("MMM dd HH:mm:ss", "MMM d HH:mm:ss"), "\\b[A-Z]\\S{2,8} {1,2}\\d{1,2} \\d{2}:\\d{2}:\\d{2}\\b", "SYSLOGTIMESTAMP", " dnsserv named[22529]: error (unexpected RCODE REFUSED) resolving 'www.elastic.co/A/IN': 95.110.68.206#53"), TimestampFormatFinder.findFirstMatch("Sep 8 11:55:35 dnsserv named[22529]: error (unexpected RCODE REFUSED) resolving " + - "'www.elastic.co/A/IN': 95.110.68.206#53")); + "'www.elastic.co/A/IN': 95.110.68.206#53", NOOP_TIMEOUT_CHECKER)); assertEquals(new TimestampMatch(3, "", "YYYY-MM-dd HH:mm:ss.SSSSSS", "yyyy-MM-dd HH:mm:ss.SSSSSS", "\\b\\d{4}-\\d{2}-\\d{2} \\d{2}:\\d{2}:\\d{2}\\.\\d{3}", "TIMESTAMP_ISO8601", "|INFO |VirtualServer |1 |client 'User1'(id:2) was added to channelgroup 'Channel Admin'(id:5) by client " + "'User1'(id:2) in channel '3er Instanz'(id:2)"), TimestampFormatFinder.findFirstMatch("2018-01-06 19:22:20.106822|INFO |VirtualServer |1 |client " + - " 'User1'(id:2) was added to channelgroup 'Channel Admin'(id:5) by client 'User1'(id:2) in channel '3er Instanz'(id:2)")); + " 'User1'(id:2) was added to channelgroup 'Channel Admin'(id:5) by client 'User1'(id:2) in channel '3er Instanz'(id:2)", + NOOP_TIMEOUT_CHECKER)); // Differs from the above as the required format is specified assertEquals(new TimestampMatch(3, "", "YYYY-MM-dd HH:mm:ss.SSSSSS", "yyyy-MM-dd HH:mm:ss.SSSSSS", @@ -204,12 +206,12 @@ public class TimestampFormatFinderTests extends FileStructureTestCase { "'User1'(id:2) in channel '3er Instanz'(id:2)"), TimestampFormatFinder.findFirstMatch("2018-01-06 19:22:20.106822|INFO |VirtualServer |1 |client " + " 'User1'(id:2) was added to channelgroup 'Channel Admin'(id:5) by client 'User1'(id:2) in channel '3er Instanz'(id:2)", - randomFrom("YYYY-MM-dd HH:mm:ss.SSSSSS", "yyyy-MM-dd HH:mm:ss.SSSSSS"))); + randomFrom("YYYY-MM-dd HH:mm:ss.SSSSSS", "yyyy-MM-dd HH:mm:ss.SSSSSS"), NOOP_TIMEOUT_CHECKER)); // Non-matching required format specified assertNull(TimestampFormatFinder.findFirstMatch("2018-01-06 19:22:20.106822|INFO |VirtualServer |1 |client " + " 'User1'(id:2) was added to channelgroup 'Channel Admin'(id:5) by client 'User1'(id:2) in channel '3er Instanz'(id:2)", - randomFrom("UNIX", "EEE MMM dd YYYY HH:mm zzz"))); + randomFrom("UNIX", "EEE MMM dd YYYY HH:mm zzz"), NOOP_TIMEOUT_CHECKER)); } public void testAdjustRequiredFormat() { @@ -246,18 +248,20 @@ public class TimestampFormatFinderTests extends FileStructureTestCase { private void validateTimestampMatch(TimestampMatch expected, String text, long expectedEpochMs) { - assertEquals(expected, TimestampFormatFinder.findFirstMatch(text)); - assertEquals(expected, TimestampFormatFinder.findFirstFullMatch(text)); - assertEquals(expected, TimestampFormatFinder.findFirstMatch(text, expected.candidateIndex)); - assertEquals(expected, TimestampFormatFinder.findFirstFullMatch(text, expected.candidateIndex)); - assertNull(TimestampFormatFinder.findFirstMatch(text, Integer.MAX_VALUE)); - assertNull(TimestampFormatFinder.findFirstFullMatch(text, Integer.MAX_VALUE)); - assertEquals(expected, TimestampFormatFinder.findFirstMatch(text, randomFrom(expected.jodaTimestampFormats))); - assertEquals(expected, TimestampFormatFinder.findFirstFullMatch(text, randomFrom(expected.jodaTimestampFormats))); - assertEquals(expected, TimestampFormatFinder.findFirstMatch(text, randomFrom(expected.javaTimestampFormats))); - assertEquals(expected, TimestampFormatFinder.findFirstFullMatch(text, randomFrom(expected.javaTimestampFormats))); - assertNull(TimestampFormatFinder.findFirstMatch(text, "wrong format")); - assertNull(TimestampFormatFinder.findFirstFullMatch(text, "wrong format")); + assertEquals(expected, TimestampFormatFinder.findFirstMatch(text, NOOP_TIMEOUT_CHECKER)); + assertEquals(expected, TimestampFormatFinder.findFirstFullMatch(text, NOOP_TIMEOUT_CHECKER)); + assertEquals(expected, TimestampFormatFinder.findFirstMatch(text, expected.candidateIndex, NOOP_TIMEOUT_CHECKER)); + assertEquals(expected, TimestampFormatFinder.findFirstFullMatch(text, expected.candidateIndex, NOOP_TIMEOUT_CHECKER)); + assertNull(TimestampFormatFinder.findFirstMatch(text, Integer.MAX_VALUE, NOOP_TIMEOUT_CHECKER)); + assertNull(TimestampFormatFinder.findFirstFullMatch(text, Integer.MAX_VALUE, NOOP_TIMEOUT_CHECKER)); + assertEquals(expected, TimestampFormatFinder.findFirstMatch(text, randomFrom(expected.jodaTimestampFormats), NOOP_TIMEOUT_CHECKER)); + assertEquals(expected, TimestampFormatFinder.findFirstFullMatch(text, randomFrom(expected.jodaTimestampFormats), + NOOP_TIMEOUT_CHECKER)); + assertEquals(expected, TimestampFormatFinder.findFirstMatch(text, randomFrom(expected.javaTimestampFormats), NOOP_TIMEOUT_CHECKER)); + assertEquals(expected, TimestampFormatFinder.findFirstFullMatch(text, randomFrom(expected.javaTimestampFormats), + NOOP_TIMEOUT_CHECKER)); + assertNull(TimestampFormatFinder.findFirstMatch(text, "wrong format", NOOP_TIMEOUT_CHECKER)); + assertNull(TimestampFormatFinder.findFirstFullMatch(text, "wrong format", NOOP_TIMEOUT_CHECKER)); validateJodaTimestampFormats(expected.jodaTimestampFormats, text, expectedEpochMs); validateJavaTimestampFormats(expected.javaTimestampFormats, text, expectedEpochMs); diff --git a/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/exporter/local/LocalExporter.java b/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/exporter/local/LocalExporter.java index 70ac5a0f7a7..293a8a5e325 100644 --- a/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/exporter/local/LocalExporter.java +++ b/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/exporter/local/LocalExporter.java @@ -71,7 +71,6 @@ import java.util.stream.Collectors; import static org.elasticsearch.common.Strings.collectionToCommaDelimitedString; import static org.elasticsearch.xpack.core.ClientHelper.MONITORING_ORIGIN; import static org.elasticsearch.xpack.core.ClientHelper.executeAsyncWithOrigin; -import static org.elasticsearch.xpack.core.ClientHelper.stashWithOrigin; import static org.elasticsearch.xpack.core.monitoring.exporter.MonitoringTemplateUtils.LAST_UPDATED_VERSION; import static org.elasticsearch.xpack.core.monitoring.exporter.MonitoringTemplateUtils.PIPELINE_IDS; import static org.elasticsearch.xpack.core.monitoring.exporter.MonitoringTemplateUtils.TEMPLATE_VERSION; @@ -145,7 +144,11 @@ public class LocalExporter extends Exporter implements ClusterStateListener, Cle if (state.get() != State.RUNNING) { listener.onResponse(null); } else { - listener.onResponse(resolveBulk(clusterService.state(), false)); + try { + listener.onResponse(resolveBulk(clusterService.state(), false)); + } catch (Exception e) { + listener.onFailure(e); + } } } @@ -314,7 +317,7 @@ public class LocalExporter extends Exporter implements ClusterStateListener, Cle if (asyncActions.size() > 0) { if (installingSomething.compareAndSet(false, true)) { pendingResponses.set(asyncActions.size()); - try (ThreadContext.StoredContext ignore = stashWithOrigin(client.threadPool().getThreadContext(), MONITORING_ORIGIN)) { + try (ThreadContext.StoredContext ignore = client.threadPool().getThreadContext().stashWithOrigin(MONITORING_ORIGIN)) { asyncActions.forEach(Runnable::run); } } else { diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/esnative/tool/CommandLineHttpClient.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/esnative/tool/CommandLineHttpClient.java index f69c3f6893f..cb0bef38207 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/esnative/tool/CommandLineHttpClient.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/esnative/tool/CommandLineHttpClient.java @@ -137,7 +137,7 @@ public class CommandLineHttpClient { final String scheme = XPackSettings.HTTP_SSL_ENABLED.get(settings) ? "https" : "http"; List httpPublishHost = SETTING_HTTP_PUBLISH_HOST.get(settings); if (httpPublishHost.isEmpty()) { - httpPublishHost = NetworkService.GLOBAL_NETWORK_PUBLISHHOST_SETTING.get(settings); + httpPublishHost = NetworkService.GLOBAL_NETWORK_PUBLISH_HOST_SETTING.get(settings); } // we cannot do custom name resolution here... diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/transport/filter/IPFilter.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/transport/filter/IPFilter.java index 10172ff95e8..c9f60dce95c 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/transport/filter/IPFilter.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/transport/filter/IPFilter.java @@ -18,7 +18,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.transport.BoundTransportAddress; import org.elasticsearch.common.transport.TransportAddress; import org.elasticsearch.license.XPackLicenseState; -import org.elasticsearch.transport.TcpTransport; +import org.elasticsearch.transport.TransportSettings; import org.elasticsearch.xpack.security.audit.AuditTrailService; import java.net.InetSocketAddress; @@ -128,7 +128,7 @@ public class IPFilter { isHttpFilterEnabled = IP_FILTER_ENABLED_HTTP_SETTING.get(settings); isIpFilterEnabled = IP_FILTER_ENABLED_SETTING.get(settings); - this.profiles = settings.getGroups("transport.profiles.",true).keySet().stream().filter(k -> TcpTransport + this.profiles = settings.getGroups("transport.profiles.",true).keySet().stream().filter(k -> TransportSettings .DEFAULT_PROFILE.equals(k) == false).collect(Collectors.toSet()); // exclude default profile -- it's handled differently for (String profile : profiles) { Setting> allowSetting = PROFILE_FILTER_ALLOW_SETTING.getConcreteSettingForNamespace(profile); @@ -237,7 +237,7 @@ public class IPFilter { if (isIpFilterEnabled && boundTransportAddress.get() != null) { TransportAddress[] localAddresses = boundTransportAddress.get().boundAddresses(); - profileRules.put(TcpTransport.DEFAULT_PROFILE, createRules(transportAllowFilter, transportDenyFilter, localAddresses)); + profileRules.put(TransportSettings.DEFAULT_PROFILE, createRules(transportAllowFilter, transportDenyFilter, localAddresses)); for (String profile : profiles) { BoundTransportAddress profileBoundTransportAddress = profileBoundAddress.get().get(profile); if (profileBoundTransportAddress == null) { diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/transport/nio/SecurityNioTransport.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/transport/nio/SecurityNioTransport.java index 0642f635ed0..b536644ad61 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/transport/nio/SecurityNioTransport.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/transport/nio/SecurityNioTransport.java @@ -28,7 +28,7 @@ import org.elasticsearch.nio.SocketChannelContext; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.ConnectTransportException; import org.elasticsearch.transport.TcpChannel; -import org.elasticsearch.transport.TcpTransport; +import org.elasticsearch.transport.TransportSettings; import org.elasticsearch.transport.nio.NioTcpChannel; import org.elasticsearch.transport.nio.NioTcpServerChannel; import org.elasticsearch.transport.nio.NioTransport; @@ -214,7 +214,7 @@ public class SecurityNioTransport extends NioTransport { protected SSLEngine createSSLEngine(SocketChannel channel) throws IOException { SSLEngine sslEngine; - SSLConfiguration defaultConfig = profileConfiguration.get(TcpTransport.DEFAULT_PROFILE); + SSLConfiguration defaultConfig = profileConfiguration.get(TransportSettings.DEFAULT_PROFILE); SSLConfiguration sslConfig = profileConfiguration.getOrDefault(profileName, defaultConfig); boolean hostnameVerificationEnabled = sslConfig.verificationMode().isHostnameVerificationEnabled(); if (hostnameVerificationEnabled) { @@ -233,7 +233,7 @@ public class SecurityNioTransport extends NioTransport { private final SNIHostName serverName; private SecurityClientTcpChannelFactory(RawChannelFactory rawChannelFactory, SNIHostName serverName) { - super(rawChannelFactory, TcpTransport.DEFAULT_PROFILE, true); + super(rawChannelFactory, TransportSettings.DEFAULT_PROFILE, true); this.serverName = serverName; } diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/user/TransportHasPrivilegesActionTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/user/TransportHasPrivilegesActionTests.java index 2da066b56ee..00ee02aaf00 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/user/TransportHasPrivilegesActionTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/user/TransportHasPrivilegesActionTests.java @@ -44,9 +44,11 @@ import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Collections; +import java.util.Iterator; import java.util.LinkedHashMap; import java.util.List; import java.util.Locale; +import java.util.Set; import static java.util.Collections.emptyMap; import static org.elasticsearch.common.util.set.Sets.newHashSet; @@ -136,7 +138,7 @@ public class TransportHasPrivilegesActionTests extends ESTestCase { assertThat(response.getClusterPrivileges().get(ClusterHealthAction.NAME), equalTo(true)); assertThat(response.getIndexPrivileges(), Matchers.iterableWithSize(1)); - final ResourcePrivileges result = response.getIndexPrivileges().get(0); + final ResourcePrivileges result = response.getIndexPrivileges().iterator().next(); assertThat(result.getResource(), equalTo("academy")); assertThat(result.getPrivileges().size(), equalTo(2)); assertThat(result.getPrivileges().get(DeleteAction.NAME), equalTo(true)); @@ -174,9 +176,10 @@ public class TransportHasPrivilegesActionTests extends ESTestCase { assertThat(response.getClusterPrivileges().get("manage"), equalTo(false)); assertThat(response.getIndexPrivileges(), Matchers.iterableWithSize(3)); - final ResourcePrivileges academy = response.getIndexPrivileges().get(0); - final ResourcePrivileges initiative = response.getIndexPrivileges().get(1); - final ResourcePrivileges school = response.getIndexPrivileges().get(2); + final Iterator indexPrivilegesIterator = response.getIndexPrivileges().iterator(); + final ResourcePrivileges academy = indexPrivilegesIterator.next(); + final ResourcePrivileges initiative = indexPrivilegesIterator.next(); + final ResourcePrivileges school = indexPrivilegesIterator.next(); assertThat(academy.getResource(), equalTo("academy")); assertThat(academy.getPrivileges().size(), equalTo(3)); @@ -213,7 +216,7 @@ public class TransportHasPrivilegesActionTests extends ESTestCase { assertThat(response.getUsername(), is(user.principal())); assertThat(response.isCompleteMatch(), is(false)); assertThat(response.getIndexPrivileges(), Matchers.iterableWithSize(1)); - final ResourcePrivileges result = response.getIndexPrivileges().get(0); + final ResourcePrivileges result = response.getIndexPrivileges().iterator().next(); assertThat(result.getResource(), equalTo("academy")); assertThat(result.getPrivileges().size(), equalTo(2)); assertThat(result.getPrivileges().get("read"), equalTo(false)); @@ -309,7 +312,7 @@ public class TransportHasPrivilegesActionTests extends ESTestCase { new ResourcePrivileges("a*xyz", mapBuilder().put("read", false).put("write", true).put("manage", false).map()) )); assertThat(response.getApplicationPrivileges().entrySet(), Matchers.iterableWithSize(1)); - final List kibanaPrivileges = response.getApplicationPrivileges().get("kibana"); + final Set kibanaPrivileges = response.getApplicationPrivileges().get("kibana"); assertThat(kibanaPrivileges, Matchers.iterableWithSize(3)); assertThat(Strings.collectionToCommaDelimitedString(kibanaPrivileges), kibanaPrivileges, containsInAnyOrder( new ResourcePrivileges("*", mapBuilder().put("read", true).put("write", false).map()), @@ -378,7 +381,7 @@ public class TransportHasPrivilegesActionTests extends ESTestCase { assertThat(response.isCompleteMatch(), is(false)); assertThat(response.getIndexPrivileges(), Matchers.emptyIterable()); assertThat(response.getApplicationPrivileges().entrySet(), Matchers.iterableWithSize(2)); - final List app1 = response.getApplicationPrivileges().get("app1"); + final Set app1 = response.getApplicationPrivileges().get("app1"); assertThat(app1, Matchers.iterableWithSize(4)); assertThat(Strings.collectionToCommaDelimitedString(app1), app1, containsInAnyOrder( new ResourcePrivileges("foo/1", MapBuilder.newMapBuilder(new LinkedHashMap()) @@ -390,7 +393,7 @@ public class TransportHasPrivilegesActionTests extends ESTestCase { new ResourcePrivileges("baz/bar/foo", MapBuilder.newMapBuilder(new LinkedHashMap()) .put("read", false).put("write", false).put("all", false).map()) )); - final List app2 = response.getApplicationPrivileges().get("app2"); + final Set app2 = response.getApplicationPrivileges().get("app2"); assertThat(app2, Matchers.iterableWithSize(4)); assertThat(Strings.collectionToCommaDelimitedString(app2), app2, containsInAnyOrder( new ResourcePrivileges("foo/1", MapBuilder.newMapBuilder(new LinkedHashMap()) diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/AbstractSimpleSecurityTransportTestCase.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/AbstractSimpleSecurityTransportTestCase.java index af865914b6b..e01fecf97e1 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/AbstractSimpleSecurityTransportTestCase.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/AbstractSimpleSecurityTransportTestCase.java @@ -26,6 +26,7 @@ import org.elasticsearch.transport.TcpTransport; import org.elasticsearch.transport.Transport; import org.elasticsearch.transport.TransportRequestOptions; import org.elasticsearch.transport.TransportService; +import org.elasticsearch.transport.TransportSettings; import org.elasticsearch.xpack.core.common.socket.SocketAccess; import org.elasticsearch.xpack.core.ssl.SSLConfiguration; import org.elasticsearch.xpack.core.ssl.SSLService; @@ -115,9 +116,9 @@ public abstract class AbstractSimpleSecurityTransportTestCase extends AbstractSi int port = serviceA.boundAddress().publishAddress().getPort(); Settings settings = Settings.builder() .put(Node.NODE_NAME_SETTING.getKey(), "foobar") - .put(TransportService.TRACE_LOG_INCLUDE_SETTING.getKey(), "") - .put(TransportService.TRACE_LOG_EXCLUDE_SETTING.getKey(), "NOTHING") - .put("transport.tcp.port", port) + .put(TransportSettings.TRACE_LOG_INCLUDE_SETTING.getKey(), "") + .put(TransportSettings.TRACE_LOG_EXCLUDE_SETTING.getKey(), "NOTHING") + .put(TransportSettings.PORT.getKey(), port) .build(); ClusterSettings clusterSettings = new ClusterSettings(settings, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS); BindTransportException bindTransportException = expectThrows(BindTransportException.class, () -> { diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/ServerTransportFilterTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/ServerTransportFilterTests.java index 17df337d291..dd340cb5839 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/ServerTransportFilterTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/ServerTransportFilterTests.java @@ -19,9 +19,9 @@ import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.transport.TcpTransport; import org.elasticsearch.transport.TransportChannel; import org.elasticsearch.transport.TransportRequest; +import org.elasticsearch.transport.TransportSettings; import org.elasticsearch.xpack.core.security.SecurityContext; import org.elasticsearch.xpack.core.security.authc.Authentication; import org.elasticsearch.xpack.core.security.authc.Authentication.RealmRef; @@ -65,7 +65,7 @@ public class ServerTransportFilterTests extends ESTestCase { authcService = mock(AuthenticationService.class); authzService = mock(AuthorizationService.class); channel = mock(TransportChannel.class); - when(channel.getProfileName()).thenReturn(TcpTransport.DEFAULT_PROFILE); + when(channel.getProfileName()).thenReturn(TransportSettings.DEFAULT_PROFILE); when(channel.getVersion()).thenReturn(Version.CURRENT); failDestructiveOperations = randomBoolean(); Settings settings = Settings.builder() diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/netty4/IPHostnameVerificationTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/netty4/IPHostnameVerificationTests.java index bc674ae1aa0..130fa226039 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/netty4/IPHostnameVerificationTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/netty4/IPHostnameVerificationTests.java @@ -9,7 +9,7 @@ import org.elasticsearch.client.Client; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.test.SecurityIntegTestCase; import org.elasticsearch.test.SecuritySettingsSource; -import org.elasticsearch.transport.TcpTransport; +import org.elasticsearch.transport.TransportSettings; import org.elasticsearch.xpack.core.ssl.SSLClientAuth; import java.nio.file.Files; @@ -61,7 +61,7 @@ public class IPHostnameVerificationTests extends SecurityIntegTestCase { return settingsBuilder.put("xpack.ssl.key", keyPath.toAbsolutePath()) .put("xpack.ssl.certificate", certPath.toAbsolutePath()) .put("xpack.ssl.certificate_authorities", certPath.toAbsolutePath()) - .put(TcpTransport.BIND_HOST.getKey(), "127.0.0.1") + .put(TransportSettings.BIND_HOST.getKey(), "127.0.0.1") .put("network.host", "127.0.0.1") .put("xpack.ssl.client_authentication", SSLClientAuth.NONE) .put("xpack.ssl.verification_mode", "full") diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/netty4/SimpleSecurityNetty4ServerTransportTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/netty4/SimpleSecurityNetty4ServerTransportTests.java index d5a5cedc19a..f70a286efe0 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/netty4/SimpleSecurityNetty4ServerTransportTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/netty4/SimpleSecurityNetty4ServerTransportTests.java @@ -18,8 +18,8 @@ import org.elasticsearch.test.transport.MockTransportService; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.ConnectionProfile; import org.elasticsearch.transport.TcpChannel; -import org.elasticsearch.transport.TcpTransport; import org.elasticsearch.transport.Transport; +import org.elasticsearch.transport.TransportSettings; import org.elasticsearch.xpack.security.transport.AbstractSimpleSecurityTransportTestCase; import java.util.Collections; @@ -56,9 +56,9 @@ public class SimpleSecurityNetty4ServerTransportTests extends AbstractSimpleSecu @Override protected MockTransportService build(Settings settings, Version version, ClusterSettings clusterSettings, boolean doHandshake) { - if (TcpTransport.PORT.exists(settings) == false) { + if (TransportSettings.PORT.exists(settings) == false) { settings = Settings.builder().put(settings) - .put(TcpTransport.PORT.getKey(), "0") + .put(TransportSettings.PORT.getKey(), "0") .build(); } MockTransportService transportService = nettyFromThreadPool(settings, threadPool, version, clusterSettings, doHandshake); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/nio/SimpleSecurityNioTransportTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/nio/SimpleSecurityNioTransportTests.java index 11d2e2a9848..84d68ffd63e 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/nio/SimpleSecurityNioTransportTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/nio/SimpleSecurityNioTransportTests.java @@ -18,8 +18,8 @@ import org.elasticsearch.test.transport.MockTransportService; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.ConnectionProfile; import org.elasticsearch.transport.TcpChannel; -import org.elasticsearch.transport.TcpTransport; import org.elasticsearch.transport.Transport; +import org.elasticsearch.transport.TransportSettings; import org.elasticsearch.xpack.security.transport.AbstractSimpleSecurityTransportTestCase; import java.util.Collections; @@ -55,9 +55,9 @@ public class SimpleSecurityNioTransportTests extends AbstractSimpleSecurityTrans @Override protected MockTransportService build(Settings settings, Version version, ClusterSettings clusterSettings, boolean doHandshake) { - if (TcpTransport.PORT.exists(settings) == false) { + if (TransportSettings.PORT.exists(settings) == false) { settings = Settings.builder().put(settings) - .put(TcpTransport.PORT.getKey(), "0") + .put(TransportSettings.PORT.getKey(), "0") .build(); } MockTransportService transportService = nioFromThreadPool(settings, threadPool, version, clusterSettings, doHandshake); diff --git a/x-pack/plugin/sql/jdbc/src/main/java/org/elasticsearch/xpack/sql/jdbc/JdbcResultSet.java b/x-pack/plugin/sql/jdbc/src/main/java/org/elasticsearch/xpack/sql/jdbc/JdbcResultSet.java index 62afe6c8f5f..4ef9ab4c829 100644 --- a/x-pack/plugin/sql/jdbc/src/main/java/org/elasticsearch/xpack/sql/jdbc/JdbcResultSet.java +++ b/x-pack/plugin/sql/jdbc/src/main/java/org/elasticsearch/xpack/sql/jdbc/JdbcResultSet.java @@ -122,8 +122,7 @@ class JdbcResultSet implements ResultSet, JdbcWrapper { @Override public String getString(int columnIndex) throws SQLException { - Object val = column(columnIndex); - return val != null ? val.toString() : null; + return getObject(columnIndex, String.class); } @Override diff --git a/x-pack/plugin/sql/jdbc/src/main/java/org/elasticsearch/xpack/sql/jdbc/TypeConverter.java b/x-pack/plugin/sql/jdbc/src/main/java/org/elasticsearch/xpack/sql/jdbc/TypeConverter.java index 638e31d02e5..a287d77191c 100644 --- a/x-pack/plugin/sql/jdbc/src/main/java/org/elasticsearch/xpack/sql/jdbc/TypeConverter.java +++ b/x-pack/plugin/sql/jdbc/src/main/java/org/elasticsearch/xpack/sql/jdbc/TypeConverter.java @@ -5,6 +5,8 @@ */ package org.elasticsearch.xpack.sql.jdbc; +import org.elasticsearch.xpack.sql.proto.StringUtils; + import java.sql.Date; import java.sql.SQLException; import java.sql.SQLFeatureNotSupportedException; @@ -118,10 +120,11 @@ final class TypeConverter { return (T) convert(val, columnType, typeString); } - // converting a Long to a Timestamp shouldn't be possible according to the spec, - // it feels a little brittle to check this scenario here and I don't particularly like it - // TODO: can we do any better or should we go over the spec and allow getLong(date) to be valid? - if (!(type == Long.class && columnType == EsType.DATE) && type.isInstance(val)) { + // if the value type is the same as the target, no conversion is needed + // make sure though to check the internal type against the desired one + // since otherwise the internal object format can leak out + // (for example dates when longs are requested or intervals for strings) + if (type.isInstance(val) && TypeUtils.classOf(columnType) == type) { try { return type.cast(val); } catch (ClassCastException cce) { @@ -268,7 +271,7 @@ final class TypeConverter { } private static String asString(Object nativeValue) { - return nativeValue == null ? null : String.valueOf(nativeValue); + return nativeValue == null ? null : StringUtils.toString(nativeValue); } private static T failConversion(Object value, EsType columnType, String typeString, Class target) throws SQLException { diff --git a/x-pack/plugin/sql/qa/single-node/src/test/java/org/elasticsearch/xpack/sql/qa/single_node/CliExplainIT.java b/x-pack/plugin/sql/qa/single-node/src/test/java/org/elasticsearch/xpack/sql/qa/single_node/CliExplainIT.java index 4296c5ae069..c2027ccbfcc 100644 --- a/x-pack/plugin/sql/qa/single-node/src/test/java/org/elasticsearch/xpack/sql/qa/single_node/CliExplainIT.java +++ b/x-pack/plugin/sql/qa/single-node/src/test/java/org/elasticsearch/xpack/sql/qa/single_node/CliExplainIT.java @@ -26,8 +26,7 @@ public class CliExplainIT extends CliIntegrationTestCase { assertThat(command("EXPLAIN " + (randomBoolean() ? "" : "(PLAN ANALYZED) ") + "SELECT * FROM test"), containsString("plan")); assertThat(readLine(), startsWith("----------")); assertThat(readLine(), startsWith("Project[[test_field{f}#")); - assertThat(readLine(), startsWith("\\_SubQueryAlias[test]")); - assertThat(readLine(), startsWith(" \\_EsRelation[test][test_field{f}#")); + assertThat(readLine(), startsWith("\\_EsRelation[test][test_field{f}#")); assertEquals("", readLine()); assertThat(command("EXPLAIN (PLAN OPTIMIZED) SELECT * FROM test"), containsString("plan")); @@ -74,8 +73,7 @@ public class CliExplainIT extends CliIntegrationTestCase { assertThat(readLine(), startsWith("----------")); assertThat(readLine(), startsWith("Project[[i{f}#")); assertThat(readLine(), startsWith("\\_Filter[i{f}#")); - assertThat(readLine(), startsWith(" \\_SubQueryAlias[test]")); - assertThat(readLine(), startsWith(" \\_EsRelation[test][i{f}#")); + assertThat(readLine(), startsWith(" \\_EsRelation[test][i{f}#")); assertEquals("", readLine()); assertThat(command("EXPLAIN (PLAN OPTIMIZED) SELECT * FROM test WHERE i = 2"), containsString("plan")); @@ -134,8 +132,7 @@ public class CliExplainIT extends CliIntegrationTestCase { containsString("plan")); assertThat(readLine(), startsWith("----------")); assertThat(readLine(), startsWith("Aggregate[[],[COUNT(1)#")); - assertThat(readLine(), startsWith("\\_SubQueryAlias[test]")); - assertThat(readLine(), startsWith(" \\_EsRelation[test][i{f}#")); + assertThat(readLine(), startsWith("\\_EsRelation[test][i{f}#")); assertEquals("", readLine()); assertThat(command("EXPLAIN (PLAN OPTIMIZED) SELECT COUNT(*) FROM test"), containsString("plan")); diff --git a/x-pack/plugin/sql/qa/single-node/src/test/java/org/elasticsearch/xpack/sql/qa/single_node/JdbcDocCsvSpecIT.java b/x-pack/plugin/sql/qa/single-node/src/test/java/org/elasticsearch/xpack/sql/qa/single_node/JdbcDocCsvSpecIT.java index 336b476a37f..f89f801d282 100644 --- a/x-pack/plugin/sql/qa/single-node/src/test/java/org/elasticsearch/xpack/sql/qa/single_node/JdbcDocCsvSpecIT.java +++ b/x-pack/plugin/sql/qa/single-node/src/test/java/org/elasticsearch/xpack/sql/qa/single_node/JdbcDocCsvSpecIT.java @@ -69,12 +69,12 @@ public class JdbcDocCsvSpecIT extends SpecBaseIntegrationTestCase { // uncomment this to printout the result set and create new CSV tests // //JdbcTestUtils.logLikeCLI(elastic, log); - JdbcAssert.assertResultSets(expected, elastic, log, true); + JdbcAssert.assertResultSets(expected, elastic, log, true, false); } @Override protected boolean logEsResultSet() { - return true; + return false; } @Override diff --git a/x-pack/plugin/sql/qa/src/main/java/org/elasticsearch/xpack/sql/qa/cli/ShowTestCase.java b/x-pack/plugin/sql/qa/src/main/java/org/elasticsearch/xpack/sql/qa/cli/ShowTestCase.java index 8f973748981..cddbf4c1007 100644 --- a/x-pack/plugin/sql/qa/src/main/java/org/elasticsearch/xpack/sql/qa/cli/ShowTestCase.java +++ b/x-pack/plugin/sql/qa/src/main/java/org/elasticsearch/xpack/sql/qa/cli/ShowTestCase.java @@ -38,6 +38,10 @@ public abstract class ShowTestCase extends CliIntegrationTestCase { while (aggregateFunction.matcher(line).matches()) { line = readLine(); } + Pattern groupingFunction = Pattern.compile("\\s*[A-Z0-9_~]+\\s*\\|\\s*GROUPING\\s*"); + while (groupingFunction.matcher(line).matches()) { + line = readLine(); + } Pattern conditionalFunction = Pattern.compile("\\s*[A-Z0-9_~]+\\s*\\|\\s*CONDITIONAL\\s*"); while (conditionalFunction.matcher(line).matches()) { line = readLine(); diff --git a/x-pack/plugin/sql/qa/src/main/java/org/elasticsearch/xpack/sql/qa/jdbc/CsvSpecTestCase.java b/x-pack/plugin/sql/qa/src/main/java/org/elasticsearch/xpack/sql/qa/jdbc/CsvSpecTestCase.java index 755d701c226..abf56cee9c7 100644 --- a/x-pack/plugin/sql/qa/src/main/java/org/elasticsearch/xpack/sql/qa/jdbc/CsvSpecTestCase.java +++ b/x-pack/plugin/sql/qa/src/main/java/org/elasticsearch/xpack/sql/qa/jdbc/CsvSpecTestCase.java @@ -7,10 +7,12 @@ package org.elasticsearch.xpack.sql.qa.jdbc; import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; +import org.apache.logging.log4j.Logger; import org.elasticsearch.xpack.sql.qa.jdbc.CsvTestUtils.CsvTestCase; import java.sql.Connection; import java.sql.ResultSet; +import java.sql.SQLException; import java.util.ArrayList; import java.util.List; @@ -58,4 +60,10 @@ public abstract class CsvSpecTestCase extends SpecBaseIntegrationTestCase { assertResults(expected, elasticResults); } } + + @Override + protected void assertResults(ResultSet expected, ResultSet elastic) throws SQLException { + Logger log = logEsResultSet() ? logger : null; + JdbcAssert.assertResultSets(expected, elastic, log, false, false); + } } diff --git a/x-pack/plugin/sql/qa/src/main/java/org/elasticsearch/xpack/sql/qa/jdbc/DebugSqlSpec.java b/x-pack/plugin/sql/qa/src/main/java/org/elasticsearch/xpack/sql/qa/jdbc/DebugSqlSpec.java index 21d2f3301fb..b51d66ace2e 100644 --- a/x-pack/plugin/sql/qa/src/main/java/org/elasticsearch/xpack/sql/qa/jdbc/DebugSqlSpec.java +++ b/x-pack/plugin/sql/qa/src/main/java/org/elasticsearch/xpack/sql/qa/jdbc/DebugSqlSpec.java @@ -16,7 +16,7 @@ public abstract class DebugSqlSpec extends SqlSpecTestCase { @ParametersFactory(shuffle = false, argumentFormatting = PARAM_FORMATTING) public static List readScriptSpec() throws Exception { Parser parser = specParser(); - return readScriptSpec("/debug.sql-spec", parser); + return readScriptSpec("/datetime.sql-spec", parser); } public DebugSqlSpec(String fileName, String groupName, String testName, Integer lineNumber, String query) { diff --git a/x-pack/plugin/sql/qa/src/main/java/org/elasticsearch/xpack/sql/qa/jdbc/JdbcAssert.java b/x-pack/plugin/sql/qa/src/main/java/org/elasticsearch/xpack/sql/qa/jdbc/JdbcAssert.java index e1fdd0744a7..2817ab6df72 100644 --- a/x-pack/plugin/sql/qa/src/main/java/org/elasticsearch/xpack/sql/qa/jdbc/JdbcAssert.java +++ b/x-pack/plugin/sql/qa/src/main/java/org/elasticsearch/xpack/sql/qa/jdbc/JdbcAssert.java @@ -1,4 +1,5 @@ /* + /* * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one * or more contributor license agreements. Licensed under the Elastic License; * you may not use this file except in compliance with the Elastic License. @@ -57,15 +58,29 @@ public class JdbcAssert { /** * Assert the given result sets, potentially in a lenient way. - * When lenient is specified, the type comparison of a column is widden to reach a common, compatible ground. + * When lenientDataType is specified, the type comparison of a column is widden to reach a common, compatible ground. * This means promoting integer types to long and floating types to double and comparing their values. - * For example in a non-lenient, strict case a comparison between an int and a tinyint would fail, with lenient it will succeed as - * long as the actual value is the same. + * For example in a non-lenient, strict case a comparison between an int and a tinyint would fail, with lenientDataType it will succeed + * as long as the actual value is the same. */ - public static void assertResultSets(ResultSet expected, ResultSet actual, Logger logger, boolean lenient) throws SQLException { + public static void assertResultSets(ResultSet expected, ResultSet actual, Logger logger, boolean lenientDataType) throws SQLException { + assertResultSets(expected, actual, logger, lenientDataType, true); + } + + /** + * Assert the given result sets, potentially in a lenient way. + * When lenientDataType is specified, the type comparison of a column is widden to reach a common, compatible ground. + * This means promoting integer types to long and floating types to double and comparing their values. + * For example in a non-lenient, strict case a comparison between an int and a tinyint would fail, with lenientDataType it will succeed + * as long as the actual value is the same. + * Also, has the option of treating the numeric results for floating point numbers in a leninent way, if chosen to. Usually, + * we would want lenient treatment for floating point numbers in sql-spec tests where the comparison is being made with H2. + */ + public static void assertResultSets(ResultSet expected, ResultSet actual, Logger logger, boolean lenientDataType, + boolean lenientFloatingNumbers) throws SQLException { try (ResultSet ex = expected; ResultSet ac = actual) { - assertResultSetMetadata(ex, ac, logger, lenient); - assertResultSetData(ex, ac, logger, lenient); + assertResultSetMetadata(ex, ac, logger, lenientDataType); + assertResultSetData(ex, ac, logger, lenientDataType, lenientFloatingNumbers); } } @@ -74,7 +89,8 @@ public class JdbcAssert { } // metadata doesn't consume a ResultSet thus it shouldn't close it - public static void assertResultSetMetadata(ResultSet expected, ResultSet actual, Logger logger, boolean lenient) throws SQLException { + public static void assertResultSetMetadata(ResultSet expected, ResultSet actual, Logger logger, boolean lenientDataType) + throws SQLException { ResultSetMetaData expectedMeta = expected.getMetaData(); ResultSetMetaData actualMeta = actual.getMetaData(); @@ -116,8 +132,8 @@ public class JdbcAssert { } // use the type not the name (timestamp with timezone returns spaces for example) - int expectedType = typeOf(expectedMeta.getColumnType(column), lenient); - int actualType = typeOf(actualMeta.getColumnType(column), lenient); + int expectedType = typeOf(expectedMeta.getColumnType(column), lenientDataType); + int actualType = typeOf(actualMeta.getColumnType(column), lenientDataType); // since H2 cannot use a fixed timezone, the data is stored in UTC (and thus with timezone) if (expectedType == Types.TIMESTAMP_WITH_TIMEZONE) { @@ -153,13 +169,20 @@ public class JdbcAssert { assertResultSetData(expected, actual, logger, false); } - public static void assertResultSetData(ResultSet expected, ResultSet actual, Logger logger, boolean lenient) throws SQLException { + public static void assertResultSetData(ResultSet expected, ResultSet actual, Logger logger, boolean lenientDataType) + throws SQLException { + assertResultSetData(expected, actual, logger, lenientDataType, true); + } + + public static void assertResultSetData(ResultSet expected, ResultSet actual, Logger logger, boolean lenientDataType, + boolean lenientFloatingNumbers) throws SQLException { try (ResultSet ex = expected; ResultSet ac = actual) { - doAssertResultSetData(ex, ac, logger, lenient); + doAssertResultSetData(ex, ac, logger, lenientDataType, lenientFloatingNumbers); } } - private static void doAssertResultSetData(ResultSet expected, ResultSet actual, Logger logger, boolean lenient) throws SQLException { + private static void doAssertResultSetData(ResultSet expected, ResultSet actual, Logger logger, boolean lenientDataType, + boolean lenientFloatingNumbers) throws SQLException { ResultSetMetaData metaData = expected.getMetaData(); int columns = metaData.getColumnCount(); @@ -199,7 +222,7 @@ public class JdbcAssert { } Object expectedObject = expected.getObject(column); - Object actualObject = lenient ? actual.getObject(column, expectedColumnClass) : actual.getObject(column); + Object actualObject = lenientDataType ? actual.getObject(column, expectedColumnClass) : actual.getObject(column); String msg = format(Locale.ROOT, "Different result for column [%s], entry [%d]", metaData.getColumnName(column), count + 1); @@ -219,10 +242,9 @@ public class JdbcAssert { } // and floats/doubles else if (type == Types.DOUBLE) { - // the 1d/1f difference is used due to rounding/flooring - assertEquals(msg, (double) expectedObject, (double) actualObject, 1d); + assertEquals(msg, (double) expectedObject, (double) actualObject, lenientFloatingNumbers ? 1d : 0.0d); } else if (type == Types.FLOAT) { - assertEquals(msg, (float) expectedObject, (float) actualObject, 1f); + assertEquals(msg, (float) expectedObject, (float) actualObject, lenientFloatingNumbers ? 1f : 0.0f); } // intervals else if (type == Types.VARCHAR && actualObject instanceof TemporalAmount) { @@ -251,8 +273,8 @@ public class JdbcAssert { /** * Returns the value of the given type either in a lenient fashion (widened) or strict. */ - private static int typeOf(int columnType, boolean lenient) { - if (lenient) { + private static int typeOf(int columnType, boolean lenientDataType) { + if (lenientDataType) { // integer upcast to long if (columnType == TINYINT || columnType == SMALLINT || columnType == INTEGER || columnType == BIGINT) { return BIGINT; diff --git a/x-pack/plugin/sql/qa/src/main/java/org/elasticsearch/xpack/sql/qa/jdbc/SpecBaseIntegrationTestCase.java b/x-pack/plugin/sql/qa/src/main/java/org/elasticsearch/xpack/sql/qa/jdbc/SpecBaseIntegrationTestCase.java index 8c1f4a375db..e69d229b6f1 100644 --- a/x-pack/plugin/sql/qa/src/main/java/org/elasticsearch/xpack/sql/qa/jdbc/SpecBaseIntegrationTestCase.java +++ b/x-pack/plugin/sql/qa/src/main/java/org/elasticsearch/xpack/sql/qa/jdbc/SpecBaseIntegrationTestCase.java @@ -197,4 +197,4 @@ public abstract class SpecBaseIntegrationTestCase extends JdbcIntegrationTestCas public static InputStream readFromJarUrl(URL source) throws IOException { return source.openStream(); } -} +} \ No newline at end of file diff --git a/x-pack/plugin/sql/qa/src/main/java/org/elasticsearch/xpack/sql/qa/rest/RestSqlTestCase.java b/x-pack/plugin/sql/qa/src/main/java/org/elasticsearch/xpack/sql/qa/rest/RestSqlTestCase.java index 062ab0c81b9..3ca8878ad5e 100644 --- a/x-pack/plugin/sql/qa/src/main/java/org/elasticsearch/xpack/sql/qa/rest/RestSqlTestCase.java +++ b/x-pack/plugin/sql/qa/src/main/java/org/elasticsearch/xpack/sql/qa/rest/RestSqlTestCase.java @@ -471,7 +471,7 @@ public abstract class RestSqlTestCase extends ESRestTestCase implements ErrorsTe @SuppressWarnings("unchecked") Map aggregations2 = (Map) groupby.get("aggregations"); - assertEquals(3, aggregations2.size()); + assertEquals(2, aggregations2.size()); List aggKeys = new ArrayList<>(2); String aggFilterKey = null; @@ -491,7 +491,7 @@ public abstract class RestSqlTestCase extends ESRestTestCase implements ErrorsTe } } Collections.sort(aggKeys); - assertEquals("having." + aggKeys.get(1), aggFilterKey); + assertEquals("having." + aggKeys.get(0), aggFilterKey); @SuppressWarnings("unchecked") Map having = (Map) aggregations2.get(aggFilterKey); @@ -505,7 +505,7 @@ public abstract class RestSqlTestCase extends ESRestTestCase implements ErrorsTe @SuppressWarnings("unchecked") Map bucketsPath = (Map) bucketSelector.get("buckets_path"); assertEquals(1, bucketsPath.size()); - assertEquals(aggKeys.get(1).toString(), bucketsPath.get("a0")); + assertEquals(aggKeys.get(0).toString(), bucketsPath.get("a0")); @SuppressWarnings("unchecked") Map filterScript = (Map) bucketSelector.get("script"); diff --git a/x-pack/plugin/sql/qa/src/main/resources/agg.csv-spec b/x-pack/plugin/sql/qa/src/main/resources/agg.csv-spec index 79656fea582..5d1e59ef7a2 100644 --- a/x-pack/plugin/sql/qa/src/main/resources/agg.csv-spec +++ b/x-pack/plugin/sql/qa/src/main/resources/agg.csv-spec @@ -16,8 +16,8 @@ SELECT gender, PERCENTILE(emp_no, 97.76) p1 FROM test_emp GROUP BY gender; gender:s | p1:d null |10019.0 -F |10099.51 -M |10095.789999999999 +F |10099.7608 +M |10096.2232 ; multiplePercentilesOneWithCommaOneWithout @@ -137,3 +137,80 @@ null |null |null |null |null | 3 |3 |51 |3 |3.0 |100.0 |NaN |NaN 4 |4 |72 |4 |4.0 |0.0 |NaN |NaN ; + + +// +// Grouping functions +// + + +histogramNumeric +SELECT HISTOGRAM(salary, 5000) AS h FROM test_emp GROUP BY h; + + h +--------------- +25000 +30000 +35000 +40000 +45000 +50000 +55000 +60000 +65000 +70000 +; + +histogramDate +schema::h:ts|c:l +SELECT HISTOGRAM(birth_date, INTERVAL 1 YEAR) AS h, COUNT(*) as c FROM test_emp GROUP BY h; + + h | c +--------------------+--------------- +null |10 +1951-04-11T00:00:00Z|1 +1952-04-05T00:00:00Z|10 +1953-03-31T00:00:00Z|10 +1954-03-26T00:00:00Z|7 +1955-03-21T00:00:00Z|4 +1956-03-15T00:00:00Z|4 +1957-03-10T00:00:00Z|6 +1958-03-05T00:00:00Z|6 +1959-02-28T00:00:00Z|9 +1960-02-23T00:00:00Z|7 +1961-02-17T00:00:00Z|8 +1962-02-12T00:00:00Z|6 +1963-02-07T00:00:00Z|7 +1964-02-02T00:00:00Z|5 + +; + +histogramDateWithCountAndOrder +schema::h:ts|c:l +SELECT HISTOGRAM(birth_date, INTERVAL 1 YEAR) AS h, COUNT(*) as c FROM test_emp GROUP BY h ORDER BY h DESC; + + h | c +--------------------+--------------- +1964-02-02T00:00:00Z|5 +1963-02-07T00:00:00Z|7 +1962-02-12T00:00:00Z|6 +1961-02-17T00:00:00Z|8 +1960-02-23T00:00:00Z|7 +1959-02-28T00:00:00Z|9 +1958-03-05T00:00:00Z|6 +1957-03-10T00:00:00Z|6 +1956-03-15T00:00:00Z|4 +1955-03-21T00:00:00Z|4 +1954-03-26T00:00:00Z|7 +1953-03-31T00:00:00Z|10 +1952-04-05T00:00:00Z|10 +1951-04-11T00:00:00Z|1 +null |10 +; + +histogramDateWithDateFunction-Ignore +SELECT YEAR(HISTOGRAM(birth_date, INTERVAL 1 YEAR)) AS h, COUNT(*) as c FROM test_emp GROUP BY h ORDER BY h DESC; + + + +; \ No newline at end of file diff --git a/x-pack/plugin/sql/qa/src/main/resources/alias.csv-spec b/x-pack/plugin/sql/qa/src/main/resources/alias.csv-spec index 2a64bfc34de..e87aaecf6f3 100644 --- a/x-pack/plugin/sql/qa/src/main/resources/alias.csv-spec +++ b/x-pack/plugin/sql/qa/src/main/resources/alias.csv-spec @@ -92,7 +92,7 @@ test_emp | BASE TABLE test_emp_copy | BASE TABLE ; -testGroupByOnAlias +groupByOnAlias SELECT gender g, PERCENTILE(emp_no, 97) p1 FROM test_alias GROUP BY g ORDER BY g DESC; g:s | p1:d @@ -102,11 +102,11 @@ F | 10099.52 null | 10019.0 ; -testGroupByOnPattern +groupByOnPattern SELECT gender, PERCENTILE(emp_no, 97) p1 FROM "test_*" WHERE gender is NOT NULL GROUP BY gender; gender:s | p1:d -F | 10099.32 -M | 10095.98 +F | 10099.52 +M | 10096.0 ; \ No newline at end of file diff --git a/x-pack/plugin/sql/qa/src/main/resources/command.csv-spec b/x-pack/plugin/sql/qa/src/main/resources/command.csv-spec index c8351e4f7bc..7c9c98f6d04 100644 --- a/x-pack/plugin/sql/qa/src/main/resources/command.csv-spec +++ b/x-pack/plugin/sql/qa/src/main/resources/command.csv-spec @@ -6,120 +6,123 @@ showFunctions SHOW FUNCTIONS; - name:s | type:s -AVG |AGGREGATE -COUNT |AGGREGATE -MAX |AGGREGATE -MIN |AGGREGATE -SUM |AGGREGATE -KURTOSIS |AGGREGATE -PERCENTILE |AGGREGATE -PERCENTILE_RANK |AGGREGATE -SKEWNESS |AGGREGATE -STDDEV_POP |AGGREGATE -SUM_OF_SQUARES |AGGREGATE -VAR_POP |AGGREGATE -COALESCE |CONDITIONAL -GREATEST |CONDITIONAL -IFNULL |CONDITIONAL -ISNULL |CONDITIONAL -LEAST |CONDITIONAL -NULLIF |CONDITIONAL -NVL |CONDITIONAL -DAY |SCALAR -DAYNAME |SCALAR -DAYOFMONTH |SCALAR -DAYOFWEEK |SCALAR -DAYOFYEAR |SCALAR -DAY_NAME |SCALAR -DAY_OF_MONTH |SCALAR -DAY_OF_WEEK |SCALAR -DAY_OF_YEAR |SCALAR -DOM |SCALAR -DOW |SCALAR -DOY |SCALAR -HOUR |SCALAR -HOUR_OF_DAY |SCALAR -IDOW |SCALAR -ISODAYOFWEEK |SCALAR -ISODOW |SCALAR -ISOWEEK |SCALAR -ISOWEEKOFYEAR |SCALAR -ISO_DAY_OF_WEEK |SCALAR -ISO_WEEK_OF_YEAR|SCALAR -IW |SCALAR -IWOY |SCALAR -MINUTE |SCALAR -MINUTE_OF_DAY |SCALAR -MINUTE_OF_HOUR |SCALAR -MONTH |SCALAR -MONTHNAME |SCALAR -MONTH_NAME |SCALAR -MONTH_OF_YEAR |SCALAR -QUARTER |SCALAR -SECOND |SCALAR -SECOND_OF_MINUTE|SCALAR -WEEK |SCALAR -WEEK_OF_YEAR |SCALAR -YEAR |SCALAR -ABS |SCALAR -ACOS |SCALAR -ASIN |SCALAR -ATAN |SCALAR -ATAN2 |SCALAR -CBRT |SCALAR -CEIL |SCALAR -CEILING |SCALAR -COS |SCALAR -COSH |SCALAR -COT |SCALAR -DEGREES |SCALAR -E |SCALAR -EXP |SCALAR -EXPM1 |SCALAR -FLOOR |SCALAR -LOG |SCALAR -LOG10 |SCALAR -MOD |SCALAR -PI |SCALAR -POWER |SCALAR -RADIANS |SCALAR -RAND |SCALAR -RANDOM |SCALAR -ROUND |SCALAR -SIGN |SCALAR -SIGNUM |SCALAR -SIN |SCALAR -SINH |SCALAR -SQRT |SCALAR -TAN |SCALAR -TRUNCATE |SCALAR -ASCII |SCALAR -BIT_LENGTH |SCALAR -CHAR |SCALAR -CHARACTER_LENGTH|SCALAR -CHAR_LENGTH |SCALAR -CONCAT |SCALAR -INSERT |SCALAR -LCASE |SCALAR -LEFT |SCALAR -LENGTH |SCALAR -LOCATE |SCALAR -LTRIM |SCALAR -OCTET_LENGTH |SCALAR -POSITION |SCALAR -REPEAT |SCALAR -REPLACE |SCALAR -RIGHT |SCALAR -RTRIM |SCALAR -SPACE |SCALAR -SUBSTRING |SCALAR -UCASE |SCALAR -CAST |SCALAR -CONVERT |SCALAR -DATABASE |SCALAR -USER |SCALAR -SCORE |SCORE + name:s | type:s +AVG |AGGREGATE +COUNT |AGGREGATE +MAX |AGGREGATE +MIN |AGGREGATE +SUM |AGGREGATE +KURTOSIS |AGGREGATE +PERCENTILE |AGGREGATE +PERCENTILE_RANK |AGGREGATE +SKEWNESS |AGGREGATE +STDDEV_POP |AGGREGATE +SUM_OF_SQUARES |AGGREGATE +VAR_POP |AGGREGATE +HISTOGRAM |GROUPING +COALESCE |CONDITIONAL +GREATEST |CONDITIONAL +IFNULL |CONDITIONAL +ISNULL |CONDITIONAL +LEAST |CONDITIONAL +NULLIF |CONDITIONAL +NVL |CONDITIONAL +CURRENT_TIMESTAMP|SCALAR +DAY |SCALAR +DAYNAME |SCALAR +DAYOFMONTH |SCALAR +DAYOFWEEK |SCALAR +DAYOFYEAR |SCALAR +DAY_NAME |SCALAR +DAY_OF_MONTH |SCALAR +DAY_OF_WEEK |SCALAR +DAY_OF_YEAR |SCALAR +DOM |SCALAR +DOW |SCALAR +DOY |SCALAR +HOUR |SCALAR +HOUR_OF_DAY |SCALAR +IDOW |SCALAR +ISODAYOFWEEK |SCALAR +ISODOW |SCALAR +ISOWEEK |SCALAR +ISOWEEKOFYEAR |SCALAR +ISO_DAY_OF_WEEK |SCALAR +ISO_WEEK_OF_YEAR |SCALAR +IW |SCALAR +IWOY |SCALAR +MINUTE |SCALAR +MINUTE_OF_DAY |SCALAR +MINUTE_OF_HOUR |SCALAR +MONTH |SCALAR +MONTHNAME |SCALAR +MONTH_NAME |SCALAR +MONTH_OF_YEAR |SCALAR +NOW |SCALAR +QUARTER |SCALAR +SECOND |SCALAR +SECOND_OF_MINUTE |SCALAR +WEEK |SCALAR +WEEK_OF_YEAR |SCALAR +YEAR |SCALAR +ABS |SCALAR +ACOS |SCALAR +ASIN |SCALAR +ATAN |SCALAR +ATAN2 |SCALAR +CBRT |SCALAR +CEIL |SCALAR +CEILING |SCALAR +COS |SCALAR +COSH |SCALAR +COT |SCALAR +DEGREES |SCALAR +E |SCALAR +EXP |SCALAR +EXPM1 |SCALAR +FLOOR |SCALAR +LOG |SCALAR +LOG10 |SCALAR +MOD |SCALAR +PI |SCALAR +POWER |SCALAR +RADIANS |SCALAR +RAND |SCALAR +RANDOM |SCALAR +ROUND |SCALAR +SIGN |SCALAR +SIGNUM |SCALAR +SIN |SCALAR +SINH |SCALAR +SQRT |SCALAR +TAN |SCALAR +TRUNCATE |SCALAR +ASCII |SCALAR +BIT_LENGTH |SCALAR +CHAR |SCALAR +CHARACTER_LENGTH |SCALAR +CHAR_LENGTH |SCALAR +CONCAT |SCALAR +INSERT |SCALAR +LCASE |SCALAR +LEFT |SCALAR +LENGTH |SCALAR +LOCATE |SCALAR +LTRIM |SCALAR +OCTET_LENGTH |SCALAR +POSITION |SCALAR +REPEAT |SCALAR +REPLACE |SCALAR +RIGHT |SCALAR +RTRIM |SCALAR +SPACE |SCALAR +SUBSTRING |SCALAR +UCASE |SCALAR +CAST |SCALAR +CONVERT |SCALAR +DATABASE |SCALAR +USER |SCALAR +SCORE |SCORE ; showFunctionsWithExactMatch diff --git a/x-pack/plugin/sql/qa/src/main/resources/datetime-interval.csv-spec b/x-pack/plugin/sql/qa/src/main/resources/datetime-interval.csv-spec index c20529485da..9434ead51da 100644 --- a/x-pack/plugin/sql/qa/src/main/resources/datetime-interval.csv-spec +++ b/x-pack/plugin/sql/qa/src/main/resources/datetime-interval.csv-spec @@ -3,7 +3,7 @@ // hence why all INTERVAL tests need to be done manually // -testExactIntervals +exactIntervals SELECT INTERVAL 1 YEAR AS y, INTERVAL 2 MONTH AS m, INTERVAL 3 DAY AS d, INTERVAL 4 HOUR AS h, INTERVAL 5 MINUTE AS mm, INTERVAL 6 SECOND AS s; y | m | d | h | mm | s @@ -20,7 +20,7 @@ SELECT INTERVAL 1 YEARS AS y, INTERVAL 2 MONTHS AS m, INTERVAL 3 DAYS AS d, INTE ; // take the examples from https://docs.microsoft.com/en-us/sql/odbc/reference/appendixes/interval-literals?view=sql-server-2017 -testYear +year SELECT INTERVAL '326' YEAR; INTERVAL '326' YEAR @@ -28,7 +28,7 @@ INTERVAL '326' YEAR +326-0 ; -testMonth +month SELECT INTERVAL '326' MONTH; INTERVAL '326' MONTH @@ -36,7 +36,7 @@ INTERVAL '326' MONTH +0-326 ; -testDay +day SELECT INTERVAL '3261' DAY; INTERVAL '3261' DAY @@ -44,7 +44,7 @@ INTERVAL '3261' DAY +3261 00:00:00.0 ; -testHour +hour SELECT INTERVAL '163' HOUR; INTERVAL '163' HOUR @@ -52,7 +52,7 @@ INTERVAL '163' HOUR +6 19:00:00.0 ; -testMinute +minute SELECT INTERVAL '163' MINUTE; INTERVAL '163' MINUTE @@ -60,7 +60,7 @@ INTERVAL '163' MINUTE +0 02:43:00.0 ; -testSecond +second SELECT INTERVAL '223.16' SECOND; INTERVAL '223.16' SECOND @@ -68,7 +68,7 @@ INTERVAL '223.16' SECOND +0 00:03:43.16 ; -testYearMonth +yearMonth SELECT INTERVAL '163-11' YEAR TO MONTH; INTERVAL '163-11' YEAR TO MONTH @@ -76,7 +76,7 @@ INTERVAL '163-11' YEAR TO MONTH +163-11 ; -testDayHour +dayHour SELECT INTERVAL '163 12' DAY TO HOUR; INTERVAL '163 12' DAY TO HOUR @@ -84,7 +84,7 @@ INTERVAL '163 12' DAY TO HOUR +163 12:00:00.0 ; -testDayMinute +dayMinute SELECT INTERVAL '163 12:39' DAY TO MINUTE AS interval; interval @@ -92,7 +92,7 @@ interval +163 12:39:00.0 ; -testDaySecond +daySecond SELECT INTERVAL '163 12:39:59.163' DAY TO SECOND AS interval; interval @@ -100,7 +100,7 @@ interval +163 12:39:59.163 ; -testDaySecondNegative +daySecondNegative SELECT INTERVAL -'163 23:39:56.23' DAY TO SECOND AS interval; interval @@ -108,7 +108,7 @@ interval -163 23:39:56.23 ; -testHourMinute +hourMinute SELECT INTERVAL '163:39' HOUR TO MINUTE AS interval; interval @@ -116,7 +116,7 @@ interval +6 19:39:00.0 ; -testHourSecond +hourSecond SELECT INTERVAL '163:39:59.163' HOUR TO SECOND AS interval; interval @@ -124,7 +124,7 @@ interval +6 19:39:59.163 ; -testMinuteSecond +minuteSecond SELECT INTERVAL '163:59.163' MINUTE TO SECOND AS interval; interval @@ -132,7 +132,65 @@ interval +0 02:43:59.163 ; -testDatePlusInterval +intervalPlusInterval +SELECT INTERVAL 1 DAY + INTERVAL 53 MINUTES; + +INTERVAL 1 DAY + INTERVAL 53 MINUTES +------------------------------------ ++1 00:53:00.0 +; + +datePlusIntervalInline +SELECT CAST('1969-05-13T12:34:56' AS DATE) + INTERVAL 49 YEARS AS result; + + result +-------------------- +2018-05-13T12:34:56Z +; + +minusInterval +SELECT - INTERVAL '49-1' YEAR TO MONTH result; + + result +--------------- +-49-1 +; + + +intervalMinusInterval +SELECT INTERVAL '1' DAY - INTERVAL '2' HOURS AS result; + + result +--------------- ++0 22:00:00.0 +; + + +intervalYearMultiply +SELECT -2 * INTERVAL '3' YEARS AS result; + + result +--------------- +-6-0 +; + +intervalDayMultiply +SELECT -2 * INTERVAL '1 23:45' DAY TO MINUTES AS result; + + result +--------------- +-3 23:30:00.0 +; + +dateMinusInterval +SELECT CAST('2018-05-13T12:34:56' AS DATE) - INTERVAL '2-8' YEAR TO MONTH AS result; + + result +-------------------- +2015-09-13T12:34:56Z +; + +datePlusInterval SELECT MONTH(birth_date) AS m, MONTH(birth_date + INTERVAL '1-2' YEAR TO MONTH) AS f FROM test_emp GROUP BY birth_date ORDER BY birth_date ASC LIMIT 5; m | f @@ -144,7 +202,7 @@ null |null 6 |8 ; -testDatePlusMixInterval +datePlusMixInterval SELECT birth_date, birth_date + INTERVAL '1-2' YEAR TO MONTH AS f FROM test_emp GROUP BY birth_date ORDER BY birth_date ASC LIMIT 5; birth_date:ts | f:ts @@ -157,7 +215,7 @@ null |null ; -testDateMinusInterval +dateMinusInterval SELECT YEAR(birth_date) AS y, YEAR(birth_date - INTERVAL 1 YEAR) AS f FROM test_emp GROUP BY birth_date ORDER BY birth_date ASC LIMIT 5; y | f @@ -169,7 +227,7 @@ null |null 1952 |1951 ; -testDatePlusMixInterval +datePlusMixInterval SELECT birth_date, birth_date + INTERVAL '1-2' YEAR TO MONTH AS f FROM test_emp GROUP BY birth_date ORDER BY birth_date ASC LIMIT 5; birth_date:ts | f:ts @@ -182,7 +240,7 @@ null |null ; -testDateAndMultipleIntervals +dateAndMultipleIntervals SELECT birth_date, birth_date - INTERVAL 1 YEAR + INTERVAL '2-3' YEAR TO MONTH AS f FROM test_emp GROUP BY birth_date ORDER BY birth_date ASC LIMIT 5; birth_date:ts | f:ts @@ -195,7 +253,7 @@ null |null ; -testDatePlusIntervalWhereClause +datePlusIntervalWhereClause SELECT birth_date, YEAR(birth_date + INTERVAL 1 YEAR) AS f FROM test_emp WHERE YEAR(birth_date + INTERVAL 1 YEAR) > 1 GROUP BY birth_date ORDER BY birth_date ASC LIMIT 5; birth_date:ts | f:i @@ -207,7 +265,7 @@ SELECT birth_date, YEAR(birth_date + INTERVAL 1 YEAR) AS f FROM test_emp WHERE Y 1952-07-08T00:00:00Z|1953 ; -testDateMinusIntervalOrder +dateMinusIntervalOrder SELECT birth_date, MONTH(birth_date - INTERVAL 1 YEAR) AS f FROM test_emp GROUP BY birth_date ORDER BY MONTH(birth_date - INTERVAL 1 YEAR) ASC LIMIT 5; birth_date:ts | f:i @@ -220,7 +278,7 @@ null |null ; // see https://github.com/elastic/elasticsearch/issues/35745 -testDatePlusIntervalHavingClause-Ignore +datePlusIntervalHavingClause-Ignore SELECT birth_date, MAX(hire_date) - INTERVAL 1 YEAR AS f FROM test_emp GROUP BY birth_date ORDER BY birth_date ASC LIMIT 5; birth_date:ts | f:ts diff --git a/x-pack/plugin/sql/qa/src/main/resources/datetime.csv-spec b/x-pack/plugin/sql/qa/src/main/resources/datetime.csv-spec index 134a5f4b667..d966c8c822e 100644 --- a/x-pack/plugin/sql/qa/src/main/resources/datetime.csv-spec +++ b/x-pack/plugin/sql/qa/src/main/resources/datetime.csv-spec @@ -271,3 +271,79 @@ SELECT WEEK(birth_date) week, COUNT(*) c FROM test_emp WHERE MOD(WEEK(birth_date 48 |2 52 |3 ; + +currentTimestampKeywordWithDivision +SELECT YEAR(CURRENT_TIMESTAMP) / 1000 AS result; + + result +--------------- +2 +; + +currentTimestampFunctionNoArgsWithDivision +SELECT YEAR(CURRENT_TIMESTAMP()) / 1000 AS result; + + result +--------------- +2 +; + +currentTimestampFunctionPrecisionWithDivision +SELECT YEAR(CURRENT_TIMESTAMP(2)) / 1000 AS result; + + result +--------------- +2 +; + +nowWithDivision +SELECT YEAR(NOW()) / 1000 AS result; + + result +--------------- +2 +; + +nowIntervalSubstraction +SELECT YEAR(NOW() - INTERVAL 2 YEARS) / 1000 AS result; + + result +--------------- +2 +; + + +currentTimestampFilter +SELECT first_name FROM test_emp WHERE hire_date > NOW() - INTERVAL 100 YEARS ORDER BY first_name ASC LIMIT 10; + + first_name +--------------- +Alejandro +Amabile +Anneke +Anoosh +Arumugam +Basil +Berhard +Berni +Bezalel +Bojan +; + +currentTimestampFilterScript +SELECT first_name FROM test_emp WHERE YEAR(hire_date) - YEAR(NOW()) / 1000 > 10 ORDER BY first_name ASC LIMIT 10; + + first_name +--------------- +Alejandro +Amabile +Anneke +Anoosh +Arumugam +Basil +Berhard +Berni +Bezalel +Bojan + +; \ No newline at end of file diff --git a/x-pack/plugin/sql/qa/src/main/resources/datetime.sql-spec b/x-pack/plugin/sql/qa/src/main/resources/datetime.sql-spec index 4323f3a2473..16fe5511e4d 100644 --- a/x-pack/plugin/sql/qa/src/main/resources/datetime.sql-spec +++ b/x-pack/plugin/sql/qa/src/main/resources/datetime.sql-spec @@ -114,3 +114,16 @@ SELECT QUARTER(hire_date) quarter, COUNT(*) hires FROM test_emp GROUP BY QUARTER dayOfWeekGroupBy SELECT DAY_OF_WEEK(birth_date) day, COUNT(*) c FROM test_emp WHERE DAY_OF_WEEK(birth_date) < 6 GROUP BY day ORDER BY DAY_OF_WEEK(birth_date); + +// current +currentTimestampYear +SELECT YEAR(CURRENT_TIMESTAMP()) AS result; + +currentTimestampMonth +SELECT MONTH(CURRENT_TIMESTAMP()) AS result; + +currentTimestampHour-Ignore +SELECT HOUR(CURRENT_TIMESTAMP()) AS result; + +currentTimestampMinute +SELECT MINUTE(CURRENT_TIMESTAMP()) AS result; diff --git a/x-pack/plugin/sql/qa/src/main/resources/docs.csv-spec b/x-pack/plugin/sql/qa/src/main/resources/docs.csv-spec index 651cf6eb663..03d412b2ab5 100644 --- a/x-pack/plugin/sql/qa/src/main/resources/docs.csv-spec +++ b/x-pack/plugin/sql/qa/src/main/resources/docs.csv-spec @@ -182,121 +182,125 @@ showFunctions // tag::showFunctions SHOW FUNCTIONS; - name | type -----------------+--------------- -AVG |AGGREGATE -COUNT |AGGREGATE -MAX |AGGREGATE -MIN |AGGREGATE -SUM |AGGREGATE -KURTOSIS |AGGREGATE -PERCENTILE |AGGREGATE -PERCENTILE_RANK |AGGREGATE -SKEWNESS |AGGREGATE -STDDEV_POP |AGGREGATE -SUM_OF_SQUARES |AGGREGATE -VAR_POP |AGGREGATE -COALESCE |CONDITIONAL -GREATEST |CONDITIONAL -IFNULL |CONDITIONAL -ISNULL |CONDITIONAL -LEAST |CONDITIONAL -NULLIF |CONDITIONAL -NVL |CONDITIONAL -DAY |SCALAR -DAYNAME |SCALAR -DAYOFMONTH |SCALAR -DAYOFWEEK |SCALAR -DAYOFYEAR |SCALAR -DAY_NAME |SCALAR -DAY_OF_MONTH |SCALAR -DAY_OF_WEEK |SCALAR -DAY_OF_YEAR |SCALAR -DOM |SCALAR -DOW |SCALAR -DOY |SCALAR -HOUR |SCALAR -HOUR_OF_DAY |SCALAR -IDOW |SCALAR -ISODAYOFWEEK |SCALAR -ISODOW |SCALAR -ISOWEEK |SCALAR -ISOWEEKOFYEAR |SCALAR -ISO_DAY_OF_WEEK |SCALAR -ISO_WEEK_OF_YEAR|SCALAR -IW |SCALAR -IWOY |SCALAR -MINUTE |SCALAR -MINUTE_OF_DAY |SCALAR -MINUTE_OF_HOUR |SCALAR -MONTH |SCALAR -MONTHNAME |SCALAR -MONTH_NAME |SCALAR -MONTH_OF_YEAR |SCALAR -QUARTER |SCALAR -SECOND |SCALAR -SECOND_OF_MINUTE|SCALAR -WEEK |SCALAR -WEEK_OF_YEAR |SCALAR -YEAR |SCALAR -ABS |SCALAR -ACOS |SCALAR -ASIN |SCALAR -ATAN |SCALAR -ATAN2 |SCALAR -CBRT |SCALAR -CEIL |SCALAR -CEILING |SCALAR -COS |SCALAR -COSH |SCALAR -COT |SCALAR -DEGREES |SCALAR -E |SCALAR -EXP |SCALAR -EXPM1 |SCALAR -FLOOR |SCALAR -LOG |SCALAR -LOG10 |SCALAR -MOD |SCALAR -PI |SCALAR -POWER |SCALAR -RADIANS |SCALAR -RAND |SCALAR -RANDOM |SCALAR -ROUND |SCALAR -SIGN |SCALAR -SIGNUM |SCALAR -SIN |SCALAR -SINH |SCALAR -SQRT |SCALAR -TAN |SCALAR -TRUNCATE |SCALAR -ASCII |SCALAR -BIT_LENGTH |SCALAR -CHAR |SCALAR -CHARACTER_LENGTH|SCALAR -CHAR_LENGTH |SCALAR -CONCAT |SCALAR -INSERT |SCALAR -LCASE |SCALAR -LEFT |SCALAR -LENGTH |SCALAR -LOCATE |SCALAR -LTRIM |SCALAR -OCTET_LENGTH |SCALAR -POSITION |SCALAR -REPEAT |SCALAR -REPLACE |SCALAR -RIGHT |SCALAR -RTRIM |SCALAR -SPACE |SCALAR -SUBSTRING |SCALAR -UCASE |SCALAR -CAST |SCALAR -CONVERT |SCALAR -DATABASE |SCALAR -USER |SCALAR -SCORE |SCORE + name | type +-----------------+--------------- +AVG |AGGREGATE +COUNT |AGGREGATE +MAX |AGGREGATE +MIN |AGGREGATE +SUM |AGGREGATE +KURTOSIS |AGGREGATE +PERCENTILE |AGGREGATE +PERCENTILE_RANK |AGGREGATE +SKEWNESS |AGGREGATE +STDDEV_POP |AGGREGATE +SUM_OF_SQUARES |AGGREGATE +VAR_POP |AGGREGATE +HISTOGRAM |GROUPING +COALESCE |CONDITIONAL +GREATEST |CONDITIONAL +IFNULL |CONDITIONAL +ISNULL |CONDITIONAL +LEAST |CONDITIONAL +NULLIF |CONDITIONAL +NVL |CONDITIONAL +CURRENT_TIMESTAMP|SCALAR +DAY |SCALAR +DAYNAME |SCALAR +DAYOFMONTH |SCALAR +DAYOFWEEK |SCALAR +DAYOFYEAR |SCALAR +DAY_NAME |SCALAR +DAY_OF_MONTH |SCALAR +DAY_OF_WEEK |SCALAR +DAY_OF_YEAR |SCALAR +DOM |SCALAR +DOW |SCALAR +DOY |SCALAR +HOUR |SCALAR +HOUR_OF_DAY |SCALAR +IDOW |SCALAR +ISODAYOFWEEK |SCALAR +ISODOW |SCALAR +ISOWEEK |SCALAR +ISOWEEKOFYEAR |SCALAR +ISO_DAY_OF_WEEK |SCALAR +ISO_WEEK_OF_YEAR |SCALAR +IW |SCALAR +IWOY |SCALAR +MINUTE |SCALAR +MINUTE_OF_DAY |SCALAR +MINUTE_OF_HOUR |SCALAR +MONTH |SCALAR +MONTHNAME |SCALAR +MONTH_NAME |SCALAR +MONTH_OF_YEAR |SCALAR +NOW |SCALAR +QUARTER |SCALAR +SECOND |SCALAR +SECOND_OF_MINUTE |SCALAR +WEEK |SCALAR +WEEK_OF_YEAR |SCALAR +YEAR |SCALAR +ABS |SCALAR +ACOS |SCALAR +ASIN |SCALAR +ATAN |SCALAR +ATAN2 |SCALAR +CBRT |SCALAR +CEIL |SCALAR +CEILING |SCALAR +COS |SCALAR +COSH |SCALAR +COT |SCALAR +DEGREES |SCALAR +E |SCALAR +EXP |SCALAR +EXPM1 |SCALAR +FLOOR |SCALAR +LOG |SCALAR +LOG10 |SCALAR +MOD |SCALAR +PI |SCALAR +POWER |SCALAR +RADIANS |SCALAR +RAND |SCALAR +RANDOM |SCALAR +ROUND |SCALAR +SIGN |SCALAR +SIGNUM |SCALAR +SIN |SCALAR +SINH |SCALAR +SQRT |SCALAR +TAN |SCALAR +TRUNCATE |SCALAR +ASCII |SCALAR +BIT_LENGTH |SCALAR +CHAR |SCALAR +CHARACTER_LENGTH |SCALAR +CHAR_LENGTH |SCALAR +CONCAT |SCALAR +INSERT |SCALAR +LCASE |SCALAR +LEFT |SCALAR +LENGTH |SCALAR +LOCATE |SCALAR +LTRIM |SCALAR +OCTET_LENGTH |SCALAR +POSITION |SCALAR +REPEAT |SCALAR +REPLACE |SCALAR +RIGHT |SCALAR +RTRIM |SCALAR +SPACE |SCALAR +SUBSTRING |SCALAR +UCASE |SCALAR +CAST |SCALAR +CONVERT |SCALAR +DATABASE |SCALAR +USER |SCALAR +SCORE |SCORE + // end::showFunctions ; @@ -669,7 +673,7 @@ SELECT MIN(salary) AS min, MAX(salary) AS max, AVG(salary) AS avg, COUNT(*) AS c min:i | max:i | avg:d | count:l ---------------+---------------+---------------+--------------- -25324 |74999 |48248 |100 +25324 |74999 |48248.55 |100 // end::groupByImplicitMultipleAggs ; @@ -695,6 +699,131 @@ SELECT MIN(salary) AS min, MAX(salary) AS max FROM emp HAVING min > 25000; // end::groupByHavingImplicitNoMatch //; +/////////////////////////////// +// +// Grouping +// +/////////////////////////////// + +histogramNumeric +// tag::histogramNumeric +SELECT HISTOGRAM(salary, 5000) AS h FROM emp GROUP BY h; + + h +--------------- +25000 +30000 +35000 +40000 +45000 +50000 +55000 +60000 +65000 +70000 + +// end::histogramNumeric +; + +histogramDate +schema::h:ts|c:l +// tag::histogramDate +SELECT HISTOGRAM(birth_date, INTERVAL 1 YEAR) AS h, COUNT(*) AS c FROM emp GROUP BY h; + + + h | c +--------------------+--------------- +null |10 +1951-04-11T00:00:00Z|1 +1952-04-05T00:00:00Z|10 +1953-03-31T00:00:00Z|10 +1954-03-26T00:00:00Z|7 +1955-03-21T00:00:00Z|4 +1956-03-15T00:00:00Z|4 +1957-03-10T00:00:00Z|6 +1958-03-05T00:00:00Z|6 +1959-02-28T00:00:00Z|9 +1960-02-23T00:00:00Z|7 +1961-02-17T00:00:00Z|8 +1962-02-12T00:00:00Z|6 +1963-02-07T00:00:00Z|7 +1964-02-02T00:00:00Z|5 + +// end::histogramDate +; + +/////////////////////////////// +// +// Date/Time +// +/////////////////////////////// + + +dtIntervalPlusInterval +// tag::dtIntervalPlusInterval +SELECT INTERVAL 1 DAY + INTERVAL 53 MINUTES AS result; + + result +--------------- ++1 00:53:00.0 + +// end::dtIntervalPlusInterval +; + + +dtDatePlusInterval +// tag::dtDatePlusInterval +SELECT CAST('1969-05-13T12:34:56' AS DATE) + INTERVAL 49 YEARS AS result; + + result +-------------------- +2018-05-13T12:34:56Z +// end::dtDatePlusInterval +; + +dtMinusInterval +// tag::dtMinusInterval +SELECT - INTERVAL '49-1' YEAR TO MONTH result; + + result +--------------- +-49-1 + +// end::dtMinusInterval +; + +dtIntervalMinusInterval +// tag::dtIntervalMinusInterval +SELECT INTERVAL '1' DAY - INTERVAL '2' HOURS AS result; + + result +--------------- ++0 22:00:00.0 +// end::dtIntervalMinusInterval +; + + +dtDateMinusInterval +// tag::dtDateMinusInterval +SELECT CAST('2018-05-13T12:34:56' AS DATE) - INTERVAL '2-8' YEAR TO MONTH AS result; + + result +-------------------- +2015-09-13T12:34:56Z +// end::dtDateMinusInterval +; + +dtIntervalMul +// tag::dtIntervalMul +SELECT -2 * INTERVAL '3' YEARS AS result; + + result +--------------- +-6-0 +// end::dtIntervalMul +; + + /////////////////////////////// // // Order by @@ -724,9 +853,9 @@ SELECT SCORE(), * FROM library WHERE MATCH(name, 'dune') ORDER BY SCORE() DESC; SCORE() | author | name | page_count | release_date ---------------+---------------+-------------------+---------------+-------------------- -2.288635 |Frank Herbert |Dune |604 |1965-06-01T00:00:00Z +2.2886353 |Frank Herbert |Dune |604 |1965-06-01T00:00:00Z 1.8893257 |Frank Herbert |Dune Messiah |331 |1969-10-15T00:00:00Z -1.6086555 |Frank Herbert |Children of Dune |408 |1976-04-21T00:00:00Z +1.6086556 |Frank Herbert |Children of Dune |408 |1976-04-21T00:00:00Z 1.4005898 |Frank Herbert |God Emperor of Dune|454 |1981-05-28T00:00:00Z // end::orderByScore @@ -738,9 +867,9 @@ SELECT SCORE(), * FROM library WHERE MATCH(name, 'dune') ORDER BY page_count DES SCORE() | author | name | page_count | release_date ---------------+---------------+-------------------+---------------+-------------------- -2.288635 |Frank Herbert |Dune |604 |1965-06-01T00:00:00Z +2.2886353 |Frank Herbert |Dune |604 |1965-06-01T00:00:00Z 1.4005898 |Frank Herbert |God Emperor of Dune|454 |1981-05-28T00:00:00Z -1.6086555 |Frank Herbert |Children of Dune |408 |1976-04-21T00:00:00Z +1.6086556 |Frank Herbert |Children of Dune |408 |1976-04-21T00:00:00Z 1.8893257 |Frank Herbert |Dune Messiah |331 |1969-10-15T00:00:00Z // end::orderByScoreWithMatch @@ -753,9 +882,9 @@ SELECT SCORE() AS score, name, release_date FROM library WHERE QUERY('dune') ORD score | name | release_date ---------------+-------------------+-------------------- 1.4005898 |God Emperor of Dune|1981-05-28T00:00:00Z -1.6086555 |Children of Dune |1976-04-21T00:00:00Z +1.6086556 |Children of Dune |1976-04-21T00:00:00Z 1.8893257 |Dune Messiah |1969-10-15T00:00:00Z -2.288635 |Dune |1965-06-01T00:00:00Z +2.2886353 |Dune |1965-06-01T00:00:00Z // end::scoreWithMatch ; @@ -789,7 +918,7 @@ SELECT AVG(salary) AS avg FROM emp; avg:d --------------- -48248 +48248.55 // end::aggAvg ; @@ -1900,3 +2029,59 @@ SELECT WEEK(CAST('1988-01-05T09:22:10Z' AS TIMESTAMP)) AS week, ISOWEEK(CAST('19 2 |1 // end::weekOfYear ; + + +currentNow +// tag::filterNow +SELECT first_name FROM emp WHERE hire_date > NOW() - INTERVAL 100 YEARS ORDER BY first_name ASC LIMIT 5; + + first_name +--------------- +Alejandro +Amabile +Anneke +Anoosh +Arumugam +// end::filterNow +; + +currentTimestamp-Ignore +// tag::curTs +SELECT CURRENT_TIMESTAMP AS result; + + result +------------------------ +2018-12-12T14:48:52.448Z +// end::curTs +; + +currentTimestampFunction-Ignore +// tag::curTsFunction +SELECT CURRENT_TIMESTAMP() AS result; + + result +------------------------ +2018-12-12T14:48:52.448Z +// end::curTsFunction +; + +currentTimestampFunctionPrecision-Ignore +// tag::curTsFunctionPrecision +SELECT CURRENT_TIMESTAMP(1) AS result; + + result +------------------------ +2018-12-12T14:48:52.4Z +// end::curTsFunctionPrecision +; + + +nowFunction-Ignore +// tag::nowFunction +SELECT NOW() AS result; + + result +------------------------ +2018-12-12T14:48:52.448Z +// end::nowIgnore +; diff --git a/x-pack/plugin/sql/qa/src/main/resources/fulltext.csv-spec b/x-pack/plugin/sql/qa/src/main/resources/fulltext.csv-spec index 93493ffdc2a..07df14d99e3 100644 --- a/x-pack/plugin/sql/qa/src/main/resources/fulltext.csv-spec +++ b/x-pack/plugin/sql/qa/src/main/resources/fulltext.csv-spec @@ -59,14 +59,14 @@ SELECT emp_no, first_name, gender, last_name FROM test_emp WHERE MATCH('first_na ; multiMatchQueryAllOptions -SELECT emp_no, first_name, gender, last_name FROM test_emp WHERE MATCH('first_name,last_name', 'Morton', 'slop=1;lenient=true;cutoff_frequency=2;tie_breaker=0.1;use_dis_max=true;fuzzy_rewrite=scoring_boolean;minimum_should_match=1;operator=AND;max_expansions=30;prefix_length=1;analyzer=english;type=best_fields;auto_generate_synonyms_phrase_query=true;fuzzy_transpositions=true'); +SELECT emp_no, first_name, gender, last_name FROM test_emp WHERE MATCH('first_name,last_name', 'Morton', 'slop=1;lenient=true;cutoff_frequency=2;tie_breaker=0.1;fuzzy_rewrite=scoring_boolean;minimum_should_match=1;operator=AND;max_expansions=30;prefix_length=1;analyzer=english;type=best_fields;auto_generate_synonyms_phrase_query=true;fuzzy_transpositions=true'); emp_no:i | first_name:s | gender:s | last_name:s 10095 |Hilari |M |Morton ; multiMatchQueryWithInMultipleCommaSeparatedStrings -SELECT emp_no, first_name, gender, last_name FROM test_emp WHERE MATCH('first_name,last_name', 'Morton', 'slop=1;lenient=true', 'cutoff_frequency=2','tie_breaker=0.1;use_dis_max=true;fuzzy_rewrite=scoring_boolean','minimum_should_match=1;operator=AND;max_expansions=30;prefix_length=1;analyzer=english;type=best_fields;auto_generate_synonyms_phrase_query=true;fuzzy_transpositions=true'); +SELECT emp_no, first_name, gender, last_name FROM test_emp WHERE MATCH('first_name,last_name', 'Morton', 'slop=1;lenient=true', 'cutoff_frequency=2','tie_breaker=0.1;fuzzy_rewrite=scoring_boolean','minimum_should_match=1;operator=AND;max_expansions=30;prefix_length=1;analyzer=english;type=best_fields;auto_generate_synonyms_phrase_query=true;fuzzy_transpositions=true'); emp_no:i | first_name:s | gender:s | last_name:s 10095 |Hilari |M |Morton @@ -76,12 +76,12 @@ score SELECT emp_no, first_name, SCORE() FROM test_emp WHERE MATCH(first_name, 'Erez') ORDER BY SCORE(); emp_no:i | first_name:s | SCORE():f -10076 |Erez |4.2096553 +10076 |Erez |4.1053944 ; scoreAsSomething SELECT emp_no, first_name, SCORE() as s FROM test_emp WHERE MATCH(first_name, 'Erez') ORDER BY SCORE(); emp_no:i | first_name:s | s:f -10076 |Erez |4.2096553 +10076 |Erez |4.1053944 ; diff --git a/x-pack/plugin/sql/qa/src/main/resources/math.csv-spec b/x-pack/plugin/sql/qa/src/main/resources/math.csv-spec index 7a63f412f43..2df93b37954 100644 --- a/x-pack/plugin/sql/qa/src/main/resources/math.csv-spec +++ b/x-pack/plugin/sql/qa/src/main/resources/math.csv-spec @@ -99,13 +99,13 @@ SELECT MIN(salary) mi, MAX(salary) ma, YEAR(hire_date) year, ROUND(AVG(languages mi:i | ma:i | year:i |ROUND(AVG(languages),1):d|TRUNCATE(AVG(languages),1):d| COUNT(1):l ---------------+---------------+---------------+-------------------------+----------------------------+--------------- -25324 |70011 |1987 |3.0 |3.0 |15 -25945 |73578 |1988 |2.9 |2.8 |9 -25976 |74970 |1989 |3.0 |3.0 |13 -31120 |71165 |1990 |3.1 |3.0 |12 -30404 |58715 |1993 |3.0 |3.0 |3 -35742 |67492 |1994 |2.8 |2.7 |4 -45656 |45656 |1996 |3.0 |3.0 |1 +25324 |70011 |1986 |3.0 |3.0 |15 +25945 |73578 |1987 |2.9 |2.8 |9 +25976 |74970 |1988 |3.0 |3.0 |13 +31120 |71165 |1989 |3.1 |3.0 |12 +30404 |58715 |1992 |3.0 |3.0 |3 +35742 |67492 |1993 |2.8 |2.7 |4 +45656 |45656 |1995 |3.0 |3.0 |1 ; minMaxRoundWithHavingRound @@ -113,17 +113,17 @@ SELECT MIN(salary) mi, MAX(salary) ma, YEAR(hire_date) year, ROUND(AVG(languages mi:i | ma:i | year:i |ROUND(AVG(languages),1):d| COUNT(1):l ---------------+---------------+---------------+-------------------------+--------------- -26436 |74999 |1985 |3.1 |11 -31897 |61805 |1986 |3.5 |11 -25324 |70011 |1987 |3.0 |15 -25945 |73578 |1988 |2.9 |9 -25976 |74970 |1989 |3.0 |13 -31120 |71165 |1990 |3.1 |12 -32568 |65030 |1991 |3.3 |6 -27215 |60781 |1992 |4.1 |8 -30404 |58715 |1993 |3.0 |3 -35742 |67492 |1994 |2.8 |4 -45656 |45656 |1996 |3.0 |1 +26436 |74999 |1984 |3.1 |11 +31897 |61805 |1985 |3.5 |11 +25324 |70011 |1986 |3.0 |15 +25945 |73578 |1987 |2.9 |9 +25976 |74970 |1988 |3.0 |13 +31120 |71165 |1989 |3.1 |12 +32568 |65030 |1990 |3.3 |6 +27215 |60781 |1991 |4.1 |8 +30404 |58715 |1992 |3.0 |3 +35742 |67492 |1993 |2.8 |4 +45656 |45656 |1995 |3.0 |1 ; groupByAndOrderByTruncateWithPositiveParameter diff --git a/x-pack/plugin/sql/qa/src/main/resources/null.sql-spec b/x-pack/plugin/sql/qa/src/main/resources/null.sql-spec index 8da5d8c1e8b..d5a21262ca5 100644 --- a/x-pack/plugin/sql/qa/src/main/resources/null.sql-spec +++ b/x-pack/plugin/sql/qa/src/main/resources/null.sql-spec @@ -11,6 +11,9 @@ SELECT COALESCE(null, ABS(MAX(emp_no)) + 1, 123) AS c FROM test_emp GROUP BY lan coalesceWhere SELECT COALESCE(null, ABS(emp_no) + 1, 123) AS c FROM test_emp WHERE COALESCE(null, ABS(emp_no) + 1, 123, 321) > 100 ORDER BY emp_no NULLS FIRST LIMIT 5; +coalesceOrderBy +SELECT COALESCE(null, ABS(emp_no) + 1, 123) AS c FROM test_emp ORDER BY c NULLS FIRST LIMIT 5; + ifNullField SELECT IFNULL(null, ABS(emp_no) + 1) AS "ifnull" FROM test_emp ORDER BY emp_no LIMIT 5; @@ -23,6 +26,9 @@ SELECT NULLIF(10002, ABS(emp_no) + 1) AS c, emp_no FROM test_emp WHERE NULLIF(10 nullIfHaving SELECT NULLIF(10030, ABS(MAX(emp_no)) + 1) AS nif FROM test_emp GROUP BY languages HAVING nif IS NOT NULL ORDER BY languages; +nullIfOrderBy +SELECT NULLIF(10030, ABS(emp_no + 1)) AS nif FROM test_emp ORDER BY nif NULLS FIRST LIMIT 5; + greatestField SELECT GREATEST(emp_no - 1 + 3, ABS(emp_no) + 1) AS "greatest" FROM test_emp ORDER BY emp_no LIMIT 5; @@ -32,6 +38,9 @@ SELECT emp_no FROM test_emp WHERE GREATEST(10005, ABS(emp_no) + 1, null, emp_no greatestHaving SELECT GREATEST(10096, ABS(MAX(emp_no)) + 1) AS gt FROM test_emp GROUP BY languages HAVING gt >= 10098 ORDER BY languages; +greatestOrderBy +SELECT GREATEST(10096, ABS(emp_no + 1)) AS gt FROM test_emp ORDER BY gt LIMIT 10; + leastField SELECT LEAST(emp_no - 1 + 3, ABS(emp_no) + 1) AS "least" FROM test_emp ORDER BY emp_no LIMIT 5; @@ -40,3 +49,6 @@ SELECT emp_no FROM test_emp WHERE LEAST(10005, ABS(emp_no) + 1, null, emp_no - 1 leastHaving SELECT LEAST(10098, ABS(MAX(emp_no)) + 1) AS lt FROM test_emp GROUP BY languages HAVING lt >= 10095 ORDER BY languages; + +leastOrderBy +SELECT LEAST(10096, ABS(emp_no + 1)) AS lt FROM test_emp ORDER BY lt LIMIT 10; diff --git a/x-pack/plugin/sql/sql-action/licenses/lucene-core-8.0.0-snapshot-774e9aefbc.jar.sha1 b/x-pack/plugin/sql/sql-action/licenses/lucene-core-8.0.0-snapshot-774e9aefbc.jar.sha1 new file mode 100644 index 00000000000..2708b818d44 --- /dev/null +++ b/x-pack/plugin/sql/sql-action/licenses/lucene-core-8.0.0-snapshot-774e9aefbc.jar.sha1 @@ -0,0 +1 @@ +49b3ac44b6749a7ebf0c2e41a81e7910133d2fcc \ No newline at end of file diff --git a/x-pack/plugin/sql/sql-action/licenses/lucene-core-8.0.0-snapshot-7e4555a2fd.jar.sha1 b/x-pack/plugin/sql/sql-action/licenses/lucene-core-8.0.0-snapshot-7e4555a2fd.jar.sha1 deleted file mode 100644 index ef4b48a8bd6..00000000000 --- a/x-pack/plugin/sql/sql-action/licenses/lucene-core-8.0.0-snapshot-7e4555a2fd.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -47253358ac340c35845c2a1007849db4234740da \ No newline at end of file diff --git a/x-pack/plugin/sql/src/main/antlr/SqlBase.g4 b/x-pack/plugin/sql/src/main/antlr/SqlBase.g4 index f6e49b0cf66..84e98da4852 100644 --- a/x-pack/plugin/sql/src/main/antlr/SqlBase.g4 +++ b/x-pack/plugin/sql/src/main/antlr/SqlBase.g4 @@ -217,6 +217,7 @@ valueExpression primaryExpression : castExpression #cast | extractExpression #extract + | builtinDateTimeFunction #currentDateTimeFunction | constant #constantDefault | (qualifiedName DOT)? ASTERISK #star | functionExpression #function @@ -235,6 +236,10 @@ castExpression castTemplate : CAST '(' expression AS dataType ')' ; + +builtinDateTimeFunction + : name=CURRENT_TIMESTAMP ('(' precision=INTEGER_VALUE? ')')? + ; convertTemplate : CONVERT '(' expression ',' dataType ')' @@ -334,7 +339,7 @@ string // http://developer.mimer.se/validator/sql-reserved-words.tml nonReserved : ANALYZE | ANALYZED - | CATALOGS | COLUMNS + | CATALOGS | COLUMNS | CURRENT | DAY | DEBUG | EXECUTABLE | EXPLAIN | FIRST | FORMAT | FUNCTIONS @@ -367,6 +372,8 @@ CATALOG: 'CATALOG'; CATALOGS: 'CATALOGS'; COLUMNS: 'COLUMNS'; CONVERT: 'CONVERT'; +CURRENT: 'CURRENT'; +CURRENT_TIMESTAMP : 'CURRENT_TIMESTAMP'; DAY: 'DAY'; DAYS: 'DAYS'; DEBUG: 'DEBUG'; diff --git a/x-pack/plugin/sql/src/main/antlr/SqlBase.tokens b/x-pack/plugin/sql/src/main/antlr/SqlBase.tokens index b815fd4d0cf..f2d522b2bc7 100644 --- a/x-pack/plugin/sql/src/main/antlr/SqlBase.tokens +++ b/x-pack/plugin/sql/src/main/antlr/SqlBase.tokens @@ -16,113 +16,115 @@ CATALOG=15 CATALOGS=16 COLUMNS=17 CONVERT=18 -DAY=19 -DAYS=20 -DEBUG=21 -DESC=22 -DESCRIBE=23 -DISTINCT=24 -ESCAPE=25 -EXECUTABLE=26 -EXISTS=27 -EXPLAIN=28 -EXTRACT=29 -FALSE=30 -FIRST=31 -FORMAT=32 -FROM=33 -FULL=34 -FUNCTIONS=35 -GRAPHVIZ=36 -GROUP=37 -HAVING=38 -HOUR=39 -HOURS=40 -IN=41 -INNER=42 -INTERVAL=43 -IS=44 -JOIN=45 -LAST=46 -LEFT=47 -LIKE=48 -LIMIT=49 -MAPPED=50 -MATCH=51 -MINUTE=52 -MINUTES=53 -MONTH=54 -MONTHS=55 -NATURAL=56 -NOT=57 -NULL=58 -NULLS=59 -ON=60 -OPTIMIZED=61 -OR=62 -ORDER=63 -OUTER=64 -PARSED=65 -PHYSICAL=66 -PLAN=67 -RIGHT=68 -RLIKE=69 -QUERY=70 -SCHEMAS=71 -SECOND=72 -SECONDS=73 -SELECT=74 -SHOW=75 -SYS=76 -TABLE=77 -TABLES=78 -TEXT=79 -TRUE=80 -TO=81 -TYPE=82 -TYPES=83 -USING=84 -VERIFY=85 -WHERE=86 -WITH=87 -YEAR=88 -YEARS=89 -ESCAPE_ESC=90 -FUNCTION_ESC=91 -LIMIT_ESC=92 -DATE_ESC=93 -TIME_ESC=94 -TIMESTAMP_ESC=95 -GUID_ESC=96 -ESC_END=97 -EQ=98 -NULLEQ=99 -NEQ=100 -LT=101 -LTE=102 -GT=103 -GTE=104 -PLUS=105 -MINUS=106 -ASTERISK=107 -SLASH=108 -PERCENT=109 -CONCAT=110 -DOT=111 -PARAM=112 -STRING=113 -INTEGER_VALUE=114 -DECIMAL_VALUE=115 -IDENTIFIER=116 -DIGIT_IDENTIFIER=117 -TABLE_IDENTIFIER=118 -QUOTED_IDENTIFIER=119 -BACKQUOTED_IDENTIFIER=120 -SIMPLE_COMMENT=121 -BRACKETED_COMMENT=122 -WS=123 -UNRECOGNIZED=124 -DELIMITER=125 +CURRENT=19 +CURRENT_TIMESTAMP=20 +DAY=21 +DAYS=22 +DEBUG=23 +DESC=24 +DESCRIBE=25 +DISTINCT=26 +ESCAPE=27 +EXECUTABLE=28 +EXISTS=29 +EXPLAIN=30 +EXTRACT=31 +FALSE=32 +FIRST=33 +FORMAT=34 +FROM=35 +FULL=36 +FUNCTIONS=37 +GRAPHVIZ=38 +GROUP=39 +HAVING=40 +HOUR=41 +HOURS=42 +IN=43 +INNER=44 +INTERVAL=45 +IS=46 +JOIN=47 +LAST=48 +LEFT=49 +LIKE=50 +LIMIT=51 +MAPPED=52 +MATCH=53 +MINUTE=54 +MINUTES=55 +MONTH=56 +MONTHS=57 +NATURAL=58 +NOT=59 +NULL=60 +NULLS=61 +ON=62 +OPTIMIZED=63 +OR=64 +ORDER=65 +OUTER=66 +PARSED=67 +PHYSICAL=68 +PLAN=69 +RIGHT=70 +RLIKE=71 +QUERY=72 +SCHEMAS=73 +SECOND=74 +SECONDS=75 +SELECT=76 +SHOW=77 +SYS=78 +TABLE=79 +TABLES=80 +TEXT=81 +TRUE=82 +TO=83 +TYPE=84 +TYPES=85 +USING=86 +VERIFY=87 +WHERE=88 +WITH=89 +YEAR=90 +YEARS=91 +ESCAPE_ESC=92 +FUNCTION_ESC=93 +LIMIT_ESC=94 +DATE_ESC=95 +TIME_ESC=96 +TIMESTAMP_ESC=97 +GUID_ESC=98 +ESC_END=99 +EQ=100 +NULLEQ=101 +NEQ=102 +LT=103 +LTE=104 +GT=105 +GTE=106 +PLUS=107 +MINUS=108 +ASTERISK=109 +SLASH=110 +PERCENT=111 +CONCAT=112 +DOT=113 +PARAM=114 +STRING=115 +INTEGER_VALUE=116 +DECIMAL_VALUE=117 +IDENTIFIER=118 +DIGIT_IDENTIFIER=119 +TABLE_IDENTIFIER=120 +QUOTED_IDENTIFIER=121 +BACKQUOTED_IDENTIFIER=122 +SIMPLE_COMMENT=123 +BRACKETED_COMMENT=124 +WS=125 +UNRECOGNIZED=126 +DELIMITER=127 '('=1 ')'=2 ','=3 @@ -141,96 +143,98 @@ DELIMITER=125 'CATALOGS'=16 'COLUMNS'=17 'CONVERT'=18 -'DAY'=19 -'DAYS'=20 -'DEBUG'=21 -'DESC'=22 -'DESCRIBE'=23 -'DISTINCT'=24 -'ESCAPE'=25 -'EXECUTABLE'=26 -'EXISTS'=27 -'EXPLAIN'=28 -'EXTRACT'=29 -'FALSE'=30 -'FIRST'=31 -'FORMAT'=32 -'FROM'=33 -'FULL'=34 -'FUNCTIONS'=35 -'GRAPHVIZ'=36 -'GROUP'=37 -'HAVING'=38 -'HOUR'=39 -'HOURS'=40 -'IN'=41 -'INNER'=42 -'INTERVAL'=43 -'IS'=44 -'JOIN'=45 -'LAST'=46 -'LEFT'=47 -'LIKE'=48 -'LIMIT'=49 -'MAPPED'=50 -'MATCH'=51 -'MINUTE'=52 -'MINUTES'=53 -'MONTH'=54 -'MONTHS'=55 -'NATURAL'=56 -'NOT'=57 -'NULL'=58 -'NULLS'=59 -'ON'=60 -'OPTIMIZED'=61 -'OR'=62 -'ORDER'=63 -'OUTER'=64 -'PARSED'=65 -'PHYSICAL'=66 -'PLAN'=67 -'RIGHT'=68 -'RLIKE'=69 -'QUERY'=70 -'SCHEMAS'=71 -'SECOND'=72 -'SECONDS'=73 -'SELECT'=74 -'SHOW'=75 -'SYS'=76 -'TABLE'=77 -'TABLES'=78 -'TEXT'=79 -'TRUE'=80 -'TO'=81 -'TYPE'=82 -'TYPES'=83 -'USING'=84 -'VERIFY'=85 -'WHERE'=86 -'WITH'=87 -'YEAR'=88 -'YEARS'=89 -'{ESCAPE'=90 -'{FN'=91 -'{LIMIT'=92 -'{D'=93 -'{T'=94 -'{TS'=95 -'{GUID'=96 -'}'=97 -'='=98 -'<=>'=99 -'<'=101 -'<='=102 -'>'=103 -'>='=104 -'+'=105 -'-'=106 -'*'=107 -'/'=108 -'%'=109 -'||'=110 -'.'=111 -'?'=112 +'CURRENT'=19 +'CURRENT_TIMESTAMP'=20 +'DAY'=21 +'DAYS'=22 +'DEBUG'=23 +'DESC'=24 +'DESCRIBE'=25 +'DISTINCT'=26 +'ESCAPE'=27 +'EXECUTABLE'=28 +'EXISTS'=29 +'EXPLAIN'=30 +'EXTRACT'=31 +'FALSE'=32 +'FIRST'=33 +'FORMAT'=34 +'FROM'=35 +'FULL'=36 +'FUNCTIONS'=37 +'GRAPHVIZ'=38 +'GROUP'=39 +'HAVING'=40 +'HOUR'=41 +'HOURS'=42 +'IN'=43 +'INNER'=44 +'INTERVAL'=45 +'IS'=46 +'JOIN'=47 +'LAST'=48 +'LEFT'=49 +'LIKE'=50 +'LIMIT'=51 +'MAPPED'=52 +'MATCH'=53 +'MINUTE'=54 +'MINUTES'=55 +'MONTH'=56 +'MONTHS'=57 +'NATURAL'=58 +'NOT'=59 +'NULL'=60 +'NULLS'=61 +'ON'=62 +'OPTIMIZED'=63 +'OR'=64 +'ORDER'=65 +'OUTER'=66 +'PARSED'=67 +'PHYSICAL'=68 +'PLAN'=69 +'RIGHT'=70 +'RLIKE'=71 +'QUERY'=72 +'SCHEMAS'=73 +'SECOND'=74 +'SECONDS'=75 +'SELECT'=76 +'SHOW'=77 +'SYS'=78 +'TABLE'=79 +'TABLES'=80 +'TEXT'=81 +'TRUE'=82 +'TO'=83 +'TYPE'=84 +'TYPES'=85 +'USING'=86 +'VERIFY'=87 +'WHERE'=88 +'WITH'=89 +'YEAR'=90 +'YEARS'=91 +'{ESCAPE'=92 +'{FN'=93 +'{LIMIT'=94 +'{D'=95 +'{T'=96 +'{TS'=97 +'{GUID'=98 +'}'=99 +'='=100 +'<=>'=101 +'<'=103 +'<='=104 +'>'=105 +'>='=106 +'+'=107 +'-'=108 +'*'=109 +'/'=110 +'%'=111 +'||'=112 +'.'=113 +'?'=114 diff --git a/x-pack/plugin/sql/src/main/antlr/SqlBaseLexer.tokens b/x-pack/plugin/sql/src/main/antlr/SqlBaseLexer.tokens index ba6b3a69e8e..8b586035f85 100644 --- a/x-pack/plugin/sql/src/main/antlr/SqlBaseLexer.tokens +++ b/x-pack/plugin/sql/src/main/antlr/SqlBaseLexer.tokens @@ -16,112 +16,114 @@ CATALOG=15 CATALOGS=16 COLUMNS=17 CONVERT=18 -DAY=19 -DAYS=20 -DEBUG=21 -DESC=22 -DESCRIBE=23 -DISTINCT=24 -ESCAPE=25 -EXECUTABLE=26 -EXISTS=27 -EXPLAIN=28 -EXTRACT=29 -FALSE=30 -FIRST=31 -FORMAT=32 -FROM=33 -FULL=34 -FUNCTIONS=35 -GRAPHVIZ=36 -GROUP=37 -HAVING=38 -HOUR=39 -HOURS=40 -IN=41 -INNER=42 -INTERVAL=43 -IS=44 -JOIN=45 -LAST=46 -LEFT=47 -LIKE=48 -LIMIT=49 -MAPPED=50 -MATCH=51 -MINUTE=52 -MINUTES=53 -MONTH=54 -MONTHS=55 -NATURAL=56 -NOT=57 -NULL=58 -NULLS=59 -ON=60 -OPTIMIZED=61 -OR=62 -ORDER=63 -OUTER=64 -PARSED=65 -PHYSICAL=66 -PLAN=67 -RIGHT=68 -RLIKE=69 -QUERY=70 -SCHEMAS=71 -SECOND=72 -SECONDS=73 -SELECT=74 -SHOW=75 -SYS=76 -TABLE=77 -TABLES=78 -TEXT=79 -TRUE=80 -TO=81 -TYPE=82 -TYPES=83 -USING=84 -VERIFY=85 -WHERE=86 -WITH=87 -YEAR=88 -YEARS=89 -ESCAPE_ESC=90 -FUNCTION_ESC=91 -LIMIT_ESC=92 -DATE_ESC=93 -TIME_ESC=94 -TIMESTAMP_ESC=95 -GUID_ESC=96 -ESC_END=97 -EQ=98 -NULLEQ=99 -NEQ=100 -LT=101 -LTE=102 -GT=103 -GTE=104 -PLUS=105 -MINUS=106 -ASTERISK=107 -SLASH=108 -PERCENT=109 -CONCAT=110 -DOT=111 -PARAM=112 -STRING=113 -INTEGER_VALUE=114 -DECIMAL_VALUE=115 -IDENTIFIER=116 -DIGIT_IDENTIFIER=117 -TABLE_IDENTIFIER=118 -QUOTED_IDENTIFIER=119 -BACKQUOTED_IDENTIFIER=120 -SIMPLE_COMMENT=121 -BRACKETED_COMMENT=122 -WS=123 -UNRECOGNIZED=124 +CURRENT=19 +CURRENT_TIMESTAMP=20 +DAY=21 +DAYS=22 +DEBUG=23 +DESC=24 +DESCRIBE=25 +DISTINCT=26 +ESCAPE=27 +EXECUTABLE=28 +EXISTS=29 +EXPLAIN=30 +EXTRACT=31 +FALSE=32 +FIRST=33 +FORMAT=34 +FROM=35 +FULL=36 +FUNCTIONS=37 +GRAPHVIZ=38 +GROUP=39 +HAVING=40 +HOUR=41 +HOURS=42 +IN=43 +INNER=44 +INTERVAL=45 +IS=46 +JOIN=47 +LAST=48 +LEFT=49 +LIKE=50 +LIMIT=51 +MAPPED=52 +MATCH=53 +MINUTE=54 +MINUTES=55 +MONTH=56 +MONTHS=57 +NATURAL=58 +NOT=59 +NULL=60 +NULLS=61 +ON=62 +OPTIMIZED=63 +OR=64 +ORDER=65 +OUTER=66 +PARSED=67 +PHYSICAL=68 +PLAN=69 +RIGHT=70 +RLIKE=71 +QUERY=72 +SCHEMAS=73 +SECOND=74 +SECONDS=75 +SELECT=76 +SHOW=77 +SYS=78 +TABLE=79 +TABLES=80 +TEXT=81 +TRUE=82 +TO=83 +TYPE=84 +TYPES=85 +USING=86 +VERIFY=87 +WHERE=88 +WITH=89 +YEAR=90 +YEARS=91 +ESCAPE_ESC=92 +FUNCTION_ESC=93 +LIMIT_ESC=94 +DATE_ESC=95 +TIME_ESC=96 +TIMESTAMP_ESC=97 +GUID_ESC=98 +ESC_END=99 +EQ=100 +NULLEQ=101 +NEQ=102 +LT=103 +LTE=104 +GT=105 +GTE=106 +PLUS=107 +MINUS=108 +ASTERISK=109 +SLASH=110 +PERCENT=111 +CONCAT=112 +DOT=113 +PARAM=114 +STRING=115 +INTEGER_VALUE=116 +DECIMAL_VALUE=117 +IDENTIFIER=118 +DIGIT_IDENTIFIER=119 +TABLE_IDENTIFIER=120 +QUOTED_IDENTIFIER=121 +BACKQUOTED_IDENTIFIER=122 +SIMPLE_COMMENT=123 +BRACKETED_COMMENT=124 +WS=125 +UNRECOGNIZED=126 '('=1 ')'=2 ','=3 @@ -140,96 +142,98 @@ UNRECOGNIZED=124 'CATALOGS'=16 'COLUMNS'=17 'CONVERT'=18 -'DAY'=19 -'DAYS'=20 -'DEBUG'=21 -'DESC'=22 -'DESCRIBE'=23 -'DISTINCT'=24 -'ESCAPE'=25 -'EXECUTABLE'=26 -'EXISTS'=27 -'EXPLAIN'=28 -'EXTRACT'=29 -'FALSE'=30 -'FIRST'=31 -'FORMAT'=32 -'FROM'=33 -'FULL'=34 -'FUNCTIONS'=35 -'GRAPHVIZ'=36 -'GROUP'=37 -'HAVING'=38 -'HOUR'=39 -'HOURS'=40 -'IN'=41 -'INNER'=42 -'INTERVAL'=43 -'IS'=44 -'JOIN'=45 -'LAST'=46 -'LEFT'=47 -'LIKE'=48 -'LIMIT'=49 -'MAPPED'=50 -'MATCH'=51 -'MINUTE'=52 -'MINUTES'=53 -'MONTH'=54 -'MONTHS'=55 -'NATURAL'=56 -'NOT'=57 -'NULL'=58 -'NULLS'=59 -'ON'=60 -'OPTIMIZED'=61 -'OR'=62 -'ORDER'=63 -'OUTER'=64 -'PARSED'=65 -'PHYSICAL'=66 -'PLAN'=67 -'RIGHT'=68 -'RLIKE'=69 -'QUERY'=70 -'SCHEMAS'=71 -'SECOND'=72 -'SECONDS'=73 -'SELECT'=74 -'SHOW'=75 -'SYS'=76 -'TABLE'=77 -'TABLES'=78 -'TEXT'=79 -'TRUE'=80 -'TO'=81 -'TYPE'=82 -'TYPES'=83 -'USING'=84 -'VERIFY'=85 -'WHERE'=86 -'WITH'=87 -'YEAR'=88 -'YEARS'=89 -'{ESCAPE'=90 -'{FN'=91 -'{LIMIT'=92 -'{D'=93 -'{T'=94 -'{TS'=95 -'{GUID'=96 -'}'=97 -'='=98 -'<=>'=99 -'<'=101 -'<='=102 -'>'=103 -'>='=104 -'+'=105 -'-'=106 -'*'=107 -'/'=108 -'%'=109 -'||'=110 -'.'=111 -'?'=112 +'CURRENT'=19 +'CURRENT_TIMESTAMP'=20 +'DAY'=21 +'DAYS'=22 +'DEBUG'=23 +'DESC'=24 +'DESCRIBE'=25 +'DISTINCT'=26 +'ESCAPE'=27 +'EXECUTABLE'=28 +'EXISTS'=29 +'EXPLAIN'=30 +'EXTRACT'=31 +'FALSE'=32 +'FIRST'=33 +'FORMAT'=34 +'FROM'=35 +'FULL'=36 +'FUNCTIONS'=37 +'GRAPHVIZ'=38 +'GROUP'=39 +'HAVING'=40 +'HOUR'=41 +'HOURS'=42 +'IN'=43 +'INNER'=44 +'INTERVAL'=45 +'IS'=46 +'JOIN'=47 +'LAST'=48 +'LEFT'=49 +'LIKE'=50 +'LIMIT'=51 +'MAPPED'=52 +'MATCH'=53 +'MINUTE'=54 +'MINUTES'=55 +'MONTH'=56 +'MONTHS'=57 +'NATURAL'=58 +'NOT'=59 +'NULL'=60 +'NULLS'=61 +'ON'=62 +'OPTIMIZED'=63 +'OR'=64 +'ORDER'=65 +'OUTER'=66 +'PARSED'=67 +'PHYSICAL'=68 +'PLAN'=69 +'RIGHT'=70 +'RLIKE'=71 +'QUERY'=72 +'SCHEMAS'=73 +'SECOND'=74 +'SECONDS'=75 +'SELECT'=76 +'SHOW'=77 +'SYS'=78 +'TABLE'=79 +'TABLES'=80 +'TEXT'=81 +'TRUE'=82 +'TO'=83 +'TYPE'=84 +'TYPES'=85 +'USING'=86 +'VERIFY'=87 +'WHERE'=88 +'WITH'=89 +'YEAR'=90 +'YEARS'=91 +'{ESCAPE'=92 +'{FN'=93 +'{LIMIT'=94 +'{D'=95 +'{T'=96 +'{TS'=97 +'{GUID'=98 +'}'=99 +'='=100 +'<=>'=101 +'<'=103 +'<='=104 +'>'=105 +'>='=106 +'+'=107 +'-'=108 +'*'=109 +'/'=110 +'%'=111 +'||'=112 +'.'=113 +'?'=114 diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/SqlFeatureSet.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/SqlFeatureSet.java index 203f9d01a51..ae478b1d4a5 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/SqlFeatureSet.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/SqlFeatureSet.java @@ -31,7 +31,7 @@ public class SqlFeatureSet implements XPackFeatureSet { private final boolean enabled; private final XPackLicenseState licenseState; - private Client client; + private final Client client; @Inject public SqlFeatureSet(Settings settings, @Nullable XPackLicenseState licenseState, Client client) { diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/analysis/analyzer/Analyzer.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/analysis/analyzer/Analyzer.java index 3d407bef286..2b1aa42277e 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/analysis/analyzer/Analyzer.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/analysis/analyzer/Analyzer.java @@ -108,7 +108,11 @@ public class Analyzer extends RuleExecutor { new ResolveAggsInHaving() //new ImplicitCasting() ); - return Arrays.asList(substitution, resolution); + Batch finish = new Batch("Finish Analysis", + new PruneSubqueryAliases(), + CleanAliases.INSTANCE + ); + return Arrays.asList(substitution, resolution, finish); } public LogicalPlan analyze(LogicalPlan plan) { @@ -931,14 +935,15 @@ public class Analyzer extends RuleExecutor { if (!condition.resolved()) { // that's why try to resolve the condition Aggregate tryResolvingCondition = new Aggregate(agg.location(), agg.child(), agg.groupings(), - singletonList(new Alias(f.location(), ".having", condition))); + combine(agg.aggregates(), new Alias(f.location(), ".having", condition))); - LogicalPlan conditionResolved = analyze(tryResolvingCondition, false); + tryResolvingCondition = (Aggregate) analyze(tryResolvingCondition, false); // if it got resolved - if (conditionResolved.resolved()) { + if (tryResolvingCondition.resolved()) { // replace the condition with the resolved one - condition = ((Alias) ((Aggregate) conditionResolved).aggregates().get(0)).child(); + condition = ((Alias) tryResolvingCondition.aggregates() + .get(tryResolvingCondition.aggregates().size() - 1)).child(); } else { // else bail out return plan; @@ -954,6 +959,8 @@ public class Analyzer extends RuleExecutor { // preserve old output return new Project(f.location(), newFilter, f.output()); } + + return new Filter(f.location(), f.child(), condition); } return plan; } @@ -1057,6 +1064,69 @@ public class Analyzer extends RuleExecutor { } } + + public static class PruneSubqueryAliases extends AnalyzeRule { + + @Override + protected LogicalPlan rule(SubQueryAlias alias) { + return alias.child(); + } + + @Override + protected boolean skipResolved() { + return false; + } + } + + public static class CleanAliases extends AnalyzeRule { + + public static final CleanAliases INSTANCE = new CleanAliases(); + + @Override + protected LogicalPlan rule(LogicalPlan plan) { + if (plan instanceof Project) { + Project p = (Project) plan; + return new Project(p.location(), p.child(), cleanExpressions(p.projections())); + } + + if (plan instanceof Aggregate) { + Aggregate a = (Aggregate) plan; + // clean group expressions + List cleanedGroups = a.groupings().stream().map(CleanAliases::trimAliases).collect(toList()); + return new Aggregate(a.location(), a.child(), cleanedGroups, cleanExpressions(a.aggregates())); + } + + return plan.transformExpressionsOnly(e -> { + if (e instanceof Alias) { + return ((Alias) e).child(); + } + return e; + }); + } + + private List cleanExpressions(List args) { + return args.stream().map(CleanAliases::trimNonTopLevelAliases).map(NamedExpression.class::cast).collect(toList()); + } + + public static Expression trimNonTopLevelAliases(Expression e) { + if (e instanceof Alias) { + Alias a = (Alias) e; + return new Alias(a.location(), a.name(), a.qualifier(), trimAliases(a.child()), a.id()); + } + return trimAliases(e); + } + + private static Expression trimAliases(Expression e) { + return e.transformDown(Alias::child, Alias.class); + } + + @Override + protected boolean skipResolved() { + return false; + } + } + + abstract static class AnalyzeRule extends Rule { // transformUp (post-order) - that is first children and then the node @@ -1073,4 +1143,4 @@ public class Analyzer extends RuleExecutor { return true; } } -} +} \ No newline at end of file diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/analysis/analyzer/Verifier.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/analysis/analyzer/Verifier.java index 3fb0e7721f4..386be1eadcc 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/analysis/analyzer/Verifier.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/analysis/analyzer/Verifier.java @@ -462,7 +462,7 @@ public final class Verifier { Map> missing = new LinkedHashMap<>(); a.aggregates().forEach(ne -> - ne.collectFirstChildren(c -> checkGroupMatch(c, ne, a.groupings(), missing, functions))); + ne.collectFirstChildren(c -> checkGroupMatch(c, ne, a.groupings(), missing, functions))); if (!missing.isEmpty()) { String plural = missing.size() > 1 ? "s" : StringUtils.EMPTY; @@ -478,6 +478,13 @@ public final class Verifier { private static boolean checkGroupMatch(Expression e, Node source, List groupings, Map> missing, Map functions) { + + // 1:1 match + if (Expressions.match(groupings, e::semanticEquals)) { + return true; + } + + // resolve FunctionAttribute to backing functions if (e instanceof FunctionAttribute) { FunctionAttribute fa = (FunctionAttribute) e; @@ -521,12 +528,14 @@ public final class Verifier { if (Functions.isAggregate(e)) { return true; } + // left without leaves which have to match; if not there's a failure - + // make sure to match directly on the expression and not on the tree + // (since otherwise exp might match the function argument which would be incorrect) final Expression exp = e; if (e.children().isEmpty()) { - if (!Expressions.anyMatch(groupings, c -> exp.semanticEquals(exp instanceof Attribute ? Expressions.attribute(c) : c))) { - missing.put(e, source); + if (Expressions.match(groupings, c -> exp.semanticEquals(exp instanceof Attribute ? Expressions.attribute(c) : c)) == false) { + missing.put(exp, source); } return true; } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/Expressions.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/Expressions.java index aa6a2b0e89a..58b559d93a2 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/Expressions.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/Expressions.java @@ -70,6 +70,15 @@ public final class Expressions { return false; } + public static boolean match(List exps, Predicate predicate) { + for (Expression exp : exps) { + if (predicate.test(exp)) { + return true; + } + } + return false; + } + public static boolean nullable(List exps) { for (Expression exp : exps) { if (exp.nullable()) { diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/UnaryExpression.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/UnaryExpression.java index c2e764522f4..1dab263f0a4 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/UnaryExpression.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/UnaryExpression.java @@ -5,9 +5,6 @@ */ package org.elasticsearch.xpack.sql.expression; -import org.elasticsearch.xpack.sql.SqlIllegalArgumentException; -import org.elasticsearch.xpack.sql.expression.gen.pipeline.Pipe; -import org.elasticsearch.xpack.sql.expression.gen.script.ScriptTemplate; import org.elasticsearch.xpack.sql.tree.Location; import org.elasticsearch.xpack.sql.type.DataType; @@ -16,12 +13,12 @@ import java.util.Objects; import static java.util.Collections.singletonList; -public abstract class UnaryExpression extends NamedExpression { +public abstract class UnaryExpression extends Expression { private final Expression child; protected UnaryExpression(Location location, Expression child) { - super(location, null, singletonList(child), null); + super(location, singletonList(child)); this.child = child; } @@ -58,21 +55,6 @@ public abstract class UnaryExpression extends NamedExpression { return child.dataType(); } - @Override - public Attribute toAttribute() { - throw new SqlIllegalArgumentException("Not supported yet"); - } - - @Override - public ScriptTemplate asScript() { - throw new SqlIllegalArgumentException("Not supported yet"); - } - - @Override - protected Pipe makePipe() { - throw new SqlIllegalArgumentException("Not supported yet"); - } - @Override public int hashCode() { return Objects.hash(child); @@ -91,4 +73,4 @@ public abstract class UnaryExpression extends NamedExpression { UnaryExpression other = (UnaryExpression) obj; return Objects.equals(child, other.child); } -} +} \ No newline at end of file diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/FunctionDefinition.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/FunctionDefinition.java index 73a2bb69dd2..6811963d3d1 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/FunctionDefinition.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/FunctionDefinition.java @@ -27,7 +27,7 @@ public class FunctionDefinition { /** * Is this a datetime function compatible with {@code EXTRACT}. */ - private final boolean datetime; + private final boolean extractViable; private final Builder builder; private final FunctionType type; @@ -36,7 +36,7 @@ public class FunctionDefinition { this.name = name; this.aliases = aliases; this.clazz = clazz; - this.datetime = datetime; + this.extractViable = datetime; this.builder = builder; this.type = FunctionType.of(clazz); } @@ -64,8 +64,8 @@ public class FunctionDefinition { /** * Is this a datetime function compatible with {@code EXTRACT}. */ - boolean datetime() { - return datetime; + boolean extractViable() { + return extractViable; } @Override diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/FunctionRegistry.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/FunctionRegistry.java index 0268ac844c5..00581ffd84e 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/FunctionRegistry.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/FunctionRegistry.java @@ -20,9 +20,11 @@ import org.elasticsearch.xpack.sql.expression.function.aggregate.StddevPop; import org.elasticsearch.xpack.sql.expression.function.aggregate.Sum; import org.elasticsearch.xpack.sql.expression.function.aggregate.SumOfSquares; import org.elasticsearch.xpack.sql.expression.function.aggregate.VarPop; +import org.elasticsearch.xpack.sql.expression.function.grouping.Histogram; import org.elasticsearch.xpack.sql.expression.function.scalar.Cast; import org.elasticsearch.xpack.sql.expression.function.scalar.Database; import org.elasticsearch.xpack.sql.expression.function.scalar.User; +import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.CurrentDateTime; import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.DayName; import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.DayOfMonth; import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.DayOfWeek; @@ -152,15 +154,18 @@ public class FunctionRegistry { def(SumOfSquares.class, SumOfSquares::new, "SUM_OF_SQUARES"), def(Skewness.class, Skewness::new, "SKEWNESS"), def(Kurtosis.class, Kurtosis::new, "KURTOSIS")); + // histogram + addToMap(def(Histogram.class, Histogram::new, "HISTOGRAM")); // Scalar functions // Conditional addToMap(def(Coalesce.class, Coalesce::new, "COALESCE"), - def(IfNull.class, IfNull::new, "IFNULL", "ISNULL", "NVL"), - def(NullIf.class, NullIf::new, "NULLIF"), - def(Greatest.class, Greatest::new, "GREATEST"), - def(Least.class, Least::new, "LEAST")); + def(IfNull.class, IfNull::new, "IFNULL", "ISNULL", "NVL"), + def(NullIf.class, NullIf::new, "NULLIF"), + def(Greatest.class, Greatest::new, "GREATEST"), + def(Least.class, Least::new, "LEAST")); // Date - addToMap(def(DayName.class, DayName::new, "DAY_NAME", "DAYNAME"), + addToMap(def(CurrentDateTime.class, CurrentDateTime::new, "CURRENT_TIMESTAMP", "NOW"), + def(DayName.class, DayName::new, "DAY_NAME", "DAYNAME"), def(DayOfMonth.class, DayOfMonth::new, "DAY_OF_MONTH", "DAYOFMONTH", "DAY", "DOM"), def(DayOfWeek.class, DayOfWeek::new, "DAY_OF_WEEK", "DAYOFWEEK", "DOW"), def(DayOfYear.class, DayOfYear::new, "DAY_OF_YEAR", "DAYOFYEAR", "DOY"), @@ -281,7 +286,7 @@ public class FunctionRegistry { // It is worth double checking if we need this copy. These are immutable anyway. return defs.entrySet().stream() .map(e -> new FunctionDefinition(e.getKey(), emptyList(), - e.getValue().clazz(), e.getValue().datetime(), e.getValue().builder())) + e.getValue().clazz(), e.getValue().extractViable(), e.getValue().builder())) .collect(toList()); } @@ -291,7 +296,7 @@ public class FunctionRegistry { return defs.entrySet().stream() .filter(e -> p == null || p.matcher(e.getKey()).matches()) .map(e -> new FunctionDefinition(e.getKey(), emptyList(), - e.getValue().clazz(), e.getValue().datetime(), e.getValue().builder())) + e.getValue().clazz(), e.getValue().extractViable(), e.getValue().builder())) .collect(toList()); } @@ -337,6 +342,32 @@ public class FunctionRegistry { T build(Location location, Configuration configuration); } + /** + * Build a {@linkplain FunctionDefinition} for a one-argument function that + * is not aware of time zone, does not support {@code DISTINCT} and needs + * the configuration object. + */ + @SuppressWarnings("overloads") + static FunctionDefinition def(Class function, + UnaryConfigurationAwareFunctionBuilder ctorRef, String... names) { + FunctionBuilder builder = (location, children, distinct, cfg) -> { + if (children.size() > 1) { + throw new IllegalArgumentException("expects exactly one argument"); + } + if (distinct) { + throw new IllegalArgumentException("does not support DISTINCT yet it was specified"); + } + Expression ex = children.size() == 1 ? children.get(0) : null; + return ctorRef.build(location, ex, cfg); + }; + return def(function, builder, false, names); + } + + interface UnaryConfigurationAwareFunctionBuilder { + T build(Location location, Expression exp, Configuration configuration); + } + + /** * Build a {@linkplain FunctionDefinition} for a unary function that is not * aware of time zone and does not support {@code DISTINCT}. @@ -419,6 +450,28 @@ public class FunctionRegistry { T build(Location location, Expression target, TimeZone tz); } + /** + * Build a {@linkplain FunctionDefinition} for a binary function that + * requires a timezone. + */ + @SuppressWarnings("overloads") // These are ambiguous if you aren't using ctor references but we always do + static FunctionDefinition def(Class function, DatetimeBinaryFunctionBuilder ctorRef, String... names) { + FunctionBuilder builder = (location, children, distinct, cfg) -> { + if (children.size() != 2) { + throw new IllegalArgumentException("expects exactly two arguments"); + } + if (distinct) { + throw new IllegalArgumentException("does not support DISTINCT yet it was specified"); + } + return ctorRef.build(location, children.get(0), children.get(1), cfg.timeZone()); + }; + return def(function, builder, false, names); + } + + interface DatetimeBinaryFunctionBuilder { + T build(Location location, Expression lhs, Expression rhs, TimeZone tz); + } + /** * Build a {@linkplain FunctionDefinition} for a binary function that is * not aware of time zone and does not support {@code DISTINCT}. @@ -531,4 +584,4 @@ public class FunctionRegistry { private interface CastFunctionBuilder { T build(Location location, Expression expression, DataType dataType); } -} +} \ No newline at end of file diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/FunctionType.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/FunctionType.java index b2f4ab8ef2c..22b6a50d8ee 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/FunctionType.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/FunctionType.java @@ -7,6 +7,7 @@ package org.elasticsearch.xpack.sql.expression.function; import org.elasticsearch.xpack.sql.SqlIllegalArgumentException; import org.elasticsearch.xpack.sql.expression.function.aggregate.AggregateFunction; +import org.elasticsearch.xpack.sql.expression.function.grouping.GroupingFunction; import org.elasticsearch.xpack.sql.expression.function.scalar.ScalarFunction; import org.elasticsearch.xpack.sql.expression.predicate.conditional.ConditionalFunction; @@ -15,6 +16,7 @@ public enum FunctionType { AGGREGATE(AggregateFunction.class), CONDITIONAL(ConditionalFunction.class), + GROUPING(GroupingFunction.class), SCALAR(ScalarFunction.class), SCORE(Score.class); diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/UnresolvedFunction.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/UnresolvedFunction.java index 51197cc1eb8..fa1be78a594 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/UnresolvedFunction.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/UnresolvedFunction.java @@ -240,14 +240,14 @@ public class UnresolvedFunction extends Function implements Unresolvable { } @Override public Function buildResolved(UnresolvedFunction uf, Configuration cfg, FunctionDefinition def) { - if (def.datetime()) { + if (def.extractViable()) { return def.builder().build(uf, false, cfg); } return uf.withMessage("Invalid datetime field [" + uf.name() + "]. Use any datetime function."); } @Override protected boolean isValidAlternative(FunctionDefinition def) { - return def.datetime(); + return def.extractViable(); } @Override protected String type() { diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/aggregate/AggregateFunction.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/aggregate/AggregateFunction.java index 2b558970df5..e85824beaff 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/aggregate/AggregateFunction.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/aggregate/AggregateFunction.java @@ -30,11 +30,11 @@ public abstract class AggregateFunction extends Function { private AggregateFunctionAttribute lazyAttribute; - AggregateFunction(Location location, Expression field) { + protected AggregateFunction(Location location, Expression field) { this(location, field, emptyList()); } - AggregateFunction(Location location, Expression field, List parameters) { + protected AggregateFunction(Location location, Expression field, List parameters) { super(location, CollectionUtils.combine(singletonList(field), parameters)); this.field = field; this.parameters = parameters; diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/grouping/GroupingFunction.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/grouping/GroupingFunction.java new file mode 100644 index 00000000000..dbfef6aeb5d --- /dev/null +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/grouping/GroupingFunction.java @@ -0,0 +1,95 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.sql.expression.function.grouping; + +import org.elasticsearch.xpack.sql.SqlIllegalArgumentException; +import org.elasticsearch.xpack.sql.expression.Expression; +import org.elasticsearch.xpack.sql.expression.function.Function; +import org.elasticsearch.xpack.sql.expression.gen.pipeline.AggNameInput; +import org.elasticsearch.xpack.sql.expression.gen.pipeline.Pipe; +import org.elasticsearch.xpack.sql.expression.gen.script.ScriptTemplate; +import org.elasticsearch.xpack.sql.tree.Location; +import org.elasticsearch.xpack.sql.util.CollectionUtils; + +import java.util.List; +import java.util.Objects; + +import static java.util.Collections.emptyList; +import static java.util.Collections.singletonList; + +/** + * A type of {@code Function} that creates groups or buckets. + */ +public abstract class GroupingFunction extends Function { + + private final Expression field; + private final List parameters; + + private GroupingFunctionAttribute lazyAttribute; + + protected GroupingFunction(Location location, Expression field) { + this(location, field, emptyList()); + } + + protected GroupingFunction(Location location, Expression field, List parameters) { + super(location, CollectionUtils.combine(singletonList(field), parameters)); + this.field = field; + this.parameters = parameters; + } + + public Expression field() { + return field; + } + + public List parameters() { + return parameters; + } + + @Override + public GroupingFunctionAttribute toAttribute() { + if (lazyAttribute == null) { + // this is highly correlated with QueryFolder$FoldAggregate#addFunction (regarding the function name within the querydsl) + lazyAttribute = new GroupingFunctionAttribute(location(), name(), dataType(), id(), functionId()); + } + return lazyAttribute; + } + + @Override + public final GroupingFunction replaceChildren(List newChildren) { + if (newChildren.size() != 1) { + throw new IllegalArgumentException("expected [1] child but received [" + newChildren.size() + "]"); + } + return replaceChild(newChildren.get(0)); + } + + protected abstract GroupingFunction replaceChild(Expression newChild); + + @Override + protected Pipe makePipe() { + // unresolved AggNameInput (should always get replaced by the folder) + return new AggNameInput(location(), this, name()); + } + + @Override + public ScriptTemplate asScript() { + throw new SqlIllegalArgumentException("Grouping functions cannot be scripted"); + } + + @Override + public boolean equals(Object obj) { + if (false == super.equals(obj)) { + return false; + } + GroupingFunction other = (GroupingFunction) obj; + return Objects.equals(other.field(), field()) + && Objects.equals(other.parameters(), parameters()); + } + + @Override + public int hashCode() { + return Objects.hash(field(), parameters()); + } +} \ No newline at end of file diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/grouping/GroupingFunctionAttribute.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/grouping/GroupingFunctionAttribute.java new file mode 100644 index 00000000000..4deac8a2f9e --- /dev/null +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/grouping/GroupingFunctionAttribute.java @@ -0,0 +1,54 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.sql.expression.function.grouping; + +import org.elasticsearch.xpack.sql.expression.Attribute; +import org.elasticsearch.xpack.sql.expression.Expression; +import org.elasticsearch.xpack.sql.expression.ExpressionId; +import org.elasticsearch.xpack.sql.expression.function.FunctionAttribute; +import org.elasticsearch.xpack.sql.tree.Location; +import org.elasticsearch.xpack.sql.tree.NodeInfo; +import org.elasticsearch.xpack.sql.type.DataType; + +public class GroupingFunctionAttribute extends FunctionAttribute { + + GroupingFunctionAttribute(Location location, String name, DataType dataType, ExpressionId id, String functionId) { + this(location, name, dataType, null, false, id, false, functionId); + } + + public GroupingFunctionAttribute(Location location, String name, DataType dataType, String qualifier, + boolean nullable, ExpressionId id, boolean synthetic, String functionId) { + super(location, name, dataType, qualifier, nullable, id, synthetic, functionId); + } + + @Override + protected NodeInfo info() { + return NodeInfo.create(this, GroupingFunctionAttribute::new, + name(), dataType(), qualifier(), nullable(), id(), synthetic(), functionId()); + } + + @Override + protected Expression canonicalize() { + return new GroupingFunctionAttribute(location(), "", dataType(), null, true, id(), false, ""); + } + + @Override + protected Attribute clone(Location location, String name, String qualifier, boolean nullable, ExpressionId id, boolean synthetic) { + // this is highly correlated with QueryFolder$FoldAggregate#addFunction (regarding the function name within the querydsl) + // that is the functionId is actually derived from the expression id to easily track it across contexts + return new GroupingFunctionAttribute(location, name, dataType(), qualifier, nullable, id, synthetic, functionId()); + } + + public GroupingFunctionAttribute withFunctionId(String functionId, String propertyPath) { + return new GroupingFunctionAttribute(location(), name(), dataType(), qualifier(), nullable(), + id(), synthetic(), functionId); + } + + @Override + protected String label() { + return "g->" + functionId(); + } +} diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/grouping/Histogram.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/grouping/Histogram.java new file mode 100644 index 00000000000..200682d980a --- /dev/null +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/grouping/Histogram.java @@ -0,0 +1,84 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +package org.elasticsearch.xpack.sql.expression.function.grouping; + +import org.elasticsearch.xpack.sql.expression.Expression; +import org.elasticsearch.xpack.sql.expression.Expressions; +import org.elasticsearch.xpack.sql.expression.Expressions.ParamOrdinal; +import org.elasticsearch.xpack.sql.expression.Literal; +import org.elasticsearch.xpack.sql.tree.Location; +import org.elasticsearch.xpack.sql.tree.NodeInfo; +import org.elasticsearch.xpack.sql.type.DataType; +import org.elasticsearch.xpack.sql.type.DataTypes; + +import java.util.Objects; +import java.util.TimeZone; + +public class Histogram extends GroupingFunction { + + private final Literal interval; + private final TimeZone timeZone; + + public Histogram(Location location, Expression field, Expression interval, TimeZone timeZone) { + super(location, field); + this.interval = (Literal) interval; + this.timeZone = timeZone; + } + + public Literal interval() { + return interval; + } + + public TimeZone timeZone() { + return timeZone; + } + + @Override + protected TypeResolution resolveType() { + TypeResolution resolution = Expressions.typeMustBeNumericOrDate(field(), "HISTOGRAM", ParamOrdinal.FIRST); + if (resolution == TypeResolution.TYPE_RESOLVED) { + // interval must be Literal interval + if (field().dataType() == DataType.DATE) { + resolution = Expressions.typeMustBe(interval, DataTypes::isInterval, "(Date) HISTOGRAM", ParamOrdinal.SECOND, "interval"); + } else { + resolution = Expressions.typeMustBeNumeric(interval, "(Numeric) HISTOGRAM", ParamOrdinal.SECOND); + } + } + + return resolution; + } + + @Override + protected GroupingFunction replaceChild(Expression newChild) { + return new Histogram(location(), newChild, interval, timeZone); + } + + @Override + public DataType dataType() { + return field().dataType(); + } + + @Override + protected NodeInfo info() { + return NodeInfo.create(this, Histogram::new, field(), interval, timeZone); + } + + @Override + public int hashCode() { + return Objects.hash(field(), interval, timeZone); + } + + @Override + public boolean equals(Object obj) { + if (super.equals(obj)) { + Histogram other = (Histogram) obj; + return Objects.equals(interval, other.interval) + && Objects.equals(timeZone, other.timeZone); + } + return false; + } +} \ No newline at end of file diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/BaseSystemFunction.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/ConfigurationFunction.java similarity index 68% rename from x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/BaseSystemFunction.java rename to x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/ConfigurationFunction.java index 9e15e2041ac..369ae24da65 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/BaseSystemFunction.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/ConfigurationFunction.java @@ -7,23 +7,24 @@ package org.elasticsearch.xpack.sql.expression.function.scalar; import org.elasticsearch.xpack.sql.expression.Expression; -import org.elasticsearch.xpack.sql.expression.gen.script.Params; import org.elasticsearch.xpack.sql.expression.gen.script.ScriptTemplate; import org.elasticsearch.xpack.sql.session.Configuration; import org.elasticsearch.xpack.sql.tree.Location; -import org.elasticsearch.xpack.sql.tree.NodeInfo; import org.elasticsearch.xpack.sql.type.DataType; import org.elasticsearch.xpack.sql.util.StringUtils; import java.util.List; +import java.util.Objects; -abstract class BaseSystemFunction extends ScalarFunction { +public abstract class ConfigurationFunction extends ScalarFunction { private final Configuration configuration; + private final DataType dataType; - BaseSystemFunction(Location location, Configuration configuration) { + protected ConfigurationFunction(Location location, Configuration configuration, DataType dataType) { super(location); this.configuration = configuration; + this.dataType = dataType; } @Override @@ -31,16 +32,28 @@ abstract class BaseSystemFunction extends ScalarFunction { throw new UnsupportedOperationException("this node doesn't have any children"); } + protected Configuration configuration() { + return configuration; + } + @Override public DataType dataType() { - return DataType.KEYWORD; + return dataType; + } + + @Override + public boolean nullable() { + return false; } @Override public boolean foldable() { return true; } - + + @Override + public abstract Object fold(); + @Override protected String functionArgs() { return StringUtils.EMPTY; @@ -48,18 +61,16 @@ abstract class BaseSystemFunction extends ScalarFunction { @Override public ScriptTemplate asScript() { - return new ScriptTemplate((String) fold(), Params.EMPTY, DataType.KEYWORD); - } - - @Override - public abstract Object fold(); - - @Override - protected NodeInfo info() { - return null; + return asScript(this); } - protected Configuration configuration() { - return configuration; + @Override + public int hashCode() { + return Objects.hash(super.hashCode(), fold()); + } + + @Override + public boolean equals(Object obj) { + return super.equals(obj) && Objects.equals(fold(), ((ConfigurationFunction) obj).fold()); } } \ No newline at end of file diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/Database.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/Database.java index 73fca6bac44..3f9d7b30efc 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/Database.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/Database.java @@ -9,33 +9,21 @@ package org.elasticsearch.xpack.sql.expression.function.scalar; import org.elasticsearch.xpack.sql.session.Configuration; import org.elasticsearch.xpack.sql.tree.Location; import org.elasticsearch.xpack.sql.tree.NodeInfo; +import org.elasticsearch.xpack.sql.type.DataType; -import java.util.Objects; - -public class Database extends BaseSystemFunction { +public class Database extends ConfigurationFunction { public Database(Location location, Configuration configuration) { - super(location, configuration); + super(location, configuration, DataType.KEYWORD); } - @Override - protected NodeInfo info() { - return NodeInfo.create(this, Database::new, configuration()); - } - @Override public Object fold() { return configuration().clusterName(); } - + @Override - public int hashCode() { - return Objects.hash(super.hashCode(), configuration().clusterName()); + protected NodeInfo info() { + return NodeInfo.create(this, Database::new, configuration()); } - - @Override - public boolean equals(Object obj) { - return super.equals(obj) && Objects.equals(configuration().clusterName(), ((Database) obj).configuration().clusterName()); - } - -} +} \ No newline at end of file diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/User.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/User.java index f3f423ae4f7..8ca883de58f 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/User.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/User.java @@ -9,23 +9,12 @@ package org.elasticsearch.xpack.sql.expression.function.scalar; import org.elasticsearch.xpack.sql.session.Configuration; import org.elasticsearch.xpack.sql.tree.Location; import org.elasticsearch.xpack.sql.tree.NodeInfo; +import org.elasticsearch.xpack.sql.type.DataType; -import java.util.Objects; - -public class User extends BaseSystemFunction { +public class User extends ConfigurationFunction { public User(Location location, Configuration configuration) { - super(location, configuration); - } - - @Override - protected NodeInfo info() { - return NodeInfo.create(this, User::new, configuration()); - } - - @Override - public boolean nullable() { - return true; + super(location, configuration, DataType.KEYWORD); } @Override @@ -34,13 +23,7 @@ public class User extends BaseSystemFunction { } @Override - public int hashCode() { - return Objects.hash(super.hashCode(), configuration().username()); + protected NodeInfo info() { + return NodeInfo.create(this, User::new, configuration()); } - - @Override - public boolean equals(Object obj) { - return super.equals(obj) && Objects.equals(configuration().username(), ((User) obj).configuration().username()); - } - } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/CurrentDateTime.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/CurrentDateTime.java new file mode 100644 index 00000000000..3eecf174e0a --- /dev/null +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/CurrentDateTime.java @@ -0,0 +1,70 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +package org.elasticsearch.xpack.sql.expression.function.scalar.datetime; + +import org.elasticsearch.xpack.sql.expression.Expression; +import org.elasticsearch.xpack.sql.expression.function.scalar.ConfigurationFunction; +import org.elasticsearch.xpack.sql.session.Configuration; +import org.elasticsearch.xpack.sql.tree.Location; +import org.elasticsearch.xpack.sql.tree.NodeInfo; +import org.elasticsearch.xpack.sql.type.DataType; + +import java.time.ZonedDateTime; +import java.util.Objects; + +public class CurrentDateTime extends ConfigurationFunction { + private final Expression precision; + private final ZonedDateTime dateTime; + + public CurrentDateTime(Location location, Expression precision, Configuration configuration) { + super(location, configuration, DataType.DATE); + this.precision = precision; + int p = precision != null ? ((Number) precision.fold()).intValue() : 0; + this.dateTime = nanoPrecision(configuration().now(), p); + } + + @Override + public Object fold() { + return dateTime; + } + + @Override + protected NodeInfo info() { + return NodeInfo.create(this, CurrentDateTime::new, precision, configuration()); + } + + @Override + public int hashCode() { + return Objects.hash(dateTime); + } + + @Override + public boolean equals(Object obj) { + if (this == obj) { + return true; + } + + if (obj == null || getClass() != obj.getClass()) { + return false; + } + + CurrentDateTime other = (CurrentDateTime) obj; + return Objects.equals(dateTime, other.dateTime); + } + + static ZonedDateTime nanoPrecision(ZonedDateTime zdt, int precision) { + if (zdt != null) { + int nano = zdt.getNano(); + if (precision >= 0 && precision < 10) { + // remove the remainder + nano = nano - nano % (int) Math.pow(10, (9 - precision)); + return zdt.withNano(nano); + } + } + return zdt; + } +} diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DateTimeHistogramFunction.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DateTimeHistogramFunction.java index 60d39e7ea30..1a60ba66f48 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DateTimeHistogramFunction.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DateTimeHistogramFunction.java @@ -24,5 +24,5 @@ public abstract class DateTimeHistogramFunction extends DateTimeFunction { /** * used for aggregration (date histogram) */ - public abstract String interval(); + public abstract long interval(); } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/Year.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/Year.java index 2eb08c7dd93..0ba4c47058d 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/Year.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/Year.java @@ -11,11 +11,15 @@ import org.elasticsearch.xpack.sql.tree.Location; import org.elasticsearch.xpack.sql.tree.NodeInfo.NodeCtor2; import java.util.TimeZone; +import java.util.concurrent.TimeUnit; /** * Extract the year from a datetime. */ public class Year extends DateTimeHistogramFunction { + + private static long YEAR_IN_MILLIS = TimeUnit.DAYS.toMillis(1) * 365L; + public Year(Location location, Expression field, TimeZone timeZone) { super(location, field, timeZone, DateTimeExtractor.YEAR); } @@ -41,7 +45,7 @@ public class Year extends DateTimeHistogramFunction { } @Override - public String interval() { - return "year"; + public long interval() { + return YEAR_IN_MILLIS; } } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/literal/Interval.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/literal/Interval.java index 0aabf3b76b6..fe83b9b0099 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/literal/Interval.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/literal/Interval.java @@ -9,6 +9,7 @@ package org.elasticsearch.xpack.sql.expression.literal; import org.elasticsearch.common.io.stream.NamedWriteable; import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.xpack.sql.proto.StringUtils; import org.elasticsearch.xpack.sql.type.DataType; import java.io.IOException; @@ -46,6 +47,8 @@ public abstract class Interval implements NamedWriteab public abstract Interval sub(Interval interval); + public abstract Interval mul(long mul); + @Override public int hashCode() { return Objects.hash(interval, intervalType); @@ -73,6 +76,6 @@ public abstract class Interval implements NamedWriteab @Override public String toString() { - return intervalType.name() + "[" + interval + "]"; + return StringUtils.toString(interval); } } \ No newline at end of file diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/literal/IntervalDayTime.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/literal/IntervalDayTime.java index 7fa0e1696ec..4f370bc313f 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/literal/IntervalDayTime.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/literal/IntervalDayTime.java @@ -54,4 +54,9 @@ public class IntervalDayTime extends Interval { public IntervalDayTime sub(Interval interval) { return new IntervalDayTime(interval().minus(interval.interval()), DataTypes.compatibleInterval(dataType(), interval.dataType())); } + + @Override + public Interval mul(long mul) { + return new IntervalDayTime(interval().multipliedBy(mul), dataType()); + } } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/literal/IntervalYearMonth.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/literal/IntervalYearMonth.java index f4267f3716d..cb4346428ae 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/literal/IntervalYearMonth.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/literal/IntervalYearMonth.java @@ -9,6 +9,7 @@ package org.elasticsearch.xpack.sql.expression.literal; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.xpack.sql.type.DataType; +import org.elasticsearch.xpack.sql.type.DataTypeConversion; import org.elasticsearch.xpack.sql.type.DataTypes; import java.io.IOException; @@ -58,4 +59,10 @@ public class IntervalYearMonth extends Interval { return new IntervalYearMonth(interval().minus(interval.interval()).normalized(), DataTypes.compatibleInterval(dataType(), interval.dataType())); } + + @Override + public Interval mul(long mul) { + int i = DataTypeConversion.safeToInt(mul); + return new IntervalYearMonth(interval().multipliedBy(i), dataType()); + } } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/literal/Intervals.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/literal/Intervals.java index a64535e83b7..0d194be105f 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/literal/Intervals.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/literal/Intervals.java @@ -10,9 +10,12 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.NamedWriteableRegistry.Entry; import org.elasticsearch.xpack.sql.SqlIllegalArgumentException; +import org.elasticsearch.xpack.sql.expression.Foldables; +import org.elasticsearch.xpack.sql.expression.Literal; import org.elasticsearch.xpack.sql.parser.ParsingException; import org.elasticsearch.xpack.sql.tree.Location; import org.elasticsearch.xpack.sql.type.DataType; +import org.elasticsearch.xpack.sql.util.Check; import org.elasticsearch.xpack.sql.util.StringUtils; import java.time.Duration; @@ -44,6 +47,22 @@ public final class Intervals { private Intervals() {} + public static long inMillis(Literal literal) { + Object fold = Foldables.valueOf(literal); + Check.isTrue(fold instanceof Interval, "Expected interval, received [{}]", fold); + TemporalAmount interval = ((Interval) fold).interval(); + long millis = 0; + if (interval instanceof Period) { + Period p = (Period) interval; + millis = p.toTotalMonths() * 30 * 24 * 60 * 60 * 1000; + } else { + Duration d = (Duration) interval; + millis = d.toMillis(); + } + + return millis; + } + public static TemporalAmount of(Location source, long duration, TimeUnit unit) { // Cannot use Period.of since it accepts int so use plus which accepts long // Further more Period and Duration have inconsistent addition methods but plus is there diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/conditional/ArbitraryConditionalFunction.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/conditional/ArbitraryConditionalFunction.java index 6d38037ec26..9a3c24c3729 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/conditional/ArbitraryConditionalFunction.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/conditional/ArbitraryConditionalFunction.java @@ -61,6 +61,6 @@ public abstract class ArbitraryConditionalFunction extends ConditionalFunction { params.script(scriptTemplate.params()); } - return new ScriptTemplate(template.toString(), params.build(), dataType()); + return new ScriptTemplate(formatTemplate(template.toString()), params.build(), dataType()); } } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/conditional/NullIf.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/conditional/NullIf.java index 3d5b78182e6..0f8bb3f2085 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/conditional/NullIf.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/conditional/NullIf.java @@ -75,7 +75,7 @@ public class NullIf extends ConditionalFunction { params.script(left.params()); params.script(right.params()); - return new ScriptTemplate(template, params.build(), dataType); + return new ScriptTemplate(formatTemplate(template), params.build(), dataType); } @Override diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/operator/arithmetic/Arithmetics.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/operator/arithmetic/Arithmetics.java index 944a144e2f9..d66fb7df2ba 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/operator/arithmetic/Arithmetics.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/operator/arithmetic/Arithmetics.java @@ -124,17 +124,17 @@ public abstract class Arithmetics { return null; } - if (l instanceof Long || r instanceof Long) { - return Long.valueOf(Math.floorMod(l.longValue(), r.longValue())); - } if (l instanceof Double || r instanceof Double) { return Double.valueOf(l.doubleValue() % r.doubleValue()); } if (l instanceof Float || r instanceof Float) { return Float.valueOf(l.floatValue() % r.floatValue()); } + if (l instanceof Long || r instanceof Long) { + return Long.valueOf(l.longValue() % r.longValue()); + } - return Math.floorMod(l.intValue(), r.intValue()); + return l.intValue() % r.intValue(); } static Number negate(Number n) { @@ -162,4 +162,4 @@ public abstract class Arithmetics { return Integer.valueOf(Math.negateExact(n.intValue())); } -} \ No newline at end of file +} diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/operator/arithmetic/BinaryArithmeticProcessor.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/operator/arithmetic/BinaryArithmeticProcessor.java index aa0c3e830b9..1b7afa20307 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/operator/arithmetic/BinaryArithmeticProcessor.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/operator/arithmetic/BinaryArithmeticProcessor.java @@ -82,7 +82,28 @@ public class BinaryArithmeticProcessor extends FunctionalBinaryProcessor { + if (l instanceof Number && r instanceof Number) { + return Arithmetics.mul((Number) l, (Number) r); + } + l = unwrapJodaTime(l); + r = unwrapJodaTime(r); + if (l instanceof Number && r instanceof IntervalYearMonth) { + return ((IntervalYearMonth) r).mul(((Number) l).intValue()); + } + if (r instanceof Number && l instanceof IntervalYearMonth) { + return ((IntervalYearMonth) l).mul(((Number) r).intValue()); + } + if (l instanceof Number && r instanceof IntervalDayTime) { + return ((IntervalDayTime) r).mul(((Number) l).longValue()); + } + if (r instanceof Number && l instanceof IntervalDayTime) { + return ((IntervalDayTime) l).mul(((Number) r).longValue()); + } + + throw new SqlIllegalArgumentException("Cannot compute [*] between [{}] [{}]", l.getClass().getSimpleName(), + r.getClass().getSimpleName()); + }, "*"), DIV(Arithmetics::div, "/"), MOD(Arithmetics::mod, "%"); diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/operator/arithmetic/Mul.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/operator/arithmetic/Mul.java index edfea25d5c0..40e4bdfaaed 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/operator/arithmetic/Mul.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/operator/arithmetic/Mul.java @@ -9,16 +9,55 @@ import org.elasticsearch.xpack.sql.expression.Expression; import org.elasticsearch.xpack.sql.expression.predicate.operator.arithmetic.BinaryArithmeticProcessor.BinaryArithmeticOperation; import org.elasticsearch.xpack.sql.tree.Location; import org.elasticsearch.xpack.sql.tree.NodeInfo; +import org.elasticsearch.xpack.sql.type.DataType; +import org.elasticsearch.xpack.sql.type.DataTypes; + +import static org.elasticsearch.common.logging.LoggerMessageFormat.format; /** * Multiplication function ({@code a * b}). */ public class Mul extends ArithmeticOperation { + private DataType dataType; + public Mul(Location location, Expression left, Expression right) { super(location, left, right, BinaryArithmeticOperation.MUL); } + @Override + protected TypeResolution resolveType() { + if (!childrenResolved()) { + return new TypeResolution("Unresolved children"); + } + + DataType l = left().dataType(); + DataType r = right().dataType(); + + // 1. both are numbers + if (l.isNumeric() && r.isNumeric()) { + return TypeResolution.TYPE_RESOLVED; + } + + if (DataTypes.isInterval(l) && r.isInteger()) { + dataType = l; + return TypeResolution.TYPE_RESOLVED; + } else if (DataTypes.isInterval(r) && l.isInteger()) { + dataType = r; + return TypeResolution.TYPE_RESOLVED; + } + + return new TypeResolution(format("[{}] has arguments with incompatible types [{}] and [{}]", symbol(), l, r)); + } + + @Override + public DataType dataType() { + if (dataType == null) { + dataType = super.dataType(); + } + return dataType; + } + @Override protected NodeInfo info() { return NodeInfo.create(this, Mul::new, left(), right()); diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/optimizer/Optimizer.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/optimizer/Optimizer.java index e8ce98a8e26..ffd12ea6fb9 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/optimizer/Optimizer.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/optimizer/Optimizer.java @@ -6,6 +6,7 @@ package org.elasticsearch.xpack.sql.optimizer; import org.elasticsearch.xpack.sql.SqlIllegalArgumentException; +import org.elasticsearch.xpack.sql.analysis.analyzer.Analyzer.CleanAliases; import org.elasticsearch.xpack.sql.expression.Alias; import org.elasticsearch.xpack.sql.expression.Attribute; import org.elasticsearch.xpack.sql.expression.AttributeMap; @@ -110,11 +111,6 @@ public class Optimizer extends RuleExecutor { @Override protected Iterable.Batch> batches() { - Batch resolution = new Batch("Finish Analysis", - new PruneSubqueryAliases(), - CleanAliases.INSTANCE - ); - Batch aggregate = new Batch("Aggregation", new PruneDuplicatesInGroupBy(), new ReplaceDuplicateAggsWithReferences(), @@ -162,70 +158,10 @@ public class Optimizer extends RuleExecutor { Batch label = new Batch("Set as Optimized", Limiter.ONCE, new SetAsOptimized()); - return Arrays.asList(resolution, aggregate, operators, local, label); + return Arrays.asList(aggregate, operators, local, label); } - static class PruneSubqueryAliases extends OptimizerRule { - - PruneSubqueryAliases() { - super(TransformDirection.UP); - } - - @Override - protected LogicalPlan rule(SubQueryAlias alias) { - return alias.child(); - } - } - - static class CleanAliases extends OptimizerRule { - - private static final CleanAliases INSTANCE = new CleanAliases(); - - CleanAliases() { - super(TransformDirection.UP); - } - - @Override - protected LogicalPlan rule(LogicalPlan plan) { - if (plan instanceof Project) { - Project p = (Project) plan; - return new Project(p.location(), p.child(), cleanExpressions(p.projections())); - } - - if (plan instanceof Aggregate) { - Aggregate a = (Aggregate) plan; - // clean group expressions - List cleanedGroups = a.groupings().stream().map(CleanAliases::trimAliases).collect(toList()); - return new Aggregate(a.location(), a.child(), cleanedGroups, cleanExpressions(a.aggregates())); - } - - return plan.transformExpressionsOnly(e -> { - if (e instanceof Alias) { - return ((Alias) e).child(); - } - return e; - }); - } - - private List cleanExpressions(List args) { - return args.stream().map(CleanAliases::trimNonTopLevelAliases).map(NamedExpression.class::cast) - .collect(toList()); - } - - static Expression trimNonTopLevelAliases(Expression e) { - if (e instanceof Alias) { - Alias a = (Alias) e; - return new Alias(a.location(), a.name(), a.qualifier(), trimAliases(a.child()), a.id()); - } - return trimAliases(e); - } - - private static Expression trimAliases(Expression e) { - return e.transformDown(Alias::child, Alias.class); - } - } - static class PruneDuplicatesInGroupBy extends OptimizerRule { @Override diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/AbstractBuilder.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/AbstractBuilder.java index 480d22a9699..67721b35d7e 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/AbstractBuilder.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/AbstractBuilder.java @@ -7,6 +7,7 @@ package org.elasticsearch.xpack.sql.parser; import org.antlr.v4.runtime.ParserRuleContext; import org.antlr.v4.runtime.Token; +import org.antlr.v4.runtime.misc.Interval; import org.antlr.v4.runtime.tree.ParseTree; import org.antlr.v4.runtime.tree.TerminalNode; import org.elasticsearch.xpack.sql.plan.logical.LogicalPlan; @@ -85,6 +86,12 @@ abstract class AbstractBuilder extends SqlBaseBaseVisitor { return new Location(token.getLine(), token.getCharPositionInLine()); } + static String text(ParserRuleContext parserRuleContext) { + Check.notNull(parserRuleContext, "parserRuleContext is null"); + Interval interval = new Interval(parserRuleContext.start.getStartIndex(), parserRuleContext.stop.getStopIndex()); + return parserRuleContext.start.getInputStream().getText(interval); + } + /** * Retrieves the raw text of the node (without interpreting it as a string literal). */ diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/ExpressionBuilder.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/ExpressionBuilder.java index 4863fd1f2e8..cd1cb189b6a 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/ExpressionBuilder.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/ExpressionBuilder.java @@ -23,6 +23,7 @@ import org.elasticsearch.xpack.sql.expression.UnresolvedAttribute; import org.elasticsearch.xpack.sql.expression.UnresolvedStar; import org.elasticsearch.xpack.sql.expression.function.Function; import org.elasticsearch.xpack.sql.expression.function.UnresolvedFunction; +import org.elasticsearch.xpack.sql.expression.function.UnresolvedFunction.ResolutionType; import org.elasticsearch.xpack.sql.expression.function.scalar.Cast; import org.elasticsearch.xpack.sql.expression.literal.Interval; import org.elasticsearch.xpack.sql.expression.literal.IntervalDayTime; @@ -58,9 +59,11 @@ import org.elasticsearch.xpack.sql.expression.predicate.regex.RLike; import org.elasticsearch.xpack.sql.parser.SqlBaseParser.ArithmeticBinaryContext; import org.elasticsearch.xpack.sql.parser.SqlBaseParser.ArithmeticUnaryContext; import org.elasticsearch.xpack.sql.parser.SqlBaseParser.BooleanLiteralContext; +import org.elasticsearch.xpack.sql.parser.SqlBaseParser.BuiltinDateTimeFunctionContext; import org.elasticsearch.xpack.sql.parser.SqlBaseParser.CastExpressionContext; import org.elasticsearch.xpack.sql.parser.SqlBaseParser.CastTemplateContext; import org.elasticsearch.xpack.sql.parser.SqlBaseParser.ComparisonContext; +import org.elasticsearch.xpack.sql.parser.SqlBaseParser.ConstantDefaultContext; import org.elasticsearch.xpack.sql.parser.SqlBaseParser.ConvertTemplateContext; import org.elasticsearch.xpack.sql.parser.SqlBaseParser.DateEscapedLiteralContext; import org.elasticsearch.xpack.sql.parser.SqlBaseParser.DecimalLiteralContext; @@ -101,9 +104,11 @@ import org.elasticsearch.xpack.sql.parser.SqlBaseParser.SubqueryExpressionContex import org.elasticsearch.xpack.sql.parser.SqlBaseParser.SysTypesContext; import org.elasticsearch.xpack.sql.parser.SqlBaseParser.TimeEscapedLiteralContext; import org.elasticsearch.xpack.sql.parser.SqlBaseParser.TimestampEscapedLiteralContext; +import org.elasticsearch.xpack.sql.parser.SqlBaseParser.ValueExpressionDefaultContext; import org.elasticsearch.xpack.sql.proto.SqlTypedParamValue; import org.elasticsearch.xpack.sql.tree.Location; import org.elasticsearch.xpack.sql.type.DataType; +import org.elasticsearch.xpack.sql.type.DataTypeConversion; import org.elasticsearch.xpack.sql.type.DataTypes; import org.elasticsearch.xpack.sql.util.DateUtils; import org.elasticsearch.xpack.sql.util.StringUtils; @@ -121,6 +126,7 @@ import java.util.Locale; import java.util.Map; import java.util.StringJoiner; +import static java.util.Collections.emptyList; import static java.util.Collections.singletonList; import static org.elasticsearch.xpack.sql.type.DataTypeConversion.conversionFor; @@ -454,6 +460,36 @@ abstract class ExpressionBuilder extends IdentifierBuilder { UnresolvedFunction.ResolutionType.EXTRACT, singletonList(expression(template.valueExpression()))); } + @Override + public Object visitBuiltinDateTimeFunction(BuiltinDateTimeFunctionContext ctx) { + // maps current_XXX to their respective functions + // since the functions need access to the Configuration, the parser only registers the definition and not the actual function + Location source = source(ctx); + Literal p = null; + + if (ctx.precision != null) { + try { + Location pSource = source(ctx.precision); + short safeShort = DataTypeConversion.safeToShort(StringUtils.parseLong(ctx.precision.getText())); + if (safeShort > 9 || safeShort < 0) { + throw new ParsingException(pSource, "Precision needs to be between [0-9], received [{}]", safeShort); + } + p = Literal.of(pSource, Short.valueOf(safeShort)); + } catch (SqlIllegalArgumentException siae) { + throw new ParsingException(source, siae.getMessage()); + } + } + + String functionName = ctx.name.getText(); + + switch (ctx.name.getType()) { + case SqlBaseLexer.CURRENT_TIMESTAMP: + return new UnresolvedFunction(source, functionName, ResolutionType.STANDARD, p != null ? singletonList(p) : emptyList()); + } + + throw new ParsingException(source, "Unknown function [{}]", functionName); + } + @Override public Function visitFunctionExpression(FunctionExpressionContext ctx) { FunctionTemplateContext template = ctx.functionTemplate(); @@ -512,9 +548,7 @@ abstract class ExpressionBuilder extends IdentifierBuilder { } @Override - public Literal visitIntervalLiteral(IntervalLiteralContext ctx) { - - IntervalContext interval = ctx.interval(); + public Literal visitInterval(IntervalContext interval) { TimeUnit leading = visitIntervalField(interval.leading); TimeUnit trailing = visitIntervalField(interval.trailing); @@ -537,10 +571,31 @@ abstract class ExpressionBuilder extends IdentifierBuilder { DataType intervalType = Intervals.intervalType(source(interval), leading, trailing); - boolean negative = interval.sign != null && interval.sign.getType() == SqlBaseParser.MINUS; + // negation outside the interval - use xor + boolean negative = false; + + ParserRuleContext parentCtx = interval.getParent(); + if (parentCtx != null) { + if (parentCtx instanceof IntervalLiteralContext) { + parentCtx = parentCtx.getParent(); + if (parentCtx instanceof ConstantDefaultContext) { + parentCtx = parentCtx.getParent(); + if (parentCtx instanceof ValueExpressionDefaultContext) { + parentCtx = parentCtx.getParent(); + if (parentCtx instanceof ArithmeticUnaryContext) { + ArithmeticUnaryContext auc = (ArithmeticUnaryContext) parentCtx; + negative = auc.MINUS() != null; + } + } + } + } + } + + + // negation inside the interval + negative ^= interval.sign != null && interval.sign.getType() == SqlBaseParser.MINUS; TemporalAmount value = null; - String valueAsText = null; if (interval.valueNumeric != null) { if (trailing != null) { @@ -549,18 +604,14 @@ abstract class ExpressionBuilder extends IdentifierBuilder { + "use the string notation instead", trailing); } value = of(interval.valueNumeric, leading); - valueAsText = interval.valueNumeric.getText(); } else { value = of(interval.valuePattern, negative, intervalType); - valueAsText = interval.valuePattern.getText(); } - String name = "INTERVAL " + valueAsText + " " + leading.name() + (trailing != null ? " TO " + trailing.name() : ""); - Interval timeInterval = value instanceof Period ? new IntervalYearMonth((Period) value, intervalType) : new IntervalDayTime((Duration) value, intervalType); - return new Literal(source(ctx), name, timeInterval, intervalType); + return new Literal(source(interval), text(interval), timeInterval, timeInterval.dataType()); } private TemporalAmount of(NumberContext valueNumeric, TimeUnit unit) { diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/SqlBaseBaseListener.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/SqlBaseBaseListener.java index b3d6a3e6c27..af7243cbe98 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/SqlBaseBaseListener.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/SqlBaseBaseListener.java @@ -671,6 +671,18 @@ class SqlBaseBaseListener implements SqlBaseListener { *

The default implementation does nothing.

*/ @Override public void exitExtract(SqlBaseParser.ExtractContext ctx) { } + /** + * {@inheritDoc} + * + *

The default implementation does nothing.

+ */ + @Override public void enterCurrentDateTimeFunction(SqlBaseParser.CurrentDateTimeFunctionContext ctx) { } + /** + * {@inheritDoc} + * + *

The default implementation does nothing.

+ */ + @Override public void exitCurrentDateTimeFunction(SqlBaseParser.CurrentDateTimeFunctionContext ctx) { } /** * {@inheritDoc} * @@ -767,6 +779,18 @@ class SqlBaseBaseListener implements SqlBaseListener { *

The default implementation does nothing.

*/ @Override public void exitCastTemplate(SqlBaseParser.CastTemplateContext ctx) { } + /** + * {@inheritDoc} + * + *

The default implementation does nothing.

+ */ + @Override public void enterBuiltinDateTimeFunction(SqlBaseParser.BuiltinDateTimeFunctionContext ctx) { } + /** + * {@inheritDoc} + * + *

The default implementation does nothing.

+ */ + @Override public void exitBuiltinDateTimeFunction(SqlBaseParser.BuiltinDateTimeFunctionContext ctx) { } /** * {@inheritDoc} * diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/SqlBaseBaseVisitor.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/SqlBaseBaseVisitor.java index 4ca3964ddd4..72f517cab8d 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/SqlBaseBaseVisitor.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/SqlBaseBaseVisitor.java @@ -396,6 +396,13 @@ class SqlBaseBaseVisitor extends AbstractParseTreeVisitor implements SqlBa * {@link #visitChildren} on {@code ctx}.

*/ @Override public T visitExtract(SqlBaseParser.ExtractContext ctx) { return visitChildren(ctx); } + /** + * {@inheritDoc} + * + *

The default implementation returns the result of calling + * {@link #visitChildren} on {@code ctx}.

+ */ + @Override public T visitCurrentDateTimeFunction(SqlBaseParser.CurrentDateTimeFunctionContext ctx) { return visitChildren(ctx); } /** * {@inheritDoc} * @@ -452,6 +459,13 @@ class SqlBaseBaseVisitor extends AbstractParseTreeVisitor implements SqlBa * {@link #visitChildren} on {@code ctx}.

*/ @Override public T visitCastTemplate(SqlBaseParser.CastTemplateContext ctx) { return visitChildren(ctx); } + /** + * {@inheritDoc} + * + *

The default implementation returns the result of calling + * {@link #visitChildren} on {@code ctx}.

+ */ + @Override public T visitBuiltinDateTimeFunction(SqlBaseParser.BuiltinDateTimeFunctionContext ctx) { return visitChildren(ctx); } /** * {@inheritDoc} * diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/SqlBaseLexer.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/SqlBaseLexer.java index bdf9a8360ff..fc113cd58c6 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/SqlBaseLexer.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/SqlBaseLexer.java @@ -19,23 +19,23 @@ class SqlBaseLexer extends Lexer { public static final int T__0=1, T__1=2, T__2=3, T__3=4, ALL=5, ANALYZE=6, ANALYZED=7, AND=8, ANY=9, AS=10, ASC=11, BETWEEN=12, BY=13, CAST=14, CATALOG=15, CATALOGS=16, COLUMNS=17, - CONVERT=18, DAY=19, DAYS=20, DEBUG=21, DESC=22, DESCRIBE=23, DISTINCT=24, - ESCAPE=25, EXECUTABLE=26, EXISTS=27, EXPLAIN=28, EXTRACT=29, FALSE=30, - FIRST=31, FORMAT=32, FROM=33, FULL=34, FUNCTIONS=35, GRAPHVIZ=36, GROUP=37, - HAVING=38, HOUR=39, HOURS=40, IN=41, INNER=42, INTERVAL=43, IS=44, JOIN=45, - LAST=46, LEFT=47, LIKE=48, LIMIT=49, MAPPED=50, MATCH=51, MINUTE=52, MINUTES=53, - MONTH=54, MONTHS=55, NATURAL=56, NOT=57, NULL=58, NULLS=59, ON=60, OPTIMIZED=61, - OR=62, ORDER=63, OUTER=64, PARSED=65, PHYSICAL=66, PLAN=67, RIGHT=68, - RLIKE=69, QUERY=70, SCHEMAS=71, SECOND=72, SECONDS=73, SELECT=74, SHOW=75, - SYS=76, TABLE=77, TABLES=78, TEXT=79, TRUE=80, TO=81, TYPE=82, TYPES=83, - USING=84, VERIFY=85, WHERE=86, WITH=87, YEAR=88, YEARS=89, ESCAPE_ESC=90, - FUNCTION_ESC=91, LIMIT_ESC=92, DATE_ESC=93, TIME_ESC=94, TIMESTAMP_ESC=95, - GUID_ESC=96, ESC_END=97, EQ=98, NULLEQ=99, NEQ=100, LT=101, LTE=102, GT=103, - GTE=104, PLUS=105, MINUS=106, ASTERISK=107, SLASH=108, PERCENT=109, CONCAT=110, - DOT=111, PARAM=112, STRING=113, INTEGER_VALUE=114, DECIMAL_VALUE=115, - IDENTIFIER=116, DIGIT_IDENTIFIER=117, TABLE_IDENTIFIER=118, QUOTED_IDENTIFIER=119, - BACKQUOTED_IDENTIFIER=120, SIMPLE_COMMENT=121, BRACKETED_COMMENT=122, - WS=123, UNRECOGNIZED=124; + CONVERT=18, CURRENT=19, CURRENT_TIMESTAMP=20, DAY=21, DAYS=22, DEBUG=23, + DESC=24, DESCRIBE=25, DISTINCT=26, ESCAPE=27, EXECUTABLE=28, EXISTS=29, + EXPLAIN=30, EXTRACT=31, FALSE=32, FIRST=33, FORMAT=34, FROM=35, FULL=36, + FUNCTIONS=37, GRAPHVIZ=38, GROUP=39, HAVING=40, HOUR=41, HOURS=42, IN=43, + INNER=44, INTERVAL=45, IS=46, JOIN=47, LAST=48, LEFT=49, LIKE=50, LIMIT=51, + MAPPED=52, MATCH=53, MINUTE=54, MINUTES=55, MONTH=56, MONTHS=57, NATURAL=58, + NOT=59, NULL=60, NULLS=61, ON=62, OPTIMIZED=63, OR=64, ORDER=65, OUTER=66, + PARSED=67, PHYSICAL=68, PLAN=69, RIGHT=70, RLIKE=71, QUERY=72, SCHEMAS=73, + SECOND=74, SECONDS=75, SELECT=76, SHOW=77, SYS=78, TABLE=79, TABLES=80, + TEXT=81, TRUE=82, TO=83, TYPE=84, TYPES=85, USING=86, VERIFY=87, WHERE=88, + WITH=89, YEAR=90, YEARS=91, ESCAPE_ESC=92, FUNCTION_ESC=93, LIMIT_ESC=94, + DATE_ESC=95, TIME_ESC=96, TIMESTAMP_ESC=97, GUID_ESC=98, ESC_END=99, EQ=100, + NULLEQ=101, NEQ=102, LT=103, LTE=104, GT=105, GTE=106, PLUS=107, MINUS=108, + ASTERISK=109, SLASH=110, PERCENT=111, CONCAT=112, DOT=113, PARAM=114, + STRING=115, INTEGER_VALUE=116, DECIMAL_VALUE=117, IDENTIFIER=118, DIGIT_IDENTIFIER=119, + TABLE_IDENTIFIER=120, QUOTED_IDENTIFIER=121, BACKQUOTED_IDENTIFIER=122, + SIMPLE_COMMENT=123, BRACKETED_COMMENT=124, WS=125, UNRECOGNIZED=126; public static String[] modeNames = { "DEFAULT_MODE" }; @@ -43,59 +43,62 @@ class SqlBaseLexer extends Lexer { public static final String[] ruleNames = { "T__0", "T__1", "T__2", "T__3", "ALL", "ANALYZE", "ANALYZED", "AND", "ANY", "AS", "ASC", "BETWEEN", "BY", "CAST", "CATALOG", "CATALOGS", "COLUMNS", - "CONVERT", "DAY", "DAYS", "DEBUG", "DESC", "DESCRIBE", "DISTINCT", "ESCAPE", - "EXECUTABLE", "EXISTS", "EXPLAIN", "EXTRACT", "FALSE", "FIRST", "FORMAT", - "FROM", "FULL", "FUNCTIONS", "GRAPHVIZ", "GROUP", "HAVING", "HOUR", "HOURS", - "IN", "INNER", "INTERVAL", "IS", "JOIN", "LAST", "LEFT", "LIKE", "LIMIT", - "MAPPED", "MATCH", "MINUTE", "MINUTES", "MONTH", "MONTHS", "NATURAL", - "NOT", "NULL", "NULLS", "ON", "OPTIMIZED", "OR", "ORDER", "OUTER", "PARSED", - "PHYSICAL", "PLAN", "RIGHT", "RLIKE", "QUERY", "SCHEMAS", "SECOND", "SECONDS", - "SELECT", "SHOW", "SYS", "TABLE", "TABLES", "TEXT", "TRUE", "TO", "TYPE", - "TYPES", "USING", "VERIFY", "WHERE", "WITH", "YEAR", "YEARS", "ESCAPE_ESC", - "FUNCTION_ESC", "LIMIT_ESC", "DATE_ESC", "TIME_ESC", "TIMESTAMP_ESC", - "GUID_ESC", "ESC_END", "EQ", "NULLEQ", "NEQ", "LT", "LTE", "GT", "GTE", - "PLUS", "MINUS", "ASTERISK", "SLASH", "PERCENT", "CONCAT", "DOT", "PARAM", - "STRING", "INTEGER_VALUE", "DECIMAL_VALUE", "IDENTIFIER", "DIGIT_IDENTIFIER", - "TABLE_IDENTIFIER", "QUOTED_IDENTIFIER", "BACKQUOTED_IDENTIFIER", "EXPONENT", - "DIGIT", "LETTER", "SIMPLE_COMMENT", "BRACKETED_COMMENT", "WS", "UNRECOGNIZED" + "CONVERT", "CURRENT", "CURRENT_TIMESTAMP", "DAY", "DAYS", "DEBUG", "DESC", + "DESCRIBE", "DISTINCT", "ESCAPE", "EXECUTABLE", "EXISTS", "EXPLAIN", "EXTRACT", + "FALSE", "FIRST", "FORMAT", "FROM", "FULL", "FUNCTIONS", "GRAPHVIZ", "GROUP", + "HAVING", "HOUR", "HOURS", "IN", "INNER", "INTERVAL", "IS", "JOIN", "LAST", + "LEFT", "LIKE", "LIMIT", "MAPPED", "MATCH", "MINUTE", "MINUTES", "MONTH", + "MONTHS", "NATURAL", "NOT", "NULL", "NULLS", "ON", "OPTIMIZED", "OR", + "ORDER", "OUTER", "PARSED", "PHYSICAL", "PLAN", "RIGHT", "RLIKE", "QUERY", + "SCHEMAS", "SECOND", "SECONDS", "SELECT", "SHOW", "SYS", "TABLE", "TABLES", + "TEXT", "TRUE", "TO", "TYPE", "TYPES", "USING", "VERIFY", "WHERE", "WITH", + "YEAR", "YEARS", "ESCAPE_ESC", "FUNCTION_ESC", "LIMIT_ESC", "DATE_ESC", + "TIME_ESC", "TIMESTAMP_ESC", "GUID_ESC", "ESC_END", "EQ", "NULLEQ", "NEQ", + "LT", "LTE", "GT", "GTE", "PLUS", "MINUS", "ASTERISK", "SLASH", "PERCENT", + "CONCAT", "DOT", "PARAM", "STRING", "INTEGER_VALUE", "DECIMAL_VALUE", + "IDENTIFIER", "DIGIT_IDENTIFIER", "TABLE_IDENTIFIER", "QUOTED_IDENTIFIER", + "BACKQUOTED_IDENTIFIER", "EXPONENT", "DIGIT", "LETTER", "SIMPLE_COMMENT", + "BRACKETED_COMMENT", "WS", "UNRECOGNIZED" }; private static final String[] _LITERAL_NAMES = { null, "'('", "')'", "','", "':'", "'ALL'", "'ANALYZE'", "'ANALYZED'", "'AND'", "'ANY'", "'AS'", "'ASC'", "'BETWEEN'", "'BY'", "'CAST'", "'CATALOG'", - "'CATALOGS'", "'COLUMNS'", "'CONVERT'", "'DAY'", "'DAYS'", "'DEBUG'", - "'DESC'", "'DESCRIBE'", "'DISTINCT'", "'ESCAPE'", "'EXECUTABLE'", "'EXISTS'", - "'EXPLAIN'", "'EXTRACT'", "'FALSE'", "'FIRST'", "'FORMAT'", "'FROM'", - "'FULL'", "'FUNCTIONS'", "'GRAPHVIZ'", "'GROUP'", "'HAVING'", "'HOUR'", - "'HOURS'", "'IN'", "'INNER'", "'INTERVAL'", "'IS'", "'JOIN'", "'LAST'", - "'LEFT'", "'LIKE'", "'LIMIT'", "'MAPPED'", "'MATCH'", "'MINUTE'", "'MINUTES'", - "'MONTH'", "'MONTHS'", "'NATURAL'", "'NOT'", "'NULL'", "'NULLS'", "'ON'", - "'OPTIMIZED'", "'OR'", "'ORDER'", "'OUTER'", "'PARSED'", "'PHYSICAL'", - "'PLAN'", "'RIGHT'", "'RLIKE'", "'QUERY'", "'SCHEMAS'", "'SECOND'", "'SECONDS'", - "'SELECT'", "'SHOW'", "'SYS'", "'TABLE'", "'TABLES'", "'TEXT'", "'TRUE'", - "'TO'", "'TYPE'", "'TYPES'", "'USING'", "'VERIFY'", "'WHERE'", "'WITH'", - "'YEAR'", "'YEARS'", "'{ESCAPE'", "'{FN'", "'{LIMIT'", "'{D'", "'{T'", - "'{TS'", "'{GUID'", "'}'", "'='", "'<=>'", null, "'<'", "'<='", "'>'", - "'>='", "'+'", "'-'", "'*'", "'/'", "'%'", "'||'", "'.'", "'?'" + "'CATALOGS'", "'COLUMNS'", "'CONVERT'", "'CURRENT'", "'CURRENT_TIMESTAMP'", + "'DAY'", "'DAYS'", "'DEBUG'", "'DESC'", "'DESCRIBE'", "'DISTINCT'", "'ESCAPE'", + "'EXECUTABLE'", "'EXISTS'", "'EXPLAIN'", "'EXTRACT'", "'FALSE'", "'FIRST'", + "'FORMAT'", "'FROM'", "'FULL'", "'FUNCTIONS'", "'GRAPHVIZ'", "'GROUP'", + "'HAVING'", "'HOUR'", "'HOURS'", "'IN'", "'INNER'", "'INTERVAL'", "'IS'", + "'JOIN'", "'LAST'", "'LEFT'", "'LIKE'", "'LIMIT'", "'MAPPED'", "'MATCH'", + "'MINUTE'", "'MINUTES'", "'MONTH'", "'MONTHS'", "'NATURAL'", "'NOT'", + "'NULL'", "'NULLS'", "'ON'", "'OPTIMIZED'", "'OR'", "'ORDER'", "'OUTER'", + "'PARSED'", "'PHYSICAL'", "'PLAN'", "'RIGHT'", "'RLIKE'", "'QUERY'", "'SCHEMAS'", + "'SECOND'", "'SECONDS'", "'SELECT'", "'SHOW'", "'SYS'", "'TABLE'", "'TABLES'", + "'TEXT'", "'TRUE'", "'TO'", "'TYPE'", "'TYPES'", "'USING'", "'VERIFY'", + "'WHERE'", "'WITH'", "'YEAR'", "'YEARS'", "'{ESCAPE'", "'{FN'", "'{LIMIT'", + "'{D'", "'{T'", "'{TS'", "'{GUID'", "'}'", "'='", "'<=>'", null, "'<'", + "'<='", "'>'", "'>='", "'+'", "'-'", "'*'", "'/'", "'%'", "'||'", "'.'", + "'?'" }; private static final String[] _SYMBOLIC_NAMES = { null, null, null, null, null, "ALL", "ANALYZE", "ANALYZED", "AND", "ANY", "AS", "ASC", "BETWEEN", "BY", "CAST", "CATALOG", "CATALOGS", "COLUMNS", - "CONVERT", "DAY", "DAYS", "DEBUG", "DESC", "DESCRIBE", "DISTINCT", "ESCAPE", - "EXECUTABLE", "EXISTS", "EXPLAIN", "EXTRACT", "FALSE", "FIRST", "FORMAT", - "FROM", "FULL", "FUNCTIONS", "GRAPHVIZ", "GROUP", "HAVING", "HOUR", "HOURS", - "IN", "INNER", "INTERVAL", "IS", "JOIN", "LAST", "LEFT", "LIKE", "LIMIT", - "MAPPED", "MATCH", "MINUTE", "MINUTES", "MONTH", "MONTHS", "NATURAL", - "NOT", "NULL", "NULLS", "ON", "OPTIMIZED", "OR", "ORDER", "OUTER", "PARSED", - "PHYSICAL", "PLAN", "RIGHT", "RLIKE", "QUERY", "SCHEMAS", "SECOND", "SECONDS", - "SELECT", "SHOW", "SYS", "TABLE", "TABLES", "TEXT", "TRUE", "TO", "TYPE", - "TYPES", "USING", "VERIFY", "WHERE", "WITH", "YEAR", "YEARS", "ESCAPE_ESC", - "FUNCTION_ESC", "LIMIT_ESC", "DATE_ESC", "TIME_ESC", "TIMESTAMP_ESC", - "GUID_ESC", "ESC_END", "EQ", "NULLEQ", "NEQ", "LT", "LTE", "GT", "GTE", - "PLUS", "MINUS", "ASTERISK", "SLASH", "PERCENT", "CONCAT", "DOT", "PARAM", - "STRING", "INTEGER_VALUE", "DECIMAL_VALUE", "IDENTIFIER", "DIGIT_IDENTIFIER", - "TABLE_IDENTIFIER", "QUOTED_IDENTIFIER", "BACKQUOTED_IDENTIFIER", "SIMPLE_COMMENT", - "BRACKETED_COMMENT", "WS", "UNRECOGNIZED" + "CONVERT", "CURRENT", "CURRENT_TIMESTAMP", "DAY", "DAYS", "DEBUG", "DESC", + "DESCRIBE", "DISTINCT", "ESCAPE", "EXECUTABLE", "EXISTS", "EXPLAIN", "EXTRACT", + "FALSE", "FIRST", "FORMAT", "FROM", "FULL", "FUNCTIONS", "GRAPHVIZ", "GROUP", + "HAVING", "HOUR", "HOURS", "IN", "INNER", "INTERVAL", "IS", "JOIN", "LAST", + "LEFT", "LIKE", "LIMIT", "MAPPED", "MATCH", "MINUTE", "MINUTES", "MONTH", + "MONTHS", "NATURAL", "NOT", "NULL", "NULLS", "ON", "OPTIMIZED", "OR", + "ORDER", "OUTER", "PARSED", "PHYSICAL", "PLAN", "RIGHT", "RLIKE", "QUERY", + "SCHEMAS", "SECOND", "SECONDS", "SELECT", "SHOW", "SYS", "TABLE", "TABLES", + "TEXT", "TRUE", "TO", "TYPE", "TYPES", "USING", "VERIFY", "WHERE", "WITH", + "YEAR", "YEARS", "ESCAPE_ESC", "FUNCTION_ESC", "LIMIT_ESC", "DATE_ESC", + "TIME_ESC", "TIMESTAMP_ESC", "GUID_ESC", "ESC_END", "EQ", "NULLEQ", "NEQ", + "LT", "LTE", "GT", "GTE", "PLUS", "MINUS", "ASTERISK", "SLASH", "PERCENT", + "CONCAT", "DOT", "PARAM", "STRING", "INTEGER_VALUE", "DECIMAL_VALUE", + "IDENTIFIER", "DIGIT_IDENTIFIER", "TABLE_IDENTIFIER", "QUOTED_IDENTIFIER", + "BACKQUOTED_IDENTIFIER", "SIMPLE_COMMENT", "BRACKETED_COMMENT", "WS", + "UNRECOGNIZED" }; public static final Vocabulary VOCABULARY = new VocabularyImpl(_LITERAL_NAMES, _SYMBOLIC_NAMES); @@ -152,352 +155,366 @@ class SqlBaseLexer extends Lexer { public ATN getATN() { return _ATN; } public static final String _serializedATN = - "\3\u0430\ud6d1\u8206\uad2d\u4417\uaef1\u8d80\uaadd\2~\u0405\b\1\4\2\t"+ - "\2\4\3\t\3\4\4\t\4\4\5\t\5\4\6\t\6\4\7\t\7\4\b\t\b\4\t\t\t\4\n\t\n\4\13"+ - "\t\13\4\f\t\f\4\r\t\r\4\16\t\16\4\17\t\17\4\20\t\20\4\21\t\21\4\22\t\22"+ - "\4\23\t\23\4\24\t\24\4\25\t\25\4\26\t\26\4\27\t\27\4\30\t\30\4\31\t\31"+ - "\4\32\t\32\4\33\t\33\4\34\t\34\4\35\t\35\4\36\t\36\4\37\t\37\4 \t \4!"+ - "\t!\4\"\t\"\4#\t#\4$\t$\4%\t%\4&\t&\4\'\t\'\4(\t(\4)\t)\4*\t*\4+\t+\4"+ - ",\t,\4-\t-\4.\t.\4/\t/\4\60\t\60\4\61\t\61\4\62\t\62\4\63\t\63\4\64\t"+ - "\64\4\65\t\65\4\66\t\66\4\67\t\67\48\t8\49\t9\4:\t:\4;\t;\4<\t<\4=\t="+ - "\4>\t>\4?\t?\4@\t@\4A\tA\4B\tB\4C\tC\4D\tD\4E\tE\4F\tF\4G\tG\4H\tH\4I"+ - "\tI\4J\tJ\4K\tK\4L\tL\4M\tM\4N\tN\4O\tO\4P\tP\4Q\tQ\4R\tR\4S\tS\4T\tT"+ - "\4U\tU\4V\tV\4W\tW\4X\tX\4Y\tY\4Z\tZ\4[\t[\4\\\t\\\4]\t]\4^\t^\4_\t_\4"+ - "`\t`\4a\ta\4b\tb\4c\tc\4d\td\4e\te\4f\tf\4g\tg\4h\th\4i\ti\4j\tj\4k\t"+ - "k\4l\tl\4m\tm\4n\tn\4o\to\4p\tp\4q\tq\4r\tr\4s\ts\4t\tt\4u\tu\4v\tv\4"+ - "w\tw\4x\tx\4y\ty\4z\tz\4{\t{\4|\t|\4}\t}\4~\t~\4\177\t\177\4\u0080\t\u0080"+ - "\3\2\3\2\3\3\3\3\3\4\3\4\3\5\3\5\3\6\3\6\3\6\3\6\3\7\3\7\3\7\3\7\3\7\3"+ - "\7\3\7\3\7\3\b\3\b\3\b\3\b\3\b\3\b\3\b\3\b\3\b\3\t\3\t\3\t\3\t\3\n\3\n"+ - "\3\n\3\n\3\13\3\13\3\13\3\f\3\f\3\f\3\f\3\r\3\r\3\r\3\r\3\r\3\r\3\r\3"+ - "\r\3\16\3\16\3\16\3\17\3\17\3\17\3\17\3\17\3\20\3\20\3\20\3\20\3\20\3"+ - "\20\3\20\3\20\3\21\3\21\3\21\3\21\3\21\3\21\3\21\3\21\3\21\3\22\3\22\3"+ - "\22\3\22\3\22\3\22\3\22\3\22\3\23\3\23\3\23\3\23\3\23\3\23\3\23\3\23\3"+ - "\24\3\24\3\24\3\24\3\25\3\25\3\25\3\25\3\25\3\26\3\26\3\26\3\26\3\26\3"+ - "\26\3\27\3\27\3\27\3\27\3\27\3\30\3\30\3\30\3\30\3\30\3\30\3\30\3\30\3"+ - "\30\3\31\3\31\3\31\3\31\3\31\3\31\3\31\3\31\3\31\3\32\3\32\3\32\3\32\3"+ - "\32\3\32\3\32\3\33\3\33\3\33\3\33\3\33\3\33\3\33\3\33\3\33\3\33\3\33\3"+ - "\34\3\34\3\34\3\34\3\34\3\34\3\34\3\35\3\35\3\35\3\35\3\35\3\35\3\35\3"+ - "\35\3\36\3\36\3\36\3\36\3\36\3\36\3\36\3\36\3\37\3\37\3\37\3\37\3\37\3"+ - "\37\3 \3 \3 \3 \3 \3 \3!\3!\3!\3!\3!\3!\3!\3\"\3\"\3\"\3\"\3\"\3#\3#\3"+ - "#\3#\3#\3$\3$\3$\3$\3$\3$\3$\3$\3$\3$\3%\3%\3%\3%\3%\3%\3%\3%\3%\3&\3"+ - "&\3&\3&\3&\3&\3\'\3\'\3\'\3\'\3\'\3\'\3\'\3(\3(\3(\3(\3(\3)\3)\3)\3)\3"+ - ")\3)\3*\3*\3*\3+\3+\3+\3+\3+\3+\3,\3,\3,\3,\3,\3,\3,\3,\3,\3-\3-\3-\3"+ - ".\3.\3.\3.\3.\3/\3/\3/\3/\3/\3\60\3\60\3\60\3\60\3\60\3\61\3\61\3\61\3"+ - "\61\3\61\3\62\3\62\3\62\3\62\3\62\3\62\3\63\3\63\3\63\3\63\3\63\3\63\3"+ - "\63\3\64\3\64\3\64\3\64\3\64\3\64\3\65\3\65\3\65\3\65\3\65\3\65\3\65\3"+ - "\66\3\66\3\66\3\66\3\66\3\66\3\66\3\66\3\67\3\67\3\67\3\67\3\67\3\67\3"+ - "8\38\38\38\38\38\38\39\39\39\39\39\39\39\39\3:\3:\3:\3:\3;\3;\3;\3;\3"+ - ";\3<\3<\3<\3<\3<\3<\3=\3=\3=\3>\3>\3>\3>\3>\3>\3>\3>\3>\3>\3?\3?\3?\3"+ - "@\3@\3@\3@\3@\3@\3A\3A\3A\3A\3A\3A\3B\3B\3B\3B\3B\3B\3B\3C\3C\3C\3C\3"+ - "C\3C\3C\3C\3C\3D\3D\3D\3D\3D\3E\3E\3E\3E\3E\3E\3F\3F\3F\3F\3F\3F\3G\3"+ - "G\3G\3G\3G\3G\3H\3H\3H\3H\3H\3H\3H\3H\3I\3I\3I\3I\3I\3I\3I\3J\3J\3J\3"+ - "J\3J\3J\3J\3J\3K\3K\3K\3K\3K\3K\3K\3L\3L\3L\3L\3L\3M\3M\3M\3M\3N\3N\3"+ - "N\3N\3N\3N\3O\3O\3O\3O\3O\3O\3O\3P\3P\3P\3P\3P\3Q\3Q\3Q\3Q\3Q\3R\3R\3"+ - "R\3S\3S\3S\3S\3S\3T\3T\3T\3T\3T\3T\3U\3U\3U\3U\3U\3U\3V\3V\3V\3V\3V\3"+ - "V\3V\3W\3W\3W\3W\3W\3W\3X\3X\3X\3X\3X\3Y\3Y\3Y\3Y\3Y\3Z\3Z\3Z\3Z\3Z\3"+ - "Z\3[\3[\3[\3[\3[\3[\3[\3[\3\\\3\\\3\\\3\\\3]\3]\3]\3]\3]\3]\3]\3^\3^\3"+ - "^\3_\3_\3_\3`\3`\3`\3`\3a\3a\3a\3a\3a\3a\3b\3b\3c\3c\3d\3d\3d\3d\3e\3"+ - "e\3e\3e\5e\u0346\ne\3f\3f\3g\3g\3g\3h\3h\3i\3i\3i\3j\3j\3k\3k\3l\3l\3"+ - "m\3m\3n\3n\3o\3o\3o\3p\3p\3q\3q\3r\3r\3r\3r\7r\u0367\nr\fr\16r\u036a\13"+ - "r\3r\3r\3s\6s\u036f\ns\rs\16s\u0370\3t\6t\u0374\nt\rt\16t\u0375\3t\3t"+ - "\7t\u037a\nt\ft\16t\u037d\13t\3t\3t\6t\u0381\nt\rt\16t\u0382\3t\6t\u0386"+ - "\nt\rt\16t\u0387\3t\3t\7t\u038c\nt\ft\16t\u038f\13t\5t\u0391\nt\3t\3t"+ - "\3t\3t\6t\u0397\nt\rt\16t\u0398\3t\3t\5t\u039d\nt\3u\3u\5u\u03a1\nu\3"+ - "u\3u\3u\7u\u03a6\nu\fu\16u\u03a9\13u\3v\3v\3v\3v\6v\u03af\nv\rv\16v\u03b0"+ - "\3w\3w\3w\6w\u03b6\nw\rw\16w\u03b7\3x\3x\3x\3x\7x\u03be\nx\fx\16x\u03c1"+ - "\13x\3x\3x\3y\3y\3y\3y\7y\u03c9\ny\fy\16y\u03cc\13y\3y\3y\3z\3z\5z\u03d2"+ - "\nz\3z\6z\u03d5\nz\rz\16z\u03d6\3{\3{\3|\3|\3}\3}\3}\3}\7}\u03e1\n}\f"+ - "}\16}\u03e4\13}\3}\5}\u03e7\n}\3}\5}\u03ea\n}\3}\3}\3~\3~\3~\3~\3~\7~"+ - "\u03f3\n~\f~\16~\u03f6\13~\3~\3~\3~\3~\3~\3\177\6\177\u03fe\n\177\r\177"+ - "\16\177\u03ff\3\177\3\177\3\u0080\3\u0080\3\u03f4\2\u0081\3\3\5\4\7\5"+ - "\t\6\13\7\r\b\17\t\21\n\23\13\25\f\27\r\31\16\33\17\35\20\37\21!\22#\23"+ - "%\24\'\25)\26+\27-\30/\31\61\32\63\33\65\34\67\359\36;\37= ?!A\"C#E$G"+ - "%I&K\'M(O)Q*S+U,W-Y.[/]\60_\61a\62c\63e\64g\65i\66k\67m8o9q:s;u{"+ - "?}@\177A\u0081B\u0083C\u0085D\u0087E\u0089F\u008bG\u008dH\u008fI\u0091"+ - "J\u0093K\u0095L\u0097M\u0099N\u009bO\u009dP\u009fQ\u00a1R\u00a3S\u00a5"+ - "T\u00a7U\u00a9V\u00abW\u00adX\u00afY\u00b1Z\u00b3[\u00b5\\\u00b7]\u00b9"+ - "^\u00bb_\u00bd`\u00bfa\u00c1b\u00c3c\u00c5d\u00c7e\u00c9f\u00cbg\u00cd"+ - "h\u00cfi\u00d1j\u00d3k\u00d5l\u00d7m\u00d9n\u00dbo\u00ddp\u00dfq\u00e1"+ - "r\u00e3s\u00e5t\u00e7u\u00e9v\u00ebw\u00edx\u00efy\u00f1z\u00f3\2\u00f5"+ - "\2\u00f7\2\u00f9{\u00fb|\u00fd}\u00ff~\3\2\f\3\2))\4\2BBaa\5\2<\3\2\2\2\u01b3\u01b4\7H\2\2\u01b4\u01b5\7K\2\2\u01b5"+ - "\u01b6\7T\2\2\u01b6\u01b7\7U\2\2\u01b7\u01b8\7V\2\2\u01b8@\3\2\2\2\u01b9"+ - "\u01ba\7H\2\2\u01ba\u01bb\7Q\2\2\u01bb\u01bc\7T\2\2\u01bc\u01bd\7O\2\2"+ - "\u01bd\u01be\7C\2\2\u01be\u01bf\7V\2\2\u01bfB\3\2\2\2\u01c0\u01c1\7H\2"+ - "\2\u01c1\u01c2\7T\2\2\u01c2\u01c3\7Q\2\2\u01c3\u01c4\7O\2\2\u01c4D\3\2"+ - "\2\2\u01c5\u01c6\7H\2\2\u01c6\u01c7\7W\2\2\u01c7\u01c8\7N\2\2\u01c8\u01c9"+ - "\7N\2\2\u01c9F\3\2\2\2\u01ca\u01cb\7H\2\2\u01cb\u01cc\7W\2\2\u01cc\u01cd"+ - "\7P\2\2\u01cd\u01ce\7E\2\2\u01ce\u01cf\7V\2\2\u01cf\u01d0\7K\2\2\u01d0"+ - "\u01d1\7Q\2\2\u01d1\u01d2\7P\2\2\u01d2\u01d3\7U\2\2\u01d3H\3\2\2\2\u01d4"+ - "\u01d5\7I\2\2\u01d5\u01d6\7T\2\2\u01d6\u01d7\7C\2\2\u01d7\u01d8\7R\2\2"+ - "\u01d8\u01d9\7J\2\2\u01d9\u01da\7X\2\2\u01da\u01db\7K\2\2\u01db\u01dc"+ - "\7\\\2\2\u01dcJ\3\2\2\2\u01dd\u01de\7I\2\2\u01de\u01df\7T\2\2\u01df\u01e0"+ - "\7Q\2\2\u01e0\u01e1\7W\2\2\u01e1\u01e2\7R\2\2\u01e2L\3\2\2\2\u01e3\u01e4"+ - "\7J\2\2\u01e4\u01e5\7C\2\2\u01e5\u01e6\7X\2\2\u01e6\u01e7\7K\2\2\u01e7"+ - "\u01e8\7P\2\2\u01e8\u01e9\7I\2\2\u01e9N\3\2\2\2\u01ea\u01eb\7J\2\2\u01eb"+ - "\u01ec\7Q\2\2\u01ec\u01ed\7W\2\2\u01ed\u01ee\7T\2\2\u01eeP\3\2\2\2\u01ef"+ - "\u01f0\7J\2\2\u01f0\u01f1\7Q\2\2\u01f1\u01f2\7W\2\2\u01f2\u01f3\7T\2\2"+ - "\u01f3\u01f4\7U\2\2\u01f4R\3\2\2\2\u01f5\u01f6\7K\2\2\u01f6\u01f7\7P\2"+ - "\2\u01f7T\3\2\2\2\u01f8\u01f9\7K\2\2\u01f9\u01fa\7P\2\2\u01fa\u01fb\7"+ - "P\2\2\u01fb\u01fc\7G\2\2\u01fc\u01fd\7T\2\2\u01fdV\3\2\2\2\u01fe\u01ff"+ - "\7K\2\2\u01ff\u0200\7P\2\2\u0200\u0201\7V\2\2\u0201\u0202\7G\2\2\u0202"+ - "\u0203\7T\2\2\u0203\u0204\7X\2\2\u0204\u0205\7C\2\2\u0205\u0206\7N\2\2"+ - "\u0206X\3\2\2\2\u0207\u0208\7K\2\2\u0208\u0209\7U\2\2\u0209Z\3\2\2\2\u020a"+ - "\u020b\7L\2\2\u020b\u020c\7Q\2\2\u020c\u020d\7K\2\2\u020d\u020e\7P\2\2"+ - "\u020e\\\3\2\2\2\u020f\u0210\7N\2\2\u0210\u0211\7C\2\2\u0211\u0212\7U"+ - "\2\2\u0212\u0213\7V\2\2\u0213^\3\2\2\2\u0214\u0215\7N\2\2\u0215\u0216"+ - "\7G\2\2\u0216\u0217\7H\2\2\u0217\u0218\7V\2\2\u0218`\3\2\2\2\u0219\u021a"+ - "\7N\2\2\u021a\u021b\7K\2\2\u021b\u021c\7M\2\2\u021c\u021d\7G\2\2\u021d"+ - "b\3\2\2\2\u021e\u021f\7N\2\2\u021f\u0220\7K\2\2\u0220\u0221\7O\2\2\u0221"+ - "\u0222\7K\2\2\u0222\u0223\7V\2\2\u0223d\3\2\2\2\u0224\u0225\7O\2\2\u0225"+ - "\u0226\7C\2\2\u0226\u0227\7R\2\2\u0227\u0228\7R\2\2\u0228\u0229\7G\2\2"+ - "\u0229\u022a\7F\2\2\u022af\3\2\2\2\u022b\u022c\7O\2\2\u022c\u022d\7C\2"+ - "\2\u022d\u022e\7V\2\2\u022e\u022f\7E\2\2\u022f\u0230\7J\2\2\u0230h\3\2"+ - "\2\2\u0231\u0232\7O\2\2\u0232\u0233\7K\2\2\u0233\u0234\7P\2\2\u0234\u0235"+ - "\7W\2\2\u0235\u0236\7V\2\2\u0236\u0237\7G\2\2\u0237j\3\2\2\2\u0238\u0239"+ - "\7O\2\2\u0239\u023a\7K\2\2\u023a\u023b\7P\2\2\u023b\u023c\7W\2\2\u023c"+ - "\u023d\7V\2\2\u023d\u023e\7G\2\2\u023e\u023f\7U\2\2\u023fl\3\2\2\2\u0240"+ - "\u0241\7O\2\2\u0241\u0242\7Q\2\2\u0242\u0243\7P\2\2\u0243\u0244\7V\2\2"+ - "\u0244\u0245\7J\2\2\u0245n\3\2\2\2\u0246\u0247\7O\2\2\u0247\u0248\7Q\2"+ - "\2\u0248\u0249\7P\2\2\u0249\u024a\7V\2\2\u024a\u024b\7J\2\2\u024b\u024c"+ - "\7U\2\2\u024cp\3\2\2\2\u024d\u024e\7P\2\2\u024e\u024f\7C\2\2\u024f\u0250"+ - "\7V\2\2\u0250\u0251\7W\2\2\u0251\u0252\7T\2\2\u0252\u0253\7C\2\2\u0253"+ - "\u0254\7N\2\2\u0254r\3\2\2\2\u0255\u0256\7P\2\2\u0256\u0257\7Q\2\2\u0257"+ - "\u0258\7V\2\2\u0258t\3\2\2\2\u0259\u025a\7P\2\2\u025a\u025b\7W\2\2\u025b"+ - "\u025c\7N\2\2\u025c\u025d\7N\2\2\u025dv\3\2\2\2\u025e\u025f\7P\2\2\u025f"+ - "\u0260\7W\2\2\u0260\u0261\7N\2\2\u0261\u0262\7N\2\2\u0262\u0263\7U\2\2"+ - "\u0263x\3\2\2\2\u0264\u0265\7Q\2\2\u0265\u0266\7P\2\2\u0266z\3\2\2\2\u0267"+ - "\u0268\7Q\2\2\u0268\u0269\7R\2\2\u0269\u026a\7V\2\2\u026a\u026b\7K\2\2"+ - "\u026b\u026c\7O\2\2\u026c\u026d\7K\2\2\u026d\u026e\7\\\2\2\u026e\u026f"+ - "\7G\2\2\u026f\u0270\7F\2\2\u0270|\3\2\2\2\u0271\u0272\7Q\2\2\u0272\u0273"+ - "\7T\2\2\u0273~\3\2\2\2\u0274\u0275\7Q\2\2\u0275\u0276\7T\2\2\u0276\u0277"+ - "\7F\2\2\u0277\u0278\7G\2\2\u0278\u0279\7T\2\2\u0279\u0080\3\2\2\2\u027a"+ - "\u027b\7Q\2\2\u027b\u027c\7W\2\2\u027c\u027d\7V\2\2\u027d\u027e\7G\2\2"+ - "\u027e\u027f\7T\2\2\u027f\u0082\3\2\2\2\u0280\u0281\7R\2\2\u0281\u0282"+ - "\7C\2\2\u0282\u0283\7T\2\2\u0283\u0284\7U\2\2\u0284\u0285\7G\2\2\u0285"+ - "\u0286\7F\2\2\u0286\u0084\3\2\2\2\u0287\u0288\7R\2\2\u0288\u0289\7J\2"+ - "\2\u0289\u028a\7[\2\2\u028a\u028b\7U\2\2\u028b\u028c\7K\2\2\u028c\u028d"+ - "\7E\2\2\u028d\u028e\7C\2\2\u028e\u028f\7N\2\2\u028f\u0086\3\2\2\2\u0290"+ - "\u0291\7R\2\2\u0291\u0292\7N\2\2\u0292\u0293\7C\2\2\u0293\u0294\7P\2\2"+ - "\u0294\u0088\3\2\2\2\u0295\u0296\7T\2\2\u0296\u0297\7K\2\2\u0297\u0298"+ - "\7I\2\2\u0298\u0299\7J\2\2\u0299\u029a\7V\2\2\u029a\u008a\3\2\2\2\u029b"+ - "\u029c\7T\2\2\u029c\u029d\7N\2\2\u029d\u029e\7K\2\2\u029e\u029f\7M\2\2"+ - "\u029f\u02a0\7G\2\2\u02a0\u008c\3\2\2\2\u02a1\u02a2\7S\2\2\u02a2\u02a3"+ - "\7W\2\2\u02a3\u02a4\7G\2\2\u02a4\u02a5\7T\2\2\u02a5\u02a6\7[\2\2\u02a6"+ - "\u008e\3\2\2\2\u02a7\u02a8\7U\2\2\u02a8\u02a9\7E\2\2\u02a9\u02aa\7J\2"+ - "\2\u02aa\u02ab\7G\2\2\u02ab\u02ac\7O\2\2\u02ac\u02ad\7C\2\2\u02ad\u02ae"+ - "\7U\2\2\u02ae\u0090\3\2\2\2\u02af\u02b0\7U\2\2\u02b0\u02b1\7G\2\2\u02b1"+ - "\u02b2\7E\2\2\u02b2\u02b3\7Q\2\2\u02b3\u02b4\7P\2\2\u02b4\u02b5\7F\2\2"+ - "\u02b5\u0092\3\2\2\2\u02b6\u02b7\7U\2\2\u02b7\u02b8\7G\2\2\u02b8\u02b9"+ - "\7E\2\2\u02b9\u02ba\7Q\2\2\u02ba\u02bb\7P\2\2\u02bb\u02bc\7F\2\2\u02bc"+ - "\u02bd\7U\2\2\u02bd\u0094\3\2\2\2\u02be\u02bf\7U\2\2\u02bf\u02c0\7G\2"+ - "\2\u02c0\u02c1\7N\2\2\u02c1\u02c2\7G\2\2\u02c2\u02c3\7E\2\2\u02c3\u02c4"+ - "\7V\2\2\u02c4\u0096\3\2\2\2\u02c5\u02c6\7U\2\2\u02c6\u02c7\7J\2\2\u02c7"+ - "\u02c8\7Q\2\2\u02c8\u02c9\7Y\2\2\u02c9\u0098\3\2\2\2\u02ca\u02cb\7U\2"+ - "\2\u02cb\u02cc\7[\2\2\u02cc\u02cd\7U\2\2\u02cd\u009a\3\2\2\2\u02ce\u02cf"+ - "\7V\2\2\u02cf\u02d0\7C\2\2\u02d0\u02d1\7D\2\2\u02d1\u02d2\7N\2\2\u02d2"+ - "\u02d3\7G\2\2\u02d3\u009c\3\2\2\2\u02d4\u02d5\7V\2\2\u02d5\u02d6\7C\2"+ - "\2\u02d6\u02d7\7D\2\2\u02d7\u02d8\7N\2\2\u02d8\u02d9\7G\2\2\u02d9\u02da"+ - "\7U\2\2\u02da\u009e\3\2\2\2\u02db\u02dc\7V\2\2\u02dc\u02dd\7G\2\2\u02dd"+ - "\u02de\7Z\2\2\u02de\u02df\7V\2\2\u02df\u00a0\3\2\2\2\u02e0\u02e1\7V\2"+ - "\2\u02e1\u02e2\7T\2\2\u02e2\u02e3\7W\2\2\u02e3\u02e4\7G\2\2\u02e4\u00a2"+ - "\3\2\2\2\u02e5\u02e6\7V\2\2\u02e6\u02e7\7Q\2\2\u02e7\u00a4\3\2\2\2\u02e8"+ - "\u02e9\7V\2\2\u02e9\u02ea\7[\2\2\u02ea\u02eb\7R\2\2\u02eb\u02ec\7G\2\2"+ - "\u02ec\u00a6\3\2\2\2\u02ed\u02ee\7V\2\2\u02ee\u02ef\7[\2\2\u02ef\u02f0"+ - "\7R\2\2\u02f0\u02f1\7G\2\2\u02f1\u02f2\7U\2\2\u02f2\u00a8\3\2\2\2\u02f3"+ - "\u02f4\7W\2\2\u02f4\u02f5\7U\2\2\u02f5\u02f6\7K\2\2\u02f6\u02f7\7P\2\2"+ - "\u02f7\u02f8\7I\2\2\u02f8\u00aa\3\2\2\2\u02f9\u02fa\7X\2\2\u02fa\u02fb"+ - "\7G\2\2\u02fb\u02fc\7T\2\2\u02fc\u02fd\7K\2\2\u02fd\u02fe\7H\2\2\u02fe"+ - "\u02ff\7[\2\2\u02ff\u00ac\3\2\2\2\u0300\u0301\7Y\2\2\u0301\u0302\7J\2"+ - "\2\u0302\u0303\7G\2\2\u0303\u0304\7T\2\2\u0304\u0305\7G\2\2\u0305\u00ae"+ - "\3\2\2\2\u0306\u0307\7Y\2\2\u0307\u0308\7K\2\2\u0308\u0309\7V\2\2\u0309"+ - "\u030a\7J\2\2\u030a\u00b0\3\2\2\2\u030b\u030c\7[\2\2\u030c\u030d\7G\2"+ - "\2\u030d\u030e\7C\2\2\u030e\u030f\7T\2\2\u030f\u00b2\3\2\2\2\u0310\u0311"+ - "\7[\2\2\u0311\u0312\7G\2\2\u0312\u0313\7C\2\2\u0313\u0314\7T\2\2\u0314"+ - "\u0315\7U\2\2\u0315\u00b4\3\2\2\2\u0316\u0317\7}\2\2\u0317\u0318\7G\2"+ - "\2\u0318\u0319\7U\2\2\u0319\u031a\7E\2\2\u031a\u031b\7C\2\2\u031b\u031c"+ - "\7R\2\2\u031c\u031d\7G\2\2\u031d\u00b6\3\2\2\2\u031e\u031f\7}\2\2\u031f"+ - "\u0320\7H\2\2\u0320\u0321\7P\2\2\u0321\u00b8\3\2\2\2\u0322\u0323\7}\2"+ - "\2\u0323\u0324\7N\2\2\u0324\u0325\7K\2\2\u0325\u0326\7O\2\2\u0326\u0327"+ - "\7K\2\2\u0327\u0328\7V\2\2\u0328\u00ba\3\2\2\2\u0329\u032a\7}\2\2\u032a"+ - "\u032b\7F\2\2\u032b\u00bc\3\2\2\2\u032c\u032d\7}\2\2\u032d\u032e\7V\2"+ - "\2\u032e\u00be\3\2\2\2\u032f\u0330\7}\2\2\u0330\u0331\7V\2\2\u0331\u0332"+ - "\7U\2\2\u0332\u00c0\3\2\2\2\u0333\u0334\7}\2\2\u0334\u0335\7I\2\2\u0335"+ - "\u0336\7W\2\2\u0336\u0337\7K\2\2\u0337\u0338\7F\2\2\u0338\u00c2\3\2\2"+ - "\2\u0339\u033a\7\177\2\2\u033a\u00c4\3\2\2\2\u033b\u033c\7?\2\2\u033c"+ - "\u00c6\3\2\2\2\u033d\u033e\7>\2\2\u033e\u033f\7?\2\2\u033f\u0340\7@\2"+ - "\2\u0340\u00c8\3\2\2\2\u0341\u0342\7>\2\2\u0342\u0346\7@\2\2\u0343\u0344"+ - "\7#\2\2\u0344\u0346\7?\2\2\u0345\u0341\3\2\2\2\u0345\u0343\3\2\2\2\u0346"+ - "\u00ca\3\2\2\2\u0347\u0348\7>\2\2\u0348\u00cc\3\2\2\2\u0349\u034a\7>\2"+ - "\2\u034a\u034b\7?\2\2\u034b\u00ce\3\2\2\2\u034c\u034d\7@\2\2\u034d\u00d0"+ - "\3\2\2\2\u034e\u034f\7@\2\2\u034f\u0350\7?\2\2\u0350\u00d2\3\2\2\2\u0351"+ - "\u0352\7-\2\2\u0352\u00d4\3\2\2\2\u0353\u0354\7/\2\2\u0354\u00d6\3\2\2"+ - "\2\u0355\u0356\7,\2\2\u0356\u00d8\3\2\2\2\u0357\u0358\7\61\2\2\u0358\u00da"+ - "\3\2\2\2\u0359\u035a\7\'\2\2\u035a\u00dc\3\2\2\2\u035b\u035c\7~\2\2\u035c"+ - "\u035d\7~\2\2\u035d\u00de\3\2\2\2\u035e\u035f\7\60\2\2\u035f\u00e0\3\2"+ - "\2\2\u0360\u0361\7A\2\2\u0361\u00e2\3\2\2\2\u0362\u0368\7)\2\2\u0363\u0367"+ - "\n\2\2\2\u0364\u0365\7)\2\2\u0365\u0367\7)\2\2\u0366\u0363\3\2\2\2\u0366"+ - "\u0364\3\2\2\2\u0367\u036a\3\2\2\2\u0368\u0366\3\2\2\2\u0368\u0369\3\2"+ - "\2\2\u0369\u036b\3\2\2\2\u036a\u0368\3\2\2\2\u036b\u036c\7)\2\2\u036c"+ - "\u00e4\3\2\2\2\u036d\u036f\5\u00f5{\2\u036e\u036d\3\2\2\2\u036f\u0370"+ - "\3\2\2\2\u0370\u036e\3\2\2\2\u0370\u0371\3\2\2\2\u0371\u00e6\3\2\2\2\u0372"+ - "\u0374\5\u00f5{\2\u0373\u0372\3\2\2\2\u0374\u0375\3\2\2\2\u0375\u0373"+ - "\3\2\2\2\u0375\u0376\3\2\2\2\u0376\u0377\3\2\2\2\u0377\u037b\5\u00dfp"+ - "\2\u0378\u037a\5\u00f5{\2\u0379\u0378\3\2\2\2\u037a\u037d\3\2\2\2\u037b"+ - "\u0379\3\2\2\2\u037b\u037c\3\2\2\2\u037c\u039d\3\2\2\2\u037d\u037b\3\2"+ - "\2\2\u037e\u0380\5\u00dfp\2\u037f\u0381\5\u00f5{\2\u0380\u037f\3\2\2\2"+ - "\u0381\u0382\3\2\2\2\u0382\u0380\3\2\2\2\u0382\u0383\3\2\2\2\u0383\u039d"+ - "\3\2\2\2\u0384\u0386\5\u00f5{\2\u0385\u0384\3\2\2\2\u0386\u0387\3\2\2"+ - "\2\u0387\u0385\3\2\2\2\u0387\u0388\3\2\2\2\u0388\u0390\3\2\2\2\u0389\u038d"+ - "\5\u00dfp\2\u038a\u038c\5\u00f5{\2\u038b\u038a\3\2\2\2\u038c\u038f\3\2"+ - "\2\2\u038d\u038b\3\2\2\2\u038d\u038e\3\2\2\2\u038e\u0391\3\2\2\2\u038f"+ - "\u038d\3\2\2\2\u0390\u0389\3\2\2\2\u0390\u0391\3\2\2\2\u0391\u0392\3\2"+ - "\2\2\u0392\u0393\5\u00f3z\2\u0393\u039d\3\2\2\2\u0394\u0396\5\u00dfp\2"+ - "\u0395\u0397\5\u00f5{\2\u0396\u0395\3\2\2\2\u0397\u0398\3\2\2\2\u0398"+ - "\u0396\3\2\2\2\u0398\u0399\3\2\2\2\u0399\u039a\3\2\2\2\u039a\u039b\5\u00f3"+ - "z\2\u039b\u039d\3\2\2\2\u039c\u0373\3\2\2\2\u039c\u037e\3\2\2\2\u039c"+ - "\u0385\3\2\2\2\u039c\u0394\3\2\2\2\u039d\u00e8\3\2\2\2\u039e\u03a1\5\u00f7"+ - "|\2\u039f\u03a1\7a\2\2\u03a0\u039e\3\2\2\2\u03a0\u039f\3\2\2\2\u03a1\u03a7"+ - "\3\2\2\2\u03a2\u03a6\5\u00f7|\2\u03a3\u03a6\5\u00f5{\2\u03a4\u03a6\t\3"+ - "\2\2\u03a5\u03a2\3\2\2\2\u03a5\u03a3\3\2\2\2\u03a5\u03a4\3\2\2\2\u03a6"+ - "\u03a9\3\2\2\2\u03a7\u03a5\3\2\2\2\u03a7\u03a8\3\2\2\2\u03a8\u00ea\3\2"+ - "\2\2\u03a9\u03a7\3\2\2\2\u03aa\u03ae\5\u00f5{\2\u03ab\u03af\5\u00f7|\2"+ - "\u03ac\u03af\5\u00f5{\2\u03ad\u03af\t\4\2\2\u03ae\u03ab\3\2\2\2\u03ae"+ - "\u03ac\3\2\2\2\u03ae\u03ad\3\2\2\2\u03af\u03b0\3\2\2\2\u03b0\u03ae\3\2"+ - "\2\2\u03b0\u03b1\3\2\2\2\u03b1\u00ec\3\2\2\2\u03b2\u03b6\5\u00f7|\2\u03b3"+ - "\u03b6\5\u00f5{\2\u03b4\u03b6\7a\2\2\u03b5\u03b2\3\2\2\2\u03b5\u03b3\3"+ - "\2\2\2\u03b5\u03b4\3\2\2\2\u03b6\u03b7\3\2\2\2\u03b7\u03b5\3\2\2\2\u03b7"+ - "\u03b8\3\2\2\2\u03b8\u00ee\3\2\2\2\u03b9\u03bf\7$\2\2\u03ba\u03be\n\5"+ - "\2\2\u03bb\u03bc\7$\2\2\u03bc\u03be\7$\2\2\u03bd\u03ba\3\2\2\2\u03bd\u03bb"+ - "\3\2\2\2\u03be\u03c1\3\2\2\2\u03bf\u03bd\3\2\2\2\u03bf\u03c0\3\2\2\2\u03c0"+ - "\u03c2\3\2\2\2\u03c1\u03bf\3\2\2\2\u03c2\u03c3\7$\2\2\u03c3\u00f0\3\2"+ - "\2\2\u03c4\u03ca\7b\2\2\u03c5\u03c9\n\6\2\2\u03c6\u03c7\7b\2\2\u03c7\u03c9"+ - "\7b\2\2\u03c8\u03c5\3\2\2\2\u03c8\u03c6\3\2\2\2\u03c9\u03cc\3\2\2\2\u03ca"+ - "\u03c8\3\2\2\2\u03ca\u03cb\3\2\2\2\u03cb\u03cd\3\2\2\2\u03cc\u03ca\3\2"+ - "\2\2\u03cd\u03ce\7b\2\2\u03ce\u00f2\3\2\2\2\u03cf\u03d1\7G\2\2\u03d0\u03d2"+ - "\t\7\2\2\u03d1\u03d0\3\2\2\2\u03d1\u03d2\3\2\2\2\u03d2\u03d4\3\2\2\2\u03d3"+ - "\u03d5\5\u00f5{\2\u03d4\u03d3\3\2\2\2\u03d5\u03d6\3\2\2\2\u03d6\u03d4"+ - "\3\2\2\2\u03d6\u03d7\3\2\2\2\u03d7\u00f4\3\2\2\2\u03d8\u03d9\t\b\2\2\u03d9"+ - "\u00f6\3\2\2\2\u03da\u03db\t\t\2\2\u03db\u00f8\3\2\2\2\u03dc\u03dd\7/"+ - "\2\2\u03dd\u03de\7/\2\2\u03de\u03e2\3\2\2\2\u03df\u03e1\n\n\2\2\u03e0"+ - "\u03df\3\2\2\2\u03e1\u03e4\3\2\2\2\u03e2\u03e0\3\2\2\2\u03e2\u03e3\3\2"+ - "\2\2\u03e3\u03e6\3\2\2\2\u03e4\u03e2\3\2\2\2\u03e5\u03e7\7\17\2\2\u03e6"+ - "\u03e5\3\2\2\2\u03e6\u03e7\3\2\2\2\u03e7\u03e9\3\2\2\2\u03e8\u03ea\7\f"+ - "\2\2\u03e9\u03e8\3\2\2\2\u03e9\u03ea\3\2\2\2\u03ea\u03eb\3\2\2\2\u03eb"+ - "\u03ec\b}\2\2\u03ec\u00fa\3\2\2\2\u03ed\u03ee\7\61\2\2\u03ee\u03ef\7,"+ - "\2\2\u03ef\u03f4\3\2\2\2\u03f0\u03f3\5\u00fb~\2\u03f1\u03f3\13\2\2\2\u03f2"+ - "\u03f0\3\2\2\2\u03f2\u03f1\3\2\2\2\u03f3\u03f6\3\2\2\2\u03f4\u03f5\3\2"+ - "\2\2\u03f4\u03f2\3\2\2\2\u03f5\u03f7\3\2\2\2\u03f6\u03f4\3\2\2\2\u03f7"+ - "\u03f8\7,\2\2\u03f8\u03f9\7\61\2\2\u03f9\u03fa\3\2\2\2\u03fa\u03fb\b~"+ - "\2\2\u03fb\u00fc\3\2\2\2\u03fc\u03fe\t\13\2\2\u03fd\u03fc\3\2\2\2\u03fe"+ - "\u03ff\3\2\2\2\u03ff\u03fd\3\2\2\2\u03ff\u0400\3\2\2\2\u0400\u0401\3\2"+ - "\2\2\u0401\u0402\b\177\2\2\u0402\u00fe\3\2\2\2\u0403\u0404\13\2\2\2\u0404"+ - "\u0100\3\2\2\2\"\2\u0345\u0366\u0368\u0370\u0375\u037b\u0382\u0387\u038d"+ - "\u0390\u0398\u039c\u03a0\u03a5\u03a7\u03ae\u03b0\u03b5\u03b7\u03bd\u03bf"+ - "\u03c8\u03ca\u03d1\u03d6\u03e2\u03e6\u03e9\u03f2\u03f4\u03ff\3\2\3\2"; + "\3\u0430\ud6d1\u8206\uad2d\u4417\uaef1\u8d80\uaadd\2\u0080\u0423\b\1\4"+ + "\2\t\2\4\3\t\3\4\4\t\4\4\5\t\5\4\6\t\6\4\7\t\7\4\b\t\b\4\t\t\t\4\n\t\n"+ + "\4\13\t\13\4\f\t\f\4\r\t\r\4\16\t\16\4\17\t\17\4\20\t\20\4\21\t\21\4\22"+ + "\t\22\4\23\t\23\4\24\t\24\4\25\t\25\4\26\t\26\4\27\t\27\4\30\t\30\4\31"+ + "\t\31\4\32\t\32\4\33\t\33\4\34\t\34\4\35\t\35\4\36\t\36\4\37\t\37\4 \t"+ + " \4!\t!\4\"\t\"\4#\t#\4$\t$\4%\t%\4&\t&\4\'\t\'\4(\t(\4)\t)\4*\t*\4+\t"+ + "+\4,\t,\4-\t-\4.\t.\4/\t/\4\60\t\60\4\61\t\61\4\62\t\62\4\63\t\63\4\64"+ + "\t\64\4\65\t\65\4\66\t\66\4\67\t\67\48\t8\49\t9\4:\t:\4;\t;\4<\t<\4=\t"+ + "=\4>\t>\4?\t?\4@\t@\4A\tA\4B\tB\4C\tC\4D\tD\4E\tE\4F\tF\4G\tG\4H\tH\4"+ + "I\tI\4J\tJ\4K\tK\4L\tL\4M\tM\4N\tN\4O\tO\4P\tP\4Q\tQ\4R\tR\4S\tS\4T\t"+ + "T\4U\tU\4V\tV\4W\tW\4X\tX\4Y\tY\4Z\tZ\4[\t[\4\\\t\\\4]\t]\4^\t^\4_\t_"+ + "\4`\t`\4a\ta\4b\tb\4c\tc\4d\td\4e\te\4f\tf\4g\tg\4h\th\4i\ti\4j\tj\4k"+ + "\tk\4l\tl\4m\tm\4n\tn\4o\to\4p\tp\4q\tq\4r\tr\4s\ts\4t\tt\4u\tu\4v\tv"+ + "\4w\tw\4x\tx\4y\ty\4z\tz\4{\t{\4|\t|\4}\t}\4~\t~\4\177\t\177\4\u0080\t"+ + "\u0080\4\u0081\t\u0081\4\u0082\t\u0082\3\2\3\2\3\3\3\3\3\4\3\4\3\5\3\5"+ + "\3\6\3\6\3\6\3\6\3\7\3\7\3\7\3\7\3\7\3\7\3\7\3\7\3\b\3\b\3\b\3\b\3\b\3"+ + "\b\3\b\3\b\3\b\3\t\3\t\3\t\3\t\3\n\3\n\3\n\3\n\3\13\3\13\3\13\3\f\3\f"+ + "\3\f\3\f\3\r\3\r\3\r\3\r\3\r\3\r\3\r\3\r\3\16\3\16\3\16\3\17\3\17\3\17"+ + "\3\17\3\17\3\20\3\20\3\20\3\20\3\20\3\20\3\20\3\20\3\21\3\21\3\21\3\21"+ + "\3\21\3\21\3\21\3\21\3\21\3\22\3\22\3\22\3\22\3\22\3\22\3\22\3\22\3\23"+ + "\3\23\3\23\3\23\3\23\3\23\3\23\3\23\3\24\3\24\3\24\3\24\3\24\3\24\3\24"+ + "\3\24\3\25\3\25\3\25\3\25\3\25\3\25\3\25\3\25\3\25\3\25\3\25\3\25\3\25"+ + "\3\25\3\25\3\25\3\25\3\25\3\26\3\26\3\26\3\26\3\27\3\27\3\27\3\27\3\27"+ + "\3\30\3\30\3\30\3\30\3\30\3\30\3\31\3\31\3\31\3\31\3\31\3\32\3\32\3\32"+ + "\3\32\3\32\3\32\3\32\3\32\3\32\3\33\3\33\3\33\3\33\3\33\3\33\3\33\3\33"+ + "\3\33\3\34\3\34\3\34\3\34\3\34\3\34\3\34\3\35\3\35\3\35\3\35\3\35\3\35"+ + "\3\35\3\35\3\35\3\35\3\35\3\36\3\36\3\36\3\36\3\36\3\36\3\36\3\37\3\37"+ + "\3\37\3\37\3\37\3\37\3\37\3\37\3 \3 \3 \3 \3 \3 \3 \3 \3!\3!\3!\3!\3!"+ + "\3!\3\"\3\"\3\"\3\"\3\"\3\"\3#\3#\3#\3#\3#\3#\3#\3$\3$\3$\3$\3$\3%\3%"+ + "\3%\3%\3%\3&\3&\3&\3&\3&\3&\3&\3&\3&\3&\3\'\3\'\3\'\3\'\3\'\3\'\3\'\3"+ + "\'\3\'\3(\3(\3(\3(\3(\3(\3)\3)\3)\3)\3)\3)\3)\3*\3*\3*\3*\3*\3+\3+\3+"+ + "\3+\3+\3+\3,\3,\3,\3-\3-\3-\3-\3-\3-\3.\3.\3.\3.\3.\3.\3.\3.\3.\3/\3/"+ + "\3/\3\60\3\60\3\60\3\60\3\60\3\61\3\61\3\61\3\61\3\61\3\62\3\62\3\62\3"+ + "\62\3\62\3\63\3\63\3\63\3\63\3\63\3\64\3\64\3\64\3\64\3\64\3\64\3\65\3"+ + "\65\3\65\3\65\3\65\3\65\3\65\3\66\3\66\3\66\3\66\3\66\3\66\3\67\3\67\3"+ + "\67\3\67\3\67\3\67\3\67\38\38\38\38\38\38\38\38\39\39\39\39\39\39\3:\3"+ + ":\3:\3:\3:\3:\3:\3;\3;\3;\3;\3;\3;\3;\3;\3<\3<\3<\3<\3=\3=\3=\3=\3=\3"+ + ">\3>\3>\3>\3>\3>\3?\3?\3?\3@\3@\3@\3@\3@\3@\3@\3@\3@\3@\3A\3A\3A\3B\3"+ + "B\3B\3B\3B\3B\3C\3C\3C\3C\3C\3C\3D\3D\3D\3D\3D\3D\3D\3E\3E\3E\3E\3E\3"+ + "E\3E\3E\3E\3F\3F\3F\3F\3F\3G\3G\3G\3G\3G\3G\3H\3H\3H\3H\3H\3H\3I\3I\3"+ + "I\3I\3I\3I\3J\3J\3J\3J\3J\3J\3J\3J\3K\3K\3K\3K\3K\3K\3K\3L\3L\3L\3L\3"+ + "L\3L\3L\3L\3M\3M\3M\3M\3M\3M\3M\3N\3N\3N\3N\3N\3O\3O\3O\3O\3P\3P\3P\3"+ + "P\3P\3P\3Q\3Q\3Q\3Q\3Q\3Q\3Q\3R\3R\3R\3R\3R\3S\3S\3S\3S\3S\3T\3T\3T\3"+ + "U\3U\3U\3U\3U\3V\3V\3V\3V\3V\3V\3W\3W\3W\3W\3W\3W\3X\3X\3X\3X\3X\3X\3"+ + "X\3Y\3Y\3Y\3Y\3Y\3Y\3Z\3Z\3Z\3Z\3Z\3[\3[\3[\3[\3[\3\\\3\\\3\\\3\\\3\\"+ + "\3\\\3]\3]\3]\3]\3]\3]\3]\3]\3^\3^\3^\3^\3_\3_\3_\3_\3_\3_\3_\3`\3`\3"+ + "`\3a\3a\3a\3b\3b\3b\3b\3c\3c\3c\3c\3c\3c\3d\3d\3e\3e\3f\3f\3f\3f\3g\3"+ + "g\3g\3g\5g\u0364\ng\3h\3h\3i\3i\3i\3j\3j\3k\3k\3k\3l\3l\3m\3m\3n\3n\3"+ + "o\3o\3p\3p\3q\3q\3q\3r\3r\3s\3s\3t\3t\3t\3t\7t\u0385\nt\ft\16t\u0388\13"+ + "t\3t\3t\3u\6u\u038d\nu\ru\16u\u038e\3v\6v\u0392\nv\rv\16v\u0393\3v\3v"+ + "\7v\u0398\nv\fv\16v\u039b\13v\3v\3v\6v\u039f\nv\rv\16v\u03a0\3v\6v\u03a4"+ + "\nv\rv\16v\u03a5\3v\3v\7v\u03aa\nv\fv\16v\u03ad\13v\5v\u03af\nv\3v\3v"+ + "\3v\3v\6v\u03b5\nv\rv\16v\u03b6\3v\3v\5v\u03bb\nv\3w\3w\5w\u03bf\nw\3"+ + "w\3w\3w\7w\u03c4\nw\fw\16w\u03c7\13w\3x\3x\3x\3x\6x\u03cd\nx\rx\16x\u03ce"+ + "\3y\3y\3y\6y\u03d4\ny\ry\16y\u03d5\3z\3z\3z\3z\7z\u03dc\nz\fz\16z\u03df"+ + "\13z\3z\3z\3{\3{\3{\3{\7{\u03e7\n{\f{\16{\u03ea\13{\3{\3{\3|\3|\5|\u03f0"+ + "\n|\3|\6|\u03f3\n|\r|\16|\u03f4\3}\3}\3~\3~\3\177\3\177\3\177\3\177\7"+ + "\177\u03ff\n\177\f\177\16\177\u0402\13\177\3\177\5\177\u0405\n\177\3\177"+ + "\5\177\u0408\n\177\3\177\3\177\3\u0080\3\u0080\3\u0080\3\u0080\3\u0080"+ + "\7\u0080\u0411\n\u0080\f\u0080\16\u0080\u0414\13\u0080\3\u0080\3\u0080"+ + "\3\u0080\3\u0080\3\u0080\3\u0081\6\u0081\u041c\n\u0081\r\u0081\16\u0081"+ + "\u041d\3\u0081\3\u0081\3\u0082\3\u0082\3\u0412\2\u0083\3\3\5\4\7\5\t\6"+ + "\13\7\r\b\17\t\21\n\23\13\25\f\27\r\31\16\33\17\35\20\37\21!\22#\23%\24"+ + "\'\25)\26+\27-\30/\31\61\32\63\33\65\34\67\359\36;\37= ?!A\"C#E$G%I&K"+ + "\'M(O)Q*S+U,W-Y.[/]\60_\61a\62c\63e\64g\65i\66k\67m8o9q:s;u{?}@\177"+ + "A\u0081B\u0083C\u0085D\u0087E\u0089F\u008bG\u008dH\u008fI\u0091J\u0093"+ + "K\u0095L\u0097M\u0099N\u009bO\u009dP\u009fQ\u00a1R\u00a3S\u00a5T\u00a7"+ + "U\u00a9V\u00abW\u00adX\u00afY\u00b1Z\u00b3[\u00b5\\\u00b7]\u00b9^\u00bb"+ + "_\u00bd`\u00bfa\u00c1b\u00c3c\u00c5d\u00c7e\u00c9f\u00cbg\u00cdh\u00cf"+ + "i\u00d1j\u00d3k\u00d5l\u00d7m\u00d9n\u00dbo\u00ddp\u00dfq\u00e1r\u00e3"+ + "s\u00e5t\u00e7u\u00e9v\u00ebw\u00edx\u00efy\u00f1z\u00f3{\u00f5|\u00f7"+ + "\2\u00f9\2\u00fb\2\u00fd}\u00ff~\u0101\177\u0103\u0080\3\2\f\3\2))\4\2"+ + "BBaa\5\2<\3\2\2\2\u01c3\u01c4\7G\2\2\u01c4\u01c5\7Z\2\2\u01c5\u01c6\7V\2"+ + "\2\u01c6\u01c7\7T\2\2\u01c7\u01c8\7C\2\2\u01c8\u01c9\7E\2\2\u01c9\u01ca"+ + "\7V\2\2\u01ca@\3\2\2\2\u01cb\u01cc\7H\2\2\u01cc\u01cd\7C\2\2\u01cd\u01ce"+ + "\7N\2\2\u01ce\u01cf\7U\2\2\u01cf\u01d0\7G\2\2\u01d0B\3\2\2\2\u01d1\u01d2"+ + "\7H\2\2\u01d2\u01d3\7K\2\2\u01d3\u01d4\7T\2\2\u01d4\u01d5\7U\2\2\u01d5"+ + "\u01d6\7V\2\2\u01d6D\3\2\2\2\u01d7\u01d8\7H\2\2\u01d8\u01d9\7Q\2\2\u01d9"+ + "\u01da\7T\2\2\u01da\u01db\7O\2\2\u01db\u01dc\7C\2\2\u01dc\u01dd\7V\2\2"+ + "\u01ddF\3\2\2\2\u01de\u01df\7H\2\2\u01df\u01e0\7T\2\2\u01e0\u01e1\7Q\2"+ + "\2\u01e1\u01e2\7O\2\2\u01e2H\3\2\2\2\u01e3\u01e4\7H\2\2\u01e4\u01e5\7"+ + "W\2\2\u01e5\u01e6\7N\2\2\u01e6\u01e7\7N\2\2\u01e7J\3\2\2\2\u01e8\u01e9"+ + "\7H\2\2\u01e9\u01ea\7W\2\2\u01ea\u01eb\7P\2\2\u01eb\u01ec\7E\2\2\u01ec"+ + "\u01ed\7V\2\2\u01ed\u01ee\7K\2\2\u01ee\u01ef\7Q\2\2\u01ef\u01f0\7P\2\2"+ + "\u01f0\u01f1\7U\2\2\u01f1L\3\2\2\2\u01f2\u01f3\7I\2\2\u01f3\u01f4\7T\2"+ + "\2\u01f4\u01f5\7C\2\2\u01f5\u01f6\7R\2\2\u01f6\u01f7\7J\2\2\u01f7\u01f8"+ + "\7X\2\2\u01f8\u01f9\7K\2\2\u01f9\u01fa\7\\\2\2\u01faN\3\2\2\2\u01fb\u01fc"+ + "\7I\2\2\u01fc\u01fd\7T\2\2\u01fd\u01fe\7Q\2\2\u01fe\u01ff\7W\2\2\u01ff"+ + "\u0200\7R\2\2\u0200P\3\2\2\2\u0201\u0202\7J\2\2\u0202\u0203\7C\2\2\u0203"+ + "\u0204\7X\2\2\u0204\u0205\7K\2\2\u0205\u0206\7P\2\2\u0206\u0207\7I\2\2"+ + "\u0207R\3\2\2\2\u0208\u0209\7J\2\2\u0209\u020a\7Q\2\2\u020a\u020b\7W\2"+ + "\2\u020b\u020c\7T\2\2\u020cT\3\2\2\2\u020d\u020e\7J\2\2\u020e\u020f\7"+ + "Q\2\2\u020f\u0210\7W\2\2\u0210\u0211\7T\2\2\u0211\u0212\7U\2\2\u0212V"+ + "\3\2\2\2\u0213\u0214\7K\2\2\u0214\u0215\7P\2\2\u0215X\3\2\2\2\u0216\u0217"+ + "\7K\2\2\u0217\u0218\7P\2\2\u0218\u0219\7P\2\2\u0219\u021a\7G\2\2\u021a"+ + "\u021b\7T\2\2\u021bZ\3\2\2\2\u021c\u021d\7K\2\2\u021d\u021e\7P\2\2\u021e"+ + "\u021f\7V\2\2\u021f\u0220\7G\2\2\u0220\u0221\7T\2\2\u0221\u0222\7X\2\2"+ + "\u0222\u0223\7C\2\2\u0223\u0224\7N\2\2\u0224\\\3\2\2\2\u0225\u0226\7K"+ + "\2\2\u0226\u0227\7U\2\2\u0227^\3\2\2\2\u0228\u0229\7L\2\2\u0229\u022a"+ + "\7Q\2\2\u022a\u022b\7K\2\2\u022b\u022c\7P\2\2\u022c`\3\2\2\2\u022d\u022e"+ + "\7N\2\2\u022e\u022f\7C\2\2\u022f\u0230\7U\2\2\u0230\u0231\7V\2\2\u0231"+ + "b\3\2\2\2\u0232\u0233\7N\2\2\u0233\u0234\7G\2\2\u0234\u0235\7H\2\2\u0235"+ + "\u0236\7V\2\2\u0236d\3\2\2\2\u0237\u0238\7N\2\2\u0238\u0239\7K\2\2\u0239"+ + "\u023a\7M\2\2\u023a\u023b\7G\2\2\u023bf\3\2\2\2\u023c\u023d\7N\2\2\u023d"+ + "\u023e\7K\2\2\u023e\u023f\7O\2\2\u023f\u0240\7K\2\2\u0240\u0241\7V\2\2"+ + "\u0241h\3\2\2\2\u0242\u0243\7O\2\2\u0243\u0244\7C\2\2\u0244\u0245\7R\2"+ + "\2\u0245\u0246\7R\2\2\u0246\u0247\7G\2\2\u0247\u0248\7F\2\2\u0248j\3\2"+ + "\2\2\u0249\u024a\7O\2\2\u024a\u024b\7C\2\2\u024b\u024c\7V\2\2\u024c\u024d"+ + "\7E\2\2\u024d\u024e\7J\2\2\u024el\3\2\2\2\u024f\u0250\7O\2\2\u0250\u0251"+ + "\7K\2\2\u0251\u0252\7P\2\2\u0252\u0253\7W\2\2\u0253\u0254\7V\2\2\u0254"+ + "\u0255\7G\2\2\u0255n\3\2\2\2\u0256\u0257\7O\2\2\u0257\u0258\7K\2\2\u0258"+ + "\u0259\7P\2\2\u0259\u025a\7W\2\2\u025a\u025b\7V\2\2\u025b\u025c\7G\2\2"+ + "\u025c\u025d\7U\2\2\u025dp\3\2\2\2\u025e\u025f\7O\2\2\u025f\u0260\7Q\2"+ + "\2\u0260\u0261\7P\2\2\u0261\u0262\7V\2\2\u0262\u0263\7J\2\2\u0263r\3\2"+ + "\2\2\u0264\u0265\7O\2\2\u0265\u0266\7Q\2\2\u0266\u0267\7P\2\2\u0267\u0268"+ + "\7V\2\2\u0268\u0269\7J\2\2\u0269\u026a\7U\2\2\u026at\3\2\2\2\u026b\u026c"+ + "\7P\2\2\u026c\u026d\7C\2\2\u026d\u026e\7V\2\2\u026e\u026f\7W\2\2\u026f"+ + "\u0270\7T\2\2\u0270\u0271\7C\2\2\u0271\u0272\7N\2\2\u0272v\3\2\2\2\u0273"+ + "\u0274\7P\2\2\u0274\u0275\7Q\2\2\u0275\u0276\7V\2\2\u0276x\3\2\2\2\u0277"+ + "\u0278\7P\2\2\u0278\u0279\7W\2\2\u0279\u027a\7N\2\2\u027a\u027b\7N\2\2"+ + "\u027bz\3\2\2\2\u027c\u027d\7P\2\2\u027d\u027e\7W\2\2\u027e\u027f\7N\2"+ + "\2\u027f\u0280\7N\2\2\u0280\u0281\7U\2\2\u0281|\3\2\2\2\u0282\u0283\7"+ + "Q\2\2\u0283\u0284\7P\2\2\u0284~\3\2\2\2\u0285\u0286\7Q\2\2\u0286\u0287"+ + "\7R\2\2\u0287\u0288\7V\2\2\u0288\u0289\7K\2\2\u0289\u028a\7O\2\2\u028a"+ + "\u028b\7K\2\2\u028b\u028c\7\\\2\2\u028c\u028d\7G\2\2\u028d\u028e\7F\2"+ + "\2\u028e\u0080\3\2\2\2\u028f\u0290\7Q\2\2\u0290\u0291\7T\2\2\u0291\u0082"+ + "\3\2\2\2\u0292\u0293\7Q\2\2\u0293\u0294\7T\2\2\u0294\u0295\7F\2\2\u0295"+ + "\u0296\7G\2\2\u0296\u0297\7T\2\2\u0297\u0084\3\2\2\2\u0298\u0299\7Q\2"+ + "\2\u0299\u029a\7W\2\2\u029a\u029b\7V\2\2\u029b\u029c\7G\2\2\u029c\u029d"+ + "\7T\2\2\u029d\u0086\3\2\2\2\u029e\u029f\7R\2\2\u029f\u02a0\7C\2\2\u02a0"+ + "\u02a1\7T\2\2\u02a1\u02a2\7U\2\2\u02a2\u02a3\7G\2\2\u02a3\u02a4\7F\2\2"+ + "\u02a4\u0088\3\2\2\2\u02a5\u02a6\7R\2\2\u02a6\u02a7\7J\2\2\u02a7\u02a8"+ + "\7[\2\2\u02a8\u02a9\7U\2\2\u02a9\u02aa\7K\2\2\u02aa\u02ab\7E\2\2\u02ab"+ + "\u02ac\7C\2\2\u02ac\u02ad\7N\2\2\u02ad\u008a\3\2\2\2\u02ae\u02af\7R\2"+ + "\2\u02af\u02b0\7N\2\2\u02b0\u02b1\7C\2\2\u02b1\u02b2\7P\2\2\u02b2\u008c"+ + "\3\2\2\2\u02b3\u02b4\7T\2\2\u02b4\u02b5\7K\2\2\u02b5\u02b6\7I\2\2\u02b6"+ + "\u02b7\7J\2\2\u02b7\u02b8\7V\2\2\u02b8\u008e\3\2\2\2\u02b9\u02ba\7T\2"+ + "\2\u02ba\u02bb\7N\2\2\u02bb\u02bc\7K\2\2\u02bc\u02bd\7M\2\2\u02bd\u02be"+ + "\7G\2\2\u02be\u0090\3\2\2\2\u02bf\u02c0\7S\2\2\u02c0\u02c1\7W\2\2\u02c1"+ + "\u02c2\7G\2\2\u02c2\u02c3\7T\2\2\u02c3\u02c4\7[\2\2\u02c4\u0092\3\2\2"+ + "\2\u02c5\u02c6\7U\2\2\u02c6\u02c7\7E\2\2\u02c7\u02c8\7J\2\2\u02c8\u02c9"+ + "\7G\2\2\u02c9\u02ca\7O\2\2\u02ca\u02cb\7C\2\2\u02cb\u02cc\7U\2\2\u02cc"+ + "\u0094\3\2\2\2\u02cd\u02ce\7U\2\2\u02ce\u02cf\7G\2\2\u02cf\u02d0\7E\2"+ + "\2\u02d0\u02d1\7Q\2\2\u02d1\u02d2\7P\2\2\u02d2\u02d3\7F\2\2\u02d3\u0096"+ + "\3\2\2\2\u02d4\u02d5\7U\2\2\u02d5\u02d6\7G\2\2\u02d6\u02d7\7E\2\2\u02d7"+ + "\u02d8\7Q\2\2\u02d8\u02d9\7P\2\2\u02d9\u02da\7F\2\2\u02da\u02db\7U\2\2"+ + "\u02db\u0098\3\2\2\2\u02dc\u02dd\7U\2\2\u02dd\u02de\7G\2\2\u02de\u02df"+ + "\7N\2\2\u02df\u02e0\7G\2\2\u02e0\u02e1\7E\2\2\u02e1\u02e2\7V\2\2\u02e2"+ + "\u009a\3\2\2\2\u02e3\u02e4\7U\2\2\u02e4\u02e5\7J\2\2\u02e5\u02e6\7Q\2"+ + "\2\u02e6\u02e7\7Y\2\2\u02e7\u009c\3\2\2\2\u02e8\u02e9\7U\2\2\u02e9\u02ea"+ + "\7[\2\2\u02ea\u02eb\7U\2\2\u02eb\u009e\3\2\2\2\u02ec\u02ed\7V\2\2\u02ed"+ + "\u02ee\7C\2\2\u02ee\u02ef\7D\2\2\u02ef\u02f0\7N\2\2\u02f0\u02f1\7G\2\2"+ + "\u02f1\u00a0\3\2\2\2\u02f2\u02f3\7V\2\2\u02f3\u02f4\7C\2\2\u02f4\u02f5"+ + "\7D\2\2\u02f5\u02f6\7N\2\2\u02f6\u02f7\7G\2\2\u02f7\u02f8\7U\2\2\u02f8"+ + "\u00a2\3\2\2\2\u02f9\u02fa\7V\2\2\u02fa\u02fb\7G\2\2\u02fb\u02fc\7Z\2"+ + "\2\u02fc\u02fd\7V\2\2\u02fd\u00a4\3\2\2\2\u02fe\u02ff\7V\2\2\u02ff\u0300"+ + "\7T\2\2\u0300\u0301\7W\2\2\u0301\u0302\7G\2\2\u0302\u00a6\3\2\2\2\u0303"+ + "\u0304\7V\2\2\u0304\u0305\7Q\2\2\u0305\u00a8\3\2\2\2\u0306\u0307\7V\2"+ + "\2\u0307\u0308\7[\2\2\u0308\u0309\7R\2\2\u0309\u030a\7G\2\2\u030a\u00aa"+ + "\3\2\2\2\u030b\u030c\7V\2\2\u030c\u030d\7[\2\2\u030d\u030e\7R\2\2\u030e"+ + "\u030f\7G\2\2\u030f\u0310\7U\2\2\u0310\u00ac\3\2\2\2\u0311\u0312\7W\2"+ + "\2\u0312\u0313\7U\2\2\u0313\u0314\7K\2\2\u0314\u0315\7P\2\2\u0315\u0316"+ + "\7I\2\2\u0316\u00ae\3\2\2\2\u0317\u0318\7X\2\2\u0318\u0319\7G\2\2\u0319"+ + "\u031a\7T\2\2\u031a\u031b\7K\2\2\u031b\u031c\7H\2\2\u031c\u031d\7[\2\2"+ + "\u031d\u00b0\3\2\2\2\u031e\u031f\7Y\2\2\u031f\u0320\7J\2\2\u0320\u0321"+ + "\7G\2\2\u0321\u0322\7T\2\2\u0322\u0323\7G\2\2\u0323\u00b2\3\2\2\2\u0324"+ + "\u0325\7Y\2\2\u0325\u0326\7K\2\2\u0326\u0327\7V\2\2\u0327\u0328\7J\2\2"+ + "\u0328\u00b4\3\2\2\2\u0329\u032a\7[\2\2\u032a\u032b\7G\2\2\u032b\u032c"+ + "\7C\2\2\u032c\u032d\7T\2\2\u032d\u00b6\3\2\2\2\u032e\u032f\7[\2\2\u032f"+ + "\u0330\7G\2\2\u0330\u0331\7C\2\2\u0331\u0332\7T\2\2\u0332\u0333\7U\2\2"+ + "\u0333\u00b8\3\2\2\2\u0334\u0335\7}\2\2\u0335\u0336\7G\2\2\u0336\u0337"+ + "\7U\2\2\u0337\u0338\7E\2\2\u0338\u0339\7C\2\2\u0339\u033a\7R\2\2\u033a"+ + "\u033b\7G\2\2\u033b\u00ba\3\2\2\2\u033c\u033d\7}\2\2\u033d\u033e\7H\2"+ + "\2\u033e\u033f\7P\2\2\u033f\u00bc\3\2\2\2\u0340\u0341\7}\2\2\u0341\u0342"+ + "\7N\2\2\u0342\u0343\7K\2\2\u0343\u0344\7O\2\2\u0344\u0345\7K\2\2\u0345"+ + "\u0346\7V\2\2\u0346\u00be\3\2\2\2\u0347\u0348\7}\2\2\u0348\u0349\7F\2"+ + "\2\u0349\u00c0\3\2\2\2\u034a\u034b\7}\2\2\u034b\u034c\7V\2\2\u034c\u00c2"+ + "\3\2\2\2\u034d\u034e\7}\2\2\u034e\u034f\7V\2\2\u034f\u0350\7U\2\2\u0350"+ + "\u00c4\3\2\2\2\u0351\u0352\7}\2\2\u0352\u0353\7I\2\2\u0353\u0354\7W\2"+ + "\2\u0354\u0355\7K\2\2\u0355\u0356\7F\2\2\u0356\u00c6\3\2\2\2\u0357\u0358"+ + "\7\177\2\2\u0358\u00c8\3\2\2\2\u0359\u035a\7?\2\2\u035a\u00ca\3\2\2\2"+ + "\u035b\u035c\7>\2\2\u035c\u035d\7?\2\2\u035d\u035e\7@\2\2\u035e\u00cc"+ + "\3\2\2\2\u035f\u0360\7>\2\2\u0360\u0364\7@\2\2\u0361\u0362\7#\2\2\u0362"+ + "\u0364\7?\2\2\u0363\u035f\3\2\2\2\u0363\u0361\3\2\2\2\u0364\u00ce\3\2"+ + "\2\2\u0365\u0366\7>\2\2\u0366\u00d0\3\2\2\2\u0367\u0368\7>\2\2\u0368\u0369"+ + "\7?\2\2\u0369\u00d2\3\2\2\2\u036a\u036b\7@\2\2\u036b\u00d4\3\2\2\2\u036c"+ + "\u036d\7@\2\2\u036d\u036e\7?\2\2\u036e\u00d6\3\2\2\2\u036f\u0370\7-\2"+ + "\2\u0370\u00d8\3\2\2\2\u0371\u0372\7/\2\2\u0372\u00da\3\2\2\2\u0373\u0374"+ + "\7,\2\2\u0374\u00dc\3\2\2\2\u0375\u0376\7\61\2\2\u0376\u00de\3\2\2\2\u0377"+ + "\u0378\7\'\2\2\u0378\u00e0\3\2\2\2\u0379\u037a\7~\2\2\u037a\u037b\7~\2"+ + "\2\u037b\u00e2\3\2\2\2\u037c\u037d\7\60\2\2\u037d\u00e4\3\2\2\2\u037e"+ + "\u037f\7A\2\2\u037f\u00e6\3\2\2\2\u0380\u0386\7)\2\2\u0381\u0385\n\2\2"+ + "\2\u0382\u0383\7)\2\2\u0383\u0385\7)\2\2\u0384\u0381\3\2\2\2\u0384\u0382"+ + "\3\2\2\2\u0385\u0388\3\2\2\2\u0386\u0384\3\2\2\2\u0386\u0387\3\2\2\2\u0387"+ + "\u0389\3\2\2\2\u0388\u0386\3\2\2\2\u0389\u038a\7)\2\2\u038a\u00e8\3\2"+ + "\2\2\u038b\u038d\5\u00f9}\2\u038c\u038b\3\2\2\2\u038d\u038e\3\2\2\2\u038e"+ + "\u038c\3\2\2\2\u038e\u038f\3\2\2\2\u038f\u00ea\3\2\2\2\u0390\u0392\5\u00f9"+ + "}\2\u0391\u0390\3\2\2\2\u0392\u0393\3\2\2\2\u0393\u0391\3\2\2\2\u0393"+ + "\u0394\3\2\2\2\u0394\u0395\3\2\2\2\u0395\u0399\5\u00e3r\2\u0396\u0398"+ + "\5\u00f9}\2\u0397\u0396\3\2\2\2\u0398\u039b\3\2\2\2\u0399\u0397\3\2\2"+ + "\2\u0399\u039a\3\2\2\2\u039a\u03bb\3\2\2\2\u039b\u0399\3\2\2\2\u039c\u039e"+ + "\5\u00e3r\2\u039d\u039f\5\u00f9}\2\u039e\u039d\3\2\2\2\u039f\u03a0\3\2"+ + "\2\2\u03a0\u039e\3\2\2\2\u03a0\u03a1\3\2\2\2\u03a1\u03bb\3\2\2\2\u03a2"+ + "\u03a4\5\u00f9}\2\u03a3\u03a2\3\2\2\2\u03a4\u03a5\3\2\2\2\u03a5\u03a3"+ + "\3\2\2\2\u03a5\u03a6\3\2\2\2\u03a6\u03ae\3\2\2\2\u03a7\u03ab\5\u00e3r"+ + "\2\u03a8\u03aa\5\u00f9}\2\u03a9\u03a8\3\2\2\2\u03aa\u03ad\3\2\2\2\u03ab"+ + "\u03a9\3\2\2\2\u03ab\u03ac\3\2\2\2\u03ac\u03af\3\2\2\2\u03ad\u03ab\3\2"+ + "\2\2\u03ae\u03a7\3\2\2\2\u03ae\u03af\3\2\2\2\u03af\u03b0\3\2\2\2\u03b0"+ + "\u03b1\5\u00f7|\2\u03b1\u03bb\3\2\2\2\u03b2\u03b4\5\u00e3r\2\u03b3\u03b5"+ + "\5\u00f9}\2\u03b4\u03b3\3\2\2\2\u03b5\u03b6\3\2\2\2\u03b6\u03b4\3\2\2"+ + "\2\u03b6\u03b7\3\2\2\2\u03b7\u03b8\3\2\2\2\u03b8\u03b9\5\u00f7|\2\u03b9"+ + "\u03bb\3\2\2\2\u03ba\u0391\3\2\2\2\u03ba\u039c\3\2\2\2\u03ba\u03a3\3\2"+ + "\2\2\u03ba\u03b2\3\2\2\2\u03bb\u00ec\3\2\2\2\u03bc\u03bf\5\u00fb~\2\u03bd"+ + "\u03bf\7a\2\2\u03be\u03bc\3\2\2\2\u03be\u03bd\3\2\2\2\u03bf\u03c5\3\2"+ + "\2\2\u03c0\u03c4\5\u00fb~\2\u03c1\u03c4\5\u00f9}\2\u03c2\u03c4\t\3\2\2"+ + "\u03c3\u03c0\3\2\2\2\u03c3\u03c1\3\2\2\2\u03c3\u03c2\3\2\2\2\u03c4\u03c7"+ + "\3\2\2\2\u03c5\u03c3\3\2\2\2\u03c5\u03c6\3\2\2\2\u03c6\u00ee\3\2\2\2\u03c7"+ + "\u03c5\3\2\2\2\u03c8\u03cc\5\u00f9}\2\u03c9\u03cd\5\u00fb~\2\u03ca\u03cd"+ + "\5\u00f9}\2\u03cb\u03cd\t\4\2\2\u03cc\u03c9\3\2\2\2\u03cc\u03ca\3\2\2"+ + "\2\u03cc\u03cb\3\2\2\2\u03cd\u03ce\3\2\2\2\u03ce\u03cc\3\2\2\2\u03ce\u03cf"+ + "\3\2\2\2\u03cf\u00f0\3\2\2\2\u03d0\u03d4\5\u00fb~\2\u03d1\u03d4\5\u00f9"+ + "}\2\u03d2\u03d4\7a\2\2\u03d3\u03d0\3\2\2\2\u03d3\u03d1\3\2\2\2\u03d3\u03d2"+ + "\3\2\2\2\u03d4\u03d5\3\2\2\2\u03d5\u03d3\3\2\2\2\u03d5\u03d6\3\2\2\2\u03d6"+ + "\u00f2\3\2\2\2\u03d7\u03dd\7$\2\2\u03d8\u03dc\n\5\2\2\u03d9\u03da\7$\2"+ + "\2\u03da\u03dc\7$\2\2\u03db\u03d8\3\2\2\2\u03db\u03d9\3\2\2\2\u03dc\u03df"+ + "\3\2\2\2\u03dd\u03db\3\2\2\2\u03dd\u03de\3\2\2\2\u03de\u03e0\3\2\2\2\u03df"+ + "\u03dd\3\2\2\2\u03e0\u03e1\7$\2\2\u03e1\u00f4\3\2\2\2\u03e2\u03e8\7b\2"+ + "\2\u03e3\u03e7\n\6\2\2\u03e4\u03e5\7b\2\2\u03e5\u03e7\7b\2\2\u03e6\u03e3"+ + "\3\2\2\2\u03e6\u03e4\3\2\2\2\u03e7\u03ea\3\2\2\2\u03e8\u03e6\3\2\2\2\u03e8"+ + "\u03e9\3\2\2\2\u03e9\u03eb\3\2\2\2\u03ea\u03e8\3\2\2\2\u03eb\u03ec\7b"+ + "\2\2\u03ec\u00f6\3\2\2\2\u03ed\u03ef\7G\2\2\u03ee\u03f0\t\7\2\2\u03ef"+ + "\u03ee\3\2\2\2\u03ef\u03f0\3\2\2\2\u03f0\u03f2\3\2\2\2\u03f1\u03f3\5\u00f9"+ + "}\2\u03f2\u03f1\3\2\2\2\u03f3\u03f4\3\2\2\2\u03f4\u03f2\3\2\2\2\u03f4"+ + "\u03f5\3\2\2\2\u03f5\u00f8\3\2\2\2\u03f6\u03f7\t\b\2\2\u03f7\u00fa\3\2"+ + "\2\2\u03f8\u03f9\t\t\2\2\u03f9\u00fc\3\2\2\2\u03fa\u03fb\7/\2\2\u03fb"+ + "\u03fc\7/\2\2\u03fc\u0400\3\2\2\2\u03fd\u03ff\n\n\2\2\u03fe\u03fd\3\2"+ + "\2\2\u03ff\u0402\3\2\2\2\u0400\u03fe\3\2\2\2\u0400\u0401\3\2\2\2\u0401"+ + "\u0404\3\2\2\2\u0402\u0400\3\2\2\2\u0403\u0405\7\17\2\2\u0404\u0403\3"+ + "\2\2\2\u0404\u0405\3\2\2\2\u0405\u0407\3\2\2\2\u0406\u0408\7\f\2\2\u0407"+ + "\u0406\3\2\2\2\u0407\u0408\3\2\2\2\u0408\u0409\3\2\2\2\u0409\u040a\b\177"+ + "\2\2\u040a\u00fe\3\2\2\2\u040b\u040c\7\61\2\2\u040c\u040d\7,\2\2\u040d"+ + "\u0412\3\2\2\2\u040e\u0411\5\u00ff\u0080\2\u040f\u0411\13\2\2\2\u0410"+ + "\u040e\3\2\2\2\u0410\u040f\3\2\2\2\u0411\u0414\3\2\2\2\u0412\u0413\3\2"+ + "\2\2\u0412\u0410\3\2\2\2\u0413\u0415\3\2\2\2\u0414\u0412\3\2\2\2\u0415"+ + "\u0416\7,\2\2\u0416\u0417\7\61\2\2\u0417\u0418\3\2\2\2\u0418\u0419\b\u0080"+ + "\2\2\u0419\u0100\3\2\2\2\u041a\u041c\t\13\2\2\u041b\u041a\3\2\2\2\u041c"+ + "\u041d\3\2\2\2\u041d\u041b\3\2\2\2\u041d\u041e\3\2\2\2\u041e\u041f\3\2"+ + "\2\2\u041f\u0420\b\u0081\2\2\u0420\u0102\3\2\2\2\u0421\u0422\13\2\2\2"+ + "\u0422\u0104\3\2\2\2\"\2\u0363\u0384\u0386\u038e\u0393\u0399\u03a0\u03a5"+ + "\u03ab\u03ae\u03b6\u03ba\u03be\u03c3\u03c5\u03cc\u03ce\u03d3\u03d5\u03db"+ + "\u03dd\u03e6\u03e8\u03ef\u03f4\u0400\u0404\u0407\u0410\u0412\u041d\3\2"+ + "\3\2"; public static final ATN _ATN = new ATNDeserializer().deserialize(_serializedATN.toCharArray()); static { diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/SqlBaseListener.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/SqlBaseListener.java index e26aba5fde4..67180685c8c 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/SqlBaseListener.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/SqlBaseListener.java @@ -621,6 +621,18 @@ interface SqlBaseListener extends ParseTreeListener { * @param ctx the parse tree */ void exitExtract(SqlBaseParser.ExtractContext ctx); + /** + * Enter a parse tree produced by the {@code currentDateTimeFunction} + * labeled alternative in {@link SqlBaseParser#primaryExpression}. + * @param ctx the parse tree + */ + void enterCurrentDateTimeFunction(SqlBaseParser.CurrentDateTimeFunctionContext ctx); + /** + * Exit a parse tree produced by the {@code currentDateTimeFunction} + * labeled alternative in {@link SqlBaseParser#primaryExpression}. + * @param ctx the parse tree + */ + void exitCurrentDateTimeFunction(SqlBaseParser.CurrentDateTimeFunctionContext ctx); /** * Enter a parse tree produced by the {@code constantDefault} * labeled alternative in {@link SqlBaseParser#primaryExpression}. @@ -713,6 +725,16 @@ interface SqlBaseListener extends ParseTreeListener { * @param ctx the parse tree */ void exitCastTemplate(SqlBaseParser.CastTemplateContext ctx); + /** + * Enter a parse tree produced by {@link SqlBaseParser#builtinDateTimeFunction}. + * @param ctx the parse tree + */ + void enterBuiltinDateTimeFunction(SqlBaseParser.BuiltinDateTimeFunctionContext ctx); + /** + * Exit a parse tree produced by {@link SqlBaseParser#builtinDateTimeFunction}. + * @param ctx the parse tree + */ + void exitBuiltinDateTimeFunction(SqlBaseParser.BuiltinDateTimeFunctionContext ctx); /** * Enter a parse tree produced by {@link SqlBaseParser#convertTemplate}. * @param ctx the parse tree diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/SqlBaseParser.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/SqlBaseParser.java index 0239a8609b7..56996e4c4c2 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/SqlBaseParser.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/SqlBaseParser.java @@ -19,23 +19,23 @@ class SqlBaseParser extends Parser { public static final int T__0=1, T__1=2, T__2=3, T__3=4, ALL=5, ANALYZE=6, ANALYZED=7, AND=8, ANY=9, AS=10, ASC=11, BETWEEN=12, BY=13, CAST=14, CATALOG=15, CATALOGS=16, COLUMNS=17, - CONVERT=18, DAY=19, DAYS=20, DEBUG=21, DESC=22, DESCRIBE=23, DISTINCT=24, - ESCAPE=25, EXECUTABLE=26, EXISTS=27, EXPLAIN=28, EXTRACT=29, FALSE=30, - FIRST=31, FORMAT=32, FROM=33, FULL=34, FUNCTIONS=35, GRAPHVIZ=36, GROUP=37, - HAVING=38, HOUR=39, HOURS=40, IN=41, INNER=42, INTERVAL=43, IS=44, JOIN=45, - LAST=46, LEFT=47, LIKE=48, LIMIT=49, MAPPED=50, MATCH=51, MINUTE=52, MINUTES=53, - MONTH=54, MONTHS=55, NATURAL=56, NOT=57, NULL=58, NULLS=59, ON=60, OPTIMIZED=61, - OR=62, ORDER=63, OUTER=64, PARSED=65, PHYSICAL=66, PLAN=67, RIGHT=68, - RLIKE=69, QUERY=70, SCHEMAS=71, SECOND=72, SECONDS=73, SELECT=74, SHOW=75, - SYS=76, TABLE=77, TABLES=78, TEXT=79, TRUE=80, TO=81, TYPE=82, TYPES=83, - USING=84, VERIFY=85, WHERE=86, WITH=87, YEAR=88, YEARS=89, ESCAPE_ESC=90, - FUNCTION_ESC=91, LIMIT_ESC=92, DATE_ESC=93, TIME_ESC=94, TIMESTAMP_ESC=95, - GUID_ESC=96, ESC_END=97, EQ=98, NULLEQ=99, NEQ=100, LT=101, LTE=102, GT=103, - GTE=104, PLUS=105, MINUS=106, ASTERISK=107, SLASH=108, PERCENT=109, CONCAT=110, - DOT=111, PARAM=112, STRING=113, INTEGER_VALUE=114, DECIMAL_VALUE=115, - IDENTIFIER=116, DIGIT_IDENTIFIER=117, TABLE_IDENTIFIER=118, QUOTED_IDENTIFIER=119, - BACKQUOTED_IDENTIFIER=120, SIMPLE_COMMENT=121, BRACKETED_COMMENT=122, - WS=123, UNRECOGNIZED=124, DELIMITER=125; + CONVERT=18, CURRENT=19, CURRENT_TIMESTAMP=20, DAY=21, DAYS=22, DEBUG=23, + DESC=24, DESCRIBE=25, DISTINCT=26, ESCAPE=27, EXECUTABLE=28, EXISTS=29, + EXPLAIN=30, EXTRACT=31, FALSE=32, FIRST=33, FORMAT=34, FROM=35, FULL=36, + FUNCTIONS=37, GRAPHVIZ=38, GROUP=39, HAVING=40, HOUR=41, HOURS=42, IN=43, + INNER=44, INTERVAL=45, IS=46, JOIN=47, LAST=48, LEFT=49, LIKE=50, LIMIT=51, + MAPPED=52, MATCH=53, MINUTE=54, MINUTES=55, MONTH=56, MONTHS=57, NATURAL=58, + NOT=59, NULL=60, NULLS=61, ON=62, OPTIMIZED=63, OR=64, ORDER=65, OUTER=66, + PARSED=67, PHYSICAL=68, PLAN=69, RIGHT=70, RLIKE=71, QUERY=72, SCHEMAS=73, + SECOND=74, SECONDS=75, SELECT=76, SHOW=77, SYS=78, TABLE=79, TABLES=80, + TEXT=81, TRUE=82, TO=83, TYPE=84, TYPES=85, USING=86, VERIFY=87, WHERE=88, + WITH=89, YEAR=90, YEARS=91, ESCAPE_ESC=92, FUNCTION_ESC=93, LIMIT_ESC=94, + DATE_ESC=95, TIME_ESC=96, TIMESTAMP_ESC=97, GUID_ESC=98, ESC_END=99, EQ=100, + NULLEQ=101, NEQ=102, LT=103, LTE=104, GT=105, GTE=106, PLUS=107, MINUS=108, + ASTERISK=109, SLASH=110, PERCENT=111, CONCAT=112, DOT=113, PARAM=114, + STRING=115, INTEGER_VALUE=116, DECIMAL_VALUE=117, IDENTIFIER=118, DIGIT_IDENTIFIER=119, + TABLE_IDENTIFIER=120, QUOTED_IDENTIFIER=121, BACKQUOTED_IDENTIFIER=122, + SIMPLE_COMMENT=123, BRACKETED_COMMENT=124, WS=125, UNRECOGNIZED=126, DELIMITER=127; public static final int RULE_singleStatement = 0, RULE_singleExpression = 1, RULE_statement = 2, RULE_query = 3, RULE_queryNoWith = 4, RULE_limitClause = 5, RULE_queryTerm = 6, @@ -46,13 +46,13 @@ class SqlBaseParser extends Parser { RULE_expression = 21, RULE_booleanExpression = 22, RULE_matchQueryOptions = 23, RULE_predicated = 24, RULE_predicate = 25, RULE_likePattern = 26, RULE_pattern = 27, RULE_patternEscape = 28, RULE_valueExpression = 29, RULE_primaryExpression = 30, - RULE_castExpression = 31, RULE_castTemplate = 32, RULE_convertTemplate = 33, - RULE_extractExpression = 34, RULE_extractTemplate = 35, RULE_functionExpression = 36, - RULE_functionTemplate = 37, RULE_functionName = 38, RULE_constant = 39, - RULE_comparisonOperator = 40, RULE_booleanValue = 41, RULE_interval = 42, - RULE_intervalField = 43, RULE_dataType = 44, RULE_qualifiedName = 45, - RULE_identifier = 46, RULE_tableIdentifier = 47, RULE_quoteIdentifier = 48, - RULE_unquoteIdentifier = 49, RULE_number = 50, RULE_string = 51, RULE_nonReserved = 52; + RULE_castExpression = 31, RULE_castTemplate = 32, RULE_builtinDateTimeFunction = 33, + RULE_convertTemplate = 34, RULE_extractExpression = 35, RULE_extractTemplate = 36, + RULE_functionExpression = 37, RULE_functionTemplate = 38, RULE_functionName = 39, + RULE_constant = 40, RULE_comparisonOperator = 41, RULE_booleanValue = 42, + RULE_interval = 43, RULE_intervalField = 44, RULE_dataType = 45, RULE_qualifiedName = 46, + RULE_identifier = 47, RULE_tableIdentifier = 48, RULE_quoteIdentifier = 49, + RULE_unquoteIdentifier = 50, RULE_number = 51, RULE_string = 52, RULE_nonReserved = 53; public static final String[] ruleNames = { "singleStatement", "singleExpression", "statement", "query", "queryNoWith", "limitClause", "queryTerm", "orderBy", "querySpecification", "fromClause", @@ -61,49 +61,51 @@ class SqlBaseParser extends Parser { "relationPrimary", "expression", "booleanExpression", "matchQueryOptions", "predicated", "predicate", "likePattern", "pattern", "patternEscape", "valueExpression", "primaryExpression", "castExpression", "castTemplate", - "convertTemplate", "extractExpression", "extractTemplate", "functionExpression", - "functionTemplate", "functionName", "constant", "comparisonOperator", - "booleanValue", "interval", "intervalField", "dataType", "qualifiedName", - "identifier", "tableIdentifier", "quoteIdentifier", "unquoteIdentifier", + "builtinDateTimeFunction", "convertTemplate", "extractExpression", "extractTemplate", + "functionExpression", "functionTemplate", "functionName", "constant", + "comparisonOperator", "booleanValue", "interval", "intervalField", "dataType", + "qualifiedName", "identifier", "tableIdentifier", "quoteIdentifier", "unquoteIdentifier", "number", "string", "nonReserved" }; private static final String[] _LITERAL_NAMES = { null, "'('", "')'", "','", "':'", "'ALL'", "'ANALYZE'", "'ANALYZED'", "'AND'", "'ANY'", "'AS'", "'ASC'", "'BETWEEN'", "'BY'", "'CAST'", "'CATALOG'", - "'CATALOGS'", "'COLUMNS'", "'CONVERT'", "'DAY'", "'DAYS'", "'DEBUG'", - "'DESC'", "'DESCRIBE'", "'DISTINCT'", "'ESCAPE'", "'EXECUTABLE'", "'EXISTS'", - "'EXPLAIN'", "'EXTRACT'", "'FALSE'", "'FIRST'", "'FORMAT'", "'FROM'", - "'FULL'", "'FUNCTIONS'", "'GRAPHVIZ'", "'GROUP'", "'HAVING'", "'HOUR'", - "'HOURS'", "'IN'", "'INNER'", "'INTERVAL'", "'IS'", "'JOIN'", "'LAST'", - "'LEFT'", "'LIKE'", "'LIMIT'", "'MAPPED'", "'MATCH'", "'MINUTE'", "'MINUTES'", - "'MONTH'", "'MONTHS'", "'NATURAL'", "'NOT'", "'NULL'", "'NULLS'", "'ON'", - "'OPTIMIZED'", "'OR'", "'ORDER'", "'OUTER'", "'PARSED'", "'PHYSICAL'", - "'PLAN'", "'RIGHT'", "'RLIKE'", "'QUERY'", "'SCHEMAS'", "'SECOND'", "'SECONDS'", - "'SELECT'", "'SHOW'", "'SYS'", "'TABLE'", "'TABLES'", "'TEXT'", "'TRUE'", - "'TO'", "'TYPE'", "'TYPES'", "'USING'", "'VERIFY'", "'WHERE'", "'WITH'", - "'YEAR'", "'YEARS'", "'{ESCAPE'", "'{FN'", "'{LIMIT'", "'{D'", "'{T'", - "'{TS'", "'{GUID'", "'}'", "'='", "'<=>'", null, "'<'", "'<='", "'>'", - "'>='", "'+'", "'-'", "'*'", "'/'", "'%'", "'||'", "'.'", "'?'" + "'CATALOGS'", "'COLUMNS'", "'CONVERT'", "'CURRENT'", "'CURRENT_TIMESTAMP'", + "'DAY'", "'DAYS'", "'DEBUG'", "'DESC'", "'DESCRIBE'", "'DISTINCT'", "'ESCAPE'", + "'EXECUTABLE'", "'EXISTS'", "'EXPLAIN'", "'EXTRACT'", "'FALSE'", "'FIRST'", + "'FORMAT'", "'FROM'", "'FULL'", "'FUNCTIONS'", "'GRAPHVIZ'", "'GROUP'", + "'HAVING'", "'HOUR'", "'HOURS'", "'IN'", "'INNER'", "'INTERVAL'", "'IS'", + "'JOIN'", "'LAST'", "'LEFT'", "'LIKE'", "'LIMIT'", "'MAPPED'", "'MATCH'", + "'MINUTE'", "'MINUTES'", "'MONTH'", "'MONTHS'", "'NATURAL'", "'NOT'", + "'NULL'", "'NULLS'", "'ON'", "'OPTIMIZED'", "'OR'", "'ORDER'", "'OUTER'", + "'PARSED'", "'PHYSICAL'", "'PLAN'", "'RIGHT'", "'RLIKE'", "'QUERY'", "'SCHEMAS'", + "'SECOND'", "'SECONDS'", "'SELECT'", "'SHOW'", "'SYS'", "'TABLE'", "'TABLES'", + "'TEXT'", "'TRUE'", "'TO'", "'TYPE'", "'TYPES'", "'USING'", "'VERIFY'", + "'WHERE'", "'WITH'", "'YEAR'", "'YEARS'", "'{ESCAPE'", "'{FN'", "'{LIMIT'", + "'{D'", "'{T'", "'{TS'", "'{GUID'", "'}'", "'='", "'<=>'", null, "'<'", + "'<='", "'>'", "'>='", "'+'", "'-'", "'*'", "'/'", "'%'", "'||'", "'.'", + "'?'" }; private static final String[] _SYMBOLIC_NAMES = { null, null, null, null, null, "ALL", "ANALYZE", "ANALYZED", "AND", "ANY", "AS", "ASC", "BETWEEN", "BY", "CAST", "CATALOG", "CATALOGS", "COLUMNS", - "CONVERT", "DAY", "DAYS", "DEBUG", "DESC", "DESCRIBE", "DISTINCT", "ESCAPE", - "EXECUTABLE", "EXISTS", "EXPLAIN", "EXTRACT", "FALSE", "FIRST", "FORMAT", - "FROM", "FULL", "FUNCTIONS", "GRAPHVIZ", "GROUP", "HAVING", "HOUR", "HOURS", - "IN", "INNER", "INTERVAL", "IS", "JOIN", "LAST", "LEFT", "LIKE", "LIMIT", - "MAPPED", "MATCH", "MINUTE", "MINUTES", "MONTH", "MONTHS", "NATURAL", - "NOT", "NULL", "NULLS", "ON", "OPTIMIZED", "OR", "ORDER", "OUTER", "PARSED", - "PHYSICAL", "PLAN", "RIGHT", "RLIKE", "QUERY", "SCHEMAS", "SECOND", "SECONDS", - "SELECT", "SHOW", "SYS", "TABLE", "TABLES", "TEXT", "TRUE", "TO", "TYPE", - "TYPES", "USING", "VERIFY", "WHERE", "WITH", "YEAR", "YEARS", "ESCAPE_ESC", - "FUNCTION_ESC", "LIMIT_ESC", "DATE_ESC", "TIME_ESC", "TIMESTAMP_ESC", - "GUID_ESC", "ESC_END", "EQ", "NULLEQ", "NEQ", "LT", "LTE", "GT", "GTE", - "PLUS", "MINUS", "ASTERISK", "SLASH", "PERCENT", "CONCAT", "DOT", "PARAM", - "STRING", "INTEGER_VALUE", "DECIMAL_VALUE", "IDENTIFIER", "DIGIT_IDENTIFIER", - "TABLE_IDENTIFIER", "QUOTED_IDENTIFIER", "BACKQUOTED_IDENTIFIER", "SIMPLE_COMMENT", - "BRACKETED_COMMENT", "WS", "UNRECOGNIZED", "DELIMITER" + "CONVERT", "CURRENT", "CURRENT_TIMESTAMP", "DAY", "DAYS", "DEBUG", "DESC", + "DESCRIBE", "DISTINCT", "ESCAPE", "EXECUTABLE", "EXISTS", "EXPLAIN", "EXTRACT", + "FALSE", "FIRST", "FORMAT", "FROM", "FULL", "FUNCTIONS", "GRAPHVIZ", "GROUP", + "HAVING", "HOUR", "HOURS", "IN", "INNER", "INTERVAL", "IS", "JOIN", "LAST", + "LEFT", "LIKE", "LIMIT", "MAPPED", "MATCH", "MINUTE", "MINUTES", "MONTH", + "MONTHS", "NATURAL", "NOT", "NULL", "NULLS", "ON", "OPTIMIZED", "OR", + "ORDER", "OUTER", "PARSED", "PHYSICAL", "PLAN", "RIGHT", "RLIKE", "QUERY", + "SCHEMAS", "SECOND", "SECONDS", "SELECT", "SHOW", "SYS", "TABLE", "TABLES", + "TEXT", "TRUE", "TO", "TYPE", "TYPES", "USING", "VERIFY", "WHERE", "WITH", + "YEAR", "YEARS", "ESCAPE_ESC", "FUNCTION_ESC", "LIMIT_ESC", "DATE_ESC", + "TIME_ESC", "TIMESTAMP_ESC", "GUID_ESC", "ESC_END", "EQ", "NULLEQ", "NEQ", + "LT", "LTE", "GT", "GTE", "PLUS", "MINUS", "ASTERISK", "SLASH", "PERCENT", + "CONCAT", "DOT", "PARAM", "STRING", "INTEGER_VALUE", "DECIMAL_VALUE", + "IDENTIFIER", "DIGIT_IDENTIFIER", "TABLE_IDENTIFIER", "QUOTED_IDENTIFIER", + "BACKQUOTED_IDENTIFIER", "SIMPLE_COMMENT", "BRACKETED_COMMENT", "WS", + "UNRECOGNIZED", "DELIMITER" }; public static final Vocabulary VOCABULARY = new VocabularyImpl(_LITERAL_NAMES, _SYMBOLIC_NAMES); @@ -184,9 +186,9 @@ class SqlBaseParser extends Parser { try { enterOuterAlt(_localctx, 1); { - setState(106); + setState(108); statement(); - setState(107); + setState(109); match(EOF); } } @@ -231,9 +233,9 @@ class SqlBaseParser extends Parser { try { enterOuterAlt(_localctx, 1); { - setState(109); + setState(111); expression(); - setState(110); + setState(112); match(EOF); } } @@ -633,14 +635,14 @@ class SqlBaseParser extends Parser { enterRule(_localctx, 4, RULE_statement); int _la; try { - setState(218); + setState(220); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,19,_ctx) ) { case 1: _localctx = new StatementDefaultContext(_localctx); enterOuterAlt(_localctx, 1); { - setState(112); + setState(114); query(); } break; @@ -648,27 +650,27 @@ class SqlBaseParser extends Parser { _localctx = new ExplainContext(_localctx); enterOuterAlt(_localctx, 2); { - setState(113); + setState(115); match(EXPLAIN); - setState(127); + setState(129); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,2,_ctx) ) { case 1: { - setState(114); + setState(116); match(T__0); - setState(123); + setState(125); _errHandler.sync(this); _la = _input.LA(1); - while (((((_la - 32)) & ~0x3f) == 0 && ((1L << (_la - 32)) & ((1L << (FORMAT - 32)) | (1L << (PLAN - 32)) | (1L << (VERIFY - 32)))) != 0)) { + while (((((_la - 34)) & ~0x3f) == 0 && ((1L << (_la - 34)) & ((1L << (FORMAT - 34)) | (1L << (PLAN - 34)) | (1L << (VERIFY - 34)))) != 0)) { { - setState(121); + setState(123); switch (_input.LA(1)) { case PLAN: { - setState(115); + setState(117); match(PLAN); - setState(116); + setState(118); ((ExplainContext)_localctx).type = _input.LT(1); _la = _input.LA(1); if ( !(((((_la - 5)) & ~0x3f) == 0 && ((1L << (_la - 5)) & ((1L << (ALL - 5)) | (1L << (ANALYZED - 5)) | (1L << (EXECUTABLE - 5)) | (1L << (MAPPED - 5)) | (1L << (OPTIMIZED - 5)) | (1L << (PARSED - 5)))) != 0)) ) { @@ -680,9 +682,9 @@ class SqlBaseParser extends Parser { break; case FORMAT: { - setState(117); + setState(119); match(FORMAT); - setState(118); + setState(120); ((ExplainContext)_localctx).format = _input.LT(1); _la = _input.LA(1); if ( !(_la==GRAPHVIZ || _la==TEXT) ) { @@ -694,9 +696,9 @@ class SqlBaseParser extends Parser { break; case VERIFY: { - setState(119); + setState(121); match(VERIFY); - setState(120); + setState(122); ((ExplainContext)_localctx).verify = booleanValue(); } break; @@ -704,16 +706,16 @@ class SqlBaseParser extends Parser { throw new NoViableAltException(this); } } - setState(125); + setState(127); _errHandler.sync(this); _la = _input.LA(1); } - setState(126); + setState(128); match(T__1); } break; } - setState(129); + setState(131); statement(); } break; @@ -721,27 +723,27 @@ class SqlBaseParser extends Parser { _localctx = new DebugContext(_localctx); enterOuterAlt(_localctx, 3); { - setState(130); + setState(132); match(DEBUG); - setState(142); + setState(144); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,5,_ctx) ) { case 1: { - setState(131); + setState(133); match(T__0); - setState(138); + setState(140); _errHandler.sync(this); _la = _input.LA(1); while (_la==FORMAT || _la==PLAN) { { - setState(136); + setState(138); switch (_input.LA(1)) { case PLAN: { - setState(132); + setState(134); match(PLAN); - setState(133); + setState(135); ((DebugContext)_localctx).type = _input.LT(1); _la = _input.LA(1); if ( !(_la==ANALYZED || _la==OPTIMIZED) ) { @@ -753,9 +755,9 @@ class SqlBaseParser extends Parser { break; case FORMAT: { - setState(134); + setState(136); match(FORMAT); - setState(135); + setState(137); ((DebugContext)_localctx).format = _input.LT(1); _la = _input.LA(1); if ( !(_la==GRAPHVIZ || _la==TEXT) ) { @@ -769,16 +771,16 @@ class SqlBaseParser extends Parser { throw new NoViableAltException(this); } } - setState(140); + setState(142); _errHandler.sync(this); _la = _input.LA(1); } - setState(141); + setState(143); match(T__1); } break; } - setState(144); + setState(146); statement(); } break; @@ -786,15 +788,15 @@ class SqlBaseParser extends Parser { _localctx = new ShowTablesContext(_localctx); enterOuterAlt(_localctx, 4); { - setState(145); + setState(147); match(SHOW); - setState(146); + setState(148); match(TABLES); - setState(149); + setState(151); switch (_input.LA(1)) { case LIKE: { - setState(147); + setState(149); ((ShowTablesContext)_localctx).tableLike = likePattern(); } break; @@ -802,6 +804,7 @@ class SqlBaseParser extends Parser { case ANALYZED: case CATALOGS: case COLUMNS: + case CURRENT: case DAY: case DEBUG: case EXECUTABLE: @@ -839,7 +842,7 @@ class SqlBaseParser extends Parser { case QUOTED_IDENTIFIER: case BACKQUOTED_IDENTIFIER: { - setState(148); + setState(150); ((ShowTablesContext)_localctx).tableIdent = tableIdentifier(); } break; @@ -854,22 +857,22 @@ class SqlBaseParser extends Parser { _localctx = new ShowColumnsContext(_localctx); enterOuterAlt(_localctx, 5); { - setState(151); - match(SHOW); - setState(152); - match(COLUMNS); setState(153); + match(SHOW); + setState(154); + match(COLUMNS); + setState(155); _la = _input.LA(1); if ( !(_la==FROM || _la==IN) ) { _errHandler.recoverInline(this); } else { consume(); } - setState(156); + setState(158); switch (_input.LA(1)) { case LIKE: { - setState(154); + setState(156); ((ShowColumnsContext)_localctx).tableLike = likePattern(); } break; @@ -877,6 +880,7 @@ class SqlBaseParser extends Parser { case ANALYZED: case CATALOGS: case COLUMNS: + case CURRENT: case DAY: case DEBUG: case EXECUTABLE: @@ -914,7 +918,7 @@ class SqlBaseParser extends Parser { case QUOTED_IDENTIFIER: case BACKQUOTED_IDENTIFIER: { - setState(155); + setState(157); ((ShowColumnsContext)_localctx).tableIdent = tableIdentifier(); } break; @@ -927,18 +931,18 @@ class SqlBaseParser extends Parser { _localctx = new ShowColumnsContext(_localctx); enterOuterAlt(_localctx, 6); { - setState(158); + setState(160); _la = _input.LA(1); if ( !(_la==DESC || _la==DESCRIBE) ) { _errHandler.recoverInline(this); } else { consume(); } - setState(161); + setState(163); switch (_input.LA(1)) { case LIKE: { - setState(159); + setState(161); ((ShowColumnsContext)_localctx).tableLike = likePattern(); } break; @@ -946,6 +950,7 @@ class SqlBaseParser extends Parser { case ANALYZED: case CATALOGS: case COLUMNS: + case CURRENT: case DAY: case DEBUG: case EXECUTABLE: @@ -983,7 +988,7 @@ class SqlBaseParser extends Parser { case QUOTED_IDENTIFIER: case BACKQUOTED_IDENTIFIER: { - setState(160); + setState(162); ((ShowColumnsContext)_localctx).tableIdent = tableIdentifier(); } break; @@ -996,15 +1001,15 @@ class SqlBaseParser extends Parser { _localctx = new ShowFunctionsContext(_localctx); enterOuterAlt(_localctx, 7); { - setState(163); + setState(165); match(SHOW); - setState(164); - match(FUNCTIONS); setState(166); + match(FUNCTIONS); + setState(168); _la = _input.LA(1); if (_la==LIKE) { { - setState(165); + setState(167); likePattern(); } } @@ -1015,9 +1020,9 @@ class SqlBaseParser extends Parser { _localctx = new ShowSchemasContext(_localctx); enterOuterAlt(_localctx, 8); { - setState(168); + setState(170); match(SHOW); - setState(169); + setState(171); match(SCHEMAS); } break; @@ -1025,9 +1030,9 @@ class SqlBaseParser extends Parser { _localctx = new SysCatalogsContext(_localctx); enterOuterAlt(_localctx, 9); { - setState(170); + setState(172); match(SYS); - setState(171); + setState(173); match(CATALOGS); } break; @@ -1035,58 +1040,58 @@ class SqlBaseParser extends Parser { _localctx = new SysTablesContext(_localctx); enterOuterAlt(_localctx, 10); { - setState(172); + setState(174); match(SYS); - setState(173); + setState(175); match(TABLES); - setState(176); + setState(178); _la = _input.LA(1); if (_la==CATALOG) { { - setState(174); + setState(176); match(CATALOG); - setState(175); + setState(177); ((SysTablesContext)_localctx).clusterLike = likePattern(); } } - setState(180); + setState(182); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,11,_ctx) ) { case 1: { - setState(178); + setState(180); ((SysTablesContext)_localctx).tableLike = likePattern(); } break; case 2: { - setState(179); + setState(181); ((SysTablesContext)_localctx).tableIdent = tableIdentifier(); } break; } - setState(191); + setState(193); _la = _input.LA(1); if (_la==TYPE) { { - setState(182); + setState(184); match(TYPE); - setState(183); + setState(185); string(); - setState(188); + setState(190); _errHandler.sync(this); _la = _input.LA(1); while (_la==T__2) { { { - setState(184); + setState(186); match(T__2); - setState(185); + setState(187); string(); } } - setState(190); + setState(192); _errHandler.sync(this); _la = _input.LA(1); } @@ -1099,28 +1104,28 @@ class SqlBaseParser extends Parser { _localctx = new SysColumnsContext(_localctx); enterOuterAlt(_localctx, 11); { - setState(193); + setState(195); match(SYS); - setState(194); + setState(196); match(COLUMNS); - setState(197); + setState(199); _la = _input.LA(1); if (_la==CATALOG) { { - setState(195); + setState(197); match(CATALOG); - setState(196); + setState(198); ((SysColumnsContext)_localctx).cluster = string(); } } - setState(202); + setState(204); switch (_input.LA(1)) { case TABLE: { - setState(199); + setState(201); match(TABLE); - setState(200); + setState(202); ((SysColumnsContext)_localctx).tableLike = likePattern(); } break; @@ -1128,6 +1133,7 @@ class SqlBaseParser extends Parser { case ANALYZED: case CATALOGS: case COLUMNS: + case CURRENT: case DAY: case DEBUG: case EXECUTABLE: @@ -1165,7 +1171,7 @@ class SqlBaseParser extends Parser { case QUOTED_IDENTIFIER: case BACKQUOTED_IDENTIFIER: { - setState(201); + setState(203); ((SysColumnsContext)_localctx).tableIdent = tableIdentifier(); } break; @@ -1175,11 +1181,11 @@ class SqlBaseParser extends Parser { default: throw new NoViableAltException(this); } - setState(205); + setState(207); _la = _input.LA(1); if (_la==LIKE) { { - setState(204); + setState(206); ((SysColumnsContext)_localctx).columnPattern = likePattern(); } } @@ -1190,19 +1196,19 @@ class SqlBaseParser extends Parser { _localctx = new SysTypesContext(_localctx); enterOuterAlt(_localctx, 12); { - setState(207); + setState(209); match(SYS); - setState(208); + setState(210); match(TYPES); - setState(213); + setState(215); _la = _input.LA(1); - if (((((_la - 105)) & ~0x3f) == 0 && ((1L << (_la - 105)) & ((1L << (PLUS - 105)) | (1L << (MINUS - 105)) | (1L << (INTEGER_VALUE - 105)) | (1L << (DECIMAL_VALUE - 105)))) != 0)) { + if (((((_la - 107)) & ~0x3f) == 0 && ((1L << (_la - 107)) & ((1L << (PLUS - 107)) | (1L << (MINUS - 107)) | (1L << (INTEGER_VALUE - 107)) | (1L << (DECIMAL_VALUE - 107)))) != 0)) { { - setState(210); + setState(212); _la = _input.LA(1); if (_la==PLUS || _la==MINUS) { { - setState(209); + setState(211); _la = _input.LA(1); if ( !(_la==PLUS || _la==MINUS) ) { _errHandler.recoverInline(this); @@ -1212,7 +1218,7 @@ class SqlBaseParser extends Parser { } } - setState(212); + setState(214); ((SysTypesContext)_localctx).type = number(); } } @@ -1223,11 +1229,11 @@ class SqlBaseParser extends Parser { _localctx = new SysTableTypesContext(_localctx); enterOuterAlt(_localctx, 13); { - setState(215); - match(SYS); - setState(216); - match(TABLE); setState(217); + match(SYS); + setState(218); + match(TABLE); + setState(219); match(TYPES); } break; @@ -1281,34 +1287,34 @@ class SqlBaseParser extends Parser { try { enterOuterAlt(_localctx, 1); { - setState(229); + setState(231); _la = _input.LA(1); if (_la==WITH) { { - setState(220); + setState(222); match(WITH); - setState(221); + setState(223); namedQuery(); - setState(226); + setState(228); _errHandler.sync(this); _la = _input.LA(1); while (_la==T__2) { { { - setState(222); + setState(224); match(T__2); - setState(223); + setState(225); namedQuery(); } } - setState(228); + setState(230); _errHandler.sync(this); _la = _input.LA(1); } } } - setState(231); + setState(233); queryNoWith(); } } @@ -1364,42 +1370,42 @@ class SqlBaseParser extends Parser { try { enterOuterAlt(_localctx, 1); { - setState(233); + setState(235); queryTerm(); - setState(244); + setState(246); _la = _input.LA(1); if (_la==ORDER) { { - setState(234); - match(ORDER); - setState(235); - match(BY); setState(236); + match(ORDER); + setState(237); + match(BY); + setState(238); orderBy(); - setState(241); + setState(243); _errHandler.sync(this); _la = _input.LA(1); while (_la==T__2) { { { - setState(237); + setState(239); match(T__2); - setState(238); + setState(240); orderBy(); } } - setState(243); + setState(245); _errHandler.sync(this); _la = _input.LA(1); } } } - setState(247); + setState(249); _la = _input.LA(1); if (_la==LIMIT || _la==LIMIT_ESC) { { - setState(246); + setState(248); limitClause(); } } @@ -1448,14 +1454,14 @@ class SqlBaseParser extends Parser { enterRule(_localctx, 10, RULE_limitClause); int _la; try { - setState(254); + setState(256); switch (_input.LA(1)) { case LIMIT: enterOuterAlt(_localctx, 1); { - setState(249); + setState(251); match(LIMIT); - setState(250); + setState(252); ((LimitClauseContext)_localctx).limit = _input.LT(1); _la = _input.LA(1); if ( !(_la==ALL || _la==INTEGER_VALUE) ) { @@ -1468,9 +1474,9 @@ class SqlBaseParser extends Parser { case LIMIT_ESC: enterOuterAlt(_localctx, 2); { - setState(251); + setState(253); match(LIMIT_ESC); - setState(252); + setState(254); ((LimitClauseContext)_localctx).limit = _input.LT(1); _la = _input.LA(1); if ( !(_la==ALL || _la==INTEGER_VALUE) ) { @@ -1478,7 +1484,7 @@ class SqlBaseParser extends Parser { } else { consume(); } - setState(253); + setState(255); match(ESC_END); } break; @@ -1551,13 +1557,13 @@ class SqlBaseParser extends Parser { QueryTermContext _localctx = new QueryTermContext(_ctx, getState()); enterRule(_localctx, 12, RULE_queryTerm); try { - setState(261); + setState(263); switch (_input.LA(1)) { case SELECT: _localctx = new QueryPrimaryDefaultContext(_localctx); enterOuterAlt(_localctx, 1); { - setState(256); + setState(258); querySpecification(); } break; @@ -1565,11 +1571,11 @@ class SqlBaseParser extends Parser { _localctx = new SubqueryContext(_localctx); enterOuterAlt(_localctx, 2); { - setState(257); - match(T__0); - setState(258); - queryNoWith(); setState(259); + match(T__0); + setState(260); + queryNoWith(); + setState(261); match(T__1); } break; @@ -1625,13 +1631,13 @@ class SqlBaseParser extends Parser { try { enterOuterAlt(_localctx, 1); { - setState(263); - expression(); setState(265); + expression(); + setState(267); _la = _input.LA(1); if (_la==ASC || _la==DESC) { { - setState(264); + setState(266); ((OrderByContext)_localctx).ordering = _input.LT(1); _la = _input.LA(1); if ( !(_la==ASC || _la==DESC) ) { @@ -1642,13 +1648,13 @@ class SqlBaseParser extends Parser { } } - setState(269); + setState(271); _la = _input.LA(1); if (_la==NULLS) { { - setState(267); + setState(269); match(NULLS); - setState(268); + setState(270); ((OrderByContext)_localctx).nullOrdering = _input.LT(1); _la = _input.LA(1); if ( !(_la==FIRST || _la==LAST) ) { @@ -1727,75 +1733,75 @@ class SqlBaseParser extends Parser { try { enterOuterAlt(_localctx, 1); { - setState(271); - match(SELECT); setState(273); + match(SELECT); + setState(275); _la = _input.LA(1); if (_la==ALL || _la==DISTINCT) { { - setState(272); + setState(274); setQuantifier(); } } - setState(275); + setState(277); selectItem(); - setState(280); + setState(282); _errHandler.sync(this); _la = _input.LA(1); while (_la==T__2) { { { - setState(276); + setState(278); match(T__2); - setState(277); + setState(279); selectItem(); } } - setState(282); + setState(284); _errHandler.sync(this); _la = _input.LA(1); } - setState(284); + setState(286); _la = _input.LA(1); if (_la==FROM) { { - setState(283); + setState(285); fromClause(); } } - setState(288); + setState(290); _la = _input.LA(1); if (_la==WHERE) { { - setState(286); + setState(288); match(WHERE); - setState(287); + setState(289); ((QuerySpecificationContext)_localctx).where = booleanExpression(0); } } - setState(293); + setState(295); _la = _input.LA(1); if (_la==GROUP) { { - setState(290); - match(GROUP); - setState(291); - match(BY); setState(292); + match(GROUP); + setState(293); + match(BY); + setState(294); groupBy(); } } - setState(297); + setState(299); _la = _input.LA(1); if (_la==HAVING) { { - setState(295); + setState(297); match(HAVING); - setState(296); + setState(298); ((QuerySpecificationContext)_localctx).having = booleanExpression(0); } } @@ -1847,23 +1853,23 @@ class SqlBaseParser extends Parser { try { enterOuterAlt(_localctx, 1); { - setState(299); + setState(301); match(FROM); - setState(300); + setState(302); relation(); - setState(305); + setState(307); _errHandler.sync(this); _la = _input.LA(1); while (_la==T__2) { { { - setState(301); + setState(303); match(T__2); - setState(302); + setState(304); relation(); } } - setState(307); + setState(309); _errHandler.sync(this); _la = _input.LA(1); } @@ -1916,30 +1922,30 @@ class SqlBaseParser extends Parser { try { enterOuterAlt(_localctx, 1); { - setState(309); + setState(311); _la = _input.LA(1); if (_la==ALL || _la==DISTINCT) { { - setState(308); + setState(310); setQuantifier(); } } - setState(311); + setState(313); groupingElement(); - setState(316); + setState(318); _errHandler.sync(this); _la = _input.LA(1); while (_la==T__2) { { { - setState(312); + setState(314); match(T__2); - setState(313); + setState(315); groupingElement(); } } - setState(318); + setState(320); _errHandler.sync(this); _la = _input.LA(1); } @@ -1994,7 +2000,7 @@ class SqlBaseParser extends Parser { _localctx = new SingleGroupingSetContext(_localctx); enterOuterAlt(_localctx, 1); { - setState(319); + setState(321); groupingExpressions(); } } @@ -2040,47 +2046,47 @@ class SqlBaseParser extends Parser { enterRule(_localctx, 24, RULE_groupingExpressions); int _la; try { - setState(334); + setState(336); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,40,_ctx) ) { case 1: enterOuterAlt(_localctx, 1); { - setState(321); + setState(323); match(T__0); - setState(330); + setState(332); _la = _input.LA(1); - if ((((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << T__0) | (1L << ANALYZE) | (1L << ANALYZED) | (1L << CAST) | (1L << CATALOGS) | (1L << COLUMNS) | (1L << CONVERT) | (1L << DAY) | (1L << DEBUG) | (1L << EXECUTABLE) | (1L << EXISTS) | (1L << EXPLAIN) | (1L << EXTRACT) | (1L << FALSE) | (1L << FIRST) | (1L << FORMAT) | (1L << FUNCTIONS) | (1L << GRAPHVIZ) | (1L << HOUR) | (1L << INTERVAL) | (1L << LAST) | (1L << LEFT) | (1L << LIMIT) | (1L << MAPPED) | (1L << MATCH) | (1L << MINUTE) | (1L << MONTH) | (1L << NOT) | (1L << NULL) | (1L << OPTIMIZED))) != 0) || ((((_la - 65)) & ~0x3f) == 0 && ((1L << (_la - 65)) & ((1L << (PARSED - 65)) | (1L << (PHYSICAL - 65)) | (1L << (PLAN - 65)) | (1L << (RIGHT - 65)) | (1L << (RLIKE - 65)) | (1L << (QUERY - 65)) | (1L << (SCHEMAS - 65)) | (1L << (SECOND - 65)) | (1L << (SHOW - 65)) | (1L << (SYS - 65)) | (1L << (TABLES - 65)) | (1L << (TEXT - 65)) | (1L << (TRUE - 65)) | (1L << (TYPE - 65)) | (1L << (TYPES - 65)) | (1L << (VERIFY - 65)) | (1L << (YEAR - 65)) | (1L << (FUNCTION_ESC - 65)) | (1L << (DATE_ESC - 65)) | (1L << (TIME_ESC - 65)) | (1L << (TIMESTAMP_ESC - 65)) | (1L << (GUID_ESC - 65)) | (1L << (PLUS - 65)) | (1L << (MINUS - 65)) | (1L << (ASTERISK - 65)) | (1L << (PARAM - 65)) | (1L << (STRING - 65)) | (1L << (INTEGER_VALUE - 65)) | (1L << (DECIMAL_VALUE - 65)) | (1L << (IDENTIFIER - 65)) | (1L << (DIGIT_IDENTIFIER - 65)) | (1L << (QUOTED_IDENTIFIER - 65)) | (1L << (BACKQUOTED_IDENTIFIER - 65)))) != 0)) { + if ((((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << T__0) | (1L << ANALYZE) | (1L << ANALYZED) | (1L << CAST) | (1L << CATALOGS) | (1L << COLUMNS) | (1L << CONVERT) | (1L << CURRENT) | (1L << CURRENT_TIMESTAMP) | (1L << DAY) | (1L << DEBUG) | (1L << EXECUTABLE) | (1L << EXISTS) | (1L << EXPLAIN) | (1L << EXTRACT) | (1L << FALSE) | (1L << FIRST) | (1L << FORMAT) | (1L << FUNCTIONS) | (1L << GRAPHVIZ) | (1L << HOUR) | (1L << INTERVAL) | (1L << LAST) | (1L << LEFT) | (1L << LIMIT) | (1L << MAPPED) | (1L << MATCH) | (1L << MINUTE) | (1L << MONTH) | (1L << NOT) | (1L << NULL) | (1L << OPTIMIZED))) != 0) || ((((_la - 67)) & ~0x3f) == 0 && ((1L << (_la - 67)) & ((1L << (PARSED - 67)) | (1L << (PHYSICAL - 67)) | (1L << (PLAN - 67)) | (1L << (RIGHT - 67)) | (1L << (RLIKE - 67)) | (1L << (QUERY - 67)) | (1L << (SCHEMAS - 67)) | (1L << (SECOND - 67)) | (1L << (SHOW - 67)) | (1L << (SYS - 67)) | (1L << (TABLES - 67)) | (1L << (TEXT - 67)) | (1L << (TRUE - 67)) | (1L << (TYPE - 67)) | (1L << (TYPES - 67)) | (1L << (VERIFY - 67)) | (1L << (YEAR - 67)) | (1L << (FUNCTION_ESC - 67)) | (1L << (DATE_ESC - 67)) | (1L << (TIME_ESC - 67)) | (1L << (TIMESTAMP_ESC - 67)) | (1L << (GUID_ESC - 67)) | (1L << (PLUS - 67)) | (1L << (MINUS - 67)) | (1L << (ASTERISK - 67)) | (1L << (PARAM - 67)) | (1L << (STRING - 67)) | (1L << (INTEGER_VALUE - 67)) | (1L << (DECIMAL_VALUE - 67)) | (1L << (IDENTIFIER - 67)) | (1L << (DIGIT_IDENTIFIER - 67)) | (1L << (QUOTED_IDENTIFIER - 67)) | (1L << (BACKQUOTED_IDENTIFIER - 67)))) != 0)) { { - setState(322); + setState(324); expression(); - setState(327); + setState(329); _errHandler.sync(this); _la = _input.LA(1); while (_la==T__2) { { { - setState(323); + setState(325); match(T__2); - setState(324); + setState(326); expression(); } } - setState(329); + setState(331); _errHandler.sync(this); _la = _input.LA(1); } } } - setState(332); + setState(334); match(T__1); } break; case 2: enterOuterAlt(_localctx, 2); { - setState(333); + setState(335); expression(); } break; @@ -2131,15 +2137,15 @@ class SqlBaseParser extends Parser { try { enterOuterAlt(_localctx, 1); { - setState(336); - ((NamedQueryContext)_localctx).name = identifier(); - setState(337); - match(AS); setState(338); - match(T__0); + ((NamedQueryContext)_localctx).name = identifier(); setState(339); - queryNoWith(); + match(AS); setState(340); + match(T__0); + setState(341); + queryNoWith(); + setState(342); match(T__1); } } @@ -2183,7 +2189,7 @@ class SqlBaseParser extends Parser { try { enterOuterAlt(_localctx, 1); { - setState(342); + setState(344); _la = _input.LA(1); if ( !(_la==ALL || _la==DISTINCT) ) { _errHandler.recoverInline(this); @@ -2246,23 +2252,23 @@ class SqlBaseParser extends Parser { _localctx = new SelectExpressionContext(_localctx); enterOuterAlt(_localctx, 1); { - setState(344); + setState(346); expression(); - setState(349); + setState(351); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,42,_ctx) ) { case 1: { - setState(346); + setState(348); _la = _input.LA(1); if (_la==AS) { { - setState(345); + setState(347); match(AS); } } - setState(348); + setState(350); identifier(); } break; @@ -2316,19 +2322,19 @@ class SqlBaseParser extends Parser { try { enterOuterAlt(_localctx, 1); { - setState(351); + setState(353); relationPrimary(); - setState(355); + setState(357); _errHandler.sync(this); _la = _input.LA(1); - while (((((_la - 34)) & ~0x3f) == 0 && ((1L << (_la - 34)) & ((1L << (FULL - 34)) | (1L << (INNER - 34)) | (1L << (JOIN - 34)) | (1L << (LEFT - 34)) | (1L << (NATURAL - 34)) | (1L << (RIGHT - 34)))) != 0)) { + while (((((_la - 36)) & ~0x3f) == 0 && ((1L << (_la - 36)) & ((1L << (FULL - 36)) | (1L << (INNER - 36)) | (1L << (JOIN - 36)) | (1L << (LEFT - 36)) | (1L << (NATURAL - 36)) | (1L << (RIGHT - 36)))) != 0)) { { { - setState(352); + setState(354); joinRelation(); } } - setState(357); + setState(359); _errHandler.sync(this); _la = _input.LA(1); } @@ -2382,7 +2388,7 @@ class SqlBaseParser extends Parser { enterRule(_localctx, 34, RULE_joinRelation); int _la; try { - setState(369); + setState(371); switch (_input.LA(1)) { case FULL: case INNER: @@ -2392,18 +2398,18 @@ class SqlBaseParser extends Parser { enterOuterAlt(_localctx, 1); { { - setState(358); + setState(360); joinType(); } - setState(359); + setState(361); match(JOIN); - setState(360); - ((JoinRelationContext)_localctx).right = relationPrimary(); setState(362); + ((JoinRelationContext)_localctx).right = relationPrimary(); + setState(364); _la = _input.LA(1); if (_la==ON || _la==USING) { { - setState(361); + setState(363); joinCriteria(); } } @@ -2413,13 +2419,13 @@ class SqlBaseParser extends Parser { case NATURAL: enterOuterAlt(_localctx, 2); { - setState(364); - match(NATURAL); - setState(365); - joinType(); setState(366); - match(JOIN); + match(NATURAL); setState(367); + joinType(); + setState(368); + match(JOIN); + setState(369); ((JoinRelationContext)_localctx).right = relationPrimary(); } break; @@ -2468,17 +2474,17 @@ class SqlBaseParser extends Parser { enterRule(_localctx, 36, RULE_joinType); int _la; try { - setState(386); + setState(388); switch (_input.LA(1)) { case INNER: case JOIN: enterOuterAlt(_localctx, 1); { - setState(372); + setState(374); _la = _input.LA(1); if (_la==INNER) { { - setState(371); + setState(373); match(INNER); } } @@ -2488,13 +2494,13 @@ class SqlBaseParser extends Parser { case LEFT: enterOuterAlt(_localctx, 2); { - setState(374); - match(LEFT); setState(376); + match(LEFT); + setState(378); _la = _input.LA(1); if (_la==OUTER) { { - setState(375); + setState(377); match(OUTER); } } @@ -2504,13 +2510,13 @@ class SqlBaseParser extends Parser { case RIGHT: enterOuterAlt(_localctx, 3); { - setState(378); - match(RIGHT); setState(380); + match(RIGHT); + setState(382); _la = _input.LA(1); if (_la==OUTER) { { - setState(379); + setState(381); match(OUTER); } } @@ -2520,13 +2526,13 @@ class SqlBaseParser extends Parser { case FULL: enterOuterAlt(_localctx, 4); { - setState(382); - match(FULL); setState(384); + match(FULL); + setState(386); _la = _input.LA(1); if (_la==OUTER) { { - setState(383); + setState(385); match(OUTER); } } @@ -2584,43 +2590,43 @@ class SqlBaseParser extends Parser { enterRule(_localctx, 38, RULE_joinCriteria); int _la; try { - setState(402); + setState(404); switch (_input.LA(1)) { case ON: enterOuterAlt(_localctx, 1); { - setState(388); + setState(390); match(ON); - setState(389); + setState(391); booleanExpression(0); } break; case USING: enterOuterAlt(_localctx, 2); { - setState(390); - match(USING); - setState(391); - match(T__0); setState(392); + match(USING); + setState(393); + match(T__0); + setState(394); identifier(); - setState(397); + setState(399); _errHandler.sync(this); _la = _input.LA(1); while (_la==T__2) { { { - setState(393); + setState(395); match(T__2); - setState(394); + setState(396); identifier(); } } - setState(399); + setState(401); _errHandler.sync(this); _la = _input.LA(1); } - setState(400); + setState(402); match(T__1); } break; @@ -2725,30 +2731,30 @@ class SqlBaseParser extends Parser { enterRule(_localctx, 40, RULE_relationPrimary); int _la; try { - setState(429); + setState(431); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,59,_ctx) ) { case 1: _localctx = new TableNameContext(_localctx); enterOuterAlt(_localctx, 1); { - setState(404); + setState(406); tableIdentifier(); - setState(409); + setState(411); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,54,_ctx) ) { case 1: { - setState(406); + setState(408); _la = _input.LA(1); if (_la==AS) { { - setState(405); + setState(407); match(AS); } } - setState(408); + setState(410); qualifiedName(); } break; @@ -2759,27 +2765,27 @@ class SqlBaseParser extends Parser { _localctx = new AliasedQueryContext(_localctx); enterOuterAlt(_localctx, 2); { - setState(411); - match(T__0); - setState(412); - queryNoWith(); setState(413); + match(T__0); + setState(414); + queryNoWith(); + setState(415); match(T__1); - setState(418); + setState(420); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,56,_ctx) ) { case 1: { - setState(415); + setState(417); _la = _input.LA(1); if (_la==AS) { { - setState(414); + setState(416); match(AS); } } - setState(417); + setState(419); qualifiedName(); } break; @@ -2790,27 +2796,27 @@ class SqlBaseParser extends Parser { _localctx = new AliasedRelationContext(_localctx); enterOuterAlt(_localctx, 3); { - setState(420); - match(T__0); - setState(421); - relation(); setState(422); + match(T__0); + setState(423); + relation(); + setState(424); match(T__1); - setState(427); + setState(429); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,58,_ctx) ) { case 1: { - setState(424); + setState(426); _la = _input.LA(1); if (_la==AS) { { - setState(423); + setState(425); match(AS); } } - setState(426); + setState(428); qualifiedName(); } break; @@ -2859,7 +2865,7 @@ class SqlBaseParser extends Parser { try { enterOuterAlt(_localctx, 1); { - setState(431); + setState(433); booleanExpression(0); } } @@ -3067,7 +3073,7 @@ class SqlBaseParser extends Parser { int _alt; enterOuterAlt(_localctx, 1); { - setState(464); + setState(466); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,60,_ctx) ) { case 1: @@ -3076,9 +3082,9 @@ class SqlBaseParser extends Parser { _ctx = _localctx; _prevctx = _localctx; - setState(434); + setState(436); match(NOT); - setState(435); + setState(437); booleanExpression(8); } break; @@ -3087,13 +3093,13 @@ class SqlBaseParser extends Parser { _localctx = new ExistsContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(436); - match(EXISTS); - setState(437); - match(T__0); setState(438); - query(); + match(EXISTS); setState(439); + match(T__0); + setState(440); + query(); + setState(441); match(T__1); } break; @@ -3102,15 +3108,15 @@ class SqlBaseParser extends Parser { _localctx = new StringQueryContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(441); - match(QUERY); - setState(442); - match(T__0); setState(443); - ((StringQueryContext)_localctx).queryString = string(); + match(QUERY); setState(444); - matchQueryOptions(); + match(T__0); setState(445); + ((StringQueryContext)_localctx).queryString = string(); + setState(446); + matchQueryOptions(); + setState(447); match(T__1); } break; @@ -3119,19 +3125,19 @@ class SqlBaseParser extends Parser { _localctx = new MatchQueryContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(447); - match(MATCH); - setState(448); - match(T__0); setState(449); - ((MatchQueryContext)_localctx).singleField = qualifiedName(); + match(MATCH); setState(450); - match(T__2); + match(T__0); setState(451); - ((MatchQueryContext)_localctx).queryString = string(); + ((MatchQueryContext)_localctx).singleField = qualifiedName(); setState(452); - matchQueryOptions(); + match(T__2); setState(453); + ((MatchQueryContext)_localctx).queryString = string(); + setState(454); + matchQueryOptions(); + setState(455); match(T__1); } break; @@ -3140,19 +3146,19 @@ class SqlBaseParser extends Parser { _localctx = new MultiMatchQueryContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(455); - match(MATCH); - setState(456); - match(T__0); setState(457); - ((MultiMatchQueryContext)_localctx).multiFields = string(); + match(MATCH); setState(458); - match(T__2); + match(T__0); setState(459); - ((MultiMatchQueryContext)_localctx).queryString = string(); + ((MultiMatchQueryContext)_localctx).multiFields = string(); setState(460); - matchQueryOptions(); + match(T__2); setState(461); + ((MultiMatchQueryContext)_localctx).queryString = string(); + setState(462); + matchQueryOptions(); + setState(463); match(T__1); } break; @@ -3161,13 +3167,13 @@ class SqlBaseParser extends Parser { _localctx = new BooleanDefaultContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(463); + setState(465); predicated(); } break; } _ctx.stop = _input.LT(-1); - setState(474); + setState(476); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,62,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { @@ -3175,7 +3181,7 @@ class SqlBaseParser extends Parser { if ( _parseListeners!=null ) triggerExitRuleEvent(); _prevctx = _localctx; { - setState(472); + setState(474); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,61,_ctx) ) { case 1: @@ -3183,11 +3189,11 @@ class SqlBaseParser extends Parser { _localctx = new LogicalBinaryContext(new BooleanExpressionContext(_parentctx, _parentState)); ((LogicalBinaryContext)_localctx).left = _prevctx; pushNewRecursionContext(_localctx, _startState, RULE_booleanExpression); - setState(466); - if (!(precpred(_ctx, 2))) throw new FailedPredicateException(this, "precpred(_ctx, 2)"); - setState(467); - ((LogicalBinaryContext)_localctx).operator = match(AND); setState(468); + if (!(precpred(_ctx, 2))) throw new FailedPredicateException(this, "precpred(_ctx, 2)"); + setState(469); + ((LogicalBinaryContext)_localctx).operator = match(AND); + setState(470); ((LogicalBinaryContext)_localctx).right = booleanExpression(3); } break; @@ -3196,18 +3202,18 @@ class SqlBaseParser extends Parser { _localctx = new LogicalBinaryContext(new BooleanExpressionContext(_parentctx, _parentState)); ((LogicalBinaryContext)_localctx).left = _prevctx; pushNewRecursionContext(_localctx, _startState, RULE_booleanExpression); - setState(469); - if (!(precpred(_ctx, 1))) throw new FailedPredicateException(this, "precpred(_ctx, 1)"); - setState(470); - ((LogicalBinaryContext)_localctx).operator = match(OR); setState(471); + if (!(precpred(_ctx, 1))) throw new FailedPredicateException(this, "precpred(_ctx, 1)"); + setState(472); + ((LogicalBinaryContext)_localctx).operator = match(OR); + setState(473); ((LogicalBinaryContext)_localctx).right = booleanExpression(2); } break; } } } - setState(476); + setState(478); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,62,_ctx); } @@ -3257,19 +3263,19 @@ class SqlBaseParser extends Parser { try { enterOuterAlt(_localctx, 1); { - setState(481); + setState(483); _errHandler.sync(this); _la = _input.LA(1); while (_la==T__2) { { { - setState(477); + setState(479); match(T__2); - setState(478); + setState(480); string(); } } - setState(483); + setState(485); _errHandler.sync(this); _la = _input.LA(1); } @@ -3318,14 +3324,14 @@ class SqlBaseParser extends Parser { try { enterOuterAlt(_localctx, 1); { - setState(484); - valueExpression(0); setState(486); + valueExpression(0); + setState(488); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,64,_ctx) ) { case 1: { - setState(485); + setState(487); predicate(); } break; @@ -3401,142 +3407,142 @@ class SqlBaseParser extends Parser { enterRule(_localctx, 50, RULE_predicate); int _la; try { - setState(534); + setState(536); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,72,_ctx) ) { case 1: enterOuterAlt(_localctx, 1); { - setState(489); + setState(491); _la = _input.LA(1); if (_la==NOT) { { - setState(488); + setState(490); match(NOT); } } - setState(491); - ((PredicateContext)_localctx).kind = match(BETWEEN); - setState(492); - ((PredicateContext)_localctx).lower = valueExpression(0); setState(493); - match(AND); + ((PredicateContext)_localctx).kind = match(BETWEEN); setState(494); + ((PredicateContext)_localctx).lower = valueExpression(0); + setState(495); + match(AND); + setState(496); ((PredicateContext)_localctx).upper = valueExpression(0); } break; case 2: enterOuterAlt(_localctx, 2); { - setState(497); + setState(499); _la = _input.LA(1); if (_la==NOT) { { - setState(496); + setState(498); match(NOT); } } - setState(499); - ((PredicateContext)_localctx).kind = match(IN); - setState(500); - match(T__0); setState(501); + ((PredicateContext)_localctx).kind = match(IN); + setState(502); + match(T__0); + setState(503); expression(); - setState(506); + setState(508); _errHandler.sync(this); _la = _input.LA(1); while (_la==T__2) { { { - setState(502); + setState(504); match(T__2); - setState(503); + setState(505); expression(); } } - setState(508); + setState(510); _errHandler.sync(this); _la = _input.LA(1); } - setState(509); + setState(511); match(T__1); } break; case 3: enterOuterAlt(_localctx, 3); { - setState(512); + setState(514); _la = _input.LA(1); if (_la==NOT) { { - setState(511); + setState(513); match(NOT); } } - setState(514); - ((PredicateContext)_localctx).kind = match(IN); - setState(515); - match(T__0); setState(516); - query(); + ((PredicateContext)_localctx).kind = match(IN); setState(517); + match(T__0); + setState(518); + query(); + setState(519); match(T__1); } break; case 4: enterOuterAlt(_localctx, 4); { - setState(520); + setState(522); _la = _input.LA(1); if (_la==NOT) { { - setState(519); + setState(521); match(NOT); } } - setState(522); + setState(524); ((PredicateContext)_localctx).kind = match(LIKE); - setState(523); + setState(525); pattern(); } break; case 5: enterOuterAlt(_localctx, 5); { - setState(525); + setState(527); _la = _input.LA(1); if (_la==NOT) { { - setState(524); + setState(526); match(NOT); } } - setState(527); + setState(529); ((PredicateContext)_localctx).kind = match(RLIKE); - setState(528); + setState(530); ((PredicateContext)_localctx).regex = string(); } break; case 6: enterOuterAlt(_localctx, 6); { - setState(529); - match(IS); setState(531); + match(IS); + setState(533); _la = _input.LA(1); if (_la==NOT) { { - setState(530); + setState(532); match(NOT); } } - setState(533); + setState(535); ((PredicateContext)_localctx).kind = match(NULL); } break; @@ -3583,9 +3589,9 @@ class SqlBaseParser extends Parser { try { enterOuterAlt(_localctx, 1); { - setState(536); + setState(538); match(LIKE); - setState(537); + setState(539); pattern(); } } @@ -3633,14 +3639,14 @@ class SqlBaseParser extends Parser { try { enterOuterAlt(_localctx, 1); { - setState(539); - ((PatternContext)_localctx).value = string(); setState(541); + ((PatternContext)_localctx).value = string(); + setState(543); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,73,_ctx) ) { case 1: { - setState(540); + setState(542); patternEscape(); } break; @@ -3688,25 +3694,25 @@ class SqlBaseParser extends Parser { PatternEscapeContext _localctx = new PatternEscapeContext(_ctx, getState()); enterRule(_localctx, 56, RULE_patternEscape); try { - setState(549); + setState(551); switch (_input.LA(1)) { case ESCAPE: enterOuterAlt(_localctx, 1); { - setState(543); + setState(545); match(ESCAPE); - setState(544); + setState(546); ((PatternEscapeContext)_localctx).escape = string(); } break; case ESCAPE_ESC: enterOuterAlt(_localctx, 2); { - setState(545); - match(ESCAPE_ESC); - setState(546); - ((PatternEscapeContext)_localctx).escape = string(); setState(547); + match(ESCAPE_ESC); + setState(548); + ((PatternEscapeContext)_localctx).escape = string(); + setState(549); match(ESC_END); } break; @@ -3851,7 +3857,7 @@ class SqlBaseParser extends Parser { int _alt; enterOuterAlt(_localctx, 1); { - setState(555); + setState(557); switch (_input.LA(1)) { case T__0: case ANALYZE: @@ -3860,6 +3866,8 @@ class SqlBaseParser extends Parser { case CATALOGS: case COLUMNS: case CONVERT: + case CURRENT: + case CURRENT_TIMESTAMP: case DAY: case DEBUG: case EXECUTABLE: @@ -3916,7 +3924,7 @@ class SqlBaseParser extends Parser { _ctx = _localctx; _prevctx = _localctx; - setState(552); + setState(554); primaryExpression(); } break; @@ -3926,7 +3934,7 @@ class SqlBaseParser extends Parser { _localctx = new ArithmeticUnaryContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(553); + setState(555); ((ArithmeticUnaryContext)_localctx).operator = _input.LT(1); _la = _input.LA(1); if ( !(_la==PLUS || _la==MINUS) ) { @@ -3934,7 +3942,7 @@ class SqlBaseParser extends Parser { } else { consume(); } - setState(554); + setState(556); valueExpression(4); } break; @@ -3942,7 +3950,7 @@ class SqlBaseParser extends Parser { throw new NoViableAltException(this); } _ctx.stop = _input.LT(-1); - setState(569); + setState(571); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,77,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { @@ -3950,7 +3958,7 @@ class SqlBaseParser extends Parser { if ( _parseListeners!=null ) triggerExitRuleEvent(); _prevctx = _localctx; { - setState(567); + setState(569); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,76,_ctx) ) { case 1: @@ -3958,17 +3966,17 @@ class SqlBaseParser extends Parser { _localctx = new ArithmeticBinaryContext(new ValueExpressionContext(_parentctx, _parentState)); ((ArithmeticBinaryContext)_localctx).left = _prevctx; pushNewRecursionContext(_localctx, _startState, RULE_valueExpression); - setState(557); + setState(559); if (!(precpred(_ctx, 3))) throw new FailedPredicateException(this, "precpred(_ctx, 3)"); - setState(558); + setState(560); ((ArithmeticBinaryContext)_localctx).operator = _input.LT(1); _la = _input.LA(1); - if ( !(((((_la - 107)) & ~0x3f) == 0 && ((1L << (_la - 107)) & ((1L << (ASTERISK - 107)) | (1L << (SLASH - 107)) | (1L << (PERCENT - 107)))) != 0)) ) { + if ( !(((((_la - 109)) & ~0x3f) == 0 && ((1L << (_la - 109)) & ((1L << (ASTERISK - 109)) | (1L << (SLASH - 109)) | (1L << (PERCENT - 109)))) != 0)) ) { ((ArithmeticBinaryContext)_localctx).operator = (Token)_errHandler.recoverInline(this); } else { consume(); } - setState(559); + setState(561); ((ArithmeticBinaryContext)_localctx).right = valueExpression(4); } break; @@ -3977,9 +3985,9 @@ class SqlBaseParser extends Parser { _localctx = new ArithmeticBinaryContext(new ValueExpressionContext(_parentctx, _parentState)); ((ArithmeticBinaryContext)_localctx).left = _prevctx; pushNewRecursionContext(_localctx, _startState, RULE_valueExpression); - setState(560); + setState(562); if (!(precpred(_ctx, 2))) throw new FailedPredicateException(this, "precpred(_ctx, 2)"); - setState(561); + setState(563); ((ArithmeticBinaryContext)_localctx).operator = _input.LT(1); _la = _input.LA(1); if ( !(_la==PLUS || _la==MINUS) ) { @@ -3987,7 +3995,7 @@ class SqlBaseParser extends Parser { } else { consume(); } - setState(562); + setState(564); ((ArithmeticBinaryContext)_localctx).right = valueExpression(3); } break; @@ -3996,18 +4004,18 @@ class SqlBaseParser extends Parser { _localctx = new ComparisonContext(new ValueExpressionContext(_parentctx, _parentState)); ((ComparisonContext)_localctx).left = _prevctx; pushNewRecursionContext(_localctx, _startState, RULE_valueExpression); - setState(563); - if (!(precpred(_ctx, 1))) throw new FailedPredicateException(this, "precpred(_ctx, 1)"); - setState(564); - comparisonOperator(); setState(565); + if (!(precpred(_ctx, 1))) throw new FailedPredicateException(this, "precpred(_ctx, 1)"); + setState(566); + comparisonOperator(); + setState(567); ((ComparisonContext)_localctx).right = valueExpression(2); } break; } } } - setState(571); + setState(573); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,77,_ctx); } @@ -4170,6 +4178,25 @@ class SqlBaseParser extends Parser { else return visitor.visitChildren(this); } } + public static class CurrentDateTimeFunctionContext extends PrimaryExpressionContext { + public BuiltinDateTimeFunctionContext builtinDateTimeFunction() { + return getRuleContext(BuiltinDateTimeFunctionContext.class,0); + } + public CurrentDateTimeFunctionContext(PrimaryExpressionContext ctx) { copyFrom(ctx); } + @Override + public void enterRule(ParseTreeListener listener) { + if ( listener instanceof SqlBaseListener ) ((SqlBaseListener)listener).enterCurrentDateTimeFunction(this); + } + @Override + public void exitRule(ParseTreeListener listener) { + if ( listener instanceof SqlBaseListener ) ((SqlBaseListener)listener).exitCurrentDateTimeFunction(this); + } + @Override + public T accept(ParseTreeVisitor visitor) { + if ( visitor instanceof SqlBaseVisitor ) return ((SqlBaseVisitor)visitor).visitCurrentDateTimeFunction(this); + else return visitor.visitChildren(this); + } + } public static class SubqueryExpressionContext extends PrimaryExpressionContext { public QueryContext query() { return getRuleContext(QueryContext.class,0); @@ -4195,14 +4222,14 @@ class SqlBaseParser extends Parser { enterRule(_localctx, 60, RULE_primaryExpression); int _la; try { - setState(591); + setState(594); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,79,_ctx) ) { case 1: _localctx = new CastContext(_localctx); enterOuterAlt(_localctx, 1); { - setState(572); + setState(574); castExpression(); } break; @@ -4210,74 +4237,82 @@ class SqlBaseParser extends Parser { _localctx = new ExtractContext(_localctx); enterOuterAlt(_localctx, 2); { - setState(573); + setState(575); extractExpression(); } break; case 3: - _localctx = new ConstantDefaultContext(_localctx); + _localctx = new CurrentDateTimeFunctionContext(_localctx); enterOuterAlt(_localctx, 3); { - setState(574); - constant(); + setState(576); + builtinDateTimeFunction(); } break; case 4: - _localctx = new StarContext(_localctx); + _localctx = new ConstantDefaultContext(_localctx); enterOuterAlt(_localctx, 4); { - setState(578); + setState(577); + constant(); + } + break; + case 5: + _localctx = new StarContext(_localctx); + enterOuterAlt(_localctx, 5); + { + setState(581); _la = _input.LA(1); - if ((((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << ANALYZE) | (1L << ANALYZED) | (1L << CATALOGS) | (1L << COLUMNS) | (1L << DAY) | (1L << DEBUG) | (1L << EXECUTABLE) | (1L << EXPLAIN) | (1L << FIRST) | (1L << FORMAT) | (1L << FUNCTIONS) | (1L << GRAPHVIZ) | (1L << HOUR) | (1L << INTERVAL) | (1L << LAST) | (1L << LIMIT) | (1L << MAPPED) | (1L << MINUTE) | (1L << MONTH) | (1L << OPTIMIZED))) != 0) || ((((_la - 65)) & ~0x3f) == 0 && ((1L << (_la - 65)) & ((1L << (PARSED - 65)) | (1L << (PHYSICAL - 65)) | (1L << (PLAN - 65)) | (1L << (RLIKE - 65)) | (1L << (QUERY - 65)) | (1L << (SCHEMAS - 65)) | (1L << (SECOND - 65)) | (1L << (SHOW - 65)) | (1L << (SYS - 65)) | (1L << (TABLES - 65)) | (1L << (TEXT - 65)) | (1L << (TYPE - 65)) | (1L << (TYPES - 65)) | (1L << (VERIFY - 65)) | (1L << (YEAR - 65)) | (1L << (IDENTIFIER - 65)) | (1L << (DIGIT_IDENTIFIER - 65)) | (1L << (QUOTED_IDENTIFIER - 65)) | (1L << (BACKQUOTED_IDENTIFIER - 65)))) != 0)) { + if ((((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << ANALYZE) | (1L << ANALYZED) | (1L << CATALOGS) | (1L << COLUMNS) | (1L << CURRENT) | (1L << DAY) | (1L << DEBUG) | (1L << EXECUTABLE) | (1L << EXPLAIN) | (1L << FIRST) | (1L << FORMAT) | (1L << FUNCTIONS) | (1L << GRAPHVIZ) | (1L << HOUR) | (1L << INTERVAL) | (1L << LAST) | (1L << LIMIT) | (1L << MAPPED) | (1L << MINUTE) | (1L << MONTH) | (1L << OPTIMIZED))) != 0) || ((((_la - 67)) & ~0x3f) == 0 && ((1L << (_la - 67)) & ((1L << (PARSED - 67)) | (1L << (PHYSICAL - 67)) | (1L << (PLAN - 67)) | (1L << (RLIKE - 67)) | (1L << (QUERY - 67)) | (1L << (SCHEMAS - 67)) | (1L << (SECOND - 67)) | (1L << (SHOW - 67)) | (1L << (SYS - 67)) | (1L << (TABLES - 67)) | (1L << (TEXT - 67)) | (1L << (TYPE - 67)) | (1L << (TYPES - 67)) | (1L << (VERIFY - 67)) | (1L << (YEAR - 67)) | (1L << (IDENTIFIER - 67)) | (1L << (DIGIT_IDENTIFIER - 67)) | (1L << (QUOTED_IDENTIFIER - 67)) | (1L << (BACKQUOTED_IDENTIFIER - 67)))) != 0)) { { - setState(575); + setState(578); qualifiedName(); - setState(576); + setState(579); match(DOT); } } - setState(580); + setState(583); match(ASTERISK); } break; - case 5: + case 6: _localctx = new FunctionContext(_localctx); - enterOuterAlt(_localctx, 5); + enterOuterAlt(_localctx, 6); { - setState(581); + setState(584); functionExpression(); } break; - case 6: + case 7: _localctx = new SubqueryExpressionContext(_localctx); - enterOuterAlt(_localctx, 6); + enterOuterAlt(_localctx, 7); { - setState(582); + setState(585); match(T__0); - setState(583); + setState(586); query(); - setState(584); + setState(587); match(T__1); } break; - case 7: + case 8: _localctx = new DereferenceContext(_localctx); - enterOuterAlt(_localctx, 7); + enterOuterAlt(_localctx, 8); { - setState(586); + setState(589); qualifiedName(); } break; - case 8: + case 9: _localctx = new ParenthesizedExpressionContext(_localctx); - enterOuterAlt(_localctx, 8); + enterOuterAlt(_localctx, 9); { - setState(587); + setState(590); match(T__0); - setState(588); + setState(591); expression(); - setState(589); + setState(592); match(T__1); } break; @@ -4326,42 +4361,42 @@ class SqlBaseParser extends Parser { CastExpressionContext _localctx = new CastExpressionContext(_ctx, getState()); enterRule(_localctx, 62, RULE_castExpression); try { - setState(603); + setState(606); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,80,_ctx) ) { case 1: enterOuterAlt(_localctx, 1); { - setState(593); + setState(596); castTemplate(); } break; case 2: enterOuterAlt(_localctx, 2); { - setState(594); + setState(597); match(FUNCTION_ESC); - setState(595); + setState(598); castTemplate(); - setState(596); + setState(599); match(ESC_END); } break; case 3: enterOuterAlt(_localctx, 3); { - setState(598); + setState(601); convertTemplate(); } break; case 4: enterOuterAlt(_localctx, 4); { - setState(599); + setState(602); match(FUNCTION_ESC); - setState(600); + setState(603); convertTemplate(); - setState(601); + setState(604); match(ESC_END); } break; @@ -4412,17 +4447,17 @@ class SqlBaseParser extends Parser { try { enterOuterAlt(_localctx, 1); { - setState(605); - match(CAST); - setState(606); - match(T__0); - setState(607); - expression(); setState(608); - match(AS); + match(CAST); setState(609); - dataType(); + match(T__0); setState(610); + expression(); + setState(611); + match(AS); + setState(612); + dataType(); + setState(613); match(T__1); } } @@ -4437,6 +4472,73 @@ class SqlBaseParser extends Parser { return _localctx; } + public static class BuiltinDateTimeFunctionContext extends ParserRuleContext { + public Token name; + public Token precision; + public TerminalNode CURRENT_TIMESTAMP() { return getToken(SqlBaseParser.CURRENT_TIMESTAMP, 0); } + public TerminalNode INTEGER_VALUE() { return getToken(SqlBaseParser.INTEGER_VALUE, 0); } + public BuiltinDateTimeFunctionContext(ParserRuleContext parent, int invokingState) { + super(parent, invokingState); + } + @Override public int getRuleIndex() { return RULE_builtinDateTimeFunction; } + @Override + public void enterRule(ParseTreeListener listener) { + if ( listener instanceof SqlBaseListener ) ((SqlBaseListener)listener).enterBuiltinDateTimeFunction(this); + } + @Override + public void exitRule(ParseTreeListener listener) { + if ( listener instanceof SqlBaseListener ) ((SqlBaseListener)listener).exitBuiltinDateTimeFunction(this); + } + @Override + public T accept(ParseTreeVisitor visitor) { + if ( visitor instanceof SqlBaseVisitor ) return ((SqlBaseVisitor)visitor).visitBuiltinDateTimeFunction(this); + else return visitor.visitChildren(this); + } + } + + public final BuiltinDateTimeFunctionContext builtinDateTimeFunction() throws RecognitionException { + BuiltinDateTimeFunctionContext _localctx = new BuiltinDateTimeFunctionContext(_ctx, getState()); + enterRule(_localctx, 66, RULE_builtinDateTimeFunction); + int _la; + try { + enterOuterAlt(_localctx, 1); + { + setState(615); + ((BuiltinDateTimeFunctionContext)_localctx).name = match(CURRENT_TIMESTAMP); + setState(621); + _errHandler.sync(this); + switch ( getInterpreter().adaptivePredict(_input,82,_ctx) ) { + case 1: + { + setState(616); + match(T__0); + setState(618); + _la = _input.LA(1); + if (_la==INTEGER_VALUE) { + { + setState(617); + ((BuiltinDateTimeFunctionContext)_localctx).precision = match(INTEGER_VALUE); + } + } + + setState(620); + match(T__1); + } + break; + } + } + } + catch (RecognitionException re) { + _localctx.exception = re; + _errHandler.reportError(this, re); + _errHandler.recover(this, re); + } + finally { + exitRule(); + } + return _localctx; + } + public static class ConvertTemplateContext extends ParserRuleContext { public TerminalNode CONVERT() { return getToken(SqlBaseParser.CONVERT, 0); } public ExpressionContext expression() { @@ -4466,21 +4568,21 @@ class SqlBaseParser extends Parser { public final ConvertTemplateContext convertTemplate() throws RecognitionException { ConvertTemplateContext _localctx = new ConvertTemplateContext(_ctx, getState()); - enterRule(_localctx, 66, RULE_convertTemplate); + enterRule(_localctx, 68, RULE_convertTemplate); try { enterOuterAlt(_localctx, 1); { - setState(612); + setState(623); match(CONVERT); - setState(613); + setState(624); match(T__0); - setState(614); + setState(625); expression(); - setState(615); + setState(626); match(T__2); - setState(616); + setState(627); dataType(); - setState(617); + setState(628); match(T__1); } } @@ -4522,25 +4624,25 @@ class SqlBaseParser extends Parser { public final ExtractExpressionContext extractExpression() throws RecognitionException { ExtractExpressionContext _localctx = new ExtractExpressionContext(_ctx, getState()); - enterRule(_localctx, 68, RULE_extractExpression); + enterRule(_localctx, 70, RULE_extractExpression); try { - setState(624); + setState(635); switch (_input.LA(1)) { case EXTRACT: enterOuterAlt(_localctx, 1); { - setState(619); + setState(630); extractTemplate(); } break; case FUNCTION_ESC: enterOuterAlt(_localctx, 2); { - setState(620); + setState(631); match(FUNCTION_ESC); - setState(621); + setState(632); extractTemplate(); - setState(622); + setState(633); match(ESC_END); } break; @@ -4590,21 +4692,21 @@ class SqlBaseParser extends Parser { public final ExtractTemplateContext extractTemplate() throws RecognitionException { ExtractTemplateContext _localctx = new ExtractTemplateContext(_ctx, getState()); - enterRule(_localctx, 70, RULE_extractTemplate); + enterRule(_localctx, 72, RULE_extractTemplate); try { enterOuterAlt(_localctx, 1); { - setState(626); + setState(637); match(EXTRACT); - setState(627); + setState(638); match(T__0); - setState(628); + setState(639); ((ExtractTemplateContext)_localctx).field = identifier(); - setState(629); + setState(640); match(FROM); - setState(630); + setState(641); valueExpression(0); - setState(631); + setState(642); match(T__1); } } @@ -4645,14 +4747,15 @@ class SqlBaseParser extends Parser { public final FunctionExpressionContext functionExpression() throws RecognitionException { FunctionExpressionContext _localctx = new FunctionExpressionContext(_ctx, getState()); - enterRule(_localctx, 72, RULE_functionExpression); + enterRule(_localctx, 74, RULE_functionExpression); try { - setState(638); + setState(649); switch (_input.LA(1)) { case ANALYZE: case ANALYZED: case CATALOGS: case COLUMNS: + case CURRENT: case DAY: case DEBUG: case EXECUTABLE: @@ -4692,18 +4795,18 @@ class SqlBaseParser extends Parser { case BACKQUOTED_IDENTIFIER: enterOuterAlt(_localctx, 1); { - setState(633); + setState(644); functionTemplate(); } break; case FUNCTION_ESC: enterOuterAlt(_localctx, 2); { - setState(634); + setState(645); match(FUNCTION_ESC); - setState(635); + setState(646); functionTemplate(); - setState(636); + setState(647); match(ESC_END); } break; @@ -4756,50 +4859,50 @@ class SqlBaseParser extends Parser { public final FunctionTemplateContext functionTemplate() throws RecognitionException { FunctionTemplateContext _localctx = new FunctionTemplateContext(_ctx, getState()); - enterRule(_localctx, 74, RULE_functionTemplate); + enterRule(_localctx, 76, RULE_functionTemplate); int _la; try { enterOuterAlt(_localctx, 1); { - setState(640); + setState(651); functionName(); - setState(641); + setState(652); match(T__0); - setState(653); + setState(664); _la = _input.LA(1); - if ((((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << T__0) | (1L << ALL) | (1L << ANALYZE) | (1L << ANALYZED) | (1L << CAST) | (1L << CATALOGS) | (1L << COLUMNS) | (1L << CONVERT) | (1L << DAY) | (1L << DEBUG) | (1L << DISTINCT) | (1L << EXECUTABLE) | (1L << EXISTS) | (1L << EXPLAIN) | (1L << EXTRACT) | (1L << FALSE) | (1L << FIRST) | (1L << FORMAT) | (1L << FUNCTIONS) | (1L << GRAPHVIZ) | (1L << HOUR) | (1L << INTERVAL) | (1L << LAST) | (1L << LEFT) | (1L << LIMIT) | (1L << MAPPED) | (1L << MATCH) | (1L << MINUTE) | (1L << MONTH) | (1L << NOT) | (1L << NULL) | (1L << OPTIMIZED))) != 0) || ((((_la - 65)) & ~0x3f) == 0 && ((1L << (_la - 65)) & ((1L << (PARSED - 65)) | (1L << (PHYSICAL - 65)) | (1L << (PLAN - 65)) | (1L << (RIGHT - 65)) | (1L << (RLIKE - 65)) | (1L << (QUERY - 65)) | (1L << (SCHEMAS - 65)) | (1L << (SECOND - 65)) | (1L << (SHOW - 65)) | (1L << (SYS - 65)) | (1L << (TABLES - 65)) | (1L << (TEXT - 65)) | (1L << (TRUE - 65)) | (1L << (TYPE - 65)) | (1L << (TYPES - 65)) | (1L << (VERIFY - 65)) | (1L << (YEAR - 65)) | (1L << (FUNCTION_ESC - 65)) | (1L << (DATE_ESC - 65)) | (1L << (TIME_ESC - 65)) | (1L << (TIMESTAMP_ESC - 65)) | (1L << (GUID_ESC - 65)) | (1L << (PLUS - 65)) | (1L << (MINUS - 65)) | (1L << (ASTERISK - 65)) | (1L << (PARAM - 65)) | (1L << (STRING - 65)) | (1L << (INTEGER_VALUE - 65)) | (1L << (DECIMAL_VALUE - 65)) | (1L << (IDENTIFIER - 65)) | (1L << (DIGIT_IDENTIFIER - 65)) | (1L << (QUOTED_IDENTIFIER - 65)) | (1L << (BACKQUOTED_IDENTIFIER - 65)))) != 0)) { + if ((((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << T__0) | (1L << ALL) | (1L << ANALYZE) | (1L << ANALYZED) | (1L << CAST) | (1L << CATALOGS) | (1L << COLUMNS) | (1L << CONVERT) | (1L << CURRENT) | (1L << CURRENT_TIMESTAMP) | (1L << DAY) | (1L << DEBUG) | (1L << DISTINCT) | (1L << EXECUTABLE) | (1L << EXISTS) | (1L << EXPLAIN) | (1L << EXTRACT) | (1L << FALSE) | (1L << FIRST) | (1L << FORMAT) | (1L << FUNCTIONS) | (1L << GRAPHVIZ) | (1L << HOUR) | (1L << INTERVAL) | (1L << LAST) | (1L << LEFT) | (1L << LIMIT) | (1L << MAPPED) | (1L << MATCH) | (1L << MINUTE) | (1L << MONTH) | (1L << NOT) | (1L << NULL) | (1L << OPTIMIZED))) != 0) || ((((_la - 67)) & ~0x3f) == 0 && ((1L << (_la - 67)) & ((1L << (PARSED - 67)) | (1L << (PHYSICAL - 67)) | (1L << (PLAN - 67)) | (1L << (RIGHT - 67)) | (1L << (RLIKE - 67)) | (1L << (QUERY - 67)) | (1L << (SCHEMAS - 67)) | (1L << (SECOND - 67)) | (1L << (SHOW - 67)) | (1L << (SYS - 67)) | (1L << (TABLES - 67)) | (1L << (TEXT - 67)) | (1L << (TRUE - 67)) | (1L << (TYPE - 67)) | (1L << (TYPES - 67)) | (1L << (VERIFY - 67)) | (1L << (YEAR - 67)) | (1L << (FUNCTION_ESC - 67)) | (1L << (DATE_ESC - 67)) | (1L << (TIME_ESC - 67)) | (1L << (TIMESTAMP_ESC - 67)) | (1L << (GUID_ESC - 67)) | (1L << (PLUS - 67)) | (1L << (MINUS - 67)) | (1L << (ASTERISK - 67)) | (1L << (PARAM - 67)) | (1L << (STRING - 67)) | (1L << (INTEGER_VALUE - 67)) | (1L << (DECIMAL_VALUE - 67)) | (1L << (IDENTIFIER - 67)) | (1L << (DIGIT_IDENTIFIER - 67)) | (1L << (QUOTED_IDENTIFIER - 67)) | (1L << (BACKQUOTED_IDENTIFIER - 67)))) != 0)) { { - setState(643); + setState(654); _la = _input.LA(1); if (_la==ALL || _la==DISTINCT) { { - setState(642); + setState(653); setQuantifier(); } } - setState(645); + setState(656); expression(); - setState(650); + setState(661); _errHandler.sync(this); _la = _input.LA(1); while (_la==T__2) { { { - setState(646); + setState(657); match(T__2); - setState(647); + setState(658); expression(); } } - setState(652); + setState(663); _errHandler.sync(this); _la = _input.LA(1); } } } - setState(655); + setState(666); match(T__1); } } @@ -4841,21 +4944,21 @@ class SqlBaseParser extends Parser { public final FunctionNameContext functionName() throws RecognitionException { FunctionNameContext _localctx = new FunctionNameContext(_ctx, getState()); - enterRule(_localctx, 76, RULE_functionName); + enterRule(_localctx, 78, RULE_functionName); try { - setState(660); + setState(671); switch (_input.LA(1)) { case LEFT: enterOuterAlt(_localctx, 1); { - setState(657); + setState(668); match(LEFT); } break; case RIGHT: enterOuterAlt(_localctx, 2); { - setState(658); + setState(669); match(RIGHT); } break; @@ -4863,6 +4966,7 @@ class SqlBaseParser extends Parser { case ANALYZED: case CATALOGS: case COLUMNS: + case CURRENT: case DAY: case DEBUG: case EXECUTABLE: @@ -4900,7 +5004,7 @@ class SqlBaseParser extends Parser { case BACKQUOTED_IDENTIFIER: enterOuterAlt(_localctx, 3); { - setState(659); + setState(670); identifier(); } break; @@ -5128,16 +5232,16 @@ class SqlBaseParser extends Parser { public final ConstantContext constant() throws RecognitionException { ConstantContext _localctx = new ConstantContext(_ctx, getState()); - enterRule(_localctx, 78, RULE_constant); + enterRule(_localctx, 80, RULE_constant); try { int _alt; - setState(688); + setState(699); switch (_input.LA(1)) { case NULL: _localctx = new NullLiteralContext(_localctx); enterOuterAlt(_localctx, 1); { - setState(662); + setState(673); match(NULL); } break; @@ -5145,7 +5249,7 @@ class SqlBaseParser extends Parser { _localctx = new IntervalLiteralContext(_localctx); enterOuterAlt(_localctx, 2); { - setState(663); + setState(674); interval(); } break; @@ -5154,7 +5258,7 @@ class SqlBaseParser extends Parser { _localctx = new NumericLiteralContext(_localctx); enterOuterAlt(_localctx, 3); { - setState(664); + setState(675); number(); } break; @@ -5163,7 +5267,7 @@ class SqlBaseParser extends Parser { _localctx = new BooleanLiteralContext(_localctx); enterOuterAlt(_localctx, 4); { - setState(665); + setState(676); booleanValue(); } break; @@ -5171,7 +5275,7 @@ class SqlBaseParser extends Parser { _localctx = new StringLiteralContext(_localctx); enterOuterAlt(_localctx, 5); { - setState(667); + setState(678); _errHandler.sync(this); _alt = 1; do { @@ -5179,7 +5283,7 @@ class SqlBaseParser extends Parser { case 1: { { - setState(666); + setState(677); match(STRING); } } @@ -5187,9 +5291,9 @@ class SqlBaseParser extends Parser { default: throw new NoViableAltException(this); } - setState(669); + setState(680); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,87,_ctx); + _alt = getInterpreter().adaptivePredict(_input,89,_ctx); } while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ); } break; @@ -5197,7 +5301,7 @@ class SqlBaseParser extends Parser { _localctx = new ParamLiteralContext(_localctx); enterOuterAlt(_localctx, 6); { - setState(671); + setState(682); match(PARAM); } break; @@ -5205,11 +5309,11 @@ class SqlBaseParser extends Parser { _localctx = new DateEscapedLiteralContext(_localctx); enterOuterAlt(_localctx, 7); { - setState(672); + setState(683); match(DATE_ESC); - setState(673); + setState(684); string(); - setState(674); + setState(685); match(ESC_END); } break; @@ -5217,11 +5321,11 @@ class SqlBaseParser extends Parser { _localctx = new TimeEscapedLiteralContext(_localctx); enterOuterAlt(_localctx, 8); { - setState(676); + setState(687); match(TIME_ESC); - setState(677); + setState(688); string(); - setState(678); + setState(689); match(ESC_END); } break; @@ -5229,11 +5333,11 @@ class SqlBaseParser extends Parser { _localctx = new TimestampEscapedLiteralContext(_localctx); enterOuterAlt(_localctx, 9); { - setState(680); + setState(691); match(TIMESTAMP_ESC); - setState(681); + setState(692); string(); - setState(682); + setState(693); match(ESC_END); } break; @@ -5241,11 +5345,11 @@ class SqlBaseParser extends Parser { _localctx = new GuidEscapedLiteralContext(_localctx); enterOuterAlt(_localctx, 10); { - setState(684); + setState(695); match(GUID_ESC); - setState(685); + setState(696); string(); - setState(686); + setState(697); match(ESC_END); } break; @@ -5293,14 +5397,14 @@ class SqlBaseParser extends Parser { public final ComparisonOperatorContext comparisonOperator() throws RecognitionException { ComparisonOperatorContext _localctx = new ComparisonOperatorContext(_ctx, getState()); - enterRule(_localctx, 80, RULE_comparisonOperator); + enterRule(_localctx, 82, RULE_comparisonOperator); int _la; try { enterOuterAlt(_localctx, 1); { - setState(690); + setState(701); _la = _input.LA(1); - if ( !(((((_la - 98)) & ~0x3f) == 0 && ((1L << (_la - 98)) & ((1L << (EQ - 98)) | (1L << (NULLEQ - 98)) | (1L << (NEQ - 98)) | (1L << (LT - 98)) | (1L << (LTE - 98)) | (1L << (GT - 98)) | (1L << (GTE - 98)))) != 0)) ) { + if ( !(((((_la - 100)) & ~0x3f) == 0 && ((1L << (_la - 100)) & ((1L << (EQ - 100)) | (1L << (NULLEQ - 100)) | (1L << (NEQ - 100)) | (1L << (LT - 100)) | (1L << (LTE - 100)) | (1L << (GT - 100)) | (1L << (GTE - 100)))) != 0)) ) { _errHandler.recoverInline(this); } else { consume(); @@ -5342,12 +5446,12 @@ class SqlBaseParser extends Parser { public final BooleanValueContext booleanValue() throws RecognitionException { BooleanValueContext _localctx = new BooleanValueContext(_ctx, getState()); - enterRule(_localctx, 82, RULE_booleanValue); + enterRule(_localctx, 84, RULE_booleanValue); int _la; try { enterOuterAlt(_localctx, 1); { - setState(692); + setState(703); _la = _input.LA(1); if ( !(_la==FALSE || _la==TRUE) ) { _errHandler.recoverInline(this); @@ -5410,18 +5514,18 @@ class SqlBaseParser extends Parser { public final IntervalContext interval() throws RecognitionException { IntervalContext _localctx = new IntervalContext(_ctx, getState()); - enterRule(_localctx, 84, RULE_interval); + enterRule(_localctx, 86, RULE_interval); int _la; try { enterOuterAlt(_localctx, 1); { - setState(694); + setState(705); match(INTERVAL); - setState(696); + setState(707); _la = _input.LA(1); if (_la==PLUS || _la==MINUS) { { - setState(695); + setState(706); ((IntervalContext)_localctx).sign = _input.LT(1); _la = _input.LA(1); if ( !(_la==PLUS || _la==MINUS) ) { @@ -5432,35 +5536,35 @@ class SqlBaseParser extends Parser { } } - setState(700); + setState(711); switch (_input.LA(1)) { case INTEGER_VALUE: case DECIMAL_VALUE: { - setState(698); + setState(709); ((IntervalContext)_localctx).valueNumeric = number(); } break; case PARAM: case STRING: { - setState(699); + setState(710); ((IntervalContext)_localctx).valuePattern = string(); } break; default: throw new NoViableAltException(this); } - setState(702); + setState(713); ((IntervalContext)_localctx).leading = intervalField(); - setState(705); + setState(716); _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,91,_ctx) ) { + switch ( getInterpreter().adaptivePredict(_input,93,_ctx) ) { case 1: { - setState(703); + setState(714); match(TO); - setState(704); + setState(715); ((IntervalContext)_localctx).trailing = intervalField(); } break; @@ -5512,14 +5616,14 @@ class SqlBaseParser extends Parser { public final IntervalFieldContext intervalField() throws RecognitionException { IntervalFieldContext _localctx = new IntervalFieldContext(_ctx, getState()); - enterRule(_localctx, 86, RULE_intervalField); + enterRule(_localctx, 88, RULE_intervalField); int _la; try { enterOuterAlt(_localctx, 1); { - setState(707); + setState(718); _la = _input.LA(1); - if ( !((((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << DAY) | (1L << DAYS) | (1L << HOUR) | (1L << HOURS) | (1L << MINUTE) | (1L << MINUTES) | (1L << MONTH) | (1L << MONTHS))) != 0) || ((((_la - 72)) & ~0x3f) == 0 && ((1L << (_la - 72)) & ((1L << (SECOND - 72)) | (1L << (SECONDS - 72)) | (1L << (YEAR - 72)) | (1L << (YEARS - 72)))) != 0)) ) { + if ( !((((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << DAY) | (1L << DAYS) | (1L << HOUR) | (1L << HOURS) | (1L << MINUTE) | (1L << MINUTES) | (1L << MONTH) | (1L << MONTHS))) != 0) || ((((_la - 74)) & ~0x3f) == 0 && ((1L << (_la - 74)) & ((1L << (SECOND - 74)) | (1L << (SECONDS - 74)) | (1L << (YEAR - 74)) | (1L << (YEARS - 74)))) != 0)) ) { _errHandler.recoverInline(this); } else { consume(); @@ -5570,12 +5674,12 @@ class SqlBaseParser extends Parser { public final DataTypeContext dataType() throws RecognitionException { DataTypeContext _localctx = new DataTypeContext(_ctx, getState()); - enterRule(_localctx, 88, RULE_dataType); + enterRule(_localctx, 90, RULE_dataType); try { _localctx = new PrimitiveDataTypeContext(_localctx); enterOuterAlt(_localctx, 1); { - setState(709); + setState(720); identifier(); } } @@ -5622,30 +5726,30 @@ class SqlBaseParser extends Parser { public final QualifiedNameContext qualifiedName() throws RecognitionException { QualifiedNameContext _localctx = new QualifiedNameContext(_ctx, getState()); - enterRule(_localctx, 90, RULE_qualifiedName); + enterRule(_localctx, 92, RULE_qualifiedName); try { int _alt; enterOuterAlt(_localctx, 1); { - setState(716); + setState(727); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,92,_ctx); + _alt = getInterpreter().adaptivePredict(_input,94,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(711); + setState(722); identifier(); - setState(712); + setState(723); match(DOT); } } } - setState(718); + setState(729); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,92,_ctx); + _alt = getInterpreter().adaptivePredict(_input,94,_ctx); } - setState(719); + setState(730); identifier(); } } @@ -5688,15 +5792,15 @@ class SqlBaseParser extends Parser { public final IdentifierContext identifier() throws RecognitionException { IdentifierContext _localctx = new IdentifierContext(_ctx, getState()); - enterRule(_localctx, 92, RULE_identifier); + enterRule(_localctx, 94, RULE_identifier); try { - setState(723); + setState(734); switch (_input.LA(1)) { case QUOTED_IDENTIFIER: case BACKQUOTED_IDENTIFIER: enterOuterAlt(_localctx, 1); { - setState(721); + setState(732); quoteIdentifier(); } break; @@ -5704,6 +5808,7 @@ class SqlBaseParser extends Parser { case ANALYZED: case CATALOGS: case COLUMNS: + case CURRENT: case DAY: case DEBUG: case EXECUTABLE: @@ -5739,7 +5844,7 @@ class SqlBaseParser extends Parser { case DIGIT_IDENTIFIER: enterOuterAlt(_localctx, 2); { - setState(722); + setState(733); unquoteIdentifier(); } break; @@ -5789,46 +5894,46 @@ class SqlBaseParser extends Parser { public final TableIdentifierContext tableIdentifier() throws RecognitionException { TableIdentifierContext _localctx = new TableIdentifierContext(_ctx, getState()); - enterRule(_localctx, 94, RULE_tableIdentifier); + enterRule(_localctx, 96, RULE_tableIdentifier); int _la; try { - setState(737); + setState(748); _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,96,_ctx) ) { + switch ( getInterpreter().adaptivePredict(_input,98,_ctx) ) { case 1: enterOuterAlt(_localctx, 1); { - setState(728); + setState(739); _la = _input.LA(1); - if ((((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << ANALYZE) | (1L << ANALYZED) | (1L << CATALOGS) | (1L << COLUMNS) | (1L << DAY) | (1L << DEBUG) | (1L << EXECUTABLE) | (1L << EXPLAIN) | (1L << FIRST) | (1L << FORMAT) | (1L << FUNCTIONS) | (1L << GRAPHVIZ) | (1L << HOUR) | (1L << INTERVAL) | (1L << LAST) | (1L << LIMIT) | (1L << MAPPED) | (1L << MINUTE) | (1L << MONTH) | (1L << OPTIMIZED))) != 0) || ((((_la - 65)) & ~0x3f) == 0 && ((1L << (_la - 65)) & ((1L << (PARSED - 65)) | (1L << (PHYSICAL - 65)) | (1L << (PLAN - 65)) | (1L << (RLIKE - 65)) | (1L << (QUERY - 65)) | (1L << (SCHEMAS - 65)) | (1L << (SECOND - 65)) | (1L << (SHOW - 65)) | (1L << (SYS - 65)) | (1L << (TABLES - 65)) | (1L << (TEXT - 65)) | (1L << (TYPE - 65)) | (1L << (TYPES - 65)) | (1L << (VERIFY - 65)) | (1L << (YEAR - 65)) | (1L << (IDENTIFIER - 65)) | (1L << (DIGIT_IDENTIFIER - 65)) | (1L << (QUOTED_IDENTIFIER - 65)) | (1L << (BACKQUOTED_IDENTIFIER - 65)))) != 0)) { + if ((((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << ANALYZE) | (1L << ANALYZED) | (1L << CATALOGS) | (1L << COLUMNS) | (1L << CURRENT) | (1L << DAY) | (1L << DEBUG) | (1L << EXECUTABLE) | (1L << EXPLAIN) | (1L << FIRST) | (1L << FORMAT) | (1L << FUNCTIONS) | (1L << GRAPHVIZ) | (1L << HOUR) | (1L << INTERVAL) | (1L << LAST) | (1L << LIMIT) | (1L << MAPPED) | (1L << MINUTE) | (1L << MONTH) | (1L << OPTIMIZED))) != 0) || ((((_la - 67)) & ~0x3f) == 0 && ((1L << (_la - 67)) & ((1L << (PARSED - 67)) | (1L << (PHYSICAL - 67)) | (1L << (PLAN - 67)) | (1L << (RLIKE - 67)) | (1L << (QUERY - 67)) | (1L << (SCHEMAS - 67)) | (1L << (SECOND - 67)) | (1L << (SHOW - 67)) | (1L << (SYS - 67)) | (1L << (TABLES - 67)) | (1L << (TEXT - 67)) | (1L << (TYPE - 67)) | (1L << (TYPES - 67)) | (1L << (VERIFY - 67)) | (1L << (YEAR - 67)) | (1L << (IDENTIFIER - 67)) | (1L << (DIGIT_IDENTIFIER - 67)) | (1L << (QUOTED_IDENTIFIER - 67)) | (1L << (BACKQUOTED_IDENTIFIER - 67)))) != 0)) { { - setState(725); + setState(736); ((TableIdentifierContext)_localctx).catalog = identifier(); - setState(726); + setState(737); match(T__3); } } - setState(730); + setState(741); match(TABLE_IDENTIFIER); } break; case 2: enterOuterAlt(_localctx, 2); { - setState(734); + setState(745); _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,95,_ctx) ) { + switch ( getInterpreter().adaptivePredict(_input,97,_ctx) ) { case 1: { - setState(731); + setState(742); ((TableIdentifierContext)_localctx).catalog = identifier(); - setState(732); + setState(743); match(T__3); } break; } - setState(736); + setState(747); ((TableIdentifierContext)_localctx).name = identifier(); } break; @@ -5893,15 +5998,15 @@ class SqlBaseParser extends Parser { public final QuoteIdentifierContext quoteIdentifier() throws RecognitionException { QuoteIdentifierContext _localctx = new QuoteIdentifierContext(_ctx, getState()); - enterRule(_localctx, 96, RULE_quoteIdentifier); + enterRule(_localctx, 98, RULE_quoteIdentifier); try { - setState(741); + setState(752); switch (_input.LA(1)) { case QUOTED_IDENTIFIER: _localctx = new QuotedIdentifierContext(_localctx); enterOuterAlt(_localctx, 1); { - setState(739); + setState(750); match(QUOTED_IDENTIFIER); } break; @@ -5909,7 +6014,7 @@ class SqlBaseParser extends Parser { _localctx = new BackQuotedIdentifierContext(_localctx); enterOuterAlt(_localctx, 2); { - setState(740); + setState(751); match(BACKQUOTED_IDENTIFIER); } break; @@ -5979,15 +6084,15 @@ class SqlBaseParser extends Parser { public final UnquoteIdentifierContext unquoteIdentifier() throws RecognitionException { UnquoteIdentifierContext _localctx = new UnquoteIdentifierContext(_ctx, getState()); - enterRule(_localctx, 98, RULE_unquoteIdentifier); + enterRule(_localctx, 100, RULE_unquoteIdentifier); try { - setState(746); + setState(757); switch (_input.LA(1)) { case IDENTIFIER: _localctx = new UnquotedIdentifierContext(_localctx); enterOuterAlt(_localctx, 1); { - setState(743); + setState(754); match(IDENTIFIER); } break; @@ -5995,6 +6100,7 @@ class SqlBaseParser extends Parser { case ANALYZED: case CATALOGS: case COLUMNS: + case CURRENT: case DAY: case DEBUG: case EXECUTABLE: @@ -6029,7 +6135,7 @@ class SqlBaseParser extends Parser { _localctx = new UnquotedIdentifierContext(_localctx); enterOuterAlt(_localctx, 2); { - setState(744); + setState(755); nonReserved(); } break; @@ -6037,7 +6143,7 @@ class SqlBaseParser extends Parser { _localctx = new DigitIdentifierContext(_localctx); enterOuterAlt(_localctx, 3); { - setState(745); + setState(756); match(DIGIT_IDENTIFIER); } break; @@ -6104,15 +6210,15 @@ class SqlBaseParser extends Parser { public final NumberContext number() throws RecognitionException { NumberContext _localctx = new NumberContext(_ctx, getState()); - enterRule(_localctx, 100, RULE_number); + enterRule(_localctx, 102, RULE_number); try { - setState(750); + setState(761); switch (_input.LA(1)) { case DECIMAL_VALUE: _localctx = new DecimalLiteralContext(_localctx); enterOuterAlt(_localctx, 1); { - setState(748); + setState(759); match(DECIMAL_VALUE); } break; @@ -6120,7 +6226,7 @@ class SqlBaseParser extends Parser { _localctx = new IntegerLiteralContext(_localctx); enterOuterAlt(_localctx, 2); { - setState(749); + setState(760); match(INTEGER_VALUE); } break; @@ -6163,12 +6269,12 @@ class SqlBaseParser extends Parser { public final StringContext string() throws RecognitionException { StringContext _localctx = new StringContext(_ctx, getState()); - enterRule(_localctx, 102, RULE_string); + enterRule(_localctx, 104, RULE_string); int _la; try { enterOuterAlt(_localctx, 1); { - setState(752); + setState(763); _la = _input.LA(1); if ( !(_la==PARAM || _la==STRING) ) { _errHandler.recoverInline(this); @@ -6193,6 +6299,7 @@ class SqlBaseParser extends Parser { public TerminalNode ANALYZED() { return getToken(SqlBaseParser.ANALYZED, 0); } public TerminalNode CATALOGS() { return getToken(SqlBaseParser.CATALOGS, 0); } public TerminalNode COLUMNS() { return getToken(SqlBaseParser.COLUMNS, 0); } + public TerminalNode CURRENT() { return getToken(SqlBaseParser.CURRENT, 0); } public TerminalNode DAY() { return getToken(SqlBaseParser.DAY, 0); } public TerminalNode DEBUG() { return getToken(SqlBaseParser.DEBUG, 0); } public TerminalNode EXECUTABLE() { return getToken(SqlBaseParser.EXECUTABLE, 0); } @@ -6245,14 +6352,14 @@ class SqlBaseParser extends Parser { public final NonReservedContext nonReserved() throws RecognitionException { NonReservedContext _localctx = new NonReservedContext(_ctx, getState()); - enterRule(_localctx, 104, RULE_nonReserved); + enterRule(_localctx, 106, RULE_nonReserved); int _la; try { enterOuterAlt(_localctx, 1); { - setState(754); + setState(765); _la = _input.LA(1); - if ( !((((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << ANALYZE) | (1L << ANALYZED) | (1L << CATALOGS) | (1L << COLUMNS) | (1L << DAY) | (1L << DEBUG) | (1L << EXECUTABLE) | (1L << EXPLAIN) | (1L << FIRST) | (1L << FORMAT) | (1L << FUNCTIONS) | (1L << GRAPHVIZ) | (1L << HOUR) | (1L << INTERVAL) | (1L << LAST) | (1L << LIMIT) | (1L << MAPPED) | (1L << MINUTE) | (1L << MONTH) | (1L << OPTIMIZED))) != 0) || ((((_la - 65)) & ~0x3f) == 0 && ((1L << (_la - 65)) & ((1L << (PARSED - 65)) | (1L << (PHYSICAL - 65)) | (1L << (PLAN - 65)) | (1L << (RLIKE - 65)) | (1L << (QUERY - 65)) | (1L << (SCHEMAS - 65)) | (1L << (SECOND - 65)) | (1L << (SHOW - 65)) | (1L << (SYS - 65)) | (1L << (TABLES - 65)) | (1L << (TEXT - 65)) | (1L << (TYPE - 65)) | (1L << (TYPES - 65)) | (1L << (VERIFY - 65)) | (1L << (YEAR - 65)))) != 0)) ) { + if ( !((((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << ANALYZE) | (1L << ANALYZED) | (1L << CATALOGS) | (1L << COLUMNS) | (1L << CURRENT) | (1L << DAY) | (1L << DEBUG) | (1L << EXECUTABLE) | (1L << EXPLAIN) | (1L << FIRST) | (1L << FORMAT) | (1L << FUNCTIONS) | (1L << GRAPHVIZ) | (1L << HOUR) | (1L << INTERVAL) | (1L << LAST) | (1L << LIMIT) | (1L << MAPPED) | (1L << MINUTE) | (1L << MONTH) | (1L << OPTIMIZED))) != 0) || ((((_la - 67)) & ~0x3f) == 0 && ((1L << (_la - 67)) & ((1L << (PARSED - 67)) | (1L << (PHYSICAL - 67)) | (1L << (PLAN - 67)) | (1L << (RLIKE - 67)) | (1L << (QUERY - 67)) | (1L << (SCHEMAS - 67)) | (1L << (SECOND - 67)) | (1L << (SHOW - 67)) | (1L << (SYS - 67)) | (1L << (TABLES - 67)) | (1L << (TEXT - 67)) | (1L << (TYPE - 67)) | (1L << (TYPES - 67)) | (1L << (VERIFY - 67)) | (1L << (YEAR - 67)))) != 0)) ) { _errHandler.recoverInline(this); } else { consume(); @@ -6301,306 +6408,312 @@ class SqlBaseParser extends Parser { } public static final String _serializedATN = - "\3\u0430\ud6d1\u8206\uad2d\u4417\uaef1\u8d80\uaadd\3\177\u02f7\4\2\t\2"+ - "\4\3\t\3\4\4\t\4\4\5\t\5\4\6\t\6\4\7\t\7\4\b\t\b\4\t\t\t\4\n\t\n\4\13"+ + "\3\u0430\ud6d1\u8206\uad2d\u4417\uaef1\u8d80\uaadd\3\u0081\u0302\4\2\t"+ + "\2\4\3\t\3\4\4\t\4\4\5\t\5\4\6\t\6\4\7\t\7\4\b\t\b\4\t\t\t\4\n\t\n\4\13"+ "\t\13\4\f\t\f\4\r\t\r\4\16\t\16\4\17\t\17\4\20\t\20\4\21\t\21\4\22\t\22"+ "\4\23\t\23\4\24\t\24\4\25\t\25\4\26\t\26\4\27\t\27\4\30\t\30\4\31\t\31"+ "\4\32\t\32\4\33\t\33\4\34\t\34\4\35\t\35\4\36\t\36\4\37\t\37\4 \t \4!"+ "\t!\4\"\t\"\4#\t#\4$\t$\4%\t%\4&\t&\4\'\t\'\4(\t(\4)\t)\4*\t*\4+\t+\4"+ ",\t,\4-\t-\4.\t.\4/\t/\4\60\t\60\4\61\t\61\4\62\t\62\4\63\t\63\4\64\t"+ - "\64\4\65\t\65\4\66\t\66\3\2\3\2\3\2\3\3\3\3\3\3\3\4\3\4\3\4\3\4\3\4\3"+ - "\4\3\4\3\4\3\4\7\4|\n\4\f\4\16\4\177\13\4\3\4\5\4\u0082\n\4\3\4\3\4\3"+ - "\4\3\4\3\4\3\4\3\4\7\4\u008b\n\4\f\4\16\4\u008e\13\4\3\4\5\4\u0091\n\4"+ - "\3\4\3\4\3\4\3\4\3\4\5\4\u0098\n\4\3\4\3\4\3\4\3\4\3\4\5\4\u009f\n\4\3"+ - "\4\3\4\3\4\5\4\u00a4\n\4\3\4\3\4\3\4\5\4\u00a9\n\4\3\4\3\4\3\4\3\4\3\4"+ - "\3\4\3\4\3\4\5\4\u00b3\n\4\3\4\3\4\5\4\u00b7\n\4\3\4\3\4\3\4\3\4\7\4\u00bd"+ - "\n\4\f\4\16\4\u00c0\13\4\5\4\u00c2\n\4\3\4\3\4\3\4\3\4\5\4\u00c8\n\4\3"+ - "\4\3\4\3\4\5\4\u00cd\n\4\3\4\5\4\u00d0\n\4\3\4\3\4\3\4\5\4\u00d5\n\4\3"+ - "\4\5\4\u00d8\n\4\3\4\3\4\3\4\5\4\u00dd\n\4\3\5\3\5\3\5\3\5\7\5\u00e3\n"+ - "\5\f\5\16\5\u00e6\13\5\5\5\u00e8\n\5\3\5\3\5\3\6\3\6\3\6\3\6\3\6\3\6\7"+ - "\6\u00f2\n\6\f\6\16\6\u00f5\13\6\5\6\u00f7\n\6\3\6\5\6\u00fa\n\6\3\7\3"+ - "\7\3\7\3\7\3\7\5\7\u0101\n\7\3\b\3\b\3\b\3\b\3\b\5\b\u0108\n\b\3\t\3\t"+ - "\5\t\u010c\n\t\3\t\3\t\5\t\u0110\n\t\3\n\3\n\5\n\u0114\n\n\3\n\3\n\3\n"+ - "\7\n\u0119\n\n\f\n\16\n\u011c\13\n\3\n\5\n\u011f\n\n\3\n\3\n\5\n\u0123"+ - "\n\n\3\n\3\n\3\n\5\n\u0128\n\n\3\n\3\n\5\n\u012c\n\n\3\13\3\13\3\13\3"+ - "\13\7\13\u0132\n\13\f\13\16\13\u0135\13\13\3\f\5\f\u0138\n\f\3\f\3\f\3"+ - "\f\7\f\u013d\n\f\f\f\16\f\u0140\13\f\3\r\3\r\3\16\3\16\3\16\3\16\7\16"+ - "\u0148\n\16\f\16\16\16\u014b\13\16\5\16\u014d\n\16\3\16\3\16\5\16\u0151"+ - "\n\16\3\17\3\17\3\17\3\17\3\17\3\17\3\20\3\20\3\21\3\21\5\21\u015d\n\21"+ - "\3\21\5\21\u0160\n\21\3\22\3\22\7\22\u0164\n\22\f\22\16\22\u0167\13\22"+ - "\3\23\3\23\3\23\3\23\5\23\u016d\n\23\3\23\3\23\3\23\3\23\3\23\5\23\u0174"+ - "\n\23\3\24\5\24\u0177\n\24\3\24\3\24\5\24\u017b\n\24\3\24\3\24\5\24\u017f"+ - "\n\24\3\24\3\24\5\24\u0183\n\24\5\24\u0185\n\24\3\25\3\25\3\25\3\25\3"+ - "\25\3\25\3\25\7\25\u018e\n\25\f\25\16\25\u0191\13\25\3\25\3\25\5\25\u0195"+ - "\n\25\3\26\3\26\5\26\u0199\n\26\3\26\5\26\u019c\n\26\3\26\3\26\3\26\3"+ - "\26\5\26\u01a2\n\26\3\26\5\26\u01a5\n\26\3\26\3\26\3\26\3\26\5\26\u01ab"+ - "\n\26\3\26\5\26\u01ae\n\26\5\26\u01b0\n\26\3\27\3\27\3\30\3\30\3\30\3"+ - "\30\3\30\3\30\3\30\3\30\3\30\3\30\3\30\3\30\3\30\3\30\3\30\3\30\3\30\3"+ - "\30\3\30\3\30\3\30\3\30\3\30\3\30\3\30\3\30\3\30\3\30\3\30\3\30\3\30\5"+ - "\30\u01d3\n\30\3\30\3\30\3\30\3\30\3\30\3\30\7\30\u01db\n\30\f\30\16\30"+ - "\u01de\13\30\3\31\3\31\7\31\u01e2\n\31\f\31\16\31\u01e5\13\31\3\32\3\32"+ - "\5\32\u01e9\n\32\3\33\5\33\u01ec\n\33\3\33\3\33\3\33\3\33\3\33\3\33\5"+ - "\33\u01f4\n\33\3\33\3\33\3\33\3\33\3\33\7\33\u01fb\n\33\f\33\16\33\u01fe"+ - "\13\33\3\33\3\33\3\33\5\33\u0203\n\33\3\33\3\33\3\33\3\33\3\33\3\33\5"+ - "\33\u020b\n\33\3\33\3\33\3\33\5\33\u0210\n\33\3\33\3\33\3\33\3\33\5\33"+ - "\u0216\n\33\3\33\5\33\u0219\n\33\3\34\3\34\3\34\3\35\3\35\5\35\u0220\n"+ - "\35\3\36\3\36\3\36\3\36\3\36\3\36\5\36\u0228\n\36\3\37\3\37\3\37\3\37"+ - "\5\37\u022e\n\37\3\37\3\37\3\37\3\37\3\37\3\37\3\37\3\37\3\37\3\37\7\37"+ - "\u023a\n\37\f\37\16\37\u023d\13\37\3 \3 \3 \3 \3 \3 \5 \u0245\n \3 \3"+ - " \3 \3 \3 \3 \3 \3 \3 \3 \3 \5 \u0252\n \3!\3!\3!\3!\3!\3!\3!\3!\3!\3"+ - "!\5!\u025e\n!\3\"\3\"\3\"\3\"\3\"\3\"\3\"\3#\3#\3#\3#\3#\3#\3#\3$\3$\3"+ - "$\3$\3$\5$\u0273\n$\3%\3%\3%\3%\3%\3%\3%\3&\3&\3&\3&\3&\5&\u0281\n&\3"+ - "\'\3\'\3\'\5\'\u0286\n\'\3\'\3\'\3\'\7\'\u028b\n\'\f\'\16\'\u028e\13\'"+ - "\5\'\u0290\n\'\3\'\3\'\3(\3(\3(\5(\u0297\n(\3)\3)\3)\3)\3)\6)\u029e\n"+ - ")\r)\16)\u029f\3)\3)\3)\3)\3)\3)\3)\3)\3)\3)\3)\3)\3)\3)\3)\3)\3)\5)\u02b3"+ - "\n)\3*\3*\3+\3+\3,\3,\5,\u02bb\n,\3,\3,\5,\u02bf\n,\3,\3,\3,\5,\u02c4"+ - "\n,\3-\3-\3.\3.\3/\3/\3/\7/\u02cd\n/\f/\16/\u02d0\13/\3/\3/\3\60\3\60"+ - "\5\60\u02d6\n\60\3\61\3\61\3\61\5\61\u02db\n\61\3\61\3\61\3\61\3\61\5"+ - "\61\u02e1\n\61\3\61\5\61\u02e4\n\61\3\62\3\62\5\62\u02e8\n\62\3\63\3\63"+ - "\3\63\5\63\u02ed\n\63\3\64\3\64\5\64\u02f1\n\64\3\65\3\65\3\66\3\66\3"+ - "\66\2\4.<\67\2\4\6\b\n\f\16\20\22\24\26\30\32\34\36 \"$&(*,.\60\62\64"+ - "\668:<>@BDFHJLNPRTVXZ\\^`bdfhj\2\22\b\2\7\7\t\t\34\34\64\64??CC\4\2&&"+ - "QQ\4\2\t\t??\4\2##++\3\2\30\31\3\2kl\4\2\7\7tt\4\2\r\r\30\30\4\2!!\60"+ - "\60\4\2\7\7\32\32\3\2mo\3\2dj\4\2 RR\7\2\25\26)*\669JKZ[\3\2rs\30\2\b"+ - "\t\22\23\25\25\27\27\34\34\36\36!\"%&))--\60\60\63\64\66\6688??CEGJMN"+ - "PQTUWWZZ\u0352\2l\3\2\2\2\4o\3\2\2\2\6\u00dc\3\2\2\2\b\u00e7\3\2\2\2\n"+ - "\u00eb\3\2\2\2\f\u0100\3\2\2\2\16\u0107\3\2\2\2\20\u0109\3\2\2\2\22\u0111"+ - "\3\2\2\2\24\u012d\3\2\2\2\26\u0137\3\2\2\2\30\u0141\3\2\2\2\32\u0150\3"+ - "\2\2\2\34\u0152\3\2\2\2\36\u0158\3\2\2\2 \u015a\3\2\2\2\"\u0161\3\2\2"+ - "\2$\u0173\3\2\2\2&\u0184\3\2\2\2(\u0194\3\2\2\2*\u01af\3\2\2\2,\u01b1"+ - "\3\2\2\2.\u01d2\3\2\2\2\60\u01e3\3\2\2\2\62\u01e6\3\2\2\2\64\u0218\3\2"+ - "\2\2\66\u021a\3\2\2\28\u021d\3\2\2\2:\u0227\3\2\2\2<\u022d\3\2\2\2>\u0251"+ - "\3\2\2\2@\u025d\3\2\2\2B\u025f\3\2\2\2D\u0266\3\2\2\2F\u0272\3\2\2\2H"+ - "\u0274\3\2\2\2J\u0280\3\2\2\2L\u0282\3\2\2\2N\u0296\3\2\2\2P\u02b2\3\2"+ - "\2\2R\u02b4\3\2\2\2T\u02b6\3\2\2\2V\u02b8\3\2\2\2X\u02c5\3\2\2\2Z\u02c7"+ - "\3\2\2\2\\\u02ce\3\2\2\2^\u02d5\3\2\2\2`\u02e3\3\2\2\2b\u02e7\3\2\2\2"+ - "d\u02ec\3\2\2\2f\u02f0\3\2\2\2h\u02f2\3\2\2\2j\u02f4\3\2\2\2lm\5\6\4\2"+ - "mn\7\2\2\3n\3\3\2\2\2op\5,\27\2pq\7\2\2\3q\5\3\2\2\2r\u00dd\5\b\5\2s\u0081"+ - "\7\36\2\2t}\7\3\2\2uv\7E\2\2v|\t\2\2\2wx\7\"\2\2x|\t\3\2\2yz\7W\2\2z|"+ - "\5T+\2{u\3\2\2\2{w\3\2\2\2{y\3\2\2\2|\177\3\2\2\2}{\3\2\2\2}~\3\2\2\2"+ - "~\u0080\3\2\2\2\177}\3\2\2\2\u0080\u0082\7\4\2\2\u0081t\3\2\2\2\u0081"+ - "\u0082\3\2\2\2\u0082\u0083\3\2\2\2\u0083\u00dd\5\6\4\2\u0084\u0090\7\27"+ - "\2\2\u0085\u008c\7\3\2\2\u0086\u0087\7E\2\2\u0087\u008b\t\4\2\2\u0088"+ - "\u0089\7\"\2\2\u0089\u008b\t\3\2\2\u008a\u0086\3\2\2\2\u008a\u0088\3\2"+ - "\2\2\u008b\u008e\3\2\2\2\u008c\u008a\3\2\2\2\u008c\u008d\3\2\2\2\u008d"+ - "\u008f\3\2\2\2\u008e\u008c\3\2\2\2\u008f\u0091\7\4\2\2\u0090\u0085\3\2"+ - "\2\2\u0090\u0091\3\2\2\2\u0091\u0092\3\2\2\2\u0092\u00dd\5\6\4\2\u0093"+ - "\u0094\7M\2\2\u0094\u0097\7P\2\2\u0095\u0098\5\66\34\2\u0096\u0098\5`"+ - "\61\2\u0097\u0095\3\2\2\2\u0097\u0096\3\2\2\2\u0097\u0098\3\2\2\2\u0098"+ - "\u00dd\3\2\2\2\u0099\u009a\7M\2\2\u009a\u009b\7\23\2\2\u009b\u009e\t\5"+ - "\2\2\u009c\u009f\5\66\34\2\u009d\u009f\5`\61\2\u009e\u009c\3\2\2\2\u009e"+ - "\u009d\3\2\2\2\u009f\u00dd\3\2\2\2\u00a0\u00a3\t\6\2\2\u00a1\u00a4\5\66"+ - "\34\2\u00a2\u00a4\5`\61\2\u00a3\u00a1\3\2\2\2\u00a3\u00a2\3\2\2\2\u00a4"+ - "\u00dd\3\2\2\2\u00a5\u00a6\7M\2\2\u00a6\u00a8\7%\2\2\u00a7\u00a9\5\66"+ - "\34\2\u00a8\u00a7\3\2\2\2\u00a8\u00a9\3\2\2\2\u00a9\u00dd\3\2\2\2\u00aa"+ - "\u00ab\7M\2\2\u00ab\u00dd\7I\2\2\u00ac\u00ad\7N\2\2\u00ad\u00dd\7\22\2"+ - "\2\u00ae\u00af\7N\2\2\u00af\u00b2\7P\2\2\u00b0\u00b1\7\21\2\2\u00b1\u00b3"+ - "\5\66\34\2\u00b2\u00b0\3\2\2\2\u00b2\u00b3\3\2\2\2\u00b3\u00b6\3\2\2\2"+ - "\u00b4\u00b7\5\66\34\2\u00b5\u00b7\5`\61\2\u00b6\u00b4\3\2\2\2\u00b6\u00b5"+ - "\3\2\2\2\u00b6\u00b7\3\2\2\2\u00b7\u00c1\3\2\2\2\u00b8\u00b9\7T\2\2\u00b9"+ - "\u00be\5h\65\2\u00ba\u00bb\7\5\2\2\u00bb\u00bd\5h\65\2\u00bc\u00ba\3\2"+ - "\2\2\u00bd\u00c0\3\2\2\2\u00be\u00bc\3\2\2\2\u00be\u00bf\3\2\2\2\u00bf"+ - "\u00c2\3\2\2\2\u00c0\u00be\3\2\2\2\u00c1\u00b8\3\2\2\2\u00c1\u00c2\3\2"+ - "\2\2\u00c2\u00dd\3\2\2\2\u00c3\u00c4\7N\2\2\u00c4\u00c7\7\23\2\2\u00c5"+ - "\u00c6\7\21\2\2\u00c6\u00c8\5h\65\2\u00c7\u00c5\3\2\2\2\u00c7\u00c8\3"+ - "\2\2\2\u00c8\u00cc\3\2\2\2\u00c9\u00ca\7O\2\2\u00ca\u00cd\5\66\34\2\u00cb"+ - "\u00cd\5`\61\2\u00cc\u00c9\3\2\2\2\u00cc\u00cb\3\2\2\2\u00cc\u00cd\3\2"+ - "\2\2\u00cd\u00cf\3\2\2\2\u00ce\u00d0\5\66\34\2\u00cf\u00ce\3\2\2\2\u00cf"+ - "\u00d0\3\2\2\2\u00d0\u00dd\3\2\2\2\u00d1\u00d2\7N\2\2\u00d2\u00d7\7U\2"+ - "\2\u00d3\u00d5\t\7\2\2\u00d4\u00d3\3\2\2\2\u00d4\u00d5\3\2\2\2\u00d5\u00d6"+ - "\3\2\2\2\u00d6\u00d8\5f\64\2\u00d7\u00d4\3\2\2\2\u00d7\u00d8\3\2\2\2\u00d8"+ - "\u00dd\3\2\2\2\u00d9\u00da\7N\2\2\u00da\u00db\7O\2\2\u00db\u00dd\7U\2"+ - "\2\u00dcr\3\2\2\2\u00dcs\3\2\2\2\u00dc\u0084\3\2\2\2\u00dc\u0093\3\2\2"+ - "\2\u00dc\u0099\3\2\2\2\u00dc\u00a0\3\2\2\2\u00dc\u00a5\3\2\2\2\u00dc\u00aa"+ - "\3\2\2\2\u00dc\u00ac\3\2\2\2\u00dc\u00ae\3\2\2\2\u00dc\u00c3\3\2\2\2\u00dc"+ - "\u00d1\3\2\2\2\u00dc\u00d9\3\2\2\2\u00dd\7\3\2\2\2\u00de\u00df\7Y\2\2"+ - "\u00df\u00e4\5\34\17\2\u00e0\u00e1\7\5\2\2\u00e1\u00e3\5\34\17\2\u00e2"+ - "\u00e0\3\2\2\2\u00e3\u00e6\3\2\2\2\u00e4\u00e2\3\2\2\2\u00e4\u00e5\3\2"+ - "\2\2\u00e5\u00e8\3\2\2\2\u00e6\u00e4\3\2\2\2\u00e7\u00de\3\2\2\2\u00e7"+ - "\u00e8\3\2\2\2\u00e8\u00e9\3\2\2\2\u00e9\u00ea\5\n\6\2\u00ea\t\3\2\2\2"+ - "\u00eb\u00f6\5\16\b\2\u00ec\u00ed\7A\2\2\u00ed\u00ee\7\17\2\2\u00ee\u00f3"+ - "\5\20\t\2\u00ef\u00f0\7\5\2\2\u00f0\u00f2\5\20\t\2\u00f1\u00ef\3\2\2\2"+ - "\u00f2\u00f5\3\2\2\2\u00f3\u00f1\3\2\2\2\u00f3\u00f4\3\2\2\2\u00f4\u00f7"+ - "\3\2\2\2\u00f5\u00f3\3\2\2\2\u00f6\u00ec\3\2\2\2\u00f6\u00f7\3\2\2\2\u00f7"+ - "\u00f9\3\2\2\2\u00f8\u00fa\5\f\7\2\u00f9\u00f8\3\2\2\2\u00f9\u00fa\3\2"+ - "\2\2\u00fa\13\3\2\2\2\u00fb\u00fc\7\63\2\2\u00fc\u0101\t\b\2\2\u00fd\u00fe"+ - "\7^\2\2\u00fe\u00ff\t\b\2\2\u00ff\u0101\7c\2\2\u0100\u00fb\3\2\2\2\u0100"+ - "\u00fd\3\2\2\2\u0101\r\3\2\2\2\u0102\u0108\5\22\n\2\u0103\u0104\7\3\2"+ - "\2\u0104\u0105\5\n\6\2\u0105\u0106\7\4\2\2\u0106\u0108\3\2\2\2\u0107\u0102"+ - "\3\2\2\2\u0107\u0103\3\2\2\2\u0108\17\3\2\2\2\u0109\u010b\5,\27\2\u010a"+ - "\u010c\t\t\2\2\u010b\u010a\3\2\2\2\u010b\u010c\3\2\2\2\u010c\u010f\3\2"+ - "\2\2\u010d\u010e\7=\2\2\u010e\u0110\t\n\2\2\u010f\u010d\3\2\2\2\u010f"+ - "\u0110\3\2\2\2\u0110\21\3\2\2\2\u0111\u0113\7L\2\2\u0112\u0114\5\36\20"+ - "\2\u0113\u0112\3\2\2\2\u0113\u0114\3\2\2\2\u0114\u0115\3\2\2\2\u0115\u011a"+ - "\5 \21\2\u0116\u0117\7\5\2\2\u0117\u0119\5 \21\2\u0118\u0116\3\2\2\2\u0119"+ - "\u011c\3\2\2\2\u011a\u0118\3\2\2\2\u011a\u011b\3\2\2\2\u011b\u011e\3\2"+ - "\2\2\u011c\u011a\3\2\2\2\u011d\u011f\5\24\13\2\u011e\u011d\3\2\2\2\u011e"+ - "\u011f\3\2\2\2\u011f\u0122\3\2\2\2\u0120\u0121\7X\2\2\u0121\u0123\5.\30"+ - "\2\u0122\u0120\3\2\2\2\u0122\u0123\3\2\2\2\u0123\u0127\3\2\2\2\u0124\u0125"+ - "\7\'\2\2\u0125\u0126\7\17\2\2\u0126\u0128\5\26\f\2\u0127\u0124\3\2\2\2"+ - "\u0127\u0128\3\2\2\2\u0128\u012b\3\2\2\2\u0129\u012a\7(\2\2\u012a\u012c"+ - "\5.\30\2\u012b\u0129\3\2\2\2\u012b\u012c\3\2\2\2\u012c\23\3\2\2\2\u012d"+ - "\u012e\7#\2\2\u012e\u0133\5\"\22\2\u012f\u0130\7\5\2\2\u0130\u0132\5\""+ - "\22\2\u0131\u012f\3\2\2\2\u0132\u0135\3\2\2\2\u0133\u0131\3\2\2\2\u0133"+ - "\u0134\3\2\2\2\u0134\25\3\2\2\2\u0135\u0133\3\2\2\2\u0136\u0138\5\36\20"+ - "\2\u0137\u0136\3\2\2\2\u0137\u0138\3\2\2\2\u0138\u0139\3\2\2\2\u0139\u013e"+ - "\5\30\r\2\u013a\u013b\7\5\2\2\u013b\u013d\5\30\r\2\u013c\u013a\3\2\2\2"+ - "\u013d\u0140\3\2\2\2\u013e\u013c\3\2\2\2\u013e\u013f\3\2\2\2\u013f\27"+ - "\3\2\2\2\u0140\u013e\3\2\2\2\u0141\u0142\5\32\16\2\u0142\31\3\2\2\2\u0143"+ - "\u014c\7\3\2\2\u0144\u0149\5,\27\2\u0145\u0146\7\5\2\2\u0146\u0148\5,"+ - "\27\2\u0147\u0145\3\2\2\2\u0148\u014b\3\2\2\2\u0149\u0147\3\2\2\2\u0149"+ - "\u014a\3\2\2\2\u014a\u014d\3\2\2\2\u014b\u0149\3\2\2\2\u014c\u0144\3\2"+ - "\2\2\u014c\u014d\3\2\2\2\u014d\u014e\3\2\2\2\u014e\u0151\7\4\2\2\u014f"+ - "\u0151\5,\27\2\u0150\u0143\3\2\2\2\u0150\u014f\3\2\2\2\u0151\33\3\2\2"+ - "\2\u0152\u0153\5^\60\2\u0153\u0154\7\f\2\2\u0154\u0155\7\3\2\2\u0155\u0156"+ - "\5\n\6\2\u0156\u0157\7\4\2\2\u0157\35\3\2\2\2\u0158\u0159\t\13\2\2\u0159"+ - "\37\3\2\2\2\u015a\u015f\5,\27\2\u015b\u015d\7\f\2\2\u015c\u015b\3\2\2"+ - "\2\u015c\u015d\3\2\2\2\u015d\u015e\3\2\2\2\u015e\u0160\5^\60\2\u015f\u015c"+ - "\3\2\2\2\u015f\u0160\3\2\2\2\u0160!\3\2\2\2\u0161\u0165\5*\26\2\u0162"+ - "\u0164\5$\23\2\u0163\u0162\3\2\2\2\u0164\u0167\3\2\2\2\u0165\u0163\3\2"+ - "\2\2\u0165\u0166\3\2\2\2\u0166#\3\2\2\2\u0167\u0165\3\2\2\2\u0168\u0169"+ - "\5&\24\2\u0169\u016a\7/\2\2\u016a\u016c\5*\26\2\u016b\u016d\5(\25\2\u016c"+ - "\u016b\3\2\2\2\u016c\u016d\3\2\2\2\u016d\u0174\3\2\2\2\u016e\u016f\7:"+ - "\2\2\u016f\u0170\5&\24\2\u0170\u0171\7/\2\2\u0171\u0172\5*\26\2\u0172"+ - "\u0174\3\2\2\2\u0173\u0168\3\2\2\2\u0173\u016e\3\2\2\2\u0174%\3\2\2\2"+ - "\u0175\u0177\7,\2\2\u0176\u0175\3\2\2\2\u0176\u0177\3\2\2\2\u0177\u0185"+ - "\3\2\2\2\u0178\u017a\7\61\2\2\u0179\u017b\7B\2\2\u017a\u0179\3\2\2\2\u017a"+ - "\u017b\3\2\2\2\u017b\u0185\3\2\2\2\u017c\u017e\7F\2\2\u017d\u017f\7B\2"+ - "\2\u017e\u017d\3\2\2\2\u017e\u017f\3\2\2\2\u017f\u0185\3\2\2\2\u0180\u0182"+ - "\7$\2\2\u0181\u0183\7B\2\2\u0182\u0181\3\2\2\2\u0182\u0183\3\2\2\2\u0183"+ - "\u0185\3\2\2\2\u0184\u0176\3\2\2\2\u0184\u0178\3\2\2\2\u0184\u017c\3\2"+ - "\2\2\u0184\u0180\3\2\2\2\u0185\'\3\2\2\2\u0186\u0187\7>\2\2\u0187\u0195"+ - "\5.\30\2\u0188\u0189\7V\2\2\u0189\u018a\7\3\2\2\u018a\u018f\5^\60\2\u018b"+ - "\u018c\7\5\2\2\u018c\u018e\5^\60\2\u018d\u018b\3\2\2\2\u018e\u0191\3\2"+ - "\2\2\u018f\u018d\3\2\2\2\u018f\u0190\3\2\2\2\u0190\u0192\3\2\2\2\u0191"+ - "\u018f\3\2\2\2\u0192\u0193\7\4\2\2\u0193\u0195\3\2\2\2\u0194\u0186\3\2"+ - "\2\2\u0194\u0188\3\2\2\2\u0195)\3\2\2\2\u0196\u019b\5`\61\2\u0197\u0199"+ - "\7\f\2\2\u0198\u0197\3\2\2\2\u0198\u0199\3\2\2\2\u0199\u019a\3\2\2\2\u019a"+ - "\u019c\5\\/\2\u019b\u0198\3\2\2\2\u019b\u019c\3\2\2\2\u019c\u01b0\3\2"+ - "\2\2\u019d\u019e\7\3\2\2\u019e\u019f\5\n\6\2\u019f\u01a4\7\4\2\2\u01a0"+ - "\u01a2\7\f\2\2\u01a1\u01a0\3\2\2\2\u01a1\u01a2\3\2\2\2\u01a2\u01a3\3\2"+ - "\2\2\u01a3\u01a5\5\\/\2\u01a4\u01a1\3\2\2\2\u01a4\u01a5\3\2\2\2\u01a5"+ - "\u01b0\3\2\2\2\u01a6\u01a7\7\3\2\2\u01a7\u01a8\5\"\22\2\u01a8\u01ad\7"+ - "\4\2\2\u01a9\u01ab\7\f\2\2\u01aa\u01a9\3\2\2\2\u01aa\u01ab\3\2\2\2\u01ab"+ - "\u01ac\3\2\2\2\u01ac\u01ae\5\\/\2\u01ad\u01aa\3\2\2\2\u01ad\u01ae\3\2"+ - "\2\2\u01ae\u01b0\3\2\2\2\u01af\u0196\3\2\2\2\u01af\u019d\3\2\2\2\u01af"+ - "\u01a6\3\2\2\2\u01b0+\3\2\2\2\u01b1\u01b2\5.\30\2\u01b2-\3\2\2\2\u01b3"+ - "\u01b4\b\30\1\2\u01b4\u01b5\7;\2\2\u01b5\u01d3\5.\30\n\u01b6\u01b7\7\35"+ - "\2\2\u01b7\u01b8\7\3\2\2\u01b8\u01b9\5\b\5\2\u01b9\u01ba\7\4\2\2\u01ba"+ - "\u01d3\3\2\2\2\u01bb\u01bc\7H\2\2\u01bc\u01bd\7\3\2\2\u01bd\u01be\5h\65"+ - "\2\u01be\u01bf\5\60\31\2\u01bf\u01c0\7\4\2\2\u01c0\u01d3\3\2\2\2\u01c1"+ - "\u01c2\7\65\2\2\u01c2\u01c3\7\3\2\2\u01c3\u01c4\5\\/\2\u01c4\u01c5\7\5"+ - "\2\2\u01c5\u01c6\5h\65\2\u01c6\u01c7\5\60\31\2\u01c7\u01c8\7\4\2\2\u01c8"+ - "\u01d3\3\2\2\2\u01c9\u01ca\7\65\2\2\u01ca\u01cb\7\3\2\2\u01cb\u01cc\5"+ - "h\65\2\u01cc\u01cd\7\5\2\2\u01cd\u01ce\5h\65\2\u01ce\u01cf\5\60\31\2\u01cf"+ - "\u01d0\7\4\2\2\u01d0\u01d3\3\2\2\2\u01d1\u01d3\5\62\32\2\u01d2\u01b3\3"+ - "\2\2\2\u01d2\u01b6\3\2\2\2\u01d2\u01bb\3\2\2\2\u01d2\u01c1\3\2\2\2\u01d2"+ - "\u01c9\3\2\2\2\u01d2\u01d1\3\2\2\2\u01d3\u01dc\3\2\2\2\u01d4\u01d5\f\4"+ - "\2\2\u01d5\u01d6\7\n\2\2\u01d6\u01db\5.\30\5\u01d7\u01d8\f\3\2\2\u01d8"+ - "\u01d9\7@\2\2\u01d9\u01db\5.\30\4\u01da\u01d4\3\2\2\2\u01da\u01d7\3\2"+ - "\2\2\u01db\u01de\3\2\2\2\u01dc\u01da\3\2\2\2\u01dc\u01dd\3\2\2\2\u01dd"+ - "/\3\2\2\2\u01de\u01dc\3\2\2\2\u01df\u01e0\7\5\2\2\u01e0\u01e2\5h\65\2"+ - "\u01e1\u01df\3\2\2\2\u01e2\u01e5\3\2\2\2\u01e3\u01e1\3\2\2\2\u01e3\u01e4"+ - "\3\2\2\2\u01e4\61\3\2\2\2\u01e5\u01e3\3\2\2\2\u01e6\u01e8\5<\37\2\u01e7"+ - "\u01e9\5\64\33\2\u01e8\u01e7\3\2\2\2\u01e8\u01e9\3\2\2\2\u01e9\63\3\2"+ - "\2\2\u01ea\u01ec\7;\2\2\u01eb\u01ea\3\2\2\2\u01eb\u01ec\3\2\2\2\u01ec"+ - "\u01ed\3\2\2\2\u01ed\u01ee\7\16\2\2\u01ee\u01ef\5<\37\2\u01ef\u01f0\7"+ - "\n\2\2\u01f0\u01f1\5<\37\2\u01f1\u0219\3\2\2\2\u01f2\u01f4\7;\2\2\u01f3"+ - "\u01f2\3\2\2\2\u01f3\u01f4\3\2\2\2\u01f4\u01f5\3\2\2\2\u01f5\u01f6\7+"+ - "\2\2\u01f6\u01f7\7\3\2\2\u01f7\u01fc\5,\27\2\u01f8\u01f9\7\5\2\2\u01f9"+ - "\u01fb\5,\27\2\u01fa\u01f8\3\2\2\2\u01fb\u01fe\3\2\2\2\u01fc\u01fa\3\2"+ - "\2\2\u01fc\u01fd\3\2\2\2\u01fd\u01ff\3\2\2\2\u01fe\u01fc\3\2\2\2\u01ff"+ - "\u0200\7\4\2\2\u0200\u0219\3\2\2\2\u0201\u0203\7;\2\2\u0202\u0201\3\2"+ - "\2\2\u0202\u0203\3\2\2\2\u0203\u0204\3\2\2\2\u0204\u0205\7+\2\2\u0205"+ - "\u0206\7\3\2\2\u0206\u0207\5\b\5\2\u0207\u0208\7\4\2\2\u0208\u0219\3\2"+ - "\2\2\u0209\u020b\7;\2\2\u020a\u0209\3\2\2\2\u020a\u020b\3\2\2\2\u020b"+ - "\u020c\3\2\2\2\u020c\u020d\7\62\2\2\u020d\u0219\58\35\2\u020e\u0210\7"+ - ";\2\2\u020f\u020e\3\2\2\2\u020f\u0210\3\2\2\2\u0210\u0211\3\2\2\2\u0211"+ - "\u0212\7G\2\2\u0212\u0219\5h\65\2\u0213\u0215\7.\2\2\u0214\u0216\7;\2"+ - "\2\u0215\u0214\3\2\2\2\u0215\u0216\3\2\2\2\u0216\u0217\3\2\2\2\u0217\u0219"+ - "\7<\2\2\u0218\u01eb\3\2\2\2\u0218\u01f3\3\2\2\2\u0218\u0202\3\2\2\2\u0218"+ - "\u020a\3\2\2\2\u0218\u020f\3\2\2\2\u0218\u0213\3\2\2\2\u0219\65\3\2\2"+ - "\2\u021a\u021b\7\62\2\2\u021b\u021c\58\35\2\u021c\67\3\2\2\2\u021d\u021f"+ - "\5h\65\2\u021e\u0220\5:\36\2\u021f\u021e\3\2\2\2\u021f\u0220\3\2\2\2\u0220"+ - "9\3\2\2\2\u0221\u0222\7\33\2\2\u0222\u0228\5h\65\2\u0223\u0224\7\\\2\2"+ - "\u0224\u0225\5h\65\2\u0225\u0226\7c\2\2\u0226\u0228\3\2\2\2\u0227\u0221"+ - "\3\2\2\2\u0227\u0223\3\2\2\2\u0228;\3\2\2\2\u0229\u022a\b\37\1\2\u022a"+ - "\u022e\5> \2\u022b\u022c\t\7\2\2\u022c\u022e\5<\37\6\u022d\u0229\3\2\2"+ - "\2\u022d\u022b\3\2\2\2\u022e\u023b\3\2\2\2\u022f\u0230\f\5\2\2\u0230\u0231"+ - "\t\f\2\2\u0231\u023a\5<\37\6\u0232\u0233\f\4\2\2\u0233\u0234\t\7\2\2\u0234"+ - "\u023a\5<\37\5\u0235\u0236\f\3\2\2\u0236\u0237\5R*\2\u0237\u0238\5<\37"+ - "\4\u0238\u023a\3\2\2\2\u0239\u022f\3\2\2\2\u0239\u0232\3\2\2\2\u0239\u0235"+ - "\3\2\2\2\u023a\u023d\3\2\2\2\u023b\u0239\3\2\2\2\u023b\u023c\3\2\2\2\u023c"+ - "=\3\2\2\2\u023d\u023b\3\2\2\2\u023e\u0252\5@!\2\u023f\u0252\5F$\2\u0240"+ - "\u0252\5P)\2\u0241\u0242\5\\/\2\u0242\u0243\7q\2\2\u0243\u0245\3\2\2\2"+ - "\u0244\u0241\3\2\2\2\u0244\u0245\3\2\2\2\u0245\u0246\3\2\2\2\u0246\u0252"+ - "\7m\2\2\u0247\u0252\5J&\2\u0248\u0249\7\3\2\2\u0249\u024a\5\b\5\2\u024a"+ - "\u024b\7\4\2\2\u024b\u0252\3\2\2\2\u024c\u0252\5\\/\2\u024d\u024e\7\3"+ - "\2\2\u024e\u024f\5,\27\2\u024f\u0250\7\4\2\2\u0250\u0252\3\2\2\2\u0251"+ - "\u023e\3\2\2\2\u0251\u023f\3\2\2\2\u0251\u0240\3\2\2\2\u0251\u0244\3\2"+ - "\2\2\u0251\u0247\3\2\2\2\u0251\u0248\3\2\2\2\u0251\u024c\3\2\2\2\u0251"+ - "\u024d\3\2\2\2\u0252?\3\2\2\2\u0253\u025e\5B\"\2\u0254\u0255\7]\2\2\u0255"+ - "\u0256\5B\"\2\u0256\u0257\7c\2\2\u0257\u025e\3\2\2\2\u0258\u025e\5D#\2"+ - "\u0259\u025a\7]\2\2\u025a\u025b\5D#\2\u025b\u025c\7c\2\2\u025c\u025e\3"+ - "\2\2\2\u025d\u0253\3\2\2\2\u025d\u0254\3\2\2\2\u025d\u0258\3\2\2\2\u025d"+ - "\u0259\3\2\2\2\u025eA\3\2\2\2\u025f\u0260\7\20\2\2\u0260\u0261\7\3\2\2"+ - "\u0261\u0262\5,\27\2\u0262\u0263\7\f\2\2\u0263\u0264\5Z.\2\u0264\u0265"+ - "\7\4\2\2\u0265C\3\2\2\2\u0266\u0267\7\24\2\2\u0267\u0268\7\3\2\2\u0268"+ - "\u0269\5,\27\2\u0269\u026a\7\5\2\2\u026a\u026b\5Z.\2\u026b\u026c\7\4\2"+ - "\2\u026cE\3\2\2\2\u026d\u0273\5H%\2\u026e\u026f\7]\2\2\u026f\u0270\5H"+ - "%\2\u0270\u0271\7c\2\2\u0271\u0273\3\2\2\2\u0272\u026d\3\2\2\2\u0272\u026e"+ - "\3\2\2\2\u0273G\3\2\2\2\u0274\u0275\7\37\2\2\u0275\u0276\7\3\2\2\u0276"+ - "\u0277\5^\60\2\u0277\u0278\7#\2\2\u0278\u0279\5<\37\2\u0279\u027a\7\4"+ - "\2\2\u027aI\3\2\2\2\u027b\u0281\5L\'\2\u027c\u027d\7]\2\2\u027d\u027e"+ - "\5L\'\2\u027e\u027f\7c\2\2\u027f\u0281\3\2\2\2\u0280\u027b\3\2\2\2\u0280"+ - "\u027c\3\2\2\2\u0281K\3\2\2\2\u0282\u0283\5N(\2\u0283\u028f\7\3\2\2\u0284"+ - "\u0286\5\36\20\2\u0285\u0284\3\2\2\2\u0285\u0286\3\2\2\2\u0286\u0287\3"+ - "\2\2\2\u0287\u028c\5,\27\2\u0288\u0289\7\5\2\2\u0289\u028b\5,\27\2\u028a"+ - "\u0288\3\2\2\2\u028b\u028e\3\2\2\2\u028c\u028a\3\2\2\2\u028c\u028d\3\2"+ - "\2\2\u028d\u0290\3\2\2\2\u028e\u028c\3\2\2\2\u028f\u0285\3\2\2\2\u028f"+ - "\u0290\3\2\2\2\u0290\u0291\3\2\2\2\u0291\u0292\7\4\2\2\u0292M\3\2\2\2"+ - "\u0293\u0297\7\61\2\2\u0294\u0297\7F\2\2\u0295\u0297\5^\60\2\u0296\u0293"+ - "\3\2\2\2\u0296\u0294\3\2\2\2\u0296\u0295\3\2\2\2\u0297O\3\2\2\2\u0298"+ - "\u02b3\7<\2\2\u0299\u02b3\5V,\2\u029a\u02b3\5f\64\2\u029b\u02b3\5T+\2"+ - "\u029c\u029e\7s\2\2\u029d\u029c\3\2\2\2\u029e\u029f\3\2\2\2\u029f\u029d"+ - "\3\2\2\2\u029f\u02a0\3\2\2\2\u02a0\u02b3\3\2\2\2\u02a1\u02b3\7r\2\2\u02a2"+ - "\u02a3\7_\2\2\u02a3\u02a4\5h\65\2\u02a4\u02a5\7c\2\2\u02a5\u02b3\3\2\2"+ - "\2\u02a6\u02a7\7`\2\2\u02a7\u02a8\5h\65\2\u02a8\u02a9\7c\2\2\u02a9\u02b3"+ - "\3\2\2\2\u02aa\u02ab\7a\2\2\u02ab\u02ac\5h\65\2\u02ac\u02ad\7c\2\2\u02ad"+ - "\u02b3\3\2\2\2\u02ae\u02af\7b\2\2\u02af\u02b0\5h\65\2\u02b0\u02b1\7c\2"+ - "\2\u02b1\u02b3\3\2\2\2\u02b2\u0298\3\2\2\2\u02b2\u0299\3\2\2\2\u02b2\u029a"+ - "\3\2\2\2\u02b2\u029b\3\2\2\2\u02b2\u029d\3\2\2\2\u02b2\u02a1\3\2\2\2\u02b2"+ - "\u02a2\3\2\2\2\u02b2\u02a6\3\2\2\2\u02b2\u02aa\3\2\2\2\u02b2\u02ae\3\2"+ - "\2\2\u02b3Q\3\2\2\2\u02b4\u02b5\t\r\2\2\u02b5S\3\2\2\2\u02b6\u02b7\t\16"+ - "\2\2\u02b7U\3\2\2\2\u02b8\u02ba\7-\2\2\u02b9\u02bb\t\7\2\2\u02ba\u02b9"+ - "\3\2\2\2\u02ba\u02bb\3\2\2\2\u02bb\u02be\3\2\2\2\u02bc\u02bf\5f\64\2\u02bd"+ - "\u02bf\5h\65\2\u02be\u02bc\3\2\2\2\u02be\u02bd\3\2\2\2\u02bf\u02c0\3\2"+ - "\2\2\u02c0\u02c3\5X-\2\u02c1\u02c2\7S\2\2\u02c2\u02c4\5X-\2\u02c3\u02c1"+ - "\3\2\2\2\u02c3\u02c4\3\2\2\2\u02c4W\3\2\2\2\u02c5\u02c6\t\17\2\2\u02c6"+ - "Y\3\2\2\2\u02c7\u02c8\5^\60\2\u02c8[\3\2\2\2\u02c9\u02ca\5^\60\2\u02ca"+ - "\u02cb\7q\2\2\u02cb\u02cd\3\2\2\2\u02cc\u02c9\3\2\2\2\u02cd\u02d0\3\2"+ - "\2\2\u02ce\u02cc\3\2\2\2\u02ce\u02cf\3\2\2\2\u02cf\u02d1\3\2\2\2\u02d0"+ - "\u02ce\3\2\2\2\u02d1\u02d2\5^\60\2\u02d2]\3\2\2\2\u02d3\u02d6\5b\62\2"+ - "\u02d4\u02d6\5d\63\2\u02d5\u02d3\3\2\2\2\u02d5\u02d4\3\2\2\2\u02d6_\3"+ - "\2\2\2\u02d7\u02d8\5^\60\2\u02d8\u02d9\7\6\2\2\u02d9\u02db\3\2\2\2\u02da"+ - "\u02d7\3\2\2\2\u02da\u02db\3\2\2\2\u02db\u02dc\3\2\2\2\u02dc\u02e4\7x"+ - "\2\2\u02dd\u02de\5^\60\2\u02de\u02df\7\6\2\2\u02df\u02e1\3\2\2\2\u02e0"+ - "\u02dd\3\2\2\2\u02e0\u02e1\3\2\2\2\u02e1\u02e2\3\2\2\2\u02e2\u02e4\5^"+ - "\60\2\u02e3\u02da\3\2\2\2\u02e3\u02e0\3\2\2\2\u02e4a\3\2\2\2\u02e5\u02e8"+ - "\7y\2\2\u02e6\u02e8\7z\2\2\u02e7\u02e5\3\2\2\2\u02e7\u02e6\3\2\2\2\u02e8"+ - "c\3\2\2\2\u02e9\u02ed\7v\2\2\u02ea\u02ed\5j\66\2\u02eb\u02ed\7w\2\2\u02ec"+ - "\u02e9\3\2\2\2\u02ec\u02ea\3\2\2\2\u02ec\u02eb\3\2\2\2\u02ede\3\2\2\2"+ - "\u02ee\u02f1\7u\2\2\u02ef\u02f1\7t\2\2\u02f0\u02ee\3\2\2\2\u02f0\u02ef"+ - "\3\2\2\2\u02f1g\3\2\2\2\u02f2\u02f3\t\20\2\2\u02f3i\3\2\2\2\u02f4\u02f5"+ - "\t\21\2\2\u02f5k\3\2\2\2f{}\u0081\u008a\u008c\u0090\u0097\u009e\u00a3"+ - "\u00a8\u00b2\u00b6\u00be\u00c1\u00c7\u00cc\u00cf\u00d4\u00d7\u00dc\u00e4"+ - "\u00e7\u00f3\u00f6\u00f9\u0100\u0107\u010b\u010f\u0113\u011a\u011e\u0122"+ - "\u0127\u012b\u0133\u0137\u013e\u0149\u014c\u0150\u015c\u015f\u0165\u016c"+ - "\u0173\u0176\u017a\u017e\u0182\u0184\u018f\u0194\u0198\u019b\u01a1\u01a4"+ - "\u01aa\u01ad\u01af\u01d2\u01da\u01dc\u01e3\u01e8\u01eb\u01f3\u01fc\u0202"+ - "\u020a\u020f\u0215\u0218\u021f\u0227\u022d\u0239\u023b\u0244\u0251\u025d"+ - "\u0272\u0280\u0285\u028c\u028f\u0296\u029f\u02b2\u02ba\u02be\u02c3\u02ce"+ - "\u02d5\u02da\u02e0\u02e3\u02e7\u02ec\u02f0"; + "\64\4\65\t\65\4\66\t\66\4\67\t\67\3\2\3\2\3\2\3\3\3\3\3\3\3\4\3\4\3\4"+ + "\3\4\3\4\3\4\3\4\3\4\3\4\7\4~\n\4\f\4\16\4\u0081\13\4\3\4\5\4\u0084\n"+ + "\4\3\4\3\4\3\4\3\4\3\4\3\4\3\4\7\4\u008d\n\4\f\4\16\4\u0090\13\4\3\4\5"+ + "\4\u0093\n\4\3\4\3\4\3\4\3\4\3\4\5\4\u009a\n\4\3\4\3\4\3\4\3\4\3\4\5\4"+ + "\u00a1\n\4\3\4\3\4\3\4\5\4\u00a6\n\4\3\4\3\4\3\4\5\4\u00ab\n\4\3\4\3\4"+ + "\3\4\3\4\3\4\3\4\3\4\3\4\5\4\u00b5\n\4\3\4\3\4\5\4\u00b9\n\4\3\4\3\4\3"+ + "\4\3\4\7\4\u00bf\n\4\f\4\16\4\u00c2\13\4\5\4\u00c4\n\4\3\4\3\4\3\4\3\4"+ + "\5\4\u00ca\n\4\3\4\3\4\3\4\5\4\u00cf\n\4\3\4\5\4\u00d2\n\4\3\4\3\4\3\4"+ + "\5\4\u00d7\n\4\3\4\5\4\u00da\n\4\3\4\3\4\3\4\5\4\u00df\n\4\3\5\3\5\3\5"+ + "\3\5\7\5\u00e5\n\5\f\5\16\5\u00e8\13\5\5\5\u00ea\n\5\3\5\3\5\3\6\3\6\3"+ + "\6\3\6\3\6\3\6\7\6\u00f4\n\6\f\6\16\6\u00f7\13\6\5\6\u00f9\n\6\3\6\5\6"+ + "\u00fc\n\6\3\7\3\7\3\7\3\7\3\7\5\7\u0103\n\7\3\b\3\b\3\b\3\b\3\b\5\b\u010a"+ + "\n\b\3\t\3\t\5\t\u010e\n\t\3\t\3\t\5\t\u0112\n\t\3\n\3\n\5\n\u0116\n\n"+ + "\3\n\3\n\3\n\7\n\u011b\n\n\f\n\16\n\u011e\13\n\3\n\5\n\u0121\n\n\3\n\3"+ + "\n\5\n\u0125\n\n\3\n\3\n\3\n\5\n\u012a\n\n\3\n\3\n\5\n\u012e\n\n\3\13"+ + "\3\13\3\13\3\13\7\13\u0134\n\13\f\13\16\13\u0137\13\13\3\f\5\f\u013a\n"+ + "\f\3\f\3\f\3\f\7\f\u013f\n\f\f\f\16\f\u0142\13\f\3\r\3\r\3\16\3\16\3\16"+ + "\3\16\7\16\u014a\n\16\f\16\16\16\u014d\13\16\5\16\u014f\n\16\3\16\3\16"+ + "\5\16\u0153\n\16\3\17\3\17\3\17\3\17\3\17\3\17\3\20\3\20\3\21\3\21\5\21"+ + "\u015f\n\21\3\21\5\21\u0162\n\21\3\22\3\22\7\22\u0166\n\22\f\22\16\22"+ + "\u0169\13\22\3\23\3\23\3\23\3\23\5\23\u016f\n\23\3\23\3\23\3\23\3\23\3"+ + "\23\5\23\u0176\n\23\3\24\5\24\u0179\n\24\3\24\3\24\5\24\u017d\n\24\3\24"+ + "\3\24\5\24\u0181\n\24\3\24\3\24\5\24\u0185\n\24\5\24\u0187\n\24\3\25\3"+ + "\25\3\25\3\25\3\25\3\25\3\25\7\25\u0190\n\25\f\25\16\25\u0193\13\25\3"+ + "\25\3\25\5\25\u0197\n\25\3\26\3\26\5\26\u019b\n\26\3\26\5\26\u019e\n\26"+ + "\3\26\3\26\3\26\3\26\5\26\u01a4\n\26\3\26\5\26\u01a7\n\26\3\26\3\26\3"+ + "\26\3\26\5\26\u01ad\n\26\3\26\5\26\u01b0\n\26\5\26\u01b2\n\26\3\27\3\27"+ + "\3\30\3\30\3\30\3\30\3\30\3\30\3\30\3\30\3\30\3\30\3\30\3\30\3\30\3\30"+ + "\3\30\3\30\3\30\3\30\3\30\3\30\3\30\3\30\3\30\3\30\3\30\3\30\3\30\3\30"+ + "\3\30\3\30\3\30\5\30\u01d5\n\30\3\30\3\30\3\30\3\30\3\30\3\30\7\30\u01dd"+ + "\n\30\f\30\16\30\u01e0\13\30\3\31\3\31\7\31\u01e4\n\31\f\31\16\31\u01e7"+ + "\13\31\3\32\3\32\5\32\u01eb\n\32\3\33\5\33\u01ee\n\33\3\33\3\33\3\33\3"+ + "\33\3\33\3\33\5\33\u01f6\n\33\3\33\3\33\3\33\3\33\3\33\7\33\u01fd\n\33"+ + "\f\33\16\33\u0200\13\33\3\33\3\33\3\33\5\33\u0205\n\33\3\33\3\33\3\33"+ + "\3\33\3\33\3\33\5\33\u020d\n\33\3\33\3\33\3\33\5\33\u0212\n\33\3\33\3"+ + "\33\3\33\3\33\5\33\u0218\n\33\3\33\5\33\u021b\n\33\3\34\3\34\3\34\3\35"+ + "\3\35\5\35\u0222\n\35\3\36\3\36\3\36\3\36\3\36\3\36\5\36\u022a\n\36\3"+ + "\37\3\37\3\37\3\37\5\37\u0230\n\37\3\37\3\37\3\37\3\37\3\37\3\37\3\37"+ + "\3\37\3\37\3\37\7\37\u023c\n\37\f\37\16\37\u023f\13\37\3 \3 \3 \3 \3 "+ + "\3 \3 \5 \u0248\n \3 \3 \3 \3 \3 \3 \3 \3 \3 \3 \3 \5 \u0255\n \3!\3!"+ + "\3!\3!\3!\3!\3!\3!\3!\3!\5!\u0261\n!\3\"\3\"\3\"\3\"\3\"\3\"\3\"\3#\3"+ + "#\3#\5#\u026d\n#\3#\5#\u0270\n#\3$\3$\3$\3$\3$\3$\3$\3%\3%\3%\3%\3%\5"+ + "%\u027e\n%\3&\3&\3&\3&\3&\3&\3&\3\'\3\'\3\'\3\'\3\'\5\'\u028c\n\'\3(\3"+ + "(\3(\5(\u0291\n(\3(\3(\3(\7(\u0296\n(\f(\16(\u0299\13(\5(\u029b\n(\3("+ + "\3(\3)\3)\3)\5)\u02a2\n)\3*\3*\3*\3*\3*\6*\u02a9\n*\r*\16*\u02aa\3*\3"+ + "*\3*\3*\3*\3*\3*\3*\3*\3*\3*\3*\3*\3*\3*\3*\3*\5*\u02be\n*\3+\3+\3,\3"+ + ",\3-\3-\5-\u02c6\n-\3-\3-\5-\u02ca\n-\3-\3-\3-\5-\u02cf\n-\3.\3.\3/\3"+ + "/\3\60\3\60\3\60\7\60\u02d8\n\60\f\60\16\60\u02db\13\60\3\60\3\60\3\61"+ + "\3\61\5\61\u02e1\n\61\3\62\3\62\3\62\5\62\u02e6\n\62\3\62\3\62\3\62\3"+ + "\62\5\62\u02ec\n\62\3\62\5\62\u02ef\n\62\3\63\3\63\5\63\u02f3\n\63\3\64"+ + "\3\64\3\64\5\64\u02f8\n\64\3\65\3\65\5\65\u02fc\n\65\3\66\3\66\3\67\3"+ + "\67\3\67\2\4.<8\2\4\6\b\n\f\16\20\22\24\26\30\32\34\36 \"$&(*,.\60\62"+ + "\64\668:<>@BDFHJLNPRTVXZ\\^`bdfhjl\2\22\b\2\7\7\t\t\36\36\66\66AAEE\4"+ + "\2((SS\4\2\t\tAA\4\2%%--\3\2\32\33\3\2mn\4\2\7\7vv\4\2\r\r\32\32\4\2#"+ + "#\62\62\4\2\7\7\34\34\3\2oq\3\2fl\4\2\"\"TT\7\2\27\30+,8;LM\\]\3\2tu\31"+ + "\2\b\t\22\23\25\25\27\27\31\31\36\36 #$\'(++//\62\62\65\6688::AAEGIL"+ + "OPRSVWYY\\\\\u035f\2n\3\2\2\2\4q\3\2\2\2\6\u00de\3\2\2\2\b\u00e9\3\2\2"+ + "\2\n\u00ed\3\2\2\2\f\u0102\3\2\2\2\16\u0109\3\2\2\2\20\u010b\3\2\2\2\22"+ + "\u0113\3\2\2\2\24\u012f\3\2\2\2\26\u0139\3\2\2\2\30\u0143\3\2\2\2\32\u0152"+ + "\3\2\2\2\34\u0154\3\2\2\2\36\u015a\3\2\2\2 \u015c\3\2\2\2\"\u0163\3\2"+ + "\2\2$\u0175\3\2\2\2&\u0186\3\2\2\2(\u0196\3\2\2\2*\u01b1\3\2\2\2,\u01b3"+ + "\3\2\2\2.\u01d4\3\2\2\2\60\u01e5\3\2\2\2\62\u01e8\3\2\2\2\64\u021a\3\2"+ + "\2\2\66\u021c\3\2\2\28\u021f\3\2\2\2:\u0229\3\2\2\2<\u022f\3\2\2\2>\u0254"+ + "\3\2\2\2@\u0260\3\2\2\2B\u0262\3\2\2\2D\u0269\3\2\2\2F\u0271\3\2\2\2H"+ + "\u027d\3\2\2\2J\u027f\3\2\2\2L\u028b\3\2\2\2N\u028d\3\2\2\2P\u02a1\3\2"+ + "\2\2R\u02bd\3\2\2\2T\u02bf\3\2\2\2V\u02c1\3\2\2\2X\u02c3\3\2\2\2Z\u02d0"+ + "\3\2\2\2\\\u02d2\3\2\2\2^\u02d9\3\2\2\2`\u02e0\3\2\2\2b\u02ee\3\2\2\2"+ + "d\u02f2\3\2\2\2f\u02f7\3\2\2\2h\u02fb\3\2\2\2j\u02fd\3\2\2\2l\u02ff\3"+ + "\2\2\2no\5\6\4\2op\7\2\2\3p\3\3\2\2\2qr\5,\27\2rs\7\2\2\3s\5\3\2\2\2t"+ + "\u00df\5\b\5\2u\u0083\7 \2\2v\177\7\3\2\2wx\7G\2\2x~\t\2\2\2yz\7$\2\2"+ + "z~\t\3\2\2{|\7Y\2\2|~\5V,\2}w\3\2\2\2}y\3\2\2\2}{\3\2\2\2~\u0081\3\2\2"+ + "\2\177}\3\2\2\2\177\u0080\3\2\2\2\u0080\u0082\3\2\2\2\u0081\177\3\2\2"+ + "\2\u0082\u0084\7\4\2\2\u0083v\3\2\2\2\u0083\u0084\3\2\2\2\u0084\u0085"+ + "\3\2\2\2\u0085\u00df\5\6\4\2\u0086\u0092\7\31\2\2\u0087\u008e\7\3\2\2"+ + "\u0088\u0089\7G\2\2\u0089\u008d\t\4\2\2\u008a\u008b\7$\2\2\u008b\u008d"+ + "\t\3\2\2\u008c\u0088\3\2\2\2\u008c\u008a\3\2\2\2\u008d\u0090\3\2\2\2\u008e"+ + "\u008c\3\2\2\2\u008e\u008f\3\2\2\2\u008f\u0091\3\2\2\2\u0090\u008e\3\2"+ + "\2\2\u0091\u0093\7\4\2\2\u0092\u0087\3\2\2\2\u0092\u0093\3\2\2\2\u0093"+ + "\u0094\3\2\2\2\u0094\u00df\5\6\4\2\u0095\u0096\7O\2\2\u0096\u0099\7R\2"+ + "\2\u0097\u009a\5\66\34\2\u0098\u009a\5b\62\2\u0099\u0097\3\2\2\2\u0099"+ + "\u0098\3\2\2\2\u0099\u009a\3\2\2\2\u009a\u00df\3\2\2\2\u009b\u009c\7O"+ + "\2\2\u009c\u009d\7\23\2\2\u009d\u00a0\t\5\2\2\u009e\u00a1\5\66\34\2\u009f"+ + "\u00a1\5b\62\2\u00a0\u009e\3\2\2\2\u00a0\u009f\3\2\2\2\u00a1\u00df\3\2"+ + "\2\2\u00a2\u00a5\t\6\2\2\u00a3\u00a6\5\66\34\2\u00a4\u00a6\5b\62\2\u00a5"+ + "\u00a3\3\2\2\2\u00a5\u00a4\3\2\2\2\u00a6\u00df\3\2\2\2\u00a7\u00a8\7O"+ + "\2\2\u00a8\u00aa\7\'\2\2\u00a9\u00ab\5\66\34\2\u00aa\u00a9\3\2\2\2\u00aa"+ + "\u00ab\3\2\2\2\u00ab\u00df\3\2\2\2\u00ac\u00ad\7O\2\2\u00ad\u00df\7K\2"+ + "\2\u00ae\u00af\7P\2\2\u00af\u00df\7\22\2\2\u00b0\u00b1\7P\2\2\u00b1\u00b4"+ + "\7R\2\2\u00b2\u00b3\7\21\2\2\u00b3\u00b5\5\66\34\2\u00b4\u00b2\3\2\2\2"+ + "\u00b4\u00b5\3\2\2\2\u00b5\u00b8\3\2\2\2\u00b6\u00b9\5\66\34\2\u00b7\u00b9"+ + "\5b\62\2\u00b8\u00b6\3\2\2\2\u00b8\u00b7\3\2\2\2\u00b8\u00b9\3\2\2\2\u00b9"+ + "\u00c3\3\2\2\2\u00ba\u00bb\7V\2\2\u00bb\u00c0\5j\66\2\u00bc\u00bd\7\5"+ + "\2\2\u00bd\u00bf\5j\66\2\u00be\u00bc\3\2\2\2\u00bf\u00c2\3\2\2\2\u00c0"+ + "\u00be\3\2\2\2\u00c0\u00c1\3\2\2\2\u00c1\u00c4\3\2\2\2\u00c2\u00c0\3\2"+ + "\2\2\u00c3\u00ba\3\2\2\2\u00c3\u00c4\3\2\2\2\u00c4\u00df\3\2\2\2\u00c5"+ + "\u00c6\7P\2\2\u00c6\u00c9\7\23\2\2\u00c7\u00c8\7\21\2\2\u00c8\u00ca\5"+ + "j\66\2\u00c9\u00c7\3\2\2\2\u00c9\u00ca\3\2\2\2\u00ca\u00ce\3\2\2\2\u00cb"+ + "\u00cc\7Q\2\2\u00cc\u00cf\5\66\34\2\u00cd\u00cf\5b\62\2\u00ce\u00cb\3"+ + "\2\2\2\u00ce\u00cd\3\2\2\2\u00ce\u00cf\3\2\2\2\u00cf\u00d1\3\2\2\2\u00d0"+ + "\u00d2\5\66\34\2\u00d1\u00d0\3\2\2\2\u00d1\u00d2\3\2\2\2\u00d2\u00df\3"+ + "\2\2\2\u00d3\u00d4\7P\2\2\u00d4\u00d9\7W\2\2\u00d5\u00d7\t\7\2\2\u00d6"+ + "\u00d5\3\2\2\2\u00d6\u00d7\3\2\2\2\u00d7\u00d8\3\2\2\2\u00d8\u00da\5h"+ + "\65\2\u00d9\u00d6\3\2\2\2\u00d9\u00da\3\2\2\2\u00da\u00df\3\2\2\2\u00db"+ + "\u00dc\7P\2\2\u00dc\u00dd\7Q\2\2\u00dd\u00df\7W\2\2\u00det\3\2\2\2\u00de"+ + "u\3\2\2\2\u00de\u0086\3\2\2\2\u00de\u0095\3\2\2\2\u00de\u009b\3\2\2\2"+ + "\u00de\u00a2\3\2\2\2\u00de\u00a7\3\2\2\2\u00de\u00ac\3\2\2\2\u00de\u00ae"+ + "\3\2\2\2\u00de\u00b0\3\2\2\2\u00de\u00c5\3\2\2\2\u00de\u00d3\3\2\2\2\u00de"+ + "\u00db\3\2\2\2\u00df\7\3\2\2\2\u00e0\u00e1\7[\2\2\u00e1\u00e6\5\34\17"+ + "\2\u00e2\u00e3\7\5\2\2\u00e3\u00e5\5\34\17\2\u00e4\u00e2\3\2\2\2\u00e5"+ + "\u00e8\3\2\2\2\u00e6\u00e4\3\2\2\2\u00e6\u00e7\3\2\2\2\u00e7\u00ea\3\2"+ + "\2\2\u00e8\u00e6\3\2\2\2\u00e9\u00e0\3\2\2\2\u00e9\u00ea\3\2\2\2\u00ea"+ + "\u00eb\3\2\2\2\u00eb\u00ec\5\n\6\2\u00ec\t\3\2\2\2\u00ed\u00f8\5\16\b"+ + "\2\u00ee\u00ef\7C\2\2\u00ef\u00f0\7\17\2\2\u00f0\u00f5\5\20\t\2\u00f1"+ + "\u00f2\7\5\2\2\u00f2\u00f4\5\20\t\2\u00f3\u00f1\3\2\2\2\u00f4\u00f7\3"+ + "\2\2\2\u00f5\u00f3\3\2\2\2\u00f5\u00f6\3\2\2\2\u00f6\u00f9\3\2\2\2\u00f7"+ + "\u00f5\3\2\2\2\u00f8\u00ee\3\2\2\2\u00f8\u00f9\3\2\2\2\u00f9\u00fb\3\2"+ + "\2\2\u00fa\u00fc\5\f\7\2\u00fb\u00fa\3\2\2\2\u00fb\u00fc\3\2\2\2\u00fc"+ + "\13\3\2\2\2\u00fd\u00fe\7\65\2\2\u00fe\u0103\t\b\2\2\u00ff\u0100\7`\2"+ + "\2\u0100\u0101\t\b\2\2\u0101\u0103\7e\2\2\u0102\u00fd\3\2\2\2\u0102\u00ff"+ + "\3\2\2\2\u0103\r\3\2\2\2\u0104\u010a\5\22\n\2\u0105\u0106\7\3\2\2\u0106"+ + "\u0107\5\n\6\2\u0107\u0108\7\4\2\2\u0108\u010a\3\2\2\2\u0109\u0104\3\2"+ + "\2\2\u0109\u0105\3\2\2\2\u010a\17\3\2\2\2\u010b\u010d\5,\27\2\u010c\u010e"+ + "\t\t\2\2\u010d\u010c\3\2\2\2\u010d\u010e\3\2\2\2\u010e\u0111\3\2\2\2\u010f"+ + "\u0110\7?\2\2\u0110\u0112\t\n\2\2\u0111\u010f\3\2\2\2\u0111\u0112\3\2"+ + "\2\2\u0112\21\3\2\2\2\u0113\u0115\7N\2\2\u0114\u0116\5\36\20\2\u0115\u0114"+ + "\3\2\2\2\u0115\u0116\3\2\2\2\u0116\u0117\3\2\2\2\u0117\u011c\5 \21\2\u0118"+ + "\u0119\7\5\2\2\u0119\u011b\5 \21\2\u011a\u0118\3\2\2\2\u011b\u011e\3\2"+ + "\2\2\u011c\u011a\3\2\2\2\u011c\u011d\3\2\2\2\u011d\u0120\3\2\2\2\u011e"+ + "\u011c\3\2\2\2\u011f\u0121\5\24\13\2\u0120\u011f\3\2\2\2\u0120\u0121\3"+ + "\2\2\2\u0121\u0124\3\2\2\2\u0122\u0123\7Z\2\2\u0123\u0125\5.\30\2\u0124"+ + "\u0122\3\2\2\2\u0124\u0125\3\2\2\2\u0125\u0129\3\2\2\2\u0126\u0127\7)"+ + "\2\2\u0127\u0128\7\17\2\2\u0128\u012a\5\26\f\2\u0129\u0126\3\2\2\2\u0129"+ + "\u012a\3\2\2\2\u012a\u012d\3\2\2\2\u012b\u012c\7*\2\2\u012c\u012e\5.\30"+ + "\2\u012d\u012b\3\2\2\2\u012d\u012e\3\2\2\2\u012e\23\3\2\2\2\u012f\u0130"+ + "\7%\2\2\u0130\u0135\5\"\22\2\u0131\u0132\7\5\2\2\u0132\u0134\5\"\22\2"+ + "\u0133\u0131\3\2\2\2\u0134\u0137\3\2\2\2\u0135\u0133\3\2\2\2\u0135\u0136"+ + "\3\2\2\2\u0136\25\3\2\2\2\u0137\u0135\3\2\2\2\u0138\u013a\5\36\20\2\u0139"+ + "\u0138\3\2\2\2\u0139\u013a\3\2\2\2\u013a\u013b\3\2\2\2\u013b\u0140\5\30"+ + "\r\2\u013c\u013d\7\5\2\2\u013d\u013f\5\30\r\2\u013e\u013c\3\2\2\2\u013f"+ + "\u0142\3\2\2\2\u0140\u013e\3\2\2\2\u0140\u0141\3\2\2\2\u0141\27\3\2\2"+ + "\2\u0142\u0140\3\2\2\2\u0143\u0144\5\32\16\2\u0144\31\3\2\2\2\u0145\u014e"+ + "\7\3\2\2\u0146\u014b\5,\27\2\u0147\u0148\7\5\2\2\u0148\u014a\5,\27\2\u0149"+ + "\u0147\3\2\2\2\u014a\u014d\3\2\2\2\u014b\u0149\3\2\2\2\u014b\u014c\3\2"+ + "\2\2\u014c\u014f\3\2\2\2\u014d\u014b\3\2\2\2\u014e\u0146\3\2\2\2\u014e"+ + "\u014f\3\2\2\2\u014f\u0150\3\2\2\2\u0150\u0153\7\4\2\2\u0151\u0153\5,"+ + "\27\2\u0152\u0145\3\2\2\2\u0152\u0151\3\2\2\2\u0153\33\3\2\2\2\u0154\u0155"+ + "\5`\61\2\u0155\u0156\7\f\2\2\u0156\u0157\7\3\2\2\u0157\u0158\5\n\6\2\u0158"+ + "\u0159\7\4\2\2\u0159\35\3\2\2\2\u015a\u015b\t\13\2\2\u015b\37\3\2\2\2"+ + "\u015c\u0161\5,\27\2\u015d\u015f\7\f\2\2\u015e\u015d\3\2\2\2\u015e\u015f"+ + "\3\2\2\2\u015f\u0160\3\2\2\2\u0160\u0162\5`\61\2\u0161\u015e\3\2\2\2\u0161"+ + "\u0162\3\2\2\2\u0162!\3\2\2\2\u0163\u0167\5*\26\2\u0164\u0166\5$\23\2"+ + "\u0165\u0164\3\2\2\2\u0166\u0169\3\2\2\2\u0167\u0165\3\2\2\2\u0167\u0168"+ + "\3\2\2\2\u0168#\3\2\2\2\u0169\u0167\3\2\2\2\u016a\u016b\5&\24\2\u016b"+ + "\u016c\7\61\2\2\u016c\u016e\5*\26\2\u016d\u016f\5(\25\2\u016e\u016d\3"+ + "\2\2\2\u016e\u016f\3\2\2\2\u016f\u0176\3\2\2\2\u0170\u0171\7<\2\2\u0171"+ + "\u0172\5&\24\2\u0172\u0173\7\61\2\2\u0173\u0174\5*\26\2\u0174\u0176\3"+ + "\2\2\2\u0175\u016a\3\2\2\2\u0175\u0170\3\2\2\2\u0176%\3\2\2\2\u0177\u0179"+ + "\7.\2\2\u0178\u0177\3\2\2\2\u0178\u0179\3\2\2\2\u0179\u0187\3\2\2\2\u017a"+ + "\u017c\7\63\2\2\u017b\u017d\7D\2\2\u017c\u017b\3\2\2\2\u017c\u017d\3\2"+ + "\2\2\u017d\u0187\3\2\2\2\u017e\u0180\7H\2\2\u017f\u0181\7D\2\2\u0180\u017f"+ + "\3\2\2\2\u0180\u0181\3\2\2\2\u0181\u0187\3\2\2\2\u0182\u0184\7&\2\2\u0183"+ + "\u0185\7D\2\2\u0184\u0183\3\2\2\2\u0184\u0185\3\2\2\2\u0185\u0187\3\2"+ + "\2\2\u0186\u0178\3\2\2\2\u0186\u017a\3\2\2\2\u0186\u017e\3\2\2\2\u0186"+ + "\u0182\3\2\2\2\u0187\'\3\2\2\2\u0188\u0189\7@\2\2\u0189\u0197\5.\30\2"+ + "\u018a\u018b\7X\2\2\u018b\u018c\7\3\2\2\u018c\u0191\5`\61\2\u018d\u018e"+ + "\7\5\2\2\u018e\u0190\5`\61\2\u018f\u018d\3\2\2\2\u0190\u0193\3\2\2\2\u0191"+ + "\u018f\3\2\2\2\u0191\u0192\3\2\2\2\u0192\u0194\3\2\2\2\u0193\u0191\3\2"+ + "\2\2\u0194\u0195\7\4\2\2\u0195\u0197\3\2\2\2\u0196\u0188\3\2\2\2\u0196"+ + "\u018a\3\2\2\2\u0197)\3\2\2\2\u0198\u019d\5b\62\2\u0199\u019b\7\f\2\2"+ + "\u019a\u0199\3\2\2\2\u019a\u019b\3\2\2\2\u019b\u019c\3\2\2\2\u019c\u019e"+ + "\5^\60\2\u019d\u019a\3\2\2\2\u019d\u019e\3\2\2\2\u019e\u01b2\3\2\2\2\u019f"+ + "\u01a0\7\3\2\2\u01a0\u01a1\5\n\6\2\u01a1\u01a6\7\4\2\2\u01a2\u01a4\7\f"+ + "\2\2\u01a3\u01a2\3\2\2\2\u01a3\u01a4\3\2\2\2\u01a4\u01a5\3\2\2\2\u01a5"+ + "\u01a7\5^\60\2\u01a6\u01a3\3\2\2\2\u01a6\u01a7\3\2\2\2\u01a7\u01b2\3\2"+ + "\2\2\u01a8\u01a9\7\3\2\2\u01a9\u01aa\5\"\22\2\u01aa\u01af\7\4\2\2\u01ab"+ + "\u01ad\7\f\2\2\u01ac\u01ab\3\2\2\2\u01ac\u01ad\3\2\2\2\u01ad\u01ae\3\2"+ + "\2\2\u01ae\u01b0\5^\60\2\u01af\u01ac\3\2\2\2\u01af\u01b0\3\2\2\2\u01b0"+ + "\u01b2\3\2\2\2\u01b1\u0198\3\2\2\2\u01b1\u019f\3\2\2\2\u01b1\u01a8\3\2"+ + "\2\2\u01b2+\3\2\2\2\u01b3\u01b4\5.\30\2\u01b4-\3\2\2\2\u01b5\u01b6\b\30"+ + "\1\2\u01b6\u01b7\7=\2\2\u01b7\u01d5\5.\30\n\u01b8\u01b9\7\37\2\2\u01b9"+ + "\u01ba\7\3\2\2\u01ba\u01bb\5\b\5\2\u01bb\u01bc\7\4\2\2\u01bc\u01d5\3\2"+ + "\2\2\u01bd\u01be\7J\2\2\u01be\u01bf\7\3\2\2\u01bf\u01c0\5j\66\2\u01c0"+ + "\u01c1\5\60\31\2\u01c1\u01c2\7\4\2\2\u01c2\u01d5\3\2\2\2\u01c3\u01c4\7"+ + "\67\2\2\u01c4\u01c5\7\3\2\2\u01c5\u01c6\5^\60\2\u01c6\u01c7\7\5\2\2\u01c7"+ + "\u01c8\5j\66\2\u01c8\u01c9\5\60\31\2\u01c9\u01ca\7\4\2\2\u01ca\u01d5\3"+ + "\2\2\2\u01cb\u01cc\7\67\2\2\u01cc\u01cd\7\3\2\2\u01cd\u01ce\5j\66\2\u01ce"+ + "\u01cf\7\5\2\2\u01cf\u01d0\5j\66\2\u01d0\u01d1\5\60\31\2\u01d1\u01d2\7"+ + "\4\2\2\u01d2\u01d5\3\2\2\2\u01d3\u01d5\5\62\32\2\u01d4\u01b5\3\2\2\2\u01d4"+ + "\u01b8\3\2\2\2\u01d4\u01bd\3\2\2\2\u01d4\u01c3\3\2\2\2\u01d4\u01cb\3\2"+ + "\2\2\u01d4\u01d3\3\2\2\2\u01d5\u01de\3\2\2\2\u01d6\u01d7\f\4\2\2\u01d7"+ + "\u01d8\7\n\2\2\u01d8\u01dd\5.\30\5\u01d9\u01da\f\3\2\2\u01da\u01db\7B"+ + "\2\2\u01db\u01dd\5.\30\4\u01dc\u01d6\3\2\2\2\u01dc\u01d9\3\2\2\2\u01dd"+ + "\u01e0\3\2\2\2\u01de\u01dc\3\2\2\2\u01de\u01df\3\2\2\2\u01df/\3\2\2\2"+ + "\u01e0\u01de\3\2\2\2\u01e1\u01e2\7\5\2\2\u01e2\u01e4\5j\66\2\u01e3\u01e1"+ + "\3\2\2\2\u01e4\u01e7\3\2\2\2\u01e5\u01e3\3\2\2\2\u01e5\u01e6\3\2\2\2\u01e6"+ + "\61\3\2\2\2\u01e7\u01e5\3\2\2\2\u01e8\u01ea\5<\37\2\u01e9\u01eb\5\64\33"+ + "\2\u01ea\u01e9\3\2\2\2\u01ea\u01eb\3\2\2\2\u01eb\63\3\2\2\2\u01ec\u01ee"+ + "\7=\2\2\u01ed\u01ec\3\2\2\2\u01ed\u01ee\3\2\2\2\u01ee\u01ef\3\2\2\2\u01ef"+ + "\u01f0\7\16\2\2\u01f0\u01f1\5<\37\2\u01f1\u01f2\7\n\2\2\u01f2\u01f3\5"+ + "<\37\2\u01f3\u021b\3\2\2\2\u01f4\u01f6\7=\2\2\u01f5\u01f4\3\2\2\2\u01f5"+ + "\u01f6\3\2\2\2\u01f6\u01f7\3\2\2\2\u01f7\u01f8\7-\2\2\u01f8\u01f9\7\3"+ + "\2\2\u01f9\u01fe\5,\27\2\u01fa\u01fb\7\5\2\2\u01fb\u01fd\5,\27\2\u01fc"+ + "\u01fa\3\2\2\2\u01fd\u0200\3\2\2\2\u01fe\u01fc\3\2\2\2\u01fe\u01ff\3\2"+ + "\2\2\u01ff\u0201\3\2\2\2\u0200\u01fe\3\2\2\2\u0201\u0202\7\4\2\2\u0202"+ + "\u021b\3\2\2\2\u0203\u0205\7=\2\2\u0204\u0203\3\2\2\2\u0204\u0205\3\2"+ + "\2\2\u0205\u0206\3\2\2\2\u0206\u0207\7-\2\2\u0207\u0208\7\3\2\2\u0208"+ + "\u0209\5\b\5\2\u0209\u020a\7\4\2\2\u020a\u021b\3\2\2\2\u020b\u020d\7="+ + "\2\2\u020c\u020b\3\2\2\2\u020c\u020d\3\2\2\2\u020d\u020e\3\2\2\2\u020e"+ + "\u020f\7\64\2\2\u020f\u021b\58\35\2\u0210\u0212\7=\2\2\u0211\u0210\3\2"+ + "\2\2\u0211\u0212\3\2\2\2\u0212\u0213\3\2\2\2\u0213\u0214\7I\2\2\u0214"+ + "\u021b\5j\66\2\u0215\u0217\7\60\2\2\u0216\u0218\7=\2\2\u0217\u0216\3\2"+ + "\2\2\u0217\u0218\3\2\2\2\u0218\u0219\3\2\2\2\u0219\u021b\7>\2\2\u021a"+ + "\u01ed\3\2\2\2\u021a\u01f5\3\2\2\2\u021a\u0204\3\2\2\2\u021a\u020c\3\2"+ + "\2\2\u021a\u0211\3\2\2\2\u021a\u0215\3\2\2\2\u021b\65\3\2\2\2\u021c\u021d"+ + "\7\64\2\2\u021d\u021e\58\35\2\u021e\67\3\2\2\2\u021f\u0221\5j\66\2\u0220"+ + "\u0222\5:\36\2\u0221\u0220\3\2\2\2\u0221\u0222\3\2\2\2\u02229\3\2\2\2"+ + "\u0223\u0224\7\35\2\2\u0224\u022a\5j\66\2\u0225\u0226\7^\2\2\u0226\u0227"+ + "\5j\66\2\u0227\u0228\7e\2\2\u0228\u022a\3\2\2\2\u0229\u0223\3\2\2\2\u0229"+ + "\u0225\3\2\2\2\u022a;\3\2\2\2\u022b\u022c\b\37\1\2\u022c\u0230\5> \2\u022d"+ + "\u022e\t\7\2\2\u022e\u0230\5<\37\6\u022f\u022b\3\2\2\2\u022f\u022d\3\2"+ + "\2\2\u0230\u023d\3\2\2\2\u0231\u0232\f\5\2\2\u0232\u0233\t\f\2\2\u0233"+ + "\u023c\5<\37\6\u0234\u0235\f\4\2\2\u0235\u0236\t\7\2\2\u0236\u023c\5<"+ + "\37\5\u0237\u0238\f\3\2\2\u0238\u0239\5T+\2\u0239\u023a\5<\37\4\u023a"+ + "\u023c\3\2\2\2\u023b\u0231\3\2\2\2\u023b\u0234\3\2\2\2\u023b\u0237\3\2"+ + "\2\2\u023c\u023f\3\2\2\2\u023d\u023b\3\2\2\2\u023d\u023e\3\2\2\2\u023e"+ + "=\3\2\2\2\u023f\u023d\3\2\2\2\u0240\u0255\5@!\2\u0241\u0255\5H%\2\u0242"+ + "\u0255\5D#\2\u0243\u0255\5R*\2\u0244\u0245\5^\60\2\u0245\u0246\7s\2\2"+ + "\u0246\u0248\3\2\2\2\u0247\u0244\3\2\2\2\u0247\u0248\3\2\2\2\u0248\u0249"+ + "\3\2\2\2\u0249\u0255\7o\2\2\u024a\u0255\5L\'\2\u024b\u024c\7\3\2\2\u024c"+ + "\u024d\5\b\5\2\u024d\u024e\7\4\2\2\u024e\u0255\3\2\2\2\u024f\u0255\5^"+ + "\60\2\u0250\u0251\7\3\2\2\u0251\u0252\5,\27\2\u0252\u0253\7\4\2\2\u0253"+ + "\u0255\3\2\2\2\u0254\u0240\3\2\2\2\u0254\u0241\3\2\2\2\u0254\u0242\3\2"+ + "\2\2\u0254\u0243\3\2\2\2\u0254\u0247\3\2\2\2\u0254\u024a\3\2\2\2\u0254"+ + "\u024b\3\2\2\2\u0254\u024f\3\2\2\2\u0254\u0250\3\2\2\2\u0255?\3\2\2\2"+ + "\u0256\u0261\5B\"\2\u0257\u0258\7_\2\2\u0258\u0259\5B\"\2\u0259\u025a"+ + "\7e\2\2\u025a\u0261\3\2\2\2\u025b\u0261\5F$\2\u025c\u025d\7_\2\2\u025d"+ + "\u025e\5F$\2\u025e\u025f\7e\2\2\u025f\u0261\3\2\2\2\u0260\u0256\3\2\2"+ + "\2\u0260\u0257\3\2\2\2\u0260\u025b\3\2\2\2\u0260\u025c\3\2\2\2\u0261A"+ + "\3\2\2\2\u0262\u0263\7\20\2\2\u0263\u0264\7\3\2\2\u0264\u0265\5,\27\2"+ + "\u0265\u0266\7\f\2\2\u0266\u0267\5\\/\2\u0267\u0268\7\4\2\2\u0268C\3\2"+ + "\2\2\u0269\u026f\7\26\2\2\u026a\u026c\7\3\2\2\u026b\u026d\7v\2\2\u026c"+ + "\u026b\3\2\2\2\u026c\u026d\3\2\2\2\u026d\u026e\3\2\2\2\u026e\u0270\7\4"+ + "\2\2\u026f\u026a\3\2\2\2\u026f\u0270\3\2\2\2\u0270E\3\2\2\2\u0271\u0272"+ + "\7\24\2\2\u0272\u0273\7\3\2\2\u0273\u0274\5,\27\2\u0274\u0275\7\5\2\2"+ + "\u0275\u0276\5\\/\2\u0276\u0277\7\4\2\2\u0277G\3\2\2\2\u0278\u027e\5J"+ + "&\2\u0279\u027a\7_\2\2\u027a\u027b\5J&\2\u027b\u027c\7e\2\2\u027c\u027e"+ + "\3\2\2\2\u027d\u0278\3\2\2\2\u027d\u0279\3\2\2\2\u027eI\3\2\2\2\u027f"+ + "\u0280\7!\2\2\u0280\u0281\7\3\2\2\u0281\u0282\5`\61\2\u0282\u0283\7%\2"+ + "\2\u0283\u0284\5<\37\2\u0284\u0285\7\4\2\2\u0285K\3\2\2\2\u0286\u028c"+ + "\5N(\2\u0287\u0288\7_\2\2\u0288\u0289\5N(\2\u0289\u028a\7e\2\2\u028a\u028c"+ + "\3\2\2\2\u028b\u0286\3\2\2\2\u028b\u0287\3\2\2\2\u028cM\3\2\2\2\u028d"+ + "\u028e\5P)\2\u028e\u029a\7\3\2\2\u028f\u0291\5\36\20\2\u0290\u028f\3\2"+ + "\2\2\u0290\u0291\3\2\2\2\u0291\u0292\3\2\2\2\u0292\u0297\5,\27\2\u0293"+ + "\u0294\7\5\2\2\u0294\u0296\5,\27\2\u0295\u0293\3\2\2\2\u0296\u0299\3\2"+ + "\2\2\u0297\u0295\3\2\2\2\u0297\u0298\3\2\2\2\u0298\u029b\3\2\2\2\u0299"+ + "\u0297\3\2\2\2\u029a\u0290\3\2\2\2\u029a\u029b\3\2\2\2\u029b\u029c\3\2"+ + "\2\2\u029c\u029d\7\4\2\2\u029dO\3\2\2\2\u029e\u02a2\7\63\2\2\u029f\u02a2"+ + "\7H\2\2\u02a0\u02a2\5`\61\2\u02a1\u029e\3\2\2\2\u02a1\u029f\3\2\2\2\u02a1"+ + "\u02a0\3\2\2\2\u02a2Q\3\2\2\2\u02a3\u02be\7>\2\2\u02a4\u02be\5X-\2\u02a5"+ + "\u02be\5h\65\2\u02a6\u02be\5V,\2\u02a7\u02a9\7u\2\2\u02a8\u02a7\3\2\2"+ + "\2\u02a9\u02aa\3\2\2\2\u02aa\u02a8\3\2\2\2\u02aa\u02ab\3\2\2\2\u02ab\u02be"+ + "\3\2\2\2\u02ac\u02be\7t\2\2\u02ad\u02ae\7a\2\2\u02ae\u02af\5j\66\2\u02af"+ + "\u02b0\7e\2\2\u02b0\u02be\3\2\2\2\u02b1\u02b2\7b\2\2\u02b2\u02b3\5j\66"+ + "\2\u02b3\u02b4\7e\2\2\u02b4\u02be\3\2\2\2\u02b5\u02b6\7c\2\2\u02b6\u02b7"+ + "\5j\66\2\u02b7\u02b8\7e\2\2\u02b8\u02be\3\2\2\2\u02b9\u02ba\7d\2\2\u02ba"+ + "\u02bb\5j\66\2\u02bb\u02bc\7e\2\2\u02bc\u02be\3\2\2\2\u02bd\u02a3\3\2"+ + "\2\2\u02bd\u02a4\3\2\2\2\u02bd\u02a5\3\2\2\2\u02bd\u02a6\3\2\2\2\u02bd"+ + "\u02a8\3\2\2\2\u02bd\u02ac\3\2\2\2\u02bd\u02ad\3\2\2\2\u02bd\u02b1\3\2"+ + "\2\2\u02bd\u02b5\3\2\2\2\u02bd\u02b9\3\2\2\2\u02beS\3\2\2\2\u02bf\u02c0"+ + "\t\r\2\2\u02c0U\3\2\2\2\u02c1\u02c2\t\16\2\2\u02c2W\3\2\2\2\u02c3\u02c5"+ + "\7/\2\2\u02c4\u02c6\t\7\2\2\u02c5\u02c4\3\2\2\2\u02c5\u02c6\3\2\2\2\u02c6"+ + "\u02c9\3\2\2\2\u02c7\u02ca\5h\65\2\u02c8\u02ca\5j\66\2\u02c9\u02c7\3\2"+ + "\2\2\u02c9\u02c8\3\2\2\2\u02ca\u02cb\3\2\2\2\u02cb\u02ce\5Z.\2\u02cc\u02cd"+ + "\7U\2\2\u02cd\u02cf\5Z.\2\u02ce\u02cc\3\2\2\2\u02ce\u02cf\3\2\2\2\u02cf"+ + "Y\3\2\2\2\u02d0\u02d1\t\17\2\2\u02d1[\3\2\2\2\u02d2\u02d3\5`\61\2\u02d3"+ + "]\3\2\2\2\u02d4\u02d5\5`\61\2\u02d5\u02d6\7s\2\2\u02d6\u02d8\3\2\2\2\u02d7"+ + "\u02d4\3\2\2\2\u02d8\u02db\3\2\2\2\u02d9\u02d7\3\2\2\2\u02d9\u02da\3\2"+ + "\2\2\u02da\u02dc\3\2\2\2\u02db\u02d9\3\2\2\2\u02dc\u02dd\5`\61\2\u02dd"+ + "_\3\2\2\2\u02de\u02e1\5d\63\2\u02df\u02e1\5f\64\2\u02e0\u02de\3\2\2\2"+ + "\u02e0\u02df\3\2\2\2\u02e1a\3\2\2\2\u02e2\u02e3\5`\61\2\u02e3\u02e4\7"+ + "\6\2\2\u02e4\u02e6\3\2\2\2\u02e5\u02e2\3\2\2\2\u02e5\u02e6\3\2\2\2\u02e6"+ + "\u02e7\3\2\2\2\u02e7\u02ef\7z\2\2\u02e8\u02e9\5`\61\2\u02e9\u02ea\7\6"+ + "\2\2\u02ea\u02ec\3\2\2\2\u02eb\u02e8\3\2\2\2\u02eb\u02ec\3\2\2\2\u02ec"+ + "\u02ed\3\2\2\2\u02ed\u02ef\5`\61\2\u02ee\u02e5\3\2\2\2\u02ee\u02eb\3\2"+ + "\2\2\u02efc\3\2\2\2\u02f0\u02f3\7{\2\2\u02f1\u02f3\7|\2\2\u02f2\u02f0"+ + "\3\2\2\2\u02f2\u02f1\3\2\2\2\u02f3e\3\2\2\2\u02f4\u02f8\7x\2\2\u02f5\u02f8"+ + "\5l\67\2\u02f6\u02f8\7y\2\2\u02f7\u02f4\3\2\2\2\u02f7\u02f5\3\2\2\2\u02f7"+ + "\u02f6\3\2\2\2\u02f8g\3\2\2\2\u02f9\u02fc\7w\2\2\u02fa\u02fc\7v\2\2\u02fb"+ + "\u02f9\3\2\2\2\u02fb\u02fa\3\2\2\2\u02fci\3\2\2\2\u02fd\u02fe\t\20\2\2"+ + "\u02fek\3\2\2\2\u02ff\u0300\t\21\2\2\u0300m\3\2\2\2h}\177\u0083\u008c"+ + "\u008e\u0092\u0099\u00a0\u00a5\u00aa\u00b4\u00b8\u00c0\u00c3\u00c9\u00ce"+ + "\u00d1\u00d6\u00d9\u00de\u00e6\u00e9\u00f5\u00f8\u00fb\u0102\u0109\u010d"+ + "\u0111\u0115\u011c\u0120\u0124\u0129\u012d\u0135\u0139\u0140\u014b\u014e"+ + "\u0152\u015e\u0161\u0167\u016e\u0175\u0178\u017c\u0180\u0184\u0186\u0191"+ + "\u0196\u019a\u019d\u01a3\u01a6\u01ac\u01af\u01b1\u01d4\u01dc\u01de\u01e5"+ + "\u01ea\u01ed\u01f5\u01fe\u0204\u020c\u0211\u0217\u021a\u0221\u0229\u022f"+ + "\u023b\u023d\u0247\u0254\u0260\u026c\u026f\u027d\u028b\u0290\u0297\u029a"+ + "\u02a1\u02aa\u02bd\u02c5\u02c9\u02ce\u02d9\u02e0\u02e5\u02eb\u02ee\u02f2"+ + "\u02f7\u02fb"; public static final ATN _ATN = new ATNDeserializer().deserialize(_serializedATN.toCharArray()); static { diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/SqlBaseVisitor.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/SqlBaseVisitor.java index c7e3e31df01..1575e310c14 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/SqlBaseVisitor.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/SqlBaseVisitor.java @@ -372,6 +372,13 @@ interface SqlBaseVisitor extends ParseTreeVisitor { * @return the visitor result */ T visitExtract(SqlBaseParser.ExtractContext ctx); + /** + * Visit a parse tree produced by the {@code currentDateTimeFunction} + * labeled alternative in {@link SqlBaseParser#primaryExpression}. + * @param ctx the parse tree + * @return the visitor result + */ + T visitCurrentDateTimeFunction(SqlBaseParser.CurrentDateTimeFunctionContext ctx); /** * Visit a parse tree produced by the {@code constantDefault} * labeled alternative in {@link SqlBaseParser#primaryExpression}. @@ -426,6 +433,12 @@ interface SqlBaseVisitor extends ParseTreeVisitor { * @return the visitor result */ T visitCastTemplate(SqlBaseParser.CastTemplateContext ctx); + /** + * Visit a parse tree produced by {@link SqlBaseParser#builtinDateTimeFunction}. + * @param ctx the parse tree + * @return the visitor result + */ + T visitBuiltinDateTimeFunction(SqlBaseParser.BuiltinDateTimeFunctionContext ctx); /** * Visit a parse tree produced by {@link SqlBaseParser#convertTemplate}. * @param ctx the parse tree diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/planner/QueryFolder.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/planner/QueryFolder.java index 57c2d4156af..35ba50ab75a 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/planner/QueryFolder.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/planner/QueryFolder.java @@ -22,6 +22,7 @@ import org.elasticsearch.xpack.sql.expression.function.aggregate.AggregateFuncti import org.elasticsearch.xpack.sql.expression.function.aggregate.CompoundNumericAggregate; import org.elasticsearch.xpack.sql.expression.function.aggregate.Count; import org.elasticsearch.xpack.sql.expression.function.aggregate.InnerAggregate; +import org.elasticsearch.xpack.sql.expression.function.grouping.GroupingFunction; import org.elasticsearch.xpack.sql.expression.function.scalar.ScalarFunction; import org.elasticsearch.xpack.sql.expression.function.scalar.ScalarFunctionAttribute; import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.DateTimeFunction; @@ -336,6 +337,11 @@ class QueryFolder extends RuleExecutor { TimeZone dt = DataType.DATE == child.dataType() ? UTC : null; queryC = queryC.addColumn(new GroupByRef(matchingGroup.id(), null, dt)); } + // handle histogram + else if (child instanceof GroupingFunction) { + queryC = queryC.addColumn(new GroupByRef(matchingGroup.id(), null, null)); + } + // fallback to regular agg functions else { // the only thing left is agg function Check.isTrue(Functions.isAggregate(child), diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/planner/QueryTranslator.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/planner/QueryTranslator.java index 2c1f5a1e449..23352af790d 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/planner/QueryTranslator.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/planner/QueryTranslator.java @@ -11,6 +11,7 @@ import org.elasticsearch.xpack.sql.expression.Expression; import org.elasticsearch.xpack.sql.expression.ExpressionId; import org.elasticsearch.xpack.sql.expression.Expressions; import org.elasticsearch.xpack.sql.expression.FieldAttribute; +import org.elasticsearch.xpack.sql.expression.Foldables; import org.elasticsearch.xpack.sql.expression.Literal; import org.elasticsearch.xpack.sql.expression.NamedExpression; import org.elasticsearch.xpack.sql.expression.function.Function; @@ -27,9 +28,12 @@ import org.elasticsearch.xpack.sql.expression.function.aggregate.PercentileRanks import org.elasticsearch.xpack.sql.expression.function.aggregate.Percentiles; import org.elasticsearch.xpack.sql.expression.function.aggregate.Stats; import org.elasticsearch.xpack.sql.expression.function.aggregate.Sum; +import org.elasticsearch.xpack.sql.expression.function.grouping.GroupingFunction; +import org.elasticsearch.xpack.sql.expression.function.grouping.Histogram; import org.elasticsearch.xpack.sql.expression.function.scalar.ScalarFunction; import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.DateTimeFunction; import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.DateTimeHistogramFunction; +import org.elasticsearch.xpack.sql.expression.literal.Intervals; import org.elasticsearch.xpack.sql.expression.predicate.Range; import org.elasticsearch.xpack.sql.expression.predicate.fulltext.MatchQueryPredicate; import org.elasticsearch.xpack.sql.expression.predicate.fulltext.MultiMatchQueryPredicate; @@ -57,10 +61,10 @@ import org.elasticsearch.xpack.sql.querydsl.agg.AndAggFilter; import org.elasticsearch.xpack.sql.querydsl.agg.AvgAgg; import org.elasticsearch.xpack.sql.querydsl.agg.CardinalityAgg; import org.elasticsearch.xpack.sql.querydsl.agg.ExtendedStatsAgg; -import org.elasticsearch.xpack.sql.querydsl.agg.GroupByColumnKey; -import org.elasticsearch.xpack.sql.querydsl.agg.GroupByDateKey; +import org.elasticsearch.xpack.sql.querydsl.agg.GroupByDateHistogram; import org.elasticsearch.xpack.sql.querydsl.agg.GroupByKey; -import org.elasticsearch.xpack.sql.querydsl.agg.GroupByScriptKey; +import org.elasticsearch.xpack.sql.querydsl.agg.GroupByNumericHistogram; +import org.elasticsearch.xpack.sql.querydsl.agg.GroupByValue; import org.elasticsearch.xpack.sql.querydsl.agg.LeafAgg; import org.elasticsearch.xpack.sql.querydsl.agg.MatrixStatsAgg; import org.elasticsearch.xpack.sql.querydsl.agg.MaxAgg; @@ -85,6 +89,7 @@ import org.elasticsearch.xpack.sql.querydsl.query.TermQuery; import org.elasticsearch.xpack.sql.querydsl.query.TermsQuery; import org.elasticsearch.xpack.sql.querydsl.query.WildcardQuery; import org.elasticsearch.xpack.sql.tree.Location; +import org.elasticsearch.xpack.sql.type.DataType; import org.elasticsearch.xpack.sql.util.Check; import org.elasticsearch.xpack.sql.util.ReflectionUtils; @@ -231,10 +236,16 @@ final class QueryTranslator { Map aggMap = new LinkedHashMap<>(); for (Expression exp : groupings) { + GroupByKey key = null; + ExpressionId id; String aggId; + if (exp instanceof NamedExpression) { NamedExpression ne = (NamedExpression) exp; + id = ne.id(); + aggId = id.toString(); + // change analyzed to non non-analyzed attributes if (exp instanceof FieldAttribute) { FieldAttribute fa = (FieldAttribute) exp; @@ -242,21 +253,51 @@ final class QueryTranslator { ne = fa.exactAttribute(); } } - aggId = ne.id().toString(); - - GroupByKey key; // handle functions differently if (exp instanceof Function) { // dates are handled differently because of date histograms if (exp instanceof DateTimeHistogramFunction) { DateTimeHistogramFunction dthf = (DateTimeHistogramFunction) exp; - key = new GroupByDateKey(aggId, nameOf(exp), dthf.interval(), dthf.timeZone()); + key = new GroupByDateHistogram(aggId, nameOf(exp), dthf.interval(), dthf.timeZone()); } // all other scalar functions become a script else if (exp instanceof ScalarFunction) { ScalarFunction sf = (ScalarFunction) exp; - key = new GroupByScriptKey(aggId, nameOf(exp), sf.asScript()); + key = new GroupByValue(aggId, sf.asScript()); + } + // histogram + else if (exp instanceof GroupingFunction) { + if (exp instanceof Histogram) { + Histogram h = (Histogram) exp; + Expression field = h.field(); + + // date histogram + if (h.dataType() == DataType.DATE) { + long intervalAsMillis = Intervals.inMillis(h.interval()); + // TODO: set timezone + if (field instanceof FieldAttribute || field instanceof DateTimeHistogramFunction) { + key = new GroupByDateHistogram(aggId, nameOf(field), intervalAsMillis, h.timeZone()); + } else if (field instanceof Function) { + key = new GroupByDateHistogram(aggId, ((Function) field).asScript(), intervalAsMillis, h.timeZone()); + } + } + // numeric histogram + else { + if (field instanceof FieldAttribute || field instanceof DateTimeHistogramFunction) { + key = new GroupByNumericHistogram(aggId, nameOf(field), Foldables.doubleValueOf(h.interval())); + } else if (field instanceof Function) { + key = new GroupByNumericHistogram(aggId, ((Function) field).asScript(), + Foldables.doubleValueOf(h.interval())); + } + } + if (key == null) { + throw new SqlIllegalArgumentException("Unsupported histogram field {}", field); + } + } + else { + throw new SqlIllegalArgumentException("Unsupproted grouping function {}", exp); + } } // bumped into into an invalid function (which should be caught by the verifier) else { @@ -264,14 +305,14 @@ final class QueryTranslator { } } else { - key = new GroupByColumnKey(aggId, ne.name()); + key = new GroupByValue(aggId, ne.name()); } - - aggMap.put(ne.id(), key); } else { throw new SqlIllegalArgumentException("Don't know how to group on {}", exp.nodeString()); } + + aggMap.put(id, key); } return new GroupingContext(aggMap); } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/RestSqlClearCursorAction.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/RestSqlClearCursorAction.java index 00e316ffabe..cf4a66131cf 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/RestSqlClearCursorAction.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/RestSqlClearCursorAction.java @@ -27,7 +27,7 @@ public class RestSqlClearCursorAction extends BaseRestHandler { private static final DeprecationLogger deprecationLogger = new DeprecationLogger(LogManager.getLogger(RestSqlClearCursorAction.class)); - RestSqlClearCursorAction(Settings settings, RestController controller) { + public RestSqlClearCursorAction(Settings settings, RestController controller) { super(settings); // TODO: remove deprecated endpoint in 8.0.0 controller.registerWithDeprecatedHandler( diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/RestSqlQueryAction.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/RestSqlQueryAction.java index 2158d0a0037..bd03715ee22 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/RestSqlQueryAction.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/RestSqlQueryAction.java @@ -38,7 +38,7 @@ public class RestSqlQueryAction extends BaseRestHandler { private static final DeprecationLogger deprecationLogger = new DeprecationLogger(LogManager.getLogger(RestSqlQueryAction.class)); - RestSqlQueryAction(Settings settings, RestController controller) { + public RestSqlQueryAction(Settings settings, RestController controller) { super(settings); // TODO: remove deprecated endpoint in 8.0.0 controller.registerWithDeprecatedHandler( @@ -56,8 +56,8 @@ public class RestSqlQueryAction extends BaseRestHandler { SqlQueryRequest sqlRequest; try (XContentParser parser = request.contentOrSourceParamParser()) { sqlRequest = SqlQueryRequest.fromXContent(parser); - } - + } + /* * Since we support {@link TextFormat} and * {@link XContent} outputs we can't use {@link RestToXContentListener} diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/TransportSqlClearCursorAction.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/TransportSqlClearCursorAction.java index 39872b4da7c..989f94672df 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/TransportSqlClearCursorAction.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/TransportSqlClearCursorAction.java @@ -15,9 +15,11 @@ import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.sql.action.SqlClearCursorRequest; import org.elasticsearch.xpack.sql.action.SqlClearCursorResponse; import org.elasticsearch.xpack.sql.execution.PlanExecutor; +import org.elasticsearch.xpack.sql.proto.Protocol; import org.elasticsearch.xpack.sql.session.Configuration; import org.elasticsearch.xpack.sql.session.Cursor; import org.elasticsearch.xpack.sql.session.Cursors; +import org.elasticsearch.xpack.sql.util.DateUtils; import static org.elasticsearch.xpack.sql.action.SqlClearCursorAction.NAME; @@ -42,7 +44,10 @@ public class TransportSqlClearCursorAction extends HandledTransportAction listener) { Cursor cursor = Cursors.decodeFromString(request.getCursor()); - planExecutor.cleanCursor(Configuration.DEFAULT, cursor, ActionListener.wrap( + planExecutor.cleanCursor( + new Configuration(DateUtils.UTC_TZ, Protocol.FETCH_SIZE, Protocol.REQUEST_TIMEOUT, Protocol.PAGE_TIMEOUT, null, + request.mode(), "", ""), + cursor, ActionListener.wrap( success -> listener.onResponse(new SqlClearCursorResponse(success)), listener::onFailure)); } } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/querydsl/agg/Agg.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/querydsl/agg/Agg.java index 09c92a77ac3..10448eae511 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/querydsl/agg/Agg.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/querydsl/agg/Agg.java @@ -27,7 +27,7 @@ public abstract class Agg { return id; } - public String fieldName() { + protected String fieldName() { return fieldName; } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/querydsl/agg/Aggs.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/querydsl/agg/Aggs.java index b8faedec718..c7ab17670a2 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/querydsl/agg/Aggs.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/querydsl/agg/Aggs.java @@ -10,6 +10,7 @@ import org.elasticsearch.search.aggregations.bucket.composite.CompositeAggregati import org.elasticsearch.search.aggregations.bucket.composite.CompositeValuesSourceBuilder; import org.elasticsearch.search.aggregations.bucket.filter.FiltersAggregationBuilder; import org.elasticsearch.xpack.sql.SqlIllegalArgumentException; +import org.elasticsearch.xpack.sql.expression.gen.script.ScriptTemplate; import org.elasticsearch.xpack.sql.querydsl.container.Sort.Direction; import java.util.ArrayList; @@ -39,15 +40,15 @@ public class Aggs { public static final String ROOT_GROUP_NAME = "groupby"; - public static final GroupByKey IMPLICIT_GROUP_KEY = new GroupByKey(ROOT_GROUP_NAME, EMPTY, null) { + public static final GroupByKey IMPLICIT_GROUP_KEY = new GroupByKey(ROOT_GROUP_NAME, EMPTY, null, null) { @Override - public CompositeValuesSourceBuilder asValueSource() { + public CompositeValuesSourceBuilder createSourceBuilder() { throw new SqlIllegalArgumentException("Default group does not translate to an aggregation"); } @Override - protected GroupByKey copy(String id, String fieldName, Direction direction) { + protected GroupByKey copy(String id, String fieldName, ScriptTemplate script, Direction direction) { return this; } }; diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/querydsl/agg/GroupByColumnKey.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/querydsl/agg/GroupByColumnKey.java deleted file mode 100644 index 931eaee6464..00000000000 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/querydsl/agg/GroupByColumnKey.java +++ /dev/null @@ -1,36 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License; - * you may not use this file except in compliance with the Elastic License. - */ -package org.elasticsearch.xpack.sql.querydsl.agg; - -import org.elasticsearch.search.aggregations.bucket.composite.TermsValuesSourceBuilder; -import org.elasticsearch.xpack.sql.querydsl.container.Sort.Direction; - -/** - * GROUP BY key for regular fields. - */ -public class GroupByColumnKey extends GroupByKey { - - public GroupByColumnKey(String id, String fieldName) { - this(id, fieldName, null); - } - - public GroupByColumnKey(String id, String fieldName, Direction direction) { - super(id, fieldName, direction); - } - - @Override - public TermsValuesSourceBuilder asValueSource() { - return new TermsValuesSourceBuilder(id()) - .field(fieldName()) - .order(direction().asOrder()) - .missingBucket(true); - } - - @Override - protected GroupByKey copy(String id, String fieldName, Direction direction) { - return new GroupByColumnKey(id, fieldName, direction); - } -} diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/querydsl/agg/GroupByDateHistogram.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/querydsl/agg/GroupByDateHistogram.java new file mode 100644 index 00000000000..714d5238bb7 --- /dev/null +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/querydsl/agg/GroupByDateHistogram.java @@ -0,0 +1,67 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.sql.querydsl.agg; + +import org.elasticsearch.search.aggregations.bucket.composite.CompositeValuesSourceBuilder; +import org.elasticsearch.search.aggregations.bucket.composite.DateHistogramValuesSourceBuilder; +import org.elasticsearch.xpack.sql.expression.gen.script.ScriptTemplate; +import org.elasticsearch.xpack.sql.querydsl.container.Sort.Direction; +import org.joda.time.DateTimeZone; + +import java.util.Objects; +import java.util.TimeZone; + +/** + * GROUP BY key based on histograms on date fields. + */ +public class GroupByDateHistogram extends GroupByKey { + + private final long interval; + private final TimeZone timeZone; + + public GroupByDateHistogram(String id, String fieldName, long interval, TimeZone timeZone) { + this(id, fieldName, null, null, interval, timeZone); + } + + public GroupByDateHistogram(String id, ScriptTemplate script, long interval, TimeZone timeZone) { + this(id, null, script, null, interval, timeZone); + } + + private GroupByDateHistogram(String id, String fieldName, ScriptTemplate script, Direction direction, long interval, + TimeZone timeZone) { + super(id, fieldName, script, direction); + this.interval = interval; + this.timeZone = timeZone; + + } + + @Override + protected CompositeValuesSourceBuilder createSourceBuilder() { + return new DateHistogramValuesSourceBuilder(id()) + .interval(interval) + .timeZone(DateTimeZone.forTimeZone(timeZone)); + } + + @Override + protected GroupByKey copy(String id, String fieldName, ScriptTemplate script, Direction direction) { + return new GroupByDateHistogram(id, fieldName, script, direction, interval, timeZone); + } + + @Override + public int hashCode() { + return Objects.hash(super.hashCode(), interval, timeZone); + } + + @Override + public boolean equals(Object obj) { + if (super.equals(obj)) { + GroupByDateHistogram other = (GroupByDateHistogram) obj; + return Objects.equals(interval, other.interval) + && Objects.equals(timeZone, other.timeZone); + } + return false; + } +} diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/querydsl/agg/GroupByDateKey.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/querydsl/agg/GroupByDateKey.java deleted file mode 100644 index 61c00c706ee..00000000000 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/querydsl/agg/GroupByDateKey.java +++ /dev/null @@ -1,70 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License; - * you may not use this file except in compliance with the Elastic License. - */ -package org.elasticsearch.xpack.sql.querydsl.agg; - -import org.elasticsearch.search.aggregations.bucket.composite.DateHistogramValuesSourceBuilder; -import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramInterval; -import org.elasticsearch.xpack.sql.querydsl.container.Sort.Direction; -import org.joda.time.DateTimeZone; - -import java.util.Objects; -import java.util.TimeZone; - -/** - * GROUP BY key specific for date fields. - */ -public class GroupByDateKey extends GroupByKey { - - private final String interval; - private final TimeZone timeZone; - - public GroupByDateKey(String id, String fieldName, String interval, TimeZone timeZone) { - this(id, fieldName, null, interval, timeZone); - } - - public GroupByDateKey(String id, String fieldName, Direction direction, String interval, TimeZone timeZone) { - super(id, fieldName, direction); - this.interval = interval; - this.timeZone = timeZone; - } - - public String interval() { - return interval; - } - - public TimeZone timeZone() { - return timeZone; - } - - @Override - public DateHistogramValuesSourceBuilder asValueSource() { - return new DateHistogramValuesSourceBuilder(id()) - .field(fieldName()) - .dateHistogramInterval(new DateHistogramInterval(interval)) - .timeZone(DateTimeZone.forTimeZone(timeZone)) - .missingBucket(true); - } - - @Override - protected GroupByKey copy(String id, String fieldName, Direction direction) { - return new GroupByDateKey(id, fieldName, direction, interval, timeZone); - } - - @Override - public int hashCode() { - return Objects.hash(super.hashCode(), interval, timeZone); - } - - @Override - public boolean equals(Object obj) { - if (super.equals(obj)) { - GroupByDateKey other = (GroupByDateKey) obj; - return Objects.equals(interval, other.interval) - && Objects.equals(timeZone, other.timeZone); - } - return false; - } -} diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/querydsl/agg/GroupByKey.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/querydsl/agg/GroupByKey.java index fd2bd5799df..7d74c1c3330 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/querydsl/agg/GroupByKey.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/querydsl/agg/GroupByKey.java @@ -6,7 +6,10 @@ package org.elasticsearch.xpack.sql.querydsl.agg; import org.elasticsearch.search.aggregations.bucket.composite.CompositeValuesSourceBuilder; +import org.elasticsearch.search.aggregations.support.ValueType; +import org.elasticsearch.xpack.sql.expression.gen.script.ScriptTemplate; import org.elasticsearch.xpack.sql.querydsl.container.Sort.Direction; +import org.elasticsearch.xpack.sql.type.DataType; import java.util.Objects; @@ -15,33 +18,64 @@ import java.util.Objects; */ public abstract class GroupByKey extends Agg { - private final Direction direction; + protected final Direction direction; + private final ScriptTemplate script; - GroupByKey(String id, String fieldName, Direction direction) { + protected GroupByKey(String id, String fieldName, ScriptTemplate script, Direction direction) { super(id, fieldName); // ASC is the default order of CompositeValueSource this.direction = direction == null ? Direction.ASC : direction; + this.script = script; } - public Direction direction() { - return direction; + public final CompositeValuesSourceBuilder asValueSource() { + CompositeValuesSourceBuilder builder = createSourceBuilder(); + + if (script != null) { + builder.script(script.toPainless()); + if (script.outputType().isInteger()) { + builder.valueType(ValueType.LONG); + } else if (script.outputType().isRational()) { + builder.valueType(ValueType.DOUBLE); + } else if (script.outputType().isString()) { + builder.valueType(ValueType.STRING); + } else if (script.outputType() == DataType.DATE) { + builder.valueType(ValueType.DATE); + } else if (script.outputType() == DataType.BOOLEAN) { + builder.valueType(ValueType.BOOLEAN); + } else if (script.outputType() == DataType.IP) { + builder.valueType(ValueType.IP); + } + } + // field based + else { + builder.field(fieldName()); + } + return builder.order(direction.asOrder()) + .missingBucket(true); } - public abstract CompositeValuesSourceBuilder asValueSource(); + protected abstract CompositeValuesSourceBuilder createSourceBuilder(); - protected abstract GroupByKey copy(String id, String fieldName, Direction direction); + protected abstract GroupByKey copy(String id, String fieldName, ScriptTemplate script, Direction direction); public GroupByKey with(Direction direction) { - return this.direction == direction ? this : copy(id(), fieldName(), direction); + return this.direction == direction ? this : copy(id(), fieldName(), script, direction); + } + + public ScriptTemplate script() { + return script; } @Override public int hashCode() { - return Objects.hash(id(), fieldName(), direction); + return Objects.hash(id(), fieldName(), script, direction); } @Override public boolean equals(Object obj) { - return super.equals(obj) && Objects.equals(direction, ((GroupByKey) obj).direction); + return super.equals(obj) + && Objects.equals(script, ((GroupByKey) obj).script) + && Objects.equals(direction, ((GroupByKey) obj).direction); } } \ No newline at end of file diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/querydsl/agg/GroupByNumericHistogram.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/querydsl/agg/GroupByNumericHistogram.java new file mode 100644 index 00000000000..a03faede75f --- /dev/null +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/querydsl/agg/GroupByNumericHistogram.java @@ -0,0 +1,59 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.sql.querydsl.agg; + +import org.elasticsearch.search.aggregations.bucket.composite.CompositeValuesSourceBuilder; +import org.elasticsearch.search.aggregations.bucket.composite.HistogramValuesSourceBuilder; +import org.elasticsearch.xpack.sql.expression.gen.script.ScriptTemplate; +import org.elasticsearch.xpack.sql.querydsl.container.Sort.Direction; + +import java.util.Objects; + +/** + * GROUP BY key based on histograms on numeric fields. + */ +public class GroupByNumericHistogram extends GroupByKey { + + private final double interval; + + public GroupByNumericHistogram(String id, String fieldName, double interval) { + this(id, fieldName, null, null, interval); + } + + public GroupByNumericHistogram(String id, ScriptTemplate script, double interval) { + this(id, null, script, null, interval); + } + + private GroupByNumericHistogram(String id, String fieldName, ScriptTemplate script, Direction direction, double interval) { + super(id, fieldName, script, direction); + this.interval = interval; + } + + @Override + protected CompositeValuesSourceBuilder createSourceBuilder() { + return new HistogramValuesSourceBuilder(id()) + .interval(interval); + } + + @Override + protected GroupByKey copy(String id, String fieldName, ScriptTemplate script, Direction direction) { + return new GroupByNumericHistogram(id, fieldName, script, direction, interval); + } + + @Override + public int hashCode() { + return Objects.hash(super.hashCode(), interval); + } + + @Override + public boolean equals(Object obj) { + if (super.equals(obj)) { + GroupByNumericHistogram other = (GroupByNumericHistogram) obj; + return interval == other.interval; + } + return false; + } +} diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/querydsl/agg/GroupByScriptKey.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/querydsl/agg/GroupByScriptKey.java deleted file mode 100644 index 9c907be38da..00000000000 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/querydsl/agg/GroupByScriptKey.java +++ /dev/null @@ -1,74 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License; - * you may not use this file except in compliance with the Elastic License. - */ -package org.elasticsearch.xpack.sql.querydsl.agg; - -import org.elasticsearch.search.aggregations.bucket.composite.TermsValuesSourceBuilder; -import org.elasticsearch.search.aggregations.support.ValueType; -import org.elasticsearch.xpack.sql.expression.gen.script.ScriptTemplate; -import org.elasticsearch.xpack.sql.querydsl.container.Sort.Direction; -import org.elasticsearch.xpack.sql.type.DataType; - -import java.util.Objects; - -/** - * GROUP BY key for scripts (typically caused by functions). - */ -public class GroupByScriptKey extends GroupByKey { - - private final ScriptTemplate script; - - public GroupByScriptKey(String id, String fieldName, ScriptTemplate script) { - this(id, fieldName, null, script); - } - - public GroupByScriptKey(String id, String fieldName, Direction direction, ScriptTemplate script) { - super(id, fieldName, direction); - this.script = script; - } - - public ScriptTemplate script() { - return script; - } - - @Override - public TermsValuesSourceBuilder asValueSource() { - TermsValuesSourceBuilder builder = new TermsValuesSourceBuilder(id()) - .script(script.toPainless()) - .order(direction().asOrder()) - .missingBucket(true); - - if (script.outputType().isInteger()) { - builder.valueType(ValueType.LONG); - } else if (script.outputType().isRational()) { - builder.valueType(ValueType.DOUBLE); - } else if (script.outputType().isString()) { - builder.valueType(ValueType.STRING); - } else if (script.outputType() == DataType.DATE) { - builder.valueType(ValueType.DATE); - } else if (script.outputType() == DataType.BOOLEAN) { - builder.valueType(ValueType.BOOLEAN); - } else if (script.outputType() == DataType.IP) { - builder.valueType(ValueType.IP); - } - - return builder; - } - - @Override - protected GroupByKey copy(String id, String fieldName, Direction direction) { - return new GroupByScriptKey(id, fieldName, direction, script); - } - - @Override - public int hashCode() { - return Objects.hash(super.hashCode(), script); - } - - @Override - public boolean equals(Object obj) { - return super.equals(obj) && Objects.equals(script, ((GroupByScriptKey) obj).script); - } -} diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/querydsl/agg/GroupByValue.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/querydsl/agg/GroupByValue.java new file mode 100644 index 00000000000..12e09dbc52d --- /dev/null +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/querydsl/agg/GroupByValue.java @@ -0,0 +1,39 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.sql.querydsl.agg; + +import org.elasticsearch.search.aggregations.bucket.composite.CompositeValuesSourceBuilder; +import org.elasticsearch.search.aggregations.bucket.composite.TermsValuesSourceBuilder; +import org.elasticsearch.xpack.sql.expression.gen.script.ScriptTemplate; +import org.elasticsearch.xpack.sql.querydsl.container.Sort.Direction; + +/** + * GROUP BY key for fields or scripts. + */ +public class GroupByValue extends GroupByKey { + + public GroupByValue(String id, String fieldName) { + this(id, fieldName, null, null); + } + + public GroupByValue(String id, ScriptTemplate script) { + this(id, null, script, null); + } + + private GroupByValue(String id, String fieldName, ScriptTemplate script, Direction direction) { + super(id, fieldName, script, direction); + } + + @Override + protected CompositeValuesSourceBuilder createSourceBuilder() { + return new TermsValuesSourceBuilder(id()); + } + + @Override + protected GroupByKey copy(String id, String fieldName, ScriptTemplate script, Direction direction) { + return new GroupByValue(id, fieldName, script, direction); + } +} diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/querydsl/query/MultiMatchQuery.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/querydsl/query/MultiMatchQuery.java index 81c990f85bd..30def4db3da 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/querydsl/query/MultiMatchQuery.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/querydsl/query/MultiMatchQuery.java @@ -6,10 +6,10 @@ package org.elasticsearch.xpack.sql.querydsl.query; import org.elasticsearch.common.Booleans; -import org.elasticsearch.index.query.Operator; -import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.index.query.MultiMatchQueryBuilder; +import org.elasticsearch.index.query.Operator; import org.elasticsearch.index.query.QueryBuilder; +import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.xpack.sql.expression.predicate.fulltext.MultiMatchQueryPredicate; import org.elasticsearch.xpack.sql.tree.Location; @@ -32,7 +32,6 @@ public class MultiMatchQuery extends LeafQuery { appliers.put("lenient", (qb, s) -> qb.lenient(Booleans.parseBoolean(s))); appliers.put("cutoff_frequency", (qb, s) -> qb.cutoffFrequency(Float.valueOf(s))); appliers.put("tie_breaker", (qb, s) -> qb.tieBreaker(Float.valueOf(s))); - appliers.put("use_dis_max", (qb, s) -> qb.useDisMax(Booleans.parseBoolean(s))); appliers.put("fuzzy_rewrite", (qb, s) -> qb.fuzzyRewrite(s)); appliers.put("minimum_should_match", (qb, s) -> qb.minimumShouldMatch(s)); appliers.put("operator", (qb, s) -> qb.operator(Operator.fromString(s))); diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/rule/RuleExecutor.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/rule/RuleExecutor.java index de4b6dac478..2ed68def135 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/rule/RuleExecutor.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/rule/RuleExecutor.java @@ -152,6 +152,9 @@ public abstract class RuleExecutor> { batchRuns++; for (Rule rule : batch.rules) { + if (log.isTraceEnabled()) { + log.trace("About to apply rule {}", rule); + } Transformation tf = new Transformation(currentPlan, rule); tfs.add(tf); currentPlan = tf.after; @@ -192,4 +195,4 @@ public abstract class RuleExecutor> { return new ExecutionInfo(plan, currentPlan, transformations); } -} \ No newline at end of file +} diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/session/Configuration.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/session/Configuration.java index 376976dfb3b..4e2965809f2 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/session/Configuration.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/session/Configuration.java @@ -9,15 +9,12 @@ import org.elasticsearch.common.Nullable; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.xpack.sql.proto.Mode; -import org.elasticsearch.xpack.sql.proto.Protocol; +import java.time.ZonedDateTime; import java.util.TimeZone; -// Typed object holding properties for a given action +// Typed object holding properties for a given query public class Configuration { - public static final Configuration DEFAULT = new Configuration(TimeZone.getTimeZone("UTC"), - Protocol.FETCH_SIZE, Protocol.REQUEST_TIMEOUT, Protocol.PAGE_TIMEOUT, null, Mode.PLAIN, null, null); - private final TimeZone timeZone; private final int pageSize; private final TimeValue requestTimeout; @@ -25,6 +22,7 @@ public class Configuration { private final Mode mode; private final String username; private final String clusterName; + private final ZonedDateTime now; @Nullable private QueryBuilder filter; @@ -39,6 +37,7 @@ public class Configuration { this.mode = mode == null ? Mode.PLAIN : mode; this.username = username; this.clusterName = clusterName; + this.now = ZonedDateTime.now(timeZone.toZoneId().normalized()); } public TimeZone timeZone() { @@ -71,4 +70,8 @@ public class Configuration { public String clusterName() { return clusterName; } + + public ZonedDateTime now() { + return now; + } } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/util/DateUtils.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/util/DateUtils.java index 814f8427a17..8e774bf6a4f 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/util/DateUtils.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/util/DateUtils.java @@ -16,13 +16,15 @@ import java.time.LocalDateTime; import java.time.ZoneId; import java.time.ZoneOffset; import java.time.ZonedDateTime; +import java.util.TimeZone; public class DateUtils { // TODO: do we have a java.time based parser we can use instead? private static final DateTimeFormatter UTC_DATE_FORMATTER = ISODateTimeFormat.dateOptionalTimeParser().withZoneUTC(); - public static ZoneId UTC = ZoneId.of("UTC"); + public static TimeZone UTC_TZ = TimeZone.getTimeZone("UTC"); + public static ZoneId UTC_ZI = ZoneId.of("Z"); private DateUtils() {} @@ -31,7 +33,7 @@ public class DateUtils { * Creates a date from the millis since epoch (thus the time-zone is UTC). */ public static ZonedDateTime of(long millis) { - return ZonedDateTime.ofInstant(Instant.ofEpochMilli(millis), UTC); + return ZonedDateTime.ofInstant(Instant.ofEpochMilli(millis), UTC_ZI); } /** diff --git a/x-pack/plugin/sql/src/main/resources/org/elasticsearch/xpack/sql/plugin/sql_whitelist.txt b/x-pack/plugin/sql/src/main/resources/org/elasticsearch/xpack/sql/plugin/sql_whitelist.txt index 3d0e8ed0fab..b5b19004eee 100644 --- a/x-pack/plugin/sql/src/main/resources/org/elasticsearch/xpack/sql/plugin/sql_whitelist.txt +++ b/x-pack/plugin/sql/src/main/resources/org/elasticsearch/xpack/sql/plugin/sql_whitelist.txt @@ -46,10 +46,10 @@ class org.elasticsearch.xpack.sql.expression.function.scalar.whitelist.InternalS # # Null # - Object coalesce(java.util.List) - Object greatest(java.util.List) - Object least(java.util.List) - Object nullif(Object, Object) + def coalesce(java.util.List) + def greatest(java.util.List) + def least(java.util.List) + def nullif(Object, Object) # # Regex diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/TestUtils.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/TestUtils.java new file mode 100644 index 00000000000..d0c1c06239d --- /dev/null +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/TestUtils.java @@ -0,0 +1,21 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +package org.elasticsearch.xpack.sql; + +import org.elasticsearch.xpack.sql.proto.Mode; +import org.elasticsearch.xpack.sql.proto.Protocol; +import org.elasticsearch.xpack.sql.session.Configuration; +import org.elasticsearch.xpack.sql.util.DateUtils; + +public class TestUtils { + + private TestUtils() {} + + public static final Configuration TEST_CFG = new Configuration(DateUtils.UTC_TZ, Protocol.FETCH_SIZE, + Protocol.REQUEST_TIMEOUT, Protocol.PAGE_TIMEOUT, null, Mode.PLAIN, null, null); + +} diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/analysis/analyzer/FieldAttributeTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/analysis/analyzer/FieldAttributeTests.java index c364a6a1acb..607810efc66 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/analysis/analyzer/FieldAttributeTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/analysis/analyzer/FieldAttributeTests.java @@ -6,6 +6,7 @@ package org.elasticsearch.xpack.sql.analysis.analyzer; import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.sql.TestUtils; import org.elasticsearch.xpack.sql.analysis.index.EsIndex; import org.elasticsearch.xpack.sql.analysis.index.IndexResolution; import org.elasticsearch.xpack.sql.analysis.index.MappingException; @@ -17,7 +18,6 @@ import org.elasticsearch.xpack.sql.expression.function.FunctionRegistry; import org.elasticsearch.xpack.sql.parser.SqlParser; import org.elasticsearch.xpack.sql.plan.logical.LogicalPlan; import org.elasticsearch.xpack.sql.plan.logical.Project; -import org.elasticsearch.xpack.sql.session.Configuration; import org.elasticsearch.xpack.sql.stats.Metrics; import org.elasticsearch.xpack.sql.type.DataType; import org.elasticsearch.xpack.sql.type.EsField; @@ -52,7 +52,7 @@ public class FieldAttributeTests extends ESTestCase { EsIndex test = new EsIndex("test", mapping); getIndexResult = IndexResolution.valid(test); - analyzer = new Analyzer(Configuration.DEFAULT, functionRegistry, getIndexResult, verifier); + analyzer = new Analyzer(TestUtils.TEST_CFG, functionRegistry, getIndexResult, verifier); } private LogicalPlan plan(String sql) { @@ -169,7 +169,7 @@ public class FieldAttributeTests extends ESTestCase { EsIndex index = new EsIndex("test", mapping); getIndexResult = IndexResolution.valid(index); - analyzer = new Analyzer(Configuration.DEFAULT, functionRegistry, getIndexResult, verifier); + analyzer = new Analyzer(TestUtils.TEST_CFG, functionRegistry, getIndexResult, verifier); VerificationException ex = expectThrows(VerificationException.class, () -> plan("SELECT test.bar FROM test")); assertEquals( diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/analysis/analyzer/VerifierErrorMessagesTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/analysis/analyzer/VerifierErrorMessagesTests.java index 12064523ae8..18f544767fa 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/analysis/analyzer/VerifierErrorMessagesTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/analysis/analyzer/VerifierErrorMessagesTests.java @@ -6,6 +6,7 @@ package org.elasticsearch.xpack.sql.analysis.analyzer; import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.sql.TestUtils; import org.elasticsearch.xpack.sql.analysis.AnalysisException; import org.elasticsearch.xpack.sql.analysis.index.EsIndex; import org.elasticsearch.xpack.sql.analysis.index.IndexResolution; @@ -18,7 +19,6 @@ import org.elasticsearch.xpack.sql.expression.predicate.conditional.Least; import org.elasticsearch.xpack.sql.expression.predicate.conditional.NullIf; import org.elasticsearch.xpack.sql.parser.SqlParser; import org.elasticsearch.xpack.sql.plan.logical.LogicalPlan; -import org.elasticsearch.xpack.sql.session.Configuration; import org.elasticsearch.xpack.sql.stats.Metrics; import org.elasticsearch.xpack.sql.type.EsField; import org.elasticsearch.xpack.sql.type.TypesTests; @@ -35,7 +35,7 @@ public class VerifierErrorMessagesTests extends ESTestCase { } private String error(IndexResolution getIndexResult, String sql) { - Analyzer analyzer = new Analyzer(Configuration.DEFAULT, new FunctionRegistry(), getIndexResult, new Verifier(new Metrics())); + Analyzer analyzer = new Analyzer(TestUtils.TEST_CFG, new FunctionRegistry(), getIndexResult, new Verifier(new Metrics())); AnalysisException e = expectThrows(AnalysisException.class, () -> analyzer.analyze(parser.createStatement(sql), true)); assertTrue(e.getMessage().startsWith("Found ")); String header = "Found 1 problem(s)\nline "; @@ -49,7 +49,7 @@ public class VerifierErrorMessagesTests extends ESTestCase { } private LogicalPlan accept(IndexResolution resolution, String sql) { - Analyzer analyzer = new Analyzer(Configuration.DEFAULT, new FunctionRegistry(), resolution, new Verifier(new Metrics())); + Analyzer analyzer = new Analyzer(TestUtils.TEST_CFG, new FunctionRegistry(), resolution, new Verifier(new Metrics())); return analyzer.analyze(parser.createStatement(sql), true); } @@ -178,11 +178,24 @@ public class VerifierErrorMessagesTests extends ESTestCase { } // GROUP BY + public void testGroupBySelectWithAlias() { + assertNotNull(accept("SELECT int AS i FROM test GROUP BY i")); + } + + public void testGroupBySelectWithAliasOrderOnActualField() { + assertNotNull(accept("SELECT int AS i FROM test GROUP BY i ORDER BY int")); + } + public void testGroupBySelectNonGrouped() { assertEquals("1:8: Cannot use non-grouped column [text], expected [int]", error("SELECT text, int FROM test GROUP BY int")); } + public void testGroupByFunctionSelectFieldFromGroupByFunction() { + assertEquals("1:8: Cannot use non-grouped column [int], expected [ABS(int)]", + error("SELECT int FROM test GROUP BY ABS(int)")); + } + public void testGroupByOrderByNonGrouped() { assertEquals("1:50: Cannot order by non-grouped column [bool], expected [text]", error("SELECT MAX(int) FROM test GROUP BY text ORDER BY bool")); @@ -203,13 +216,18 @@ public class VerifierErrorMessagesTests extends ESTestCase { error("SELECT MAX(int) FROM test GROUP BY text ORDER BY YEAR(date)")); } + public void testGroupByOrderByFieldFromGroupByFunction() { + assertEquals("1:54: Cannot use non-grouped column [int], expected [ABS(int)]", + error("SELECT ABS(int) FROM test GROUP BY ABS(int) ORDER BY int")); + } + public void testGroupByOrderByScalarOverNonGrouped_WithHaving() { assertEquals("1:71: Cannot order by non-grouped column [YEAR(date [UTC])], expected [text]", error("SELECT MAX(int) FROM test GROUP BY text HAVING MAX(int) > 10 ORDER BY YEAR(date)")); } public void testGroupByHavingNonGrouped() { - assertEquals("1:48: Cannot filter by non-grouped column [int], expected [text]", + assertEquals("1:48: Cannot filter HAVING on non-aggregate [int]; consider using WHERE instead", error("SELECT AVG(int) FROM test GROUP BY text HAVING int > 10")); } diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/execution/search/CursorTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/execution/search/CursorTests.java index 68fd6aa0bb4..83545ebdde6 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/execution/search/CursorTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/execution/search/CursorTests.java @@ -12,12 +12,12 @@ import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.client.Client; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xpack.sql.SqlException; +import org.elasticsearch.xpack.sql.TestUtils; import org.elasticsearch.xpack.sql.action.CliFormatter; import org.elasticsearch.xpack.sql.action.SqlQueryResponse; import org.elasticsearch.xpack.sql.plugin.CliFormatterCursor; import org.elasticsearch.xpack.sql.proto.ColumnInfo; import org.elasticsearch.xpack.sql.proto.Mode; -import org.elasticsearch.xpack.sql.session.Configuration; import org.elasticsearch.xpack.sql.session.Cursor; import org.elasticsearch.xpack.sql.session.Cursors; import org.mockito.ArgumentCaptor; @@ -39,7 +39,7 @@ public class CursorTests extends ESTestCase { Client clientMock = mock(Client.class); Cursor cursor = Cursor.EMPTY; PlainActionFuture future = newFuture(); - cursor.clear(Configuration.DEFAULT, clientMock, future); + cursor.clear(TestUtils.TEST_CFG, clientMock, future); assertFalse(future.actionGet()); verifyZeroInteractions(clientMock); } @@ -51,7 +51,7 @@ public class CursorTests extends ESTestCase { String cursorString = randomAlphaOfLength(10); Cursor cursor = new ScrollCursor(cursorString, Collections.emptyList(), randomInt()); - cursor.clear(Configuration.DEFAULT, clientMock, listenerMock); + cursor.clear(TestUtils.TEST_CFG, clientMock, listenerMock); ArgumentCaptor request = ArgumentCaptor.forClass(ClearScrollRequest.class); verify(clientMock).clearScroll(request.capture(), any(ActionListener.class)); diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/execution/search/SourceGeneratorTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/execution/search/SourceGeneratorTests.java index ae7830ad6d9..75f37f8e71f 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/execution/search/SourceGeneratorTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/execution/search/SourceGeneratorTests.java @@ -16,7 +16,7 @@ import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xpack.sql.expression.FieldAttribute; import org.elasticsearch.xpack.sql.expression.function.Score; import org.elasticsearch.xpack.sql.querydsl.agg.AvgAgg; -import org.elasticsearch.xpack.sql.querydsl.agg.GroupByColumnKey; +import org.elasticsearch.xpack.sql.querydsl.agg.GroupByValue; import org.elasticsearch.xpack.sql.querydsl.container.AttributeSort; import org.elasticsearch.xpack.sql.querydsl.container.QueryContainer; import org.elasticsearch.xpack.sql.querydsl.container.ScoreSort; @@ -62,7 +62,7 @@ public class SourceGeneratorTests extends ESTestCase { } public void testLimit() { - QueryContainer container = new QueryContainer().withLimit(10).addGroups(singletonList(new GroupByColumnKey("1", "field"))); + QueryContainer container = new QueryContainer().withLimit(10).addGroups(singletonList(new GroupByValue("1", "field"))); int size = randomIntBetween(1, 10); SearchSourceBuilder sourceBuilder = SourceGenerator.sourceBuilder(container, null, size); Builder aggBuilder = sourceBuilder.aggregations(); @@ -114,7 +114,7 @@ public class SourceGeneratorTests extends ESTestCase { public void testNoSortIfAgg() { QueryContainer container = new QueryContainer() - .addGroups(singletonList(new GroupByColumnKey("group_id", "group_column"))) + .addGroups(singletonList(new GroupByValue("group_id", "group_column"))) .addAgg("group_id", new AvgAgg("agg_id", "avg_column")); SearchSourceBuilder sourceBuilder = SourceGenerator.sourceBuilder(container, null, randomIntBetween(1, 10)); assertNull(sourceBuilder.sorts()); diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/TyperResolutionTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/TyperResolutionTests.java new file mode 100644 index 00000000000..27bfcaf3722 --- /dev/null +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/TyperResolutionTests.java @@ -0,0 +1,48 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +package org.elasticsearch.xpack.sql.expression; + +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.sql.expression.Expression.TypeResolution; +import org.elasticsearch.xpack.sql.expression.literal.IntervalYearMonth; +import org.elasticsearch.xpack.sql.expression.predicate.operator.arithmetic.Mul; +import org.elasticsearch.xpack.sql.type.DataType; + +import java.time.Period; + +import static org.elasticsearch.xpack.sql.tree.Location.EMPTY; + +public class TyperResolutionTests extends ESTestCase { + + public void testMulNumeric() { + Mul m = new Mul(EMPTY, L(1), L(2)); + assertEquals(TypeResolution.TYPE_RESOLVED, m.typeResolved()); + } + + public void testMulIntervalAndNumber() { + Mul m = new Mul(EMPTY, L(1), randomYearInterval()); + assertEquals(TypeResolution.TYPE_RESOLVED, m.typeResolved()); + } + + public void testMulNumberAndInterval() { + Mul m = new Mul(EMPTY, randomYearInterval(), L(1)); + assertEquals(TypeResolution.TYPE_RESOLVED, m.typeResolved()); + } + + public void testMulTypeResolution() throws Exception { + Mul mul = new Mul(EMPTY, randomYearInterval(), randomYearInterval()); + assertTrue(mul.typeResolved().unresolved()); + } + + private static Literal randomYearInterval() { + return Literal.of(EMPTY, new IntervalYearMonth(Period.ofMonths(randomInt(123)), DataType.INTERVAL_YEAR_TO_MONTH)); + } + + private static Literal L(Object value) { + return Literal.of(EMPTY, value); + } +} diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/FunctionRegistryTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/FunctionRegistryTests.java index 63008e8f27f..cbd2bf8bfde 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/FunctionRegistryTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/FunctionRegistryTests.java @@ -59,7 +59,7 @@ public class FunctionRegistryTests extends ESTestCase { return new DummyFunction(l); }, "DUMMY_FUNCTION")); FunctionDefinition def = r.resolveFunction(ur.name()); - assertFalse(def.datetime()); + assertFalse(def.extractViable()); assertEquals(ur.location(), ur.buildResolved(randomConfiguration(), def).location()); // Distinct isn't supported @@ -88,7 +88,7 @@ public class FunctionRegistryTests extends ESTestCase { }, "DUMMY_FUNCTION")); FunctionDefinition def = r.resolveFunction(ur.name()); assertEquals(ur.location(), ur.buildResolved(randomConfiguration(), def).location()); - assertFalse(def.datetime()); + assertFalse(def.extractViable()); // No children aren't supported ParsingException e = expectThrows(ParsingException.class, () -> @@ -113,7 +113,7 @@ public class FunctionRegistryTests extends ESTestCase { }, "DUMMY_FUNCTION")); FunctionDefinition def = r.resolveFunction(ur.name()); assertEquals(ur.location(), ur.buildResolved(providedConfiguration, def).location()); - assertTrue(def.datetime()); + assertTrue(def.extractViable()); // Distinct isn't supported ParsingException e = expectThrows(ParsingException.class, () -> @@ -140,7 +140,7 @@ public class FunctionRegistryTests extends ESTestCase { }, "DUMMY_FUNCTION")); FunctionDefinition def = r.resolveFunction(ur.name()); assertEquals(ur.location(), ur.buildResolved(randomConfiguration(), def).location()); - assertFalse(def.datetime()); + assertFalse(def.extractViable()); // Distinct isn't supported ParsingException e = expectThrows(ParsingException.class, () -> diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/CurrentDateTimeTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/CurrentDateTimeTests.java new file mode 100644 index 00000000000..c5cdb06724b --- /dev/null +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/CurrentDateTimeTests.java @@ -0,0 +1,28 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +package org.elasticsearch.xpack.sql.expression.function.scalar.datetime; + +import org.elasticsearch.test.ESTestCase; + +import java.time.ZonedDateTime; + +public class CurrentDateTimeTests extends ESTestCase { + + public void testNanoPrecision() throws Exception { + ZonedDateTime zdt = ZonedDateTime.parse("2018-01-23T12:34:45.123456789Z"); + assertEquals(000_000_000, CurrentDateTime.nanoPrecision(zdt, 0).getNano()); + assertEquals(100_000_000, CurrentDateTime.nanoPrecision(zdt, 1).getNano()); + assertEquals(120_000_000, CurrentDateTime.nanoPrecision(zdt, 2).getNano()); + assertEquals(123_000_000, CurrentDateTime.nanoPrecision(zdt, 3).getNano()); + assertEquals(123_400_000, CurrentDateTime.nanoPrecision(zdt, 4).getNano()); + assertEquals(123_450_000, CurrentDateTime.nanoPrecision(zdt, 5).getNano()); + assertEquals(123_456_000, CurrentDateTime.nanoPrecision(zdt, 6).getNano()); + assertEquals(123_456_700, CurrentDateTime.nanoPrecision(zdt, 7).getNano()); + assertEquals(123_456_780, CurrentDateTime.nanoPrecision(zdt, 8).getNano()); + assertEquals(123_456_789, CurrentDateTime.nanoPrecision(zdt, 9).getNano()); + } +} diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DateTimeTestUtils.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DateTimeTestUtils.java index 164fe1fe931..305fa528e1f 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DateTimeTestUtils.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DateTimeTestUtils.java @@ -20,7 +20,7 @@ public class DateTimeTestUtils { public static ZonedDateTime dateTime(int year, int month, int day, int hour, int minute) { DateTime dateTime = new DateTime(year, month, day, hour, minute, DateTimeZone.UTC); - ZonedDateTime zdt = ZonedDateTime.of(year, month, day, hour, minute, 0, 0, DateUtils.UTC); + ZonedDateTime zdt = ZonedDateTime.of(year, month, day, hour, minute, 0, 0, DateUtils.UTC_ZI); assertEquals(dateTime.getMillis() / 1000, zdt.toEpochSecond()); return zdt; } diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/predicate/operator/arithmetic/BinaryArithmeticTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/predicate/operator/arithmetic/BinaryArithmeticTests.java index ebdc2171a9d..748718d0a3a 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/predicate/operator/arithmetic/BinaryArithmeticTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/predicate/operator/arithmetic/BinaryArithmeticTests.java @@ -19,6 +19,7 @@ import java.time.Period; import java.time.ZonedDateTime; import java.time.temporal.TemporalAmount; +import static org.elasticsearch.xpack.sql.expression.predicate.operator.arithmetic.Arithmetics.mod; import static org.elasticsearch.xpack.sql.tree.Location.EMPTY; import static org.elasticsearch.xpack.sql.type.DataType.INTERVAL_DAY; import static org.elasticsearch.xpack.sql.type.DataType.INTERVAL_DAY_TO_HOUR; @@ -29,33 +30,55 @@ import static org.elasticsearch.xpack.sql.type.DataType.INTERVAL_YEAR_TO_MONTH; public class BinaryArithmeticTests extends ESTestCase { - public void testAddNumbers() throws Exception { + public void testAddNumbers() { assertEquals(Long.valueOf(3), add(1L, 2L)); } - public void testAddYearMonthIntervals() throws Exception { + public void testMod() { + assertEquals(2, mod(10, 8)); + assertEquals(2, mod(10, -8)); + assertEquals(-2, mod(-10, 8)); + assertEquals(-2, mod(-10, -8)); + + assertEquals(2L, mod(10L, 8)); + assertEquals(2L, mod(10, -8L)); + assertEquals(-2L, mod(-10L, 8L)); + assertEquals(-2L, mod(-10L, -8L)); + + assertEquals(2.3000002f, mod(10.3f, 8L)); + assertEquals(1.5f, mod(10, -8.5f)); + assertEquals(-1.8000002f, mod(-10.3f, 8.5f)); + assertEquals(-1.8000002f, mod(-10.3f, -8.5f)); + + assertEquals(2.3000000000000007d, mod(10.3d, 8L)); + assertEquals(1.5d, mod(10, -8.5d)); + assertEquals(-1.8000001907348633d, mod(-10.3f, 8.5d)); + assertEquals(-1.8000000000000007, mod(-10.3d, -8.5d)); + } + + public void testAddYearMonthIntervals() { Literal l = interval(Period.ofYears(1), INTERVAL_YEAR); Literal r = interval(Period.ofMonths(2), INTERVAL_MONTH); IntervalYearMonth x = add(l, r); assertEquals(interval(Period.ofYears(1).plusMonths(2), INTERVAL_YEAR_TO_MONTH), L(x)); } - public void testAddYearMonthMixedIntervals() throws Exception { + public void testAddYearMonthMixedIntervals() { Literal l = interval(Period.ofYears(1).plusMonths(5), INTERVAL_YEAR_TO_MONTH); Literal r = interval(Period.ofMonths(2), INTERVAL_MONTH); IntervalYearMonth x = add(l, r); assertEquals(interval(Period.ofYears(1).plusMonths(7), INTERVAL_YEAR_TO_MONTH), L(x)); } - public void testAddDayTimeIntervals() throws Exception { + public void testAddDayTimeIntervals() { Literal l = interval(Duration.ofDays(1), INTERVAL_DAY); Literal r = interval(Duration.ofHours(2), INTERVAL_HOUR); IntervalDayTime x = add(l, r); assertEquals(interval(Duration.ofDays(1).plusHours(2), INTERVAL_DAY_TO_HOUR), L(x)); } - public void testAddYearMonthIntervalToDate() throws Exception { - ZonedDateTime now = ZonedDateTime.now(DateUtils.UTC); + public void testAddYearMonthIntervalToDate() { + ZonedDateTime now = ZonedDateTime.now(DateUtils.UTC_ZI); Literal l = L(now); TemporalAmount t = Period.ofYears(100).plusMonths(50); Literal r = interval(t, INTERVAL_HOUR); @@ -63,8 +86,8 @@ public class BinaryArithmeticTests extends ESTestCase { assertEquals(L(now.plus(t)), L(x)); } - public void testAddDayTimeIntervalToDate() throws Exception { - ZonedDateTime now = ZonedDateTime.now(DateUtils.UTC); + public void testAddDayTimeIntervalToDate() { + ZonedDateTime now = ZonedDateTime.now(DateUtils.UTC_ZI); Literal l = L(now); TemporalAmount t = Duration.ofHours(2); Literal r = interval(Duration.ofHours(2), INTERVAL_HOUR); @@ -72,8 +95,8 @@ public class BinaryArithmeticTests extends ESTestCase { assertEquals(L(now.plus(t)), L(x)); } - public void testAddDayTimeIntervalToDateReverse() throws Exception { - ZonedDateTime now = ZonedDateTime.now(DateUtils.UTC); + public void testAddDayTimeIntervalToDateReverse() { + ZonedDateTime now = ZonedDateTime.now(DateUtils.UTC_ZI); Literal l = L(now); TemporalAmount t = Duration.ofHours(2); Literal r = interval(Duration.ofHours(2), INTERVAL_HOUR); @@ -81,28 +104,28 @@ public class BinaryArithmeticTests extends ESTestCase { assertEquals(L(now.plus(t)), L(x)); } - public void testAddNumberToIntervalIllegal() throws Exception { + public void testAddNumberToIntervalIllegal() { Literal r = interval(Duration.ofHours(2), INTERVAL_HOUR); SqlIllegalArgumentException expect = expectThrows(SqlIllegalArgumentException.class, () -> add(r, L(1))); assertEquals("Cannot compute [+] between [IntervalDayTime] [Integer]", expect.getMessage()); } - public void testSubYearMonthIntervals() throws Exception { + public void testSubYearMonthIntervals() { Literal l = interval(Period.ofYears(1), INTERVAL_YEAR); Literal r = interval(Period.ofMonths(2), INTERVAL_MONTH); IntervalYearMonth x = sub(l, r); assertEquals(interval(Period.ofMonths(10), INTERVAL_YEAR_TO_MONTH), L(x)); } - public void testSubDayTimeIntervals() throws Exception { + public void testSubDayTimeIntervals() { Literal l = interval(Duration.ofDays(1).plusHours(10), INTERVAL_DAY_TO_HOUR); Literal r = interval(Duration.ofHours(2), INTERVAL_HOUR); IntervalDayTime x = sub(l, r); assertEquals(interval(Duration.ofDays(1).plusHours(8), INTERVAL_DAY_TO_HOUR), L(x)); } - public void testSubYearMonthIntervalToDate() throws Exception { - ZonedDateTime now = ZonedDateTime.now(DateUtils.UTC); + public void testSubYearMonthIntervalToDate() { + ZonedDateTime now = ZonedDateTime.now(DateUtils.UTC_ZI); Literal l = L(now); TemporalAmount t = Period.ofYears(100).plusMonths(50); Literal r = interval(t, INTERVAL_HOUR); @@ -110,8 +133,8 @@ public class BinaryArithmeticTests extends ESTestCase { assertEquals(L(now.minus(t)), L(x)); } - public void testSubYearMonthIntervalToDateIllegal() throws Exception { - ZonedDateTime now = ZonedDateTime.now(DateUtils.UTC); + public void testSubYearMonthIntervalToDateIllegal() { + ZonedDateTime now = ZonedDateTime.now(DateUtils.UTC_ZI); Literal l = L(now); TemporalAmount t = Period.ofYears(100).plusMonths(50); Literal r = interval(t, INTERVAL_HOUR); @@ -119,14 +142,14 @@ public class BinaryArithmeticTests extends ESTestCase { assertEquals("Cannot substract a date from an interval; do you mean the reverse?", ex.getMessage()); } - public void testSubNumberFromIntervalIllegal() throws Exception { + public void testSubNumberFromIntervalIllegal() { Literal r = interval(Duration.ofHours(2), INTERVAL_HOUR); SqlIllegalArgumentException expect = expectThrows(SqlIllegalArgumentException.class, () -> sub(r, L(1))); assertEquals("Cannot compute [-] between [IntervalDayTime] [Integer]", expect.getMessage()); } - public void testSubDayTimeIntervalToDate() throws Exception { - ZonedDateTime now = ZonedDateTime.now(DateUtils.UTC); + public void testSubDayTimeIntervalToDate() { + ZonedDateTime now = ZonedDateTime.now(DateUtils.UTC_ZI); Literal l = L(now); TemporalAmount t = Duration.ofHours(2); Literal r = interval(Duration.ofHours(2), INTERVAL_HOUR); @@ -134,6 +157,22 @@ public class BinaryArithmeticTests extends ESTestCase { assertEquals(L(now.minus(t)), L(x)); } + public void testMulIntervalNumber() throws Exception { + Literal l = interval(Duration.ofHours(2), INTERVAL_HOUR); + IntervalDayTime interval = mul(l, -1); + assertEquals(INTERVAL_HOUR, interval.dataType()); + Duration p = interval.interval(); + assertEquals(Duration.ofHours(2).negated(), p); + } + + public void testMulNumberInterval() throws Exception { + Literal r = interval(Period.ofYears(1), INTERVAL_YEAR); + IntervalYearMonth interval = mul(-2, r); + assertEquals(INTERVAL_YEAR, interval.dataType()); + Period p = interval.interval(); + assertEquals(Period.ofYears(2).negated(), p); + } + @SuppressWarnings("unchecked") private static T add(Object l, Object r) { Add add = new Add(EMPTY, L(l), L(r)); @@ -148,6 +187,12 @@ public class BinaryArithmeticTests extends ESTestCase { return (T) sub.fold(); } + @SuppressWarnings("unchecked") + private static T mul(Object l, Object r) { + Mul mul = new Mul(EMPTY, L(l), L(r)); + assertTrue(mul.foldable()); + return (T) mul.fold(); + } private static Literal L(Object value) { return Literal.of(EMPTY, value); @@ -158,4 +203,4 @@ public class BinaryArithmeticTests extends ESTestCase { : new IntervalDayTime((Duration) value, intervalType); return Literal.of(EMPTY, i); } -} \ No newline at end of file +} diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/optimizer/OptimizerRunTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/optimizer/OptimizerRunTests.java index 2d16edc2514..bec0ddc9caf 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/optimizer/OptimizerRunTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/optimizer/OptimizerRunTests.java @@ -6,6 +6,7 @@ package org.elasticsearch.xpack.sql.optimizer; import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.sql.TestUtils; import org.elasticsearch.xpack.sql.analysis.analyzer.Analyzer; import org.elasticsearch.xpack.sql.analysis.analyzer.Verifier; import org.elasticsearch.xpack.sql.analysis.index.EsIndex; @@ -13,7 +14,6 @@ import org.elasticsearch.xpack.sql.analysis.index.IndexResolution; import org.elasticsearch.xpack.sql.expression.function.FunctionRegistry; import org.elasticsearch.xpack.sql.parser.SqlParser; import org.elasticsearch.xpack.sql.plan.logical.LogicalPlan; -import org.elasticsearch.xpack.sql.session.Configuration; import org.elasticsearch.xpack.sql.stats.Metrics; import org.elasticsearch.xpack.sql.type.EsField; import org.elasticsearch.xpack.sql.type.TypesTests; @@ -36,7 +36,7 @@ public class OptimizerRunTests extends ESTestCase { EsIndex test = new EsIndex("test", mapping); getIndexResult = IndexResolution.valid(test); - analyzer = new Analyzer(Configuration.DEFAULT, functionRegistry, getIndexResult, new Verifier(new Metrics())); + analyzer = new Analyzer(TestUtils.TEST_CFG, functionRegistry, getIndexResult, new Verifier(new Metrics())); optimizer = new Optimizer(); } diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/optimizer/OptimizerTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/optimizer/OptimizerTests.java index 6c986b745ca..514c36ddf72 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/optimizer/OptimizerTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/optimizer/OptimizerTests.java @@ -6,6 +6,7 @@ package org.elasticsearch.xpack.sql.optimizer; import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.sql.analysis.analyzer.Analyzer.PruneSubqueryAliases; import org.elasticsearch.xpack.sql.expression.Alias; import org.elasticsearch.xpack.sql.expression.Expression; import org.elasticsearch.xpack.sql.expression.Expressions; @@ -71,7 +72,6 @@ import org.elasticsearch.xpack.sql.optimizer.Optimizer.ConstantFolding; import org.elasticsearch.xpack.sql.optimizer.Optimizer.FoldNull; import org.elasticsearch.xpack.sql.optimizer.Optimizer.PropagateEquals; import org.elasticsearch.xpack.sql.optimizer.Optimizer.PruneDuplicateFunctions; -import org.elasticsearch.xpack.sql.optimizer.Optimizer.PruneSubqueryAliases; import org.elasticsearch.xpack.sql.optimizer.Optimizer.ReplaceFoldableAttributes; import org.elasticsearch.xpack.sql.optimizer.Optimizer.SimplifyConditional; import org.elasticsearch.xpack.sql.plan.logical.Filter; diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/parser/ExpressionTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/parser/ExpressionTests.java index de1571e64a8..a85add8b110 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/parser/ExpressionTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/parser/ExpressionTests.java @@ -329,4 +329,28 @@ public class ExpressionTests extends ESTestCase { ParsingException ex = expectThrows(ParsingException.class, () -> parser.createExpression("CONVERT(1, INVALID)")); assertEquals("line 1:13: Invalid data type [INVALID] provided", ex.getMessage()); } -} + + public void testCurrentTimestamp() { + Expression expr = parser.createExpression("CURRENT_TIMESTAMP"); + assertEquals(UnresolvedFunction.class, expr.getClass()); + UnresolvedFunction ur = (UnresolvedFunction) expr; + assertEquals("CURRENT_TIMESTAMP", ur.name()); + assertEquals(0, ur.children().size()); + } + + public void testCurrentTimestampPrecision() { + Expression expr = parser.createExpression("CURRENT_TIMESTAMP(4)"); + assertEquals(UnresolvedFunction.class, expr.getClass()); + UnresolvedFunction ur = (UnresolvedFunction) expr; + assertEquals("CURRENT_TIMESTAMP", ur.name()); + assertEquals(1, ur.children().size()); + Expression child = ur.children().get(0); + assertEquals(Literal.class, child.getClass()); + assertEquals(Short.valueOf((short) 4), child.fold()); + } + + public void testCurrentTimestampInvalidPrecision() { + ParsingException ex = expectThrows(ParsingException.class, () -> parser.createExpression("CURRENT_TIMESTAMP(100)")); + assertEquals("line 1:20: Precision needs to be between [0-9], received [100]", ex.getMessage()); + } +} \ No newline at end of file diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/plan/logical/command/sys/SysCatalogsTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/plan/logical/command/sys/SysCatalogsTests.java index bdc50159344..757c7ed0108 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/plan/logical/command/sys/SysCatalogsTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/plan/logical/command/sys/SysCatalogsTests.java @@ -8,6 +8,7 @@ package org.elasticsearch.xpack.sql.plan.logical.command.sys; import org.elasticsearch.action.ActionListener; import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.sql.TestUtils; import org.elasticsearch.xpack.sql.analysis.analyzer.Analyzer; import org.elasticsearch.xpack.sql.analysis.analyzer.Verifier; import org.elasticsearch.xpack.sql.analysis.index.EsIndex; @@ -16,7 +17,6 @@ import org.elasticsearch.xpack.sql.analysis.index.IndexResolver; import org.elasticsearch.xpack.sql.expression.function.FunctionRegistry; import org.elasticsearch.xpack.sql.parser.SqlParser; import org.elasticsearch.xpack.sql.plan.logical.command.Command; -import org.elasticsearch.xpack.sql.session.Configuration; import org.elasticsearch.xpack.sql.session.SqlSession; import org.elasticsearch.xpack.sql.stats.Metrics; import org.elasticsearch.xpack.sql.type.TypesTests; @@ -34,7 +34,7 @@ public class SysCatalogsTests extends ESTestCase { @SuppressWarnings({ "rawtypes", "unchecked" }) private Tuple sql(String sql) { EsIndex test = new EsIndex("test", TypesTests.loadMapping("mapping-multi-field-with-nested.json", true)); - Analyzer analyzer = new Analyzer(Configuration.DEFAULT, new FunctionRegistry(), IndexResolution.valid(test), + Analyzer analyzer = new Analyzer(TestUtils.TEST_CFG, new FunctionRegistry(), IndexResolution.valid(test), new Verifier(new Metrics())); Command cmd = (Command) analyzer.analyze(parser.createStatement(sql), true); diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/plan/logical/command/sys/SysParserTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/plan/logical/command/sys/SysParserTests.java index 5de02a5acea..0462956bf85 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/plan/logical/command/sys/SysParserTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/plan/logical/command/sys/SysParserTests.java @@ -8,6 +8,7 @@ package org.elasticsearch.xpack.sql.plan.logical.command.sys; import org.elasticsearch.action.ActionListener; import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.sql.TestUtils; import org.elasticsearch.xpack.sql.analysis.analyzer.Analyzer; import org.elasticsearch.xpack.sql.analysis.analyzer.Verifier; import org.elasticsearch.xpack.sql.analysis.index.EsIndex; @@ -16,7 +17,6 @@ import org.elasticsearch.xpack.sql.analysis.index.IndexResolver; import org.elasticsearch.xpack.sql.expression.function.FunctionRegistry; import org.elasticsearch.xpack.sql.parser.SqlParser; import org.elasticsearch.xpack.sql.plan.logical.command.Command; -import org.elasticsearch.xpack.sql.session.Configuration; import org.elasticsearch.xpack.sql.session.SqlSession; import org.elasticsearch.xpack.sql.stats.Metrics; import org.elasticsearch.xpack.sql.type.DataType; @@ -41,7 +41,7 @@ public class SysParserTests extends ESTestCase { @SuppressWarnings({ "rawtypes", "unchecked" }) private Tuple sql(String sql) { EsIndex test = new EsIndex("test", mapping); - Analyzer analyzer = new Analyzer(Configuration.DEFAULT, new FunctionRegistry(), IndexResolution.valid(test), + Analyzer analyzer = new Analyzer(TestUtils.TEST_CFG, new FunctionRegistry(), IndexResolution.valid(test), new Verifier(new Metrics())); Command cmd = (Command) analyzer.analyze(parser.createStatement(sql), true); @@ -53,7 +53,7 @@ public class SysParserTests extends ESTestCase { return Void.TYPE; }).when(resolver).resolveAsSeparateMappings(any(), any(), any()); - SqlSession session = new SqlSession(Configuration.DEFAULT, null, null, resolver, null, null, null, null); + SqlSession session = new SqlSession(TestUtils.TEST_CFG, null, null, resolver, null, null, null, null); return new Tuple<>(cmd, session); } diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/plan/logical/command/sys/SysTableTypesTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/plan/logical/command/sys/SysTableTypesTests.java index ccc2bf87d84..2458a3f34eb 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/plan/logical/command/sys/SysTableTypesTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/plan/logical/command/sys/SysTableTypesTests.java @@ -8,6 +8,7 @@ package org.elasticsearch.xpack.sql.plan.logical.command.sys; import org.elasticsearch.action.ActionListener; import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.sql.TestUtils; import org.elasticsearch.xpack.sql.analysis.analyzer.Analyzer; import org.elasticsearch.xpack.sql.analysis.analyzer.Verifier; import org.elasticsearch.xpack.sql.analysis.index.EsIndex; @@ -16,7 +17,6 @@ import org.elasticsearch.xpack.sql.analysis.index.IndexResolver; import org.elasticsearch.xpack.sql.expression.function.FunctionRegistry; import org.elasticsearch.xpack.sql.parser.SqlParser; import org.elasticsearch.xpack.sql.plan.logical.command.Command; -import org.elasticsearch.xpack.sql.session.Configuration; import org.elasticsearch.xpack.sql.session.SqlSession; import org.elasticsearch.xpack.sql.stats.Metrics; import org.elasticsearch.xpack.sql.type.TypesTests; @@ -29,7 +29,7 @@ public class SysTableTypesTests extends ESTestCase { private Tuple sql(String sql) { EsIndex test = new EsIndex("test", TypesTests.loadMapping("mapping-multi-field-with-nested.json", true)); - Analyzer analyzer = new Analyzer(Configuration.DEFAULT, new FunctionRegistry(), IndexResolution.valid(test), + Analyzer analyzer = new Analyzer(TestUtils.TEST_CFG, new FunctionRegistry(), IndexResolution.valid(test), new Verifier(new Metrics())); Command cmd = (Command) analyzer.analyze(parser.createStatement(sql), true); diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/plan/logical/command/sys/SysTablesTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/plan/logical/command/sys/SysTablesTests.java index 3558c290fdc..aae5ac06447 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/plan/logical/command/sys/SysTablesTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/plan/logical/command/sys/SysTablesTests.java @@ -8,6 +8,7 @@ package org.elasticsearch.xpack.sql.plan.logical.command.sys; import org.elasticsearch.action.ActionListener; import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.sql.TestUtils; import org.elasticsearch.xpack.sql.analysis.analyzer.Analyzer; import org.elasticsearch.xpack.sql.analysis.analyzer.Verifier; import org.elasticsearch.xpack.sql.analysis.index.EsIndex; @@ -19,7 +20,6 @@ import org.elasticsearch.xpack.sql.expression.function.FunctionRegistry; import org.elasticsearch.xpack.sql.parser.SqlParser; import org.elasticsearch.xpack.sql.plan.logical.command.Command; import org.elasticsearch.xpack.sql.proto.SqlTypedParamValue; -import org.elasticsearch.xpack.sql.session.Configuration; import org.elasticsearch.xpack.sql.session.SchemaRowSet; import org.elasticsearch.xpack.sql.session.SqlSession; import org.elasticsearch.xpack.sql.stats.Metrics; @@ -236,7 +236,7 @@ public class SysTablesTests extends ESTestCase { private Tuple sql(String sql, List params) { EsIndex test = new EsIndex("test", mapping); - Analyzer analyzer = new Analyzer(Configuration.DEFAULT, new FunctionRegistry(), IndexResolution.valid(test), + Analyzer analyzer = new Analyzer(TestUtils.TEST_CFG, new FunctionRegistry(), IndexResolution.valid(test), new Verifier(new Metrics())); Command cmd = (Command) analyzer.analyze(parser.createStatement(sql, params), true); diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/plan/logical/command/sys/SysTypesTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/plan/logical/command/sys/SysTypesTests.java index dd2d56ef507..7adeddc9ebe 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/plan/logical/command/sys/SysTypesTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/plan/logical/command/sys/SysTypesTests.java @@ -7,6 +7,7 @@ package org.elasticsearch.xpack.sql.plan.logical.command.sys; import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.sql.TestUtils; import org.elasticsearch.xpack.sql.analysis.analyzer.Analyzer; import org.elasticsearch.xpack.sql.analysis.index.EsIndex; import org.elasticsearch.xpack.sql.analysis.index.IndexResolution; @@ -14,7 +15,6 @@ import org.elasticsearch.xpack.sql.analysis.index.IndexResolver; import org.elasticsearch.xpack.sql.expression.function.FunctionRegistry; import org.elasticsearch.xpack.sql.parser.SqlParser; import org.elasticsearch.xpack.sql.plan.logical.command.Command; -import org.elasticsearch.xpack.sql.session.Configuration; import org.elasticsearch.xpack.sql.session.SqlSession; import org.elasticsearch.xpack.sql.type.DataType; import org.elasticsearch.xpack.sql.type.TypesTests; @@ -32,7 +32,7 @@ public class SysTypesTests extends ESTestCase { private Tuple sql(String sql) { EsIndex test = new EsIndex("test", TypesTests.loadMapping("mapping-multi-field-with-nested.json", true)); - Analyzer analyzer = new Analyzer(Configuration.DEFAULT, new FunctionRegistry(), IndexResolution.valid(test), null); + Analyzer analyzer = new Analyzer(TestUtils.TEST_CFG, new FunctionRegistry(), IndexResolution.valid(test), null); Command cmd = (Command) analyzer.analyze(parser.createStatement(sql), false); IndexResolver resolver = mock(IndexResolver.class); diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/planner/QueryFolderTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/planner/QueryFolderTests.java index 998fc132f98..6a6a1e2dd8e 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/planner/QueryFolderTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/planner/QueryFolderTests.java @@ -6,6 +6,7 @@ package org.elasticsearch.xpack.sql.planner; import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.sql.TestUtils; import org.elasticsearch.xpack.sql.analysis.analyzer.Analyzer; import org.elasticsearch.xpack.sql.analysis.analyzer.Verifier; import org.elasticsearch.xpack.sql.analysis.index.EsIndex; @@ -16,7 +17,6 @@ import org.elasticsearch.xpack.sql.parser.SqlParser; import org.elasticsearch.xpack.sql.plan.physical.EsQueryExec; import org.elasticsearch.xpack.sql.plan.physical.LocalExec; import org.elasticsearch.xpack.sql.plan.physical.PhysicalPlan; -import org.elasticsearch.xpack.sql.session.Configuration; import org.elasticsearch.xpack.sql.session.EmptyExecutable; import org.elasticsearch.xpack.sql.stats.Metrics; import org.elasticsearch.xpack.sql.type.EsField; @@ -43,7 +43,7 @@ public class QueryFolderTests extends ESTestCase { Map mapping = TypesTests.loadMapping("mapping-multi-field-variation.json"); EsIndex test = new EsIndex("test", mapping); IndexResolution getIndexResult = IndexResolution.valid(test); - analyzer = new Analyzer(Configuration.DEFAULT, new FunctionRegistry(), getIndexResult, new Verifier(new Metrics())); + analyzer = new Analyzer(TestUtils.TEST_CFG, new FunctionRegistry(), getIndexResult, new Verifier(new Metrics())); optimizer = new Optimizer(); planner = new Planner(); } diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/planner/QueryTranslatorTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/planner/QueryTranslatorTests.java index d4ee7bce36f..559d676f1b9 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/planner/QueryTranslatorTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/planner/QueryTranslatorTests.java @@ -7,15 +7,20 @@ package org.elasticsearch.xpack.sql.planner; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xpack.sql.SqlIllegalArgumentException; +import org.elasticsearch.xpack.sql.TestUtils; import org.elasticsearch.xpack.sql.analysis.analyzer.Analyzer; import org.elasticsearch.xpack.sql.analysis.analyzer.Verifier; import org.elasticsearch.xpack.sql.analysis.index.EsIndex; import org.elasticsearch.xpack.sql.analysis.index.IndexResolution; import org.elasticsearch.xpack.sql.analysis.index.MappingException; import org.elasticsearch.xpack.sql.expression.Expression; +import org.elasticsearch.xpack.sql.expression.FieldAttribute; import org.elasticsearch.xpack.sql.expression.function.FunctionRegistry; +import org.elasticsearch.xpack.sql.expression.function.grouping.Histogram; import org.elasticsearch.xpack.sql.expression.function.scalar.math.MathProcessor.MathOperation; +import org.elasticsearch.xpack.sql.expression.gen.script.ScriptTemplate; import org.elasticsearch.xpack.sql.parser.SqlParser; +import org.elasticsearch.xpack.sql.plan.logical.Aggregate; import org.elasticsearch.xpack.sql.plan.logical.Filter; import org.elasticsearch.xpack.sql.plan.logical.LogicalPlan; import org.elasticsearch.xpack.sql.plan.logical.Project; @@ -28,14 +33,15 @@ import org.elasticsearch.xpack.sql.querydsl.query.RangeQuery; import org.elasticsearch.xpack.sql.querydsl.query.ScriptQuery; import org.elasticsearch.xpack.sql.querydsl.query.TermQuery; import org.elasticsearch.xpack.sql.querydsl.query.TermsQuery; -import org.elasticsearch.xpack.sql.session.Configuration; import org.elasticsearch.xpack.sql.stats.Metrics; +import org.elasticsearch.xpack.sql.type.DataType; import org.elasticsearch.xpack.sql.type.EsField; import org.elasticsearch.xpack.sql.type.TypesTests; import org.elasticsearch.xpack.sql.util.DateUtils; import org.junit.AfterClass; import org.junit.BeforeClass; +import java.util.List; import java.util.Locale; import java.util.Map; import java.util.stream.Stream; @@ -57,7 +63,7 @@ public class QueryTranslatorTests extends ESTestCase { Map mapping = TypesTests.loadMapping("mapping-multi-field-variation.json"); EsIndex test = new EsIndex("test", mapping); IndexResolution getIndexResult = IndexResolution.valid(test); - analyzer = new Analyzer(Configuration.DEFAULT, new FunctionRegistry(), getIndexResult, new Verifier(new Metrics())); + analyzer = new Analyzer(TestUtils.TEST_CFG, new FunctionRegistry(), getIndexResult, new Verifier(new Metrics())); } @AfterClass @@ -246,9 +252,8 @@ public class QueryTranslatorTests extends ESTestCase { public void testTranslateIsNullExpression_HavingClause_Painless() { LogicalPlan p = plan("SELECT keyword, max(int) FROM test GROUP BY keyword HAVING max(int) IS NULL"); - assertTrue(p instanceof Project); - assertTrue(p.children().get(0) instanceof Filter); - Expression condition = ((Filter) p.children().get(0)).condition(); + assertTrue(p instanceof Filter); + Expression condition = ((Filter) p).condition(); assertFalse(condition.foldable()); QueryTranslation translation = QueryTranslator.toQuery(condition, true); assertNull(translation.query); @@ -260,9 +265,8 @@ public class QueryTranslatorTests extends ESTestCase { public void testTranslateIsNotNullExpression_HavingClause_Painless() { LogicalPlan p = plan("SELECT keyword, max(int) FROM test GROUP BY keyword HAVING max(int) IS NOT NULL"); - assertTrue(p instanceof Project); - assertTrue(p.children().get(0) instanceof Filter); - Expression condition = ((Filter) p.children().get(0)).condition(); + assertTrue(p instanceof Filter); + Expression condition = ((Filter) p).condition(); assertFalse(condition.foldable()); QueryTranslation translation = QueryTranslator.toQuery(condition, true); assertNull(translation.query); @@ -329,9 +333,8 @@ public class QueryTranslatorTests extends ESTestCase { public void testTranslateInExpression_HavingClause_Painless() { LogicalPlan p = plan("SELECT keyword, max(int) FROM test GROUP BY keyword HAVING max(int) IN (10, 20, 30 - 10)"); - assertTrue(p instanceof Project); - assertTrue(p.children().get(0) instanceof Filter); - Expression condition = ((Filter) p.children().get(0)).condition(); + assertTrue(p instanceof Filter); + Expression condition = ((Filter) p).condition(); assertFalse(condition.foldable()); QueryTranslation translation = QueryTranslator.toQuery(condition, true); assertNull(translation.query); @@ -344,9 +347,8 @@ public class QueryTranslatorTests extends ESTestCase { public void testTranslateInExpression_HavingClause_PainlessOneArg() { LogicalPlan p = plan("SELECT keyword, max(int) FROM test GROUP BY keyword HAVING max(int) IN (10, 30 - 20)"); - assertTrue(p instanceof Project); - assertTrue(p.children().get(0) instanceof Filter); - Expression condition = ((Filter) p.children().get(0)).condition(); + assertTrue(p instanceof Filter); + Expression condition = ((Filter) p).condition(); assertFalse(condition.foldable()); QueryTranslation translation = QueryTranslator.toQuery(condition, true); assertNull(translation.query); @@ -360,9 +362,8 @@ public class QueryTranslatorTests extends ESTestCase { public void testTranslateInExpression_HavingClause_PainlessAndNullHandling() { LogicalPlan p = plan("SELECT keyword, max(int) FROM test GROUP BY keyword HAVING max(int) IN (10, null, 20, 30, null, 30 - 10)"); - assertTrue(p instanceof Project); - assertTrue(p.children().get(0) instanceof Filter); - Expression condition = ((Filter) p.children().get(0)).condition(); + assertTrue(p instanceof Filter); + Expression condition = ((Filter) p).condition(); assertFalse(condition.foldable()); QueryTranslation translation = QueryTranslator.toQuery(condition, true); assertNull(translation.query); @@ -379,9 +380,8 @@ public class QueryTranslatorTests extends ESTestCase { LogicalPlan p = plan("SELECT keyword, max(int) FROM test GROUP BY keyword HAVING " + operation.name() + "(max(int)) > 10"); - assertTrue(p instanceof Project); - assertTrue(p.children().get(0) instanceof Filter); - Expression condition = ((Filter) p.children().get(0)).condition(); + assertTrue(p instanceof Filter); + Expression condition = ((Filter) p).condition(); assertFalse(condition.foldable()); QueryTranslation translation = QueryTranslator.toQuery(condition, true); assertNull(translation.query); @@ -392,4 +392,75 @@ public class QueryTranslatorTests extends ESTestCase { assertThat(aggFilter.scriptTemplate().params().toString(), startsWith("[{a=MAX(int){a->")); assertThat(aggFilter.scriptTemplate().params().toString(), endsWith(", {v=10}]")); } + + public void testGroupByAndHavingWithFunctionOnTopOfAggregation() { + LogicalPlan p = plan("SELECT keyword, MAX(int) FROM test GROUP BY 1 HAVING ABS(MAX(int)) > 10"); + assertTrue(p instanceof Filter); + Expression condition = ((Filter) p).condition(); + assertFalse(condition.foldable()); + QueryTranslation translation = QueryTranslator.toQuery(condition, true); + assertNull(translation.query); + AggFilter aggFilter = translation.aggFilter; + assertEquals("InternalSqlScriptUtils.nullSafeFilter(InternalSqlScriptUtils.gt(InternalSqlScriptUtils.abs" + + "(params.a0),params.v0))", + aggFilter.scriptTemplate().toString()); + assertThat(aggFilter.scriptTemplate().params().toString(), startsWith("[{a=MAX(int){a->")); + assertThat(aggFilter.scriptTemplate().params().toString(), endsWith(", {v=10}]")); + } + + public void testTranslateCoalesce_GroupBy_Painless() { + LogicalPlan p = plan("SELECT COALESCE(int, 10) FROM test GROUP BY 1"); + assertTrue(p instanceof Aggregate); + Expression condition = ((Aggregate) p).groupings().get(0); + assertFalse(condition.foldable()); + QueryTranslator.GroupingContext groupingContext = QueryTranslator.groupBy(((Aggregate) p).groupings()); + assertNotNull(groupingContext); + ScriptTemplate scriptTemplate = groupingContext.tail.script(); + assertEquals("InternalSqlScriptUtils.coalesce([InternalSqlScriptUtils.docValue(doc,params.v0),params.v1])", + scriptTemplate.toString()); + assertEquals("[{v=int}, {v=10}]", scriptTemplate.params().toString()); + } + + public void testTranslateNullIf_GroupBy_Painless() { + LogicalPlan p = plan("SELECT NULLIF(int, 10) FROM test GROUP BY 1"); + assertTrue(p instanceof Aggregate); + Expression condition = ((Aggregate) p).groupings().get(0); + assertFalse(condition.foldable()); + QueryTranslator.GroupingContext groupingContext = QueryTranslator.groupBy(((Aggregate) p).groupings()); + assertNotNull(groupingContext); + ScriptTemplate scriptTemplate = groupingContext.tail.script(); + assertEquals("InternalSqlScriptUtils.nullif(InternalSqlScriptUtils.docValue(doc,params.v0),params.v1)", + scriptTemplate.toString()); + assertEquals("[{v=int}, {v=10}]", scriptTemplate.params().toString()); + } + public void testGroupByDateHistogram() { + LogicalPlan p = plan("SELECT MAX(int) FROM test GROUP BY HISTOGRAM(int, 1000)"); + assertTrue(p instanceof Aggregate); + Aggregate a = (Aggregate) p; + List groupings = a.groupings(); + assertEquals(1, groupings.size()); + Expression exp = groupings.get(0); + assertEquals(Histogram.class, exp.getClass()); + Histogram h = (Histogram) exp; + assertEquals(1000, h.interval().fold()); + Expression field = h.field(); + assertEquals(FieldAttribute.class, field.getClass()); + assertEquals(DataType.INTEGER, field.dataType()); + } + + + public void testGroupByHistogram() { + LogicalPlan p = plan("SELECT MAX(int) FROM test GROUP BY HISTOGRAM(date, INTERVAL 2 YEARS)"); + assertTrue(p instanceof Aggregate); + Aggregate a = (Aggregate) p; + List groupings = a.groupings(); + assertEquals(1, groupings.size()); + Expression exp = groupings.get(0); + assertEquals(Histogram.class, exp.getClass()); + Histogram h = (Histogram) exp; + assertEquals("+2-0", h.interval().fold().toString()); + Expression field = h.field(); + assertEquals(FieldAttribute.class, field.getClass()); + assertEquals(DataType.DATE, field.dataType()); + } } diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/querydsl/query/MultiMatchQueryTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/querydsl/query/MultiMatchQueryTests.java index ba2d548cde9..2e26c7c0595 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/querydsl/query/MultiMatchQueryTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/querydsl/query/MultiMatchQueryTests.java @@ -23,8 +23,7 @@ public class MultiMatchQueryTests extends ESTestCase { MultiMatchQueryBuilder qb = getBuilder("lenient=true"); assertThat(qb.lenient(), equalTo(true)); - qb = getBuilder("use_dis_max=true;type=best_fields"); - assertThat(qb.useDisMax(), equalTo(true)); + qb = getBuilder("type=best_fields"); assertThat(qb.getType(), equalTo(MultiMatchQueryBuilder.Type.BEST_FIELDS)); Exception e = expectThrows(IllegalArgumentException.class, () -> getBuilder("pizza=yummy")); diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/stats/VerifierMetricsTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/stats/VerifierMetricsTests.java index 2796b94e50e..1db0d4383e7 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/stats/VerifierMetricsTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/stats/VerifierMetricsTests.java @@ -8,13 +8,13 @@ package org.elasticsearch.xpack.sql.stats; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xpack.core.watcher.common.stats.Counters; +import org.elasticsearch.xpack.sql.TestUtils; import org.elasticsearch.xpack.sql.analysis.analyzer.Analyzer; import org.elasticsearch.xpack.sql.analysis.analyzer.Verifier; import org.elasticsearch.xpack.sql.analysis.index.EsIndex; import org.elasticsearch.xpack.sql.analysis.index.IndexResolution; import org.elasticsearch.xpack.sql.expression.function.FunctionRegistry; import org.elasticsearch.xpack.sql.parser.SqlParser; -import org.elasticsearch.xpack.sql.session.Configuration; import org.elasticsearch.xpack.sql.type.EsField; import org.elasticsearch.xpack.sql.type.TypesTests; @@ -241,7 +241,7 @@ public class VerifierMetricsTests extends ESTestCase { verifier = new Verifier(metrics); } - Analyzer analyzer = new Analyzer(Configuration.DEFAULT, new FunctionRegistry(), IndexResolution.valid(test), verifier); + Analyzer analyzer = new Analyzer(TestUtils.TEST_CFG, new FunctionRegistry(), IndexResolution.valid(test), verifier); analyzer.analyze(parser.createStatement(sql), true); return metrics == null ? null : metrics.stats(); diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/tree/NodeSubclassTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/tree/NodeSubclassTests.java index 7fd84f8c207..963498bb9b6 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/tree/NodeSubclassTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/tree/NodeSubclassTests.java @@ -12,6 +12,8 @@ import org.elasticsearch.common.io.PathUtils; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xpack.sql.expression.Expression; import org.elasticsearch.xpack.sql.expression.FieldAttribute; +import org.elasticsearch.xpack.sql.expression.Literal; +import org.elasticsearch.xpack.sql.expression.LiteralTests; import org.elasticsearch.xpack.sql.expression.UnresolvedAttributeTests; import org.elasticsearch.xpack.sql.expression.function.Function; import org.elasticsearch.xpack.sql.expression.function.aggregate.AggregateFunction; @@ -20,14 +22,16 @@ import org.elasticsearch.xpack.sql.expression.function.aggregate.InnerAggregate; import org.elasticsearch.xpack.sql.expression.function.aggregate.Percentile; import org.elasticsearch.xpack.sql.expression.function.aggregate.PercentileRanks; import org.elasticsearch.xpack.sql.expression.function.aggregate.Percentiles; +import org.elasticsearch.xpack.sql.expression.function.grouping.Histogram; +import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.CurrentDateTime; import org.elasticsearch.xpack.sql.expression.gen.pipeline.AggExtractorInput; import org.elasticsearch.xpack.sql.expression.gen.pipeline.BinaryPipesTests; import org.elasticsearch.xpack.sql.expression.gen.pipeline.Pipe; import org.elasticsearch.xpack.sql.expression.gen.processor.ConstantProcessor; import org.elasticsearch.xpack.sql.expression.gen.processor.Processor; import org.elasticsearch.xpack.sql.expression.predicate.conditional.IfNull; -import org.elasticsearch.xpack.sql.expression.predicate.operator.comparison.In; import org.elasticsearch.xpack.sql.expression.predicate.fulltext.FullTextPredicate; +import org.elasticsearch.xpack.sql.expression.predicate.operator.comparison.In; import org.elasticsearch.xpack.sql.expression.predicate.operator.comparison.InPipe; import org.elasticsearch.xpack.sql.expression.predicate.regex.LikePattern; import org.elasticsearch.xpack.sql.tree.NodeTests.ChildrenAreAProperty; @@ -453,8 +457,15 @@ public class NodeSubclassTests> extends ESTestCas if (argClass == char.class) { return randomFrom('\\', '|', '/', '`'); } + } else if (toBuildClass == Histogram.class) { + if (argClass == Expression.class) { + return LiteralTests.randomLiteral(); + } + } else if (toBuildClass == CurrentDateTime.class) { + if (argClass == Expression.class) { + return Literal.of(LocationTests.randomLocation(), randomInt(9)); + } } - if (Expression.class == argClass) { /* * Rather than use any old subclass of expression lets diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/type/DataTypeConversionTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/type/DataTypeConversionTests.java index ffe68e1765f..064014a321d 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/type/DataTypeConversionTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/type/DataTypeConversionTests.java @@ -111,7 +111,7 @@ public class DataTypeConversionTests extends ESTestCase { assertEquals(dateTime(18000000L), conversion.convert("1970-01-01T00:00:00-05:00")); // double check back and forth conversion - ZonedDateTime dt = ZonedDateTime.now(DateUtils.UTC); + ZonedDateTime dt = ZonedDateTime.now(DateUtils.UTC_ZI); Conversion forward = conversionFor(DATE, KEYWORD); Conversion back = conversionFor(KEYWORD, DATE); assertEquals(dt, back.convert(forward.convert(dt)));