diff --git a/TESTING.asciidoc b/TESTING.asciidoc index d16d85b2ee1..9c8ab20a3a6 100644 --- a/TESTING.asciidoc +++ b/TESTING.asciidoc @@ -77,39 +77,24 @@ Run a single test case (variants) ./gradlew test "-Dtests.class=*.ClassName" ---------------------------------------------------------- -Run all tests in a package and sub-packages +Run all tests in a package and its sub-packages ---------------------------------------------------- ./gradlew test "-Dtests.class=org.elasticsearch.package.*" ---------------------------------------------------- -Run any test methods that contain 'esi' (like: ...r*esi*ze...). +Run any test methods that contain 'esi' (like: ...r*esi*ze...) ------------------------------- ./gradlew test "-Dtests.method=*esi*" ------------------------------- -You can also filter tests by certain annotations ie: - - * `@Nightly` - tests that only run in nightly builds (disabled by default) - * `@Backwards` - backwards compatibility tests (disabled by default) - * `@AwaitsFix` - tests that are waiting for a bugfix (disabled by default) - * `@BadApple` - tests that are known to fail randomly (disabled by default) - -Those annotation names can be combined into a filter expression like: +Run all tests that are waiting for a bugfix (disabled by default) ------------------------------------------------ -./gradlew test -Dtests.filter="@nightly and not @backwards" +./gradlew test -Dtests.filter=@awaitsfix ------------------------------------------------ -to run all nightly test but not the ones that are backwards tests. `tests.filter` supports -the boolean operators `and, or, not` and grouping ie: - - ---------------------------------------------------------------- -./gradlew test -Dtests.filter="@nightly and not(@badapple or @backwards)" ---------------------------------------------------------------- - === Seed and repetitions. Run with a given seed (seed is a hex-encoded long). @@ -160,8 +145,6 @@ Test groups can be enabled or disabled (true/false). Default value provided below in [brackets]. ------------------------------------------------------------------ -./gradlew test -Dtests.nightly=[false] - nightly test group (@Nightly) -./gradlew test -Dtests.weekly=[false] - weekly tests (@Weekly) ./gradlew test -Dtests.awaitsfix=[false] - known issue (@AwaitsFix) ------------------------------------------------------------------ diff --git a/buildSrc/build.gradle b/buildSrc/build.gradle index da8ad788164..82b1d8525b1 100644 --- a/buildSrc/build.gradle +++ b/buildSrc/build.gradle @@ -106,6 +106,7 @@ dependencies { compile 'org.apache.rat:apache-rat:0.11' compile "org.elasticsearch:jna:4.5.1" compile 'com.github.jengelman.gradle.plugins:shadow:2.0.4' + compile 'de.thetaphi:forbiddenapis:2.6' testCompile "junit:junit:${props.getProperty('junit')}" } diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/precommit/PrecommitTasks.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/precommit/PrecommitTasks.groovy index e89d05e8508..0e706aa5956 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/precommit/PrecommitTasks.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/precommit/PrecommitTasks.groovy @@ -19,7 +19,10 @@ package org.elasticsearch.gradle.precommit import com.github.jengelman.gradle.plugins.shadow.ShadowPlugin +import de.thetaphi.forbiddenapis.gradle.CheckForbiddenApis +import de.thetaphi.forbiddenapis.gradle.ForbiddenApisPlugin import org.elasticsearch.gradle.ExportElasticsearchBuildResourcesTask +import org.gradle.api.JavaVersion import org.gradle.api.Project import org.gradle.api.Task import org.gradle.api.plugins.JavaBasePlugin @@ -33,7 +36,7 @@ class PrecommitTasks { public static Task create(Project project, boolean includeDependencyLicenses) { project.configurations.create("forbiddenApisCliJar") project.dependencies { - forbiddenApisCliJar ('de.thetaphi:forbiddenapis:2.5') + forbiddenApisCliJar ('de.thetaphi:forbiddenapis:2.6') } List precommitTasks = [ @@ -109,47 +112,43 @@ class PrecommitTasks { } private static Task configureForbiddenApisCli(Project project) { - Task forbiddenApisCli = project.tasks.create('forbiddenApis') - project.sourceSets.all { sourceSet -> - forbiddenApisCli.dependsOn( - project.tasks.create(sourceSet.getTaskName('forbiddenApis', null), ForbiddenApisCliTask) { - ExportElasticsearchBuildResourcesTask buildResources = project.tasks.getByName('buildResources') - dependsOn(buildResources) - it.sourceSet = sourceSet - javaHome = project.runtimeJavaHome - targetCompatibility = project.compilerJavaVersion - bundledSignatures = [ - "jdk-unsafe", "jdk-deprecated", "jdk-non-portable", "jdk-system-out" - ] - signaturesFiles = project.files( - buildResources.copy("forbidden/jdk-signatures.txt"), - buildResources.copy("forbidden/es-all-signatures.txt") - ) - suppressAnnotations = ['**.SuppressForbidden'] - if (sourceSet.name == 'test') { - signaturesFiles += project.files( - buildResources.copy("forbidden/es-test-signatures.txt"), - buildResources.copy("forbidden/http-signatures.txt") - ) - } else { - signaturesFiles += project.files(buildResources.copy("forbidden/es-server-signatures.txt")) - } - dependsOn sourceSet.classesTaskName - classesDirs = sourceSet.output.classesDirs - ext.replaceSignatureFiles = { String... names -> - signaturesFiles = project.files( - names.collect { buildResources.copy("forbidden/${it}.txt") } - ) - } - ext.addSignatureFiles = { String... names -> - signaturesFiles += project.files( - names.collect { buildResources.copy("forbidden/${it}.txt") } - ) - } - } + project.pluginManager.apply(ForbiddenApisPlugin) + ExportElasticsearchBuildResourcesTask buildResources = project.tasks.getByName('buildResources') + project.tasks.withType(CheckForbiddenApis) { + dependsOn(buildResources) + targetCompatibility = project.runtimeJavaVersion >= JavaVersion.VERSION_1_9 ? + project.runtimeJavaVersion.getMajorVersion() : + project.runtimeJavaVersion + bundledSignatures = [ + "jdk-unsafe", "jdk-deprecated", "jdk-non-portable", "jdk-system-out" + ] + signaturesFiles = project.files( + buildResources.copy("forbidden/jdk-signatures.txt"), + buildResources.copy("forbidden/es-all-signatures.txt") ) + suppressAnnotations = ['**.SuppressForbidden'] + if (name.endsWith('Test')) { + signaturesFiles += project.files( + buildResources.copy("forbidden/es-test-signatures.txt"), + buildResources.copy("forbidden/http-signatures.txt") + ) + } else { + signaturesFiles += project.files(buildResources.copy("forbidden/es-server-signatures.txt")) + } + ext.replaceSignatureFiles = { String... names -> + signaturesFiles = project.files( + names.collect { buildResources.copy("forbidden/${it}.txt") } + ) + } + ext.addSignatureFiles = { String... names -> + signaturesFiles += project.files( + names.collect { buildResources.copy("forbidden/${it}.txt") } + ) + } } - return forbiddenApisCli + Task forbiddenApis = project.tasks.getByName("forbiddenApis") + forbiddenApis.group = "" + return forbiddenApis } private static Task configureCheckstyle(Project project) { diff --git a/buildSrc/src/main/java/org/elasticsearch/gradle/precommit/ForbiddenApisCliTask.java b/buildSrc/src/main/java/org/elasticsearch/gradle/precommit/ForbiddenApisCliTask.java deleted file mode 100644 index f88fff24be5..00000000000 --- a/buildSrc/src/main/java/org/elasticsearch/gradle/precommit/ForbiddenApisCliTask.java +++ /dev/null @@ -1,177 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.elasticsearch.gradle.precommit; - -import org.elasticsearch.gradle.LoggedExec; -import org.gradle.api.JavaVersion; -import org.gradle.api.artifacts.Configuration; -import org.gradle.api.file.FileCollection; -import org.gradle.api.logging.Logger; -import org.gradle.api.logging.Logging; -import org.gradle.api.tasks.Input; -import org.gradle.api.tasks.InputFiles; -import org.gradle.api.tasks.SkipWhenEmpty; -import org.gradle.api.tasks.SourceSet; -import org.gradle.api.tasks.TaskAction; -import org.gradle.process.JavaExecSpec; - -import java.io.File; -import java.util.ArrayList; -import java.util.LinkedHashSet; -import java.util.List; -import java.util.Set; - -public class ForbiddenApisCliTask extends PrecommitTask { - - private final Logger logger = Logging.getLogger(ForbiddenApisCliTask.class); - private FileCollection signaturesFiles; - private List signatures = new ArrayList<>(); - private Set bundledSignatures = new LinkedHashSet<>(); - private Set suppressAnnotations = new LinkedHashSet<>(); - private JavaVersion targetCompatibility; - private FileCollection classesDirs; - private SourceSet sourceSet; - // This needs to be an object so it can hold Groovy GStrings - private Object javaHome; - - @Input - public JavaVersion getTargetCompatibility() { - return targetCompatibility; - } - - public void setTargetCompatibility(JavaVersion targetCompatibility) { - if (targetCompatibility.compareTo(JavaVersion.VERSION_1_10) > 0) { - logger.warn( - "Target compatibility is set to {} but forbiddenapis only supports up to 10. Will cap at 10.", - targetCompatibility - ); - this.targetCompatibility = JavaVersion.VERSION_1_10; - } else { - this.targetCompatibility = targetCompatibility; - } - } - - @InputFiles - @SkipWhenEmpty - public FileCollection getClassesDirs() { - return classesDirs.filter(File::exists); - } - - public void setClassesDirs(FileCollection classesDirs) { - this.classesDirs = classesDirs; - } - - @InputFiles - public FileCollection getSignaturesFiles() { - return signaturesFiles; - } - - public void setSignaturesFiles(FileCollection signaturesFiles) { - this.signaturesFiles = signaturesFiles; - } - - @Input - public List getSignatures() { - return signatures; - } - - public void setSignatures(List signatures) { - this.signatures = signatures; - } - - @Input - public Set getBundledSignatures() { - return bundledSignatures; - } - - public void setBundledSignatures(Set bundledSignatures) { - this.bundledSignatures = bundledSignatures; - } - - @Input - public Set getSuppressAnnotations() { - return suppressAnnotations; - } - - public void setSuppressAnnotations(Set suppressAnnotations) { - this.suppressAnnotations = suppressAnnotations; - } - - @InputFiles - public FileCollection getClassPathFromSourceSet() { - return getProject().files( - sourceSet.getCompileClasspath(), - sourceSet.getRuntimeClasspath() - ); - } - - public void setSourceSet(SourceSet sourceSet) { - this.sourceSet = sourceSet; - } - - @InputFiles - public Configuration getForbiddenAPIsConfiguration() { - return getProject().getConfigurations().getByName("forbiddenApisCliJar"); - } - - @Input - public Object getJavaHome() { - return javaHome; - } - - public void setJavaHome(Object javaHome) { - this.javaHome = javaHome; - } - - @TaskAction - public void runForbiddenApisAndWriteMarker() { - LoggedExec.javaexec(getProject(), (JavaExecSpec spec) -> { - spec.classpath( - getForbiddenAPIsConfiguration(), - getClassPathFromSourceSet() - ); - spec.setExecutable(getJavaHome() + "/bin/java"); - spec.setMain("de.thetaphi.forbiddenapis.cli.CliMain"); - // build the command line - getSignaturesFiles().forEach(file -> spec.args("-f", file.getAbsolutePath())); - getSuppressAnnotations().forEach(annotation -> spec.args("--suppressannotation", annotation)); - getBundledSignatures().forEach(bundled -> { - // there's no option for target compatibility so we have to interpret it - final String prefix; - if (bundled.equals("jdk-system-out") || - bundled.equals("jdk-reflection") || - bundled.equals("jdk-non-portable")) { - prefix = ""; - } else { - prefix = "-" + ( - getTargetCompatibility().compareTo(JavaVersion.VERSION_1_9) >= 0 ? - getTargetCompatibility().getMajorVersion() : - "1." + getTargetCompatibility().getMajorVersion()) - ; - } - spec.args("-b", bundled + prefix); - } - ); - getClassesDirs().forEach(dir -> - spec.args("-d", dir) - ); - }); - } - -} diff --git a/buildSrc/src/main/java/org/elasticsearch/gradle/precommit/ThirdPartyAuditTask.java b/buildSrc/src/main/java/org/elasticsearch/gradle/precommit/ThirdPartyAuditTask.java index bffa011cb7b..cd6326693ee 100644 --- a/buildSrc/src/main/java/org/elasticsearch/gradle/precommit/ThirdPartyAuditTask.java +++ b/buildSrc/src/main/java/org/elasticsearch/gradle/precommit/ThirdPartyAuditTask.java @@ -52,7 +52,7 @@ import java.util.stream.IntStream; public class ThirdPartyAuditTask extends DefaultTask { private static final Pattern MISSING_CLASS_PATTERN = Pattern.compile( - "WARNING: The referenced class '(.*)' cannot be loaded\\. Please fix the classpath!" + "WARNING: Class '(.*)' cannot be loaded \\(.*\\)\\. Please fix the classpath!" ); private static final Pattern VIOLATION_PATTERN = Pattern.compile( diff --git a/buildSrc/src/main/java/org/elasticsearch/gradle/testclusters/ElasticsearchConfiguration.java b/buildSrc/src/main/java/org/elasticsearch/gradle/testclusters/ElasticsearchConfiguration.java deleted file mode 100644 index a200c75880e..00000000000 --- a/buildSrc/src/main/java/org/elasticsearch/gradle/testclusters/ElasticsearchConfiguration.java +++ /dev/null @@ -1,46 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.elasticsearch.gradle.testclusters; - -import org.elasticsearch.gradle.Distribution; -import org.elasticsearch.gradle.Version; - -import java.util.concurrent.Future; - -public interface ElasticsearchConfiguration { - String getName(); - - Version getVersion(); - - void setVersion(Version version); - - default void setVersion(String version) { - setVersion(Version.fromString(version)); - } - - Distribution getDistribution(); - - void setDistribution(Distribution distribution); - - void claim(); - - Future start(); - - void unClaimAndStop(); -} diff --git a/buildSrc/src/main/java/org/elasticsearch/gradle/testclusters/ElasticsearchNode.java b/buildSrc/src/main/java/org/elasticsearch/gradle/testclusters/ElasticsearchNode.java index a196cb09e97..85931c7846b 100644 --- a/buildSrc/src/main/java/org/elasticsearch/gradle/testclusters/ElasticsearchNode.java +++ b/buildSrc/src/main/java/org/elasticsearch/gradle/testclusters/ElasticsearchNode.java @@ -29,7 +29,7 @@ import java.util.concurrent.Future; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicInteger; -public class ElasticsearchNode implements ElasticsearchConfiguration { +public class ElasticsearchNode { private final String name; private final GradleServicesAdapter services; @@ -45,34 +45,28 @@ public class ElasticsearchNode implements ElasticsearchConfiguration { this.services = services; } - @Override public String getName() { return name; } - @Override public Version getVersion() { return version; } - @Override public void setVersion(Version version) { checkNotRunning(); this.version = version; } - @Override public Distribution getDistribution() { return distribution; } - @Override public void setDistribution(Distribution distribution) { checkNotRunning(); this.distribution = distribution; } - @Override public void claim() { noOfClaims.incrementAndGet(); } @@ -82,7 +76,6 @@ public class ElasticsearchNode implements ElasticsearchConfiguration { * * @return future of thread running in the background */ - @Override public Future start() { if (started.getAndSet(true)) { logger.lifecycle("Already started cluster: {}", name); @@ -95,7 +88,6 @@ public class ElasticsearchNode implements ElasticsearchConfiguration { /** * Stops a running cluster if it's not claimed. Does nothing otherwise. */ - @Override public void unClaimAndStop() { int decrementedClaims = noOfClaims.decrementAndGet(); if (decrementedClaims > 0) { diff --git a/buildSrc/src/main/java/org/elasticsearch/gradle/testclusters/TestClustersPlugin.java b/buildSrc/src/main/java/org/elasticsearch/gradle/testclusters/TestClustersPlugin.java index 3a137906bec..73aad33b8ea 100644 --- a/buildSrc/src/main/java/org/elasticsearch/gradle/testclusters/TestClustersPlugin.java +++ b/buildSrc/src/main/java/org/elasticsearch/gradle/testclusters/TestClustersPlugin.java @@ -46,7 +46,7 @@ public class TestClustersPlugin implements Plugin { @Override public void apply(Project project) { - NamedDomainObjectContainer container = project.container( + NamedDomainObjectContainer container = project.container( ElasticsearchNode.class, (name) -> new ElasticsearchNode(name, GradleServicesAdapter.getInstance(project)) ); @@ -56,12 +56,12 @@ public class TestClustersPlugin implements Plugin { listTask.setGroup("ES cluster formation"); listTask.setDescription("Lists all ES clusters configured for this project"); listTask.doLast((Task task) -> - container.forEach((ElasticsearchConfiguration cluster) -> + container.forEach((ElasticsearchNode cluster) -> logger.lifecycle(" * {}: {}", cluster.getName(), cluster.getDistribution()) ) ); - Map> taskToCluster = new HashMap<>(); + Map> taskToCluster = new HashMap<>(); // register an extension for all current and future tasks, so that any task can declare that it wants to use a // specific cluster. @@ -70,7 +70,7 @@ public class TestClustersPlugin implements Plugin { .set( "useCluster", new Closure(this, this) { - public void doCall(ElasticsearchConfiguration conf) { + public void doCall(ElasticsearchNode conf) { taskToCluster.computeIfAbsent(task, k -> new ArrayList<>()).add(conf); } }) @@ -79,7 +79,7 @@ public class TestClustersPlugin implements Plugin { project.getGradle().getTaskGraph().whenReady(taskExecutionGraph -> taskExecutionGraph.getAllTasks() .forEach(task -> - taskToCluster.getOrDefault(task, Collections.emptyList()).forEach(ElasticsearchConfiguration::claim) + taskToCluster.getOrDefault(task, Collections.emptyList()).forEach(ElasticsearchNode::claim) ) ); project.getGradle().addListener( @@ -87,7 +87,7 @@ public class TestClustersPlugin implements Plugin { @Override public void beforeActions(Task task) { // we only start the cluster before the actions, so we'll not start it if the task is up-to-date - taskToCluster.getOrDefault(task, new ArrayList<>()).forEach(ElasticsearchConfiguration::start); + taskToCluster.getOrDefault(task, new ArrayList<>()).forEach(ElasticsearchNode::start); } @Override public void afterActions(Task task) {} @@ -99,7 +99,7 @@ public class TestClustersPlugin implements Plugin { public void afterExecute(Task task, TaskState state) { // always un-claim the cluster, even if _this_ task is up-to-date, as others might not have been and caused the // cluster to start. - taskToCluster.getOrDefault(task, new ArrayList<>()).forEach(ElasticsearchConfiguration::unClaimAndStop); + taskToCluster.getOrDefault(task, new ArrayList<>()).forEach(ElasticsearchNode::unClaimAndStop); } @Override public void beforeExecute(Task task) {} diff --git a/client/rest/build.gradle b/client/rest/build.gradle index 273836a31f0..f07ff4240ba 100644 --- a/client/rest/build.gradle +++ b/client/rest/build.gradle @@ -1,4 +1,4 @@ -import org.elasticsearch.gradle.precommit.ForbiddenApisCliTask +import de.thetaphi.forbiddenapis.gradle.CheckForbiddenApis /* * Licensed to Elasticsearch under one or more contributor @@ -52,7 +52,7 @@ dependencies { testCompile "org.elasticsearch:mocksocket:${versions.mocksocket}" } -tasks.withType(ForbiddenApisCliTask) { +tasks.withType(CheckForbiddenApis) { //client does not depend on server, so only jdk and http signatures should be checked replaceSignatureFiles ('jdk-signatures', 'http-signatures') } diff --git a/distribution/tools/launchers/build.gradle b/distribution/tools/launchers/build.gradle index ca1aa6bcac9..f933c04278e 100644 --- a/distribution/tools/launchers/build.gradle +++ b/distribution/tools/launchers/build.gradle @@ -16,10 +16,7 @@ * specific language governing permissions and limitations * under the License. */ - - - -import org.elasticsearch.gradle.precommit.ForbiddenApisCliTask +import de.thetaphi.forbiddenapis.gradle.CheckForbiddenApis apply plugin: 'elasticsearch.build' @@ -32,7 +29,7 @@ dependencies { archivesBaseName = 'elasticsearch-launchers' -tasks.withType(ForbiddenApisCliTask) { +tasks.withType(CheckForbiddenApis) { replaceSignatureFiles 'jdk-signatures' } diff --git a/docs/reference/indices/forcemerge.asciidoc b/docs/reference/indices/forcemerge.asciidoc index 57fe746d59b..a28d5eaa858 100644 --- a/docs/reference/indices/forcemerge.asciidoc +++ b/docs/reference/indices/forcemerge.asciidoc @@ -55,7 +55,11 @@ POST /kimchy/_forcemerge?only_expunge_deletes=false&max_num_segments=100&flush=t === Multi Index The force merge API can be applied to more than one index with a single call, or -even on `_all` the indices. +even on `_all` the indices. Multi index operations are executed one shard at a +time per node. Force merge makes the storage for the shard being merged +temporarily increase, up to double its size in case `max_num_segments` is set +to `1`, as all segments need to be rewritten into a new one. + [source,js] -------------------------------------------------- diff --git a/docs/reference/ingest/ingest-node.asciidoc b/docs/reference/ingest/ingest-node.asciidoc index 0241751a4df..bad758c8a3c 100644 --- a/docs/reference/ingest/ingest-node.asciidoc +++ b/docs/reference/ingest/ingest-node.asciidoc @@ -721,12 +721,30 @@ All processors are defined in the following way within a pipeline definition: // NOTCONSOLE Each processor defines its own configuration parameters, but all processors have -the ability to declare `tag` and `on_failure` fields. These fields are optional. +the ability to declare `tag`, `on_failure` and `if` fields. These fields are optional. A `tag` is simply a string identifier of the specific instantiation of a certain processor in a pipeline. The `tag` field does not affect the processor's behavior, but is very useful for bookkeeping and tracing errors to specific processors. +The `if` field must contain a script that returns a boolean value. If the script evaluates to `true` +then the processor will be executed for the given document otherwise it will be skipped. +The `if` field takes an object with the script fields defined in <> +and accesses a read only version of the document via the same `ctx` variable used by scripts in the +<>. + +[source,js] +-------------------------------------------------- +{ + "set": { + "if": "ctx.bar == 'expectedValue'", + "field": "foo", + "value": "bar" + } +} +-------------------------------------------------- +// NOTCONSOLE + See <> to learn more about the `on_failure` field and error handling in pipelines. The <> can be used to figure out what processors are available in a cluster. diff --git a/docs/reference/migration/apis/assistance.asciidoc b/docs/reference/migration/apis/assistance.asciidoc index ae9972cc062..943057f425a 100644 --- a/docs/reference/migration/apis/assistance.asciidoc +++ b/docs/reference/migration/apis/assistance.asciidoc @@ -40,7 +40,7 @@ GET /_xpack/migration/assistance // CONSOLE // TEST[skip:cannot create an old index in docs test] -A successful call returns a list of indices that need to updated or reindexed: +A successful call returns a list of indices that need to be updated or reindexed: [source,js] -------------------------------------------------- @@ -73,7 +73,7 @@ GET /_xpack/migration/assistance/my_* // CONSOLE // TEST[skip:cannot create an old index in docs test] -A successful call returns a list of indices that needs to updated or reindexed +A successful call returns a list of indices that needs to be updated or reindexed and match the index specified on the endpoint: [source,js] diff --git a/docs/reference/rollup/apis/delete-job.asciidoc b/docs/reference/rollup/apis/delete-job.asciidoc index 37774560848..f649d3ee60d 100644 --- a/docs/reference/rollup/apis/delete-job.asciidoc +++ b/docs/reference/rollup/apis/delete-job.asciidoc @@ -8,8 +8,8 @@ experimental[] -This API deletes an existing rollup job. The job can be started or stopped, in both cases it will be deleted. Attempting -to delete a non-existing job will throw an exception +This API deletes an existing rollup job. A job must be *stopped* first before it can be deleted. Attempting to delete +a started job will result in an error. Similarly, attempting to delete a nonexistent job will throw an exception. .Deleting the job does not delete rolled up data ********************************** @@ -99,12 +99,12 @@ A 404 `resource_not_found` exception will be thrown: "root_cause" : [ { "type" : "resource_not_found_exception", - "reason" : "the task with id does_not_exist doesn't exist", + "reason" : "the task with id [does_not_exist] doesn't exist", "stack_trace": ... } ], "type" : "resource_not_found_exception", - "reason" : "the task with id does_not_exist doesn't exist", + "reason" : "the task with id [does_not_exist] doesn't exist", "stack_trace": ... }, "status": 404 diff --git a/docs/reference/setup/install/xpack-indices.asciidoc b/docs/reference/setup/install/xpack-indices.asciidoc index 14eb756ed03..f809aafd2aa 100644 --- a/docs/reference/setup/install/xpack-indices.asciidoc +++ b/docs/reference/setup/install/xpack-indices.asciidoc @@ -6,7 +6,7 @@ creation in {es}, you must configure [source,yaml] ----------------------------------------------------------- -action.auto_create_index: .security,.monitoring*,.watches,.triggered_watches,.watcher-history*,.ml* +action.auto_create_index: .monitoring*,.watches,.triggered_watches,.watcher-history*,.ml* ----------------------------------------------------------- [IMPORTANT] diff --git a/docs/reference/sql/functions/operators.asciidoc b/docs/reference/sql/functions/operators.asciidoc index 9c90d12320e..aae9d47ec7e 100644 --- a/docs/reference/sql/functions/operators.asciidoc +++ b/docs/reference/sql/functions/operators.asciidoc @@ -3,7 +3,7 @@ [[sql-operators]] === Comparison Operators -Boolean operator for comparing one or two expressions. +Boolean operator for comparing against one or multiple expressions. * Equality (`=`) @@ -40,6 +40,13 @@ include-tagged::{sql-specs}/filter.sql-spec[whereBetween] include-tagged::{sql-specs}/filter.sql-spec[whereIsNotNullAndIsNull] -------------------------------------------------- +* `IN (, , ...)` + +["source","sql",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{sql-specs}/filter.sql-spec[whereWithInAndMultipleValues] +-------------------------------------------------- + [[sql-operators-logical]] === Logical Operators diff --git a/docs/reference/sql/functions/type-conversion.asciidoc b/docs/reference/sql/functions/type-conversion.asciidoc index 549b05d69d8..7075e7f2846 100644 --- a/docs/reference/sql/functions/type-conversion.asciidoc +++ b/docs/reference/sql/functions/type-conversion.asciidoc @@ -19,7 +19,7 @@ CAST ( expression<1> AS data_type<2> ) .Description -Casts the result of the given expression to the target type. +Casts the result of the given expression to the target <>. If the cast is not possible (for example because of target type is too narrow or because the value itself cannot be converted), the query fails. @@ -36,4 +36,33 @@ include-tagged::{sql-specs}/docs.csv-spec[conversionIntToStringCast] ["source","sql",subs="attributes,callouts,macros"] ---- include-tagged::{sql-specs}/docs.csv-spec[conversionStringToDateCast] ----- \ No newline at end of file +---- + + +[[sql-functions-type-conversion-convert]] +==== `CONVERT` + +.Synopsis +[source, sql] +---- +CONVERT ( expression<1>, data_type<2> ) +---- + +<1> Expression to convert +<2> Target data type to convert to + +.Description + +Works exactly like <> with slightly different syntax. +Moreover, apart from the standard <> it supports the corresponding +https://docs.microsoft.com/en-us/sql/odbc/reference/appendixes/explicit-data-type-conversion-function?view=sql-server-2017[ODBC data types]. + +["source","sql",subs="attributes,callouts,macros"] +---- +include-tagged::{sql-specs}/docs.csv-spec[conversionStringToIntConvertODBCDataType] +---- + +["source","sql",subs="attributes,callouts,macros"] +---- +include-tagged::{sql-specs}/docs.csv-spec[conversionStringToIntConvertESDataType] +---- diff --git a/docs/reference/sql/security.asciidoc b/docs/reference/sql/security.asciidoc index 64f554f0231..a317355866b 100644 --- a/docs/reference/sql/security.asciidoc +++ b/docs/reference/sql/security.asciidoc @@ -34,6 +34,6 @@ indices: ["source","yaml",subs="attributes,callouts,macros"] -------------------------------------------------- -include-tagged::{sql-tests}/security/roles.yml[cli_jdbc] +include-tagged::{sql-tests}/security/roles.yml[cli_drivers] -------------------------------------------------- diff --git a/libs/core/build.gradle b/libs/core/build.gradle index 9c90837bd80..50b1b88bc61 100644 --- a/libs/core/build.gradle +++ b/libs/core/build.gradle @@ -48,8 +48,7 @@ if (!isEclipse && !isIdea) { forbiddenApisJava9 { if (project.runtimeJavaVersion < JavaVersion.VERSION_1_9) { - targetCompatibility = JavaVersion.VERSION_1_9 - javaHome = project.java9Home + targetCompatibility = JavaVersion.VERSION_1_9.getMajorVersion() } replaceSignatureFiles 'jdk-signatures' } diff --git a/modules/ingest-common/src/test/resources/rest-api-spec/test/ingest/210_pipeline_processor.yml b/modules/ingest-common/src/test/resources/rest-api-spec/test/ingest/210_pipeline_processor.yml index c7c5df1e06f..4efaaeb4091 100644 --- a/modules/ingest-common/src/test/resources/rest-api-spec/test/ingest/210_pipeline_processor.yml +++ b/modules/ingest-common/src/test/resources/rest-api-spec/test/ingest/210_pipeline_processor.yml @@ -44,7 +44,7 @@ teardown: "processors" : [ { "pipeline" : { - "pipeline": "inner" + "name": "inner" } } ] @@ -78,7 +78,7 @@ teardown: "processors" : [ { "pipeline" : { - "pipeline": "inner" + "name": "inner" } } ] @@ -94,7 +94,7 @@ teardown: "processors" : [ { "pipeline" : { - "pipeline": "outer" + "name": "outer" } } ] diff --git a/modules/ingest-common/src/test/resources/rest-api-spec/test/ingest/90_simulate.yml b/modules/ingest-common/src/test/resources/rest-api-spec/test/ingest/90_simulate.yml index 46c4fb0a69e..65e888d8caa 100644 --- a/modules/ingest-common/src/test/resources/rest-api-spec/test/ingest/90_simulate.yml +++ b/modules/ingest-common/src/test/resources/rest-api-spec/test/ingest/90_simulate.yml @@ -617,7 +617,7 @@ teardown: "processors" : [ { "pipeline" : { - "pipeline": "inner" + "name": "inner" } } ] @@ -633,7 +633,7 @@ teardown: "processors" : [ { "pipeline" : { - "pipeline": "outer" + "name": "outer" } } ] @@ -641,7 +641,6 @@ teardown: - match: { acknowledged: true } - do: - catch: /illegal_state_exception/ ingest.simulate: verbose: true body: > @@ -650,7 +649,7 @@ teardown: "processors" : [ { "pipeline" : { - "pipeline": "outer" + "name": "outer" } } ] @@ -667,8 +666,10 @@ teardown: } ] } -- match: { error.root_cause.0.type: "illegal_state_exception" } -- match: { error.root_cause.0.reason: "Cycle detected for pipeline: inner" } +- length: { docs: 1 } +- length: { docs.0.processor_results: 1 } +- match: { docs.0.processor_results.0.error.reason: "java.lang.IllegalArgumentException: java.lang.IllegalStateException: Cycle detected for pipeline: outer" } +- match: { docs.0.processor_results.0.error.caused_by.caused_by.reason: "Cycle detected for pipeline: outer" } --- "Test verbose simulate with Pipeline Processor with Multiple Pipelines": @@ -686,7 +687,7 @@ teardown: }, { "pipeline": { - "pipeline": "pipeline2" + "name": "pipeline2" } } ] @@ -724,7 +725,7 @@ teardown: }, { "pipeline": { - "pipeline": "pipeline1" + "name": "pipeline1" } } ] diff --git a/plugins/analysis-icu/build.gradle b/plugins/analysis-icu/build.gradle index a42a28cad4e..315dcc5f6cb 100644 --- a/plugins/analysis-icu/build.gradle +++ b/plugins/analysis-icu/build.gradle @@ -1,4 +1,4 @@ -import org.elasticsearch.gradle.precommit.ForbiddenApisCliTask +import de.thetaphi.forbiddenapis.gradle.CheckForbiddenApis /* * Licensed to Elasticsearch under one or more contributor @@ -25,7 +25,7 @@ esplugin { hasClientJar = true } -tasks.withType(ForbiddenApisCliTask) { +tasks.withType(CheckForbiddenApis) { signatures += [ "com.ibm.icu.text.Collator#getInstance() @ Don't use default locale, use getInstance(ULocale) instead" ] diff --git a/server/build.gradle b/server/build.gradle index c01fb92b050..85c7f45cf7e 100644 --- a/server/build.gradle +++ b/server/build.gradle @@ -61,8 +61,7 @@ if (!isEclipse && !isIdea) { forbiddenApisJava9 { if (project.runtimeJavaVersion < JavaVersion.VERSION_1_9) { - targetCompatibility = JavaVersion.VERSION_1_9 - javaHome = project.java9Home + targetCompatibility = JavaVersion.VERSION_1_9.getMajorVersion() } } diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/list/ListTasksResponse.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/list/ListTasksResponse.java index 0ab1391faa2..e6f1c52aae8 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/list/ListTasksResponse.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/list/ListTasksResponse.java @@ -51,8 +51,6 @@ import static org.elasticsearch.common.xcontent.ConstructingObjectParser.optiona */ public class ListTasksResponse extends BaseTasksResponse implements ToXContentObject { private static final String TASKS = "tasks"; - private static final String TASK_FAILURES = "task_failures"; - private static final String NODE_FAILURES = "node_failures"; private List tasks; @@ -246,28 +244,6 @@ public class ListTasksResponse extends BaseTasksResponse implements ToXContentOb return builder; } - private void toXContentCommon(XContentBuilder builder, Params params) throws IOException { - if (getTaskFailures() != null && getTaskFailures().size() > 0) { - builder.startArray(TASK_FAILURES); - for (TaskOperationFailure ex : getTaskFailures()){ - builder.startObject(); - builder.value(ex); - builder.endObject(); - } - builder.endArray(); - } - - if (getNodeFailures() != null && getNodeFailures().size() > 0) { - builder.startArray(NODE_FAILURES); - for (ElasticsearchException ex : getNodeFailures()) { - builder.startObject(); - ex.toXContent(builder, params); - builder.endObject(); - } - builder.endArray(); - } - } - public static ListTasksResponse fromXContent(XContentParser parser) { return PARSER.apply(parser, null); } diff --git a/server/src/main/java/org/elasticsearch/action/ingest/SimulateExecutionService.java b/server/src/main/java/org/elasticsearch/action/ingest/SimulateExecutionService.java index c081707f4db..e2b44ae2a7a 100644 --- a/server/src/main/java/org/elasticsearch/action/ingest/SimulateExecutionService.java +++ b/server/src/main/java/org/elasticsearch/action/ingest/SimulateExecutionService.java @@ -21,17 +21,13 @@ package org.elasticsearch.action.ingest; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionRunnable; +import org.elasticsearch.ingest.CompoundProcessor; import org.elasticsearch.ingest.IngestDocument; import org.elasticsearch.ingest.Pipeline; -import org.elasticsearch.ingest.CompoundProcessor; -import org.elasticsearch.ingest.PipelineProcessor; import org.elasticsearch.threadpool.ThreadPool; import java.util.ArrayList; -import java.util.Collections; -import java.util.IdentityHashMap; import java.util.List; -import java.util.Set; import static org.elasticsearch.ingest.TrackingResultProcessor.decorate; @@ -46,11 +42,9 @@ class SimulateExecutionService { } SimulateDocumentResult executeDocument(Pipeline pipeline, IngestDocument ingestDocument, boolean verbose) { - // Prevent cycles in pipeline decoration - final Set pipelinesSeen = Collections.newSetFromMap(new IdentityHashMap<>()); if (verbose) { List processorResultList = new ArrayList<>(); - CompoundProcessor verbosePipelineProcessor = decorate(pipeline.getCompoundProcessor(), processorResultList, pipelinesSeen); + CompoundProcessor verbosePipelineProcessor = decorate(pipeline.getCompoundProcessor(), processorResultList); try { Pipeline verbosePipeline = new Pipeline(pipeline.getId(), pipeline.getDescription(), pipeline.getVersion(), verbosePipelineProcessor); diff --git a/server/src/main/java/org/elasticsearch/action/support/tasks/BaseTasksResponse.java b/server/src/main/java/org/elasticsearch/action/support/tasks/BaseTasksResponse.java index 1436410bf20..090aaf628ac 100644 --- a/server/src/main/java/org/elasticsearch/action/support/tasks/BaseTasksResponse.java +++ b/server/src/main/java/org/elasticsearch/action/support/tasks/BaseTasksResponse.java @@ -25,12 +25,15 @@ import org.elasticsearch.action.FailedNodeException; import org.elasticsearch.action.TaskOperationFailure; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.xcontent.ToXContent; +import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.tasks.TaskId; import java.io.IOException; import java.util.ArrayList; import java.util.Collections; import java.util.List; +import java.util.Objects; import java.util.stream.Stream; import static java.util.stream.Collectors.toList; @@ -41,6 +44,9 @@ import static org.elasticsearch.ExceptionsHelper.rethrowAndSuppress; * Base class for responses of task-related operations */ public class BaseTasksResponse extends ActionResponse { + protected static final String TASK_FAILURES = "task_failures"; + protected static final String NODE_FAILURES = "node_failures"; + private List taskFailures; private List nodeFailures; @@ -103,4 +109,44 @@ public class BaseTasksResponse extends ActionResponse { exp.writeTo(out); } } + + protected void toXContentCommon(XContentBuilder builder, ToXContent.Params params) throws IOException { + if (getTaskFailures() != null && getTaskFailures().size() > 0) { + builder.startArray(TASK_FAILURES); + for (TaskOperationFailure ex : getTaskFailures()){ + builder.startObject(); + builder.value(ex); + builder.endObject(); + } + builder.endArray(); + } + + if (getNodeFailures() != null && getNodeFailures().size() > 0) { + builder.startArray(NODE_FAILURES); + for (ElasticsearchException ex : getNodeFailures()) { + builder.startObject(); + ex.toXContent(builder, params); + builder.endObject(); + } + builder.endArray(); + } + } + + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + BaseTasksResponse response = (BaseTasksResponse) o; + return taskFailures.equals(response.taskFailures) + && nodeFailures.equals(response.nodeFailures); + } + + @Override + public int hashCode() { + return Objects.hash(taskFailures, nodeFailures); + } } diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolver.java b/server/src/main/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolver.java index d0824ea7d8f..9be5acc0561 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolver.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolver.java @@ -27,27 +27,27 @@ import org.elasticsearch.common.Nullable; import org.elasticsearch.common.Strings; import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.component.AbstractComponent; -import org.elasticsearch.common.joda.FormatDateTimeFormatter; import org.elasticsearch.common.regex.Regex; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.time.DateFormatter; +import org.elasticsearch.common.time.DateFormatters; import org.elasticsearch.common.time.DateMathParser; -import org.elasticsearch.common.time.DateUtils; +import org.elasticsearch.common.time.JavaDateMathParser; import org.elasticsearch.common.util.set.Sets; import org.elasticsearch.index.Index; import org.elasticsearch.index.IndexNotFoundException; import org.elasticsearch.indices.IndexClosedException; import org.elasticsearch.indices.InvalidIndexNameException; -import org.joda.time.DateTimeZone; -import org.joda.time.format.DateTimeFormat; -import org.joda.time.format.DateTimeFormatter; +import java.time.Instant; +import java.time.ZoneId; +import java.time.ZoneOffset; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.List; -import java.util.Locale; import java.util.Map; import java.util.Set; import java.util.SortedMap; @@ -62,7 +62,7 @@ public class IndexNameExpressionResolver extends AbstractComponent { public IndexNameExpressionResolver(Settings settings) { super(settings); expressionResolvers = Arrays.asList( - dateMathExpressionResolver = new DateMathExpressionResolver(settings), + dateMathExpressionResolver = new DateMathExpressionResolver(), new WildcardExpressionResolver() ); } @@ -815,6 +815,7 @@ public class IndexNameExpressionResolver extends AbstractComponent { static final class DateMathExpressionResolver implements ExpressionResolver { + private static final DateFormatter DEFAULT_DATE_FORMATTER = DateFormatters.forPattern("uuuu.MM.dd"); private static final String EXPRESSION_LEFT_BOUND = "<"; private static final String EXPRESSION_RIGHT_BOUND = ">"; private static final char LEFT_BOUND = '{'; @@ -822,17 +823,6 @@ public class IndexNameExpressionResolver extends AbstractComponent { private static final char ESCAPE_CHAR = '\\'; private static final char TIME_ZONE_BOUND = '|'; - private final DateTimeZone defaultTimeZone; - private final String defaultDateFormatterPattern; - private final DateTimeFormatter defaultDateFormatter; - - DateMathExpressionResolver(Settings settings) { - String defaultTimeZoneId = settings.get("date_math_expression_resolver.default_time_zone", "UTC"); - this.defaultTimeZone = DateTimeZone.forID(defaultTimeZoneId); - defaultDateFormatterPattern = settings.get("date_math_expression_resolver.default_date_format", "YYYY.MM.dd"); - this.defaultDateFormatter = DateTimeFormat.forPattern(defaultDateFormatterPattern); - } - @Override public List resolve(final Context context, List expressions) { List result = new ArrayList<>(expressions.size()); @@ -896,13 +886,12 @@ public class IndexNameExpressionResolver extends AbstractComponent { int dateTimeFormatLeftBoundIndex = inPlaceHolderString.indexOf(LEFT_BOUND); String mathExpression; String dateFormatterPattern; - DateTimeFormatter dateFormatter; - final DateTimeZone timeZone; + DateFormatter dateFormatter; + final ZoneId timeZone; if (dateTimeFormatLeftBoundIndex < 0) { mathExpression = inPlaceHolderString; - dateFormatterPattern = defaultDateFormatterPattern; - dateFormatter = defaultDateFormatter; - timeZone = defaultTimeZone; + dateFormatter = DEFAULT_DATE_FORMATTER; + timeZone = ZoneOffset.UTC; } else { if (inPlaceHolderString.lastIndexOf(RIGHT_BOUND) != inPlaceHolderString.length() - 1) { throw new ElasticsearchParseException("invalid dynamic name expression [{}]. missing closing `}` for date math format", inPlaceHolderString); @@ -915,20 +904,18 @@ public class IndexNameExpressionResolver extends AbstractComponent { int formatPatternTimeZoneSeparatorIndex = dateFormatterPatternAndTimeZoneId.indexOf(TIME_ZONE_BOUND); if (formatPatternTimeZoneSeparatorIndex != -1) { dateFormatterPattern = dateFormatterPatternAndTimeZoneId.substring(0, formatPatternTimeZoneSeparatorIndex); - timeZone = DateTimeZone.forID(dateFormatterPatternAndTimeZoneId.substring(formatPatternTimeZoneSeparatorIndex + 1)); + timeZone = ZoneId.of(dateFormatterPatternAndTimeZoneId.substring(formatPatternTimeZoneSeparatorIndex + 1)); } else { dateFormatterPattern = dateFormatterPatternAndTimeZoneId; - timeZone = defaultTimeZone; + timeZone = ZoneOffset.UTC; } - dateFormatter = DateTimeFormat.forPattern(dateFormatterPattern); + dateFormatter = DateFormatters.forPattern(dateFormatterPattern); } - DateTimeFormatter parser = dateFormatter.withZone(timeZone); - FormatDateTimeFormatter formatter = new FormatDateTimeFormatter(dateFormatterPattern, parser, Locale.ROOT); - DateMathParser dateMathParser = formatter.toDateMathParser(); - long millis = dateMathParser.parse(mathExpression, context::getStartTime, false, - DateUtils.dateTimeZoneToZoneId(timeZone)); + DateFormatter formatter = dateFormatter.withZone(timeZone); + DateMathParser dateMathParser = new JavaDateMathParser(formatter); + long millis = dateMathParser.parse(mathExpression, context::getStartTime, false, timeZone); - String time = formatter.printer().print(millis); + String time = formatter.format(Instant.ofEpochMilli(millis)); beforePlaceHolderSb.append(time); inPlaceHolderSb = new StringBuilder(); inPlaceHolder = false; @@ -968,18 +955,4 @@ public class IndexNameExpressionResolver extends AbstractComponent { return beforePlaceHolderSb.toString(); } } - - /** - * Returns true iff the given expression resolves to the given index name otherwise false - */ - public final boolean matchesIndex(String indexName, String expression, ClusterState state) { - final String[] concreteIndices = concreteIndexNames(state, IndicesOptions.lenientExpandOpen(), expression); - for (String index : concreteIndices) { - if (Regex.simpleMatch(index, indexName)) { - return true; - } - } - return indexName.equals(expression); - } - } diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/MetaData.java b/server/src/main/java/org/elasticsearch/cluster/metadata/MetaData.java index f4eafd05e15..bafbea2e727 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/MetaData.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/MetaData.java @@ -249,53 +249,45 @@ public class MetaData implements Iterable, Diffable, To } /** - * Finds the specific index aliases that point to the specified concrete indices or match partially with the indices via wildcards. + * Finds the specific index aliases that point to the requested concrete indices directly + * or that match with the indices via wildcards. * - * @param concreteIndices The concrete indexes the index aliases must point to order to be returned. - * @return a map of index to a list of alias metadata, the list corresponding to a concrete index will be empty if no aliases are - * present for that index + * @param concreteIndices The concrete indices that the aliases must point to in order to be returned. + * @return A map of index name to the list of aliases metadata. If a concrete index does not have matching + * aliases then the result will not include the index's key. */ - public ImmutableOpenMap> findAllAliases(String[] concreteIndices) { - return findAliases(Strings.EMPTY_ARRAY, Strings.EMPTY_ARRAY, concreteIndices); + public ImmutableOpenMap> findAllAliases(final String[] concreteIndices) { + return findAliases(Strings.EMPTY_ARRAY, concreteIndices); } /** - * Finds the specific index aliases that match with the specified aliases directly or partially via wildcards and - * that point to the specified concrete indices or match partially with the indices via wildcards. + * Finds the specific index aliases that match with the specified aliases directly or partially via wildcards, and + * that point to the specified concrete indices (directly or matching indices via wildcards). * * @param aliasesRequest The request to find aliases for - * @param concreteIndices The concrete indexes the index aliases must point to order to be returned. - * @return a map of index to a list of alias metadata, the list corresponding to a concrete index will be empty if no aliases are - * present for that index + * @param concreteIndices The concrete indices that the aliases must point to in order to be returned. + * @return A map of index name to the list of aliases metadata. If a concrete index does not have matching + * aliases then the result will not include the index's key. */ - public ImmutableOpenMap> findAliases(final AliasesRequest aliasesRequest, String[] concreteIndices) { - return findAliases(aliasesRequest.getOriginalAliases(), aliasesRequest.aliases(), concreteIndices); + public ImmutableOpenMap> findAliases(final AliasesRequest aliasesRequest, final String[] concreteIndices) { + return findAliases(aliasesRequest.aliases(), concreteIndices); } /** - * Finds the specific index aliases that match with the specified aliases directly or partially via wildcards and - * that point to the specified concrete indices or match partially with the indices via wildcards. + * Finds the specific index aliases that match with the specified aliases directly or partially via wildcards, and + * that point to the specified concrete indices (directly or matching indices via wildcards). * - * @param aliases The aliases to look for - * @param originalAliases The original aliases that the user originally requested - * @param concreteIndices The concrete indexes the index aliases must point to order to be returned. - * @return a map of index to a list of alias metadata, the list corresponding to a concrete index will be empty if no aliases are - * present for that index + * @param aliases The aliases to look for. Might contain include or exclude wildcards. + * @param concreteIndices The concrete indices that the aliases must point to in order to be returned + * @return A map of index name to the list of aliases metadata. If a concrete index does not have matching + * aliases then the result will not include the index's key. */ - private ImmutableOpenMap> findAliases(String[] originalAliases, String[] aliases, - String[] concreteIndices) { + private ImmutableOpenMap> findAliases(final String[] aliases, final String[] concreteIndices) { assert aliases != null; - assert originalAliases != null; assert concreteIndices != null; if (concreteIndices.length == 0) { return ImmutableOpenMap.of(); } - - //if aliases were provided but they got replaced with empty aliases, return empty map - if (originalAliases.length > 0 && aliases.length == 0) { - return ImmutableOpenMap.of(); - } - String[] patterns = new String[aliases.length]; boolean[] include = new boolean[aliases.length]; for (int i = 0; i < aliases.length; i++) { @@ -331,7 +323,6 @@ public class MetaData implements Iterable, Diffable, To filteredValues.add(value); } } - if (filteredValues.isEmpty() == false) { // Make the list order deterministic CollectionUtil.timSort(filteredValues, Comparator.comparing(AliasMetaData::alias)); diff --git a/server/src/main/java/org/elasticsearch/ingest/CompoundProcessor.java b/server/src/main/java/org/elasticsearch/ingest/CompoundProcessor.java index e1a413f6aa9..3b8281bd471 100644 --- a/server/src/main/java/org/elasticsearch/ingest/CompoundProcessor.java +++ b/server/src/main/java/org/elasticsearch/ingest/CompoundProcessor.java @@ -20,12 +20,15 @@ package org.elasticsearch.ingest; import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.common.collect.Tuple; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.List; import java.util.Map; +import java.util.concurrent.TimeUnit; +import java.util.function.LongSupplier; import java.util.stream.Collectors; /** @@ -40,16 +43,33 @@ public class CompoundProcessor implements Processor { private final boolean ignoreFailure; private final List processors; private final List onFailureProcessors; + private final List> processorsWithMetrics; + private final LongSupplier relativeTimeProvider; + + CompoundProcessor(LongSupplier relativeTimeProvider, Processor... processor) { + this(false, Arrays.asList(processor), Collections.emptyList(), relativeTimeProvider); + } public CompoundProcessor(Processor... processor) { this(false, Arrays.asList(processor), Collections.emptyList()); } public CompoundProcessor(boolean ignoreFailure, List processors, List onFailureProcessors) { + this(ignoreFailure, processors, onFailureProcessors, System::nanoTime); + } + CompoundProcessor(boolean ignoreFailure, List processors, List onFailureProcessors, + LongSupplier relativeTimeProvider) { super(); this.ignoreFailure = ignoreFailure; this.processors = processors; this.onFailureProcessors = onFailureProcessors; + this.relativeTimeProvider = relativeTimeProvider; + this.processorsWithMetrics = new ArrayList<>(processors.size()); + processors.forEach(p -> processorsWithMetrics.add(new Tuple<>(p, new IngestMetric()))); + } + + List> getProcessorsWithMetrics() { + return processorsWithMetrics; } public boolean isIgnoreFailure() { @@ -94,12 +114,17 @@ public class CompoundProcessor implements Processor { @Override public IngestDocument execute(IngestDocument ingestDocument) throws Exception { - for (Processor processor : processors) { + for (Tuple processorWithMetric : processorsWithMetrics) { + Processor processor = processorWithMetric.v1(); + IngestMetric metric = processorWithMetric.v2(); + long startTimeInNanos = relativeTimeProvider.getAsLong(); try { + metric.preIngest(); if (processor.execute(ingestDocument) == null) { return null; } } catch (Exception e) { + metric.ingestFailed(); if (ignoreFailure) { continue; } @@ -112,11 +137,15 @@ public class CompoundProcessor implements Processor { executeOnFailure(ingestDocument, compoundProcessorException); break; } + } finally { + long ingestTimeInMillis = TimeUnit.NANOSECONDS.toMillis(relativeTimeProvider.getAsLong() - startTimeInNanos); + metric.postIngest(ingestTimeInMillis); } } return ingestDocument; } + void executeOnFailure(IngestDocument ingestDocument, ElasticsearchException exception) throws Exception { try { putFailureMetadata(ingestDocument, exception); diff --git a/server/src/main/java/org/elasticsearch/ingest/ConditionalProcessor.java b/server/src/main/java/org/elasticsearch/ingest/ConditionalProcessor.java index b6f6612344a..2493f291bcd 100644 --- a/server/src/main/java/org/elasticsearch/ingest/ConditionalProcessor.java +++ b/server/src/main/java/org/elasticsearch/ingest/ConditionalProcessor.java @@ -28,6 +28,8 @@ import java.util.List; import java.util.ListIterator; import java.util.Map; import java.util.Set; +import java.util.concurrent.TimeUnit; +import java.util.function.LongSupplier; import java.util.stream.Collectors; import org.elasticsearch.script.IngestConditionalScript; import org.elasticsearch.script.Script; @@ -42,24 +44,54 @@ public class ConditionalProcessor extends AbstractProcessor { private final ScriptService scriptService; private final Processor processor; + private final IngestMetric metric; + private final LongSupplier relativeTimeProvider; ConditionalProcessor(String tag, Script script, ScriptService scriptService, Processor processor) { + this(tag, script, scriptService, processor, System::nanoTime); + } + + ConditionalProcessor(String tag, Script script, ScriptService scriptService, Processor processor, LongSupplier relativeTimeProvider) { super(tag); this.condition = script; this.scriptService = scriptService; this.processor = processor; + this.metric = new IngestMetric(); + this.relativeTimeProvider = relativeTimeProvider; } @Override public IngestDocument execute(IngestDocument ingestDocument) throws Exception { - IngestConditionalScript script = - scriptService.compile(condition, IngestConditionalScript.CONTEXT).newInstance(condition.getParams()); - if (script.execute(new UnmodifiableIngestData(ingestDocument.getSourceAndMetadata()))) { - return processor.execute(ingestDocument); + if (evaluate(ingestDocument)) { + long startTimeInNanos = relativeTimeProvider.getAsLong(); + try { + metric.preIngest(); + return processor.execute(ingestDocument); + } catch (Exception e) { + metric.ingestFailed(); + throw e; + } finally { + long ingestTimeInMillis = TimeUnit.NANOSECONDS.toMillis(relativeTimeProvider.getAsLong() - startTimeInNanos); + metric.postIngest(ingestTimeInMillis); + } } return ingestDocument; } + boolean evaluate(IngestDocument ingestDocument) { + IngestConditionalScript script = + scriptService.compile(condition, IngestConditionalScript.CONTEXT).newInstance(condition.getParams()); + return script.execute(new UnmodifiableIngestData(ingestDocument.getSourceAndMetadata())); + } + + Processor getProcessor() { + return processor; + } + + IngestMetric getMetric() { + return metric; + } + @Override public String getType() { return TYPE; diff --git a/server/src/main/java/org/elasticsearch/ingest/IngestService.java b/server/src/main/java/org/elasticsearch/ingest/IngestService.java index 6c46a9b2354..705e77028a1 100644 --- a/server/src/main/java/org/elasticsearch/ingest/IngestService.java +++ b/server/src/main/java/org/elasticsearch/ingest/IngestService.java @@ -19,19 +19,6 @@ package org.elasticsearch.ingest; -import java.util.ArrayList; -import java.util.Collections; -import java.util.HashMap; -import java.util.HashSet; -import java.util.List; -import java.util.Map; -import java.util.Objects; -import java.util.Set; -import java.util.concurrent.TimeUnit; -import java.util.function.BiConsumer; -import java.util.function.Consumer; -import java.util.stream.Collectors; - import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.ResourceNotFoundException; @@ -49,6 +36,7 @@ import org.elasticsearch.cluster.ClusterStateApplier; import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.regex.Regex; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.util.concurrent.AbstractRunnable; @@ -61,6 +49,19 @@ import org.elasticsearch.plugins.IngestPlugin; import org.elasticsearch.script.ScriptService; import org.elasticsearch.threadpool.ThreadPool; +import java.util.ArrayList; +import java.util.Collections; +import java.util.HashMap; +import java.util.HashSet; +import java.util.Iterator; +import java.util.List; +import java.util.Map; +import java.util.Objects; +import java.util.Set; +import java.util.concurrent.TimeUnit; +import java.util.function.BiConsumer; +import java.util.function.Consumer; + /** * Holder class for several ingest related services. */ @@ -262,11 +263,59 @@ public class IngestService implements ClusterStateApplier { Pipeline originalPipeline = originalPipelines.get(id); if (originalPipeline != null) { pipeline.getMetrics().add(originalPipeline.getMetrics()); + List> oldPerProcessMetrics = new ArrayList<>(); + List> newPerProcessMetrics = new ArrayList<>(); + getProcessorMetrics(originalPipeline.getCompoundProcessor(), oldPerProcessMetrics); + getProcessorMetrics(pipeline.getCompoundProcessor(), newPerProcessMetrics); + //Best attempt to populate new processor metrics using a parallel array of the old metrics. This is not ideal since + //the per processor metrics may get reset when the arrays don't match. However, to get to an ideal model, unique and + //consistent id's per processor and/or semantic equals for each processor will be needed. + if (newPerProcessMetrics.size() == oldPerProcessMetrics.size()) { + Iterator> oldMetricsIterator = oldPerProcessMetrics.iterator(); + for (Tuple compositeMetric : newPerProcessMetrics) { + String type = compositeMetric.v1().getType(); + IngestMetric metric = compositeMetric.v2(); + if (oldMetricsIterator.hasNext()) { + Tuple oldCompositeMetric = oldMetricsIterator.next(); + String oldType = oldCompositeMetric.v1().getType(); + IngestMetric oldMetric = oldCompositeMetric.v2(); + if (type.equals(oldType)) { + metric.add(oldMetric); + } + } + } + } } }); } } + /** + * Recursive method to obtain all of the non-failure processors for given compoundProcessor. Since conditionals are implemented as + * wrappers to the actual processor, always prefer the actual processor's metric over the conditional processor's metric. + * @param compoundProcessor The compound processor to start walking the non-failure processors + * @param processorMetrics The list of {@link Processor} {@link IngestMetric} tuples. + * @return the processorMetrics for all non-failure processor that belong to the original compoundProcessor + */ + private static List> getProcessorMetrics(CompoundProcessor compoundProcessor, + List> processorMetrics) { + //only surface the top level non-failure processors, on-failure processor times will be included in the top level non-failure + for (Tuple processorWithMetric : compoundProcessor.getProcessorsWithMetrics()) { + Processor processor = processorWithMetric.v1(); + IngestMetric metric = processorWithMetric.v2(); + if (processor instanceof CompoundProcessor) { + getProcessorMetrics((CompoundProcessor) processor, processorMetrics); + } else { + //Prefer the conditional's metric since it only includes metrics when the conditional evaluated to true. + if (processor instanceof ConditionalProcessor) { + metric = ((ConditionalProcessor) processor).getMetric(); + } + processorMetrics.add(new Tuple<>(processor, metric)); + } + } + return processorMetrics; + } + private static Pipeline substitutePipeline(String id, ElasticsearchParseException e) { String tag = e.getHeaderKeys().contains("processor_tag") ? e.getHeader("processor_tag").get(0) : null; String type = e.getHeaderKeys().contains("processor_type") ? e.getHeader("processor_type").get(0) : "unknown"; @@ -371,11 +420,42 @@ public class IngestService implements ClusterStateApplier { } public IngestStats stats() { + IngestStats.Builder statsBuilder = new IngestStats.Builder(); + statsBuilder.addTotalMetrics(totalMetrics); + pipelines.forEach((id, pipeline) -> { + CompoundProcessor rootProcessor = pipeline.getCompoundProcessor(); + statsBuilder.addPipelineMetrics(id, pipeline.getMetrics()); + List> processorMetrics = new ArrayList<>(); + getProcessorMetrics(rootProcessor, processorMetrics); + processorMetrics.forEach(t -> { + Processor processor = t.v1(); + IngestMetric processorMetric = t.v2(); + statsBuilder.addProcessorMetrics(id, getProcessorName(processor), processorMetric); + }); + }); + return statsBuilder.build(); + } - Map statsPerPipeline = - pipelines.entrySet().stream().collect(Collectors.toMap(Map.Entry::getKey, v -> v.getValue().getMetrics().createStats())); + //package private for testing + static String getProcessorName(Processor processor){ + // conditionals are implemented as wrappers around the real processor, so get the real processor for the correct type for the name + if(processor instanceof ConditionalProcessor){ + processor = ((ConditionalProcessor) processor).getProcessor(); + } + StringBuilder sb = new StringBuilder(5); + sb.append(processor.getType()); - return new IngestStats(totalMetrics.createStats(), statsPerPipeline); + if(processor instanceof PipelineProcessor){ + String pipelineName = ((PipelineProcessor) processor).getPipelineName(); + sb.append(":"); + sb.append(pipelineName); + } + String tag = processor.getTag(); + if(tag != null && !tag.isEmpty()){ + sb.append(":"); + sb.append(tag); + } + return sb.toString(); } private void innerExecute(IndexRequest indexRequest, Pipeline pipeline, Consumer itemDroppedHandler) throws Exception { diff --git a/server/src/main/java/org/elasticsearch/ingest/IngestStats.java b/server/src/main/java/org/elasticsearch/ingest/IngestStats.java index c4c1520fd19..7a24cf3ee89 100644 --- a/server/src/main/java/org/elasticsearch/ingest/IngestStats.java +++ b/server/src/main/java/org/elasticsearch/ingest/IngestStats.java @@ -19,6 +19,7 @@ package org.elasticsearch.ingest; +import org.elasticsearch.Version; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; @@ -27,17 +28,28 @@ import org.elasticsearch.common.xcontent.ToXContentFragment; import org.elasticsearch.common.xcontent.XContentBuilder; import java.io.IOException; +import java.util.ArrayList; +import java.util.Collections; import java.util.HashMap; +import java.util.List; import java.util.Map; import java.util.concurrent.TimeUnit; public class IngestStats implements Writeable, ToXContentFragment { private final Stats totalStats; - private final Map statsPerPipeline; + private final List pipelineStats; + private final Map> processorStats; - public IngestStats(Stats totalStats, Map statsPerPipeline) { + /** + * @param totalStats - The total stats for Ingest. This is the logically the sum of all pipeline stats, + * and pipeline stats are logically the sum of the processor stats. + * @param pipelineStats - The stats for a given ingest pipeline. + * @param processorStats - The per-processor stats for a given pipeline. A map keyed by the pipeline identifier. + */ + public IngestStats(Stats totalStats, List pipelineStats, Map> processorStats) { this.totalStats = totalStats; - this.statsPerPipeline = statsPerPipeline; + this.pipelineStats = pipelineStats; + this.processorStats = processorStats; } /** @@ -46,37 +58,47 @@ public class IngestStats implements Writeable, ToXContentFragment { public IngestStats(StreamInput in) throws IOException { this.totalStats = new Stats(in); int size = in.readVInt(); - this.statsPerPipeline = new HashMap<>(size); + this.pipelineStats = new ArrayList<>(size); + this.processorStats = new HashMap<>(size); for (int i = 0; i < size; i++) { - statsPerPipeline.put(in.readString(), new Stats(in)); + String pipelineId = in.readString(); + Stats pipelineStat = new Stats(in); + this.pipelineStats.add(new PipelineStat(pipelineId, pipelineStat)); + if (in.getVersion().onOrAfter(Version.V_6_5_0)) { + int processorsSize = in.readVInt(); + List processorStatsPerPipeline = new ArrayList<>(processorsSize); + for (int j = 0; j < processorsSize; j++) { + String processorName = in.readString(); + Stats processorStat = new Stats(in); + processorStatsPerPipeline.add(new ProcessorStat(processorName, processorStat)); + } + this.processorStats.put(pipelineId, processorStatsPerPipeline); + } } } @Override public void writeTo(StreamOutput out) throws IOException { totalStats.writeTo(out); - out.writeVInt(statsPerPipeline.size()); - for (Map.Entry entry : statsPerPipeline.entrySet()) { - out.writeString(entry.getKey()); - entry.getValue().writeTo(out); + out.writeVInt(pipelineStats.size()); + for (PipelineStat pipelineStat : pipelineStats) { + out.writeString(pipelineStat.getPipelineId()); + pipelineStat.getStats().writeTo(out); + if (out.getVersion().onOrAfter(Version.V_6_5_0)) { + List processorStatsForPipeline = processorStats.get(pipelineStat.getPipelineId()); + if (processorStatsForPipeline == null) { + out.writeVInt(0); + } else { + out.writeVInt(processorStatsForPipeline.size()); + for (ProcessorStat processorStat : processorStatsForPipeline) { + out.writeString(processorStat.getName()); + processorStat.getStats().writeTo(out); + } + } + } } } - - /** - * @return The accumulated stats for all pipelines - */ - public Stats getTotalStats() { - return totalStats; - } - - /** - * @return The stats on a per pipeline basis - */ - public Map getStatsPerPipeline() { - return statsPerPipeline; - } - @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject("ingest"); @@ -84,9 +106,21 @@ public class IngestStats implements Writeable, ToXContentFragment { totalStats.toXContent(builder, params); builder.endObject(); builder.startObject("pipelines"); - for (Map.Entry entry : statsPerPipeline.entrySet()) { - builder.startObject(entry.getKey()); - entry.getValue().toXContent(builder, params); + for (PipelineStat pipelineStat : pipelineStats) { + builder.startObject(pipelineStat.getPipelineId()); + pipelineStat.getStats().toXContent(builder, params); + List processorStatsForPipeline = processorStats.get(pipelineStat.getPipelineId()); + builder.startArray("processors"); + if (processorStatsForPipeline != null) { + for (ProcessorStat processorStat : processorStatsForPipeline) { + builder.startObject(); + builder.startObject(processorStat.getName()); + processorStat.getStats().toXContent(builder, params); + builder.endObject(); + builder.endObject(); + } + } + builder.endArray(); builder.endObject(); } builder.endObject(); @@ -94,6 +128,18 @@ public class IngestStats implements Writeable, ToXContentFragment { return builder; } + public Stats getTotalStats() { + return totalStats; + } + + public List getPipelineStats() { + return pipelineStats; + } + + public Map> getProcessorStats() { + return processorStats; + } + public static class Stats implements Writeable, ToXContentFragment { private final long ingestCount; @@ -134,7 +180,6 @@ public class IngestStats implements Writeable, ToXContentFragment { } /** - * * @return The total time spent of ingest preprocessing in millis. */ public long getIngestTimeInMillis() { @@ -164,4 +209,77 @@ public class IngestStats implements Writeable, ToXContentFragment { return builder; } } + + /** + * Easy conversion from scoped {@link IngestMetric} objects to a serializable Stats objects + */ + static class Builder { + private Stats totalStats; + private List pipelineStats = new ArrayList<>(); + private Map> processorStats = new HashMap<>(); + + + Builder addTotalMetrics(IngestMetric totalMetric) { + this.totalStats = totalMetric.createStats(); + return this; + } + + Builder addPipelineMetrics(String pipelineId, IngestMetric pipelineMetric) { + this.pipelineStats.add(new PipelineStat(pipelineId, pipelineMetric.createStats())); + return this; + } + + Builder addProcessorMetrics(String pipelineId, String processorName, IngestMetric metric) { + this.processorStats.computeIfAbsent(pipelineId, k -> new ArrayList<>()) + .add(new ProcessorStat(processorName, metric.createStats())); + return this; + } + + IngestStats build() { + return new IngestStats(totalStats, Collections.unmodifiableList(pipelineStats), + Collections.unmodifiableMap(processorStats)); + } + } + + /** + * Container for pipeline stats. + */ + public static class PipelineStat { + private final String pipelineId; + private final Stats stats; + + public PipelineStat(String pipelineId, Stats stats) { + this.pipelineId = pipelineId; + this.stats = stats; + } + + public String getPipelineId() { + return pipelineId; + } + + public Stats getStats() { + return stats; + } + } + + /** + * Container for processor stats. + */ + public static class ProcessorStat { + private final String name; + private final Stats stats; + + public ProcessorStat(String name, Stats stats) { + this.name = name; + this.stats = stats; + } + + public String getName() { + return name; + } + + public Stats getStats() { + return stats; + } + } } diff --git a/server/src/main/java/org/elasticsearch/ingest/Pipeline.java b/server/src/main/java/org/elasticsearch/ingest/Pipeline.java index 8d5f6d6ff7c..fc5311be5cb 100644 --- a/server/src/main/java/org/elasticsearch/ingest/Pipeline.java +++ b/server/src/main/java/org/elasticsearch/ingest/Pipeline.java @@ -22,11 +22,12 @@ package org.elasticsearch.ingest; import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.common.Nullable; -import java.time.Clock; import java.util.Arrays; import java.util.Collections; import java.util.List; import java.util.Map; +import java.util.concurrent.TimeUnit; +import java.util.function.LongSupplier; import org.elasticsearch.script.ScriptService; @@ -47,20 +48,21 @@ public final class Pipeline { private final Integer version; private final CompoundProcessor compoundProcessor; private final IngestMetric metrics; - private final Clock clock; + private final LongSupplier relativeTimeProvider; public Pipeline(String id, @Nullable String description, @Nullable Integer version, CompoundProcessor compoundProcessor) { - this(id, description, version, compoundProcessor, Clock.systemUTC()); + this(id, description, version, compoundProcessor, System::nanoTime); } //package private for testing - Pipeline(String id, @Nullable String description, @Nullable Integer version, CompoundProcessor compoundProcessor, Clock clock) { + Pipeline(String id, @Nullable String description, @Nullable Integer version, CompoundProcessor compoundProcessor, + LongSupplier relativeTimeProvider) { this.id = id; this.description = description; this.compoundProcessor = compoundProcessor; this.version = version; this.metrics = new IngestMetric(); - this.clock = clock; + this.relativeTimeProvider = relativeTimeProvider; } public static Pipeline create(String id, Map config, @@ -89,7 +91,7 @@ public final class Pipeline { * Modifies the data of a document to be indexed based on the processor this pipeline holds */ public IngestDocument execute(IngestDocument ingestDocument) throws Exception { - long startTimeInMillis = clock.millis(); + long startTimeInNanos = relativeTimeProvider.getAsLong(); try { metrics.preIngest(); return compoundProcessor.execute(ingestDocument); @@ -97,7 +99,7 @@ public final class Pipeline { metrics.ingestFailed(); throw e; } finally { - long ingestTimeInMillis = clock.millis() - startTimeInMillis; + long ingestTimeInMillis = TimeUnit.NANOSECONDS.toMillis(relativeTimeProvider.getAsLong() - startTimeInNanos); metrics.postIngest(ingestTimeInMillis); } } diff --git a/server/src/main/java/org/elasticsearch/ingest/PipelineProcessor.java b/server/src/main/java/org/elasticsearch/ingest/PipelineProcessor.java index 918ff6b8aef..b5794a3f768 100644 --- a/server/src/main/java/org/elasticsearch/ingest/PipelineProcessor.java +++ b/server/src/main/java/org/elasticsearch/ingest/PipelineProcessor.java @@ -53,6 +53,10 @@ public class PipelineProcessor extends AbstractProcessor { return TYPE; } + String getPipelineName() { + return pipelineName; + } + public static final class Factory implements Processor.Factory { private final IngestService ingestService; @@ -65,7 +69,7 @@ public class PipelineProcessor extends AbstractProcessor { public PipelineProcessor create(Map registry, String processorTag, Map config) throws Exception { String pipeline = - ConfigurationUtils.readStringProperty(TYPE, processorTag, config, "pipeline"); + ConfigurationUtils.readStringProperty(TYPE, processorTag, config, "name"); return new PipelineProcessor(processorTag, pipeline, ingestService); } } diff --git a/server/src/main/java/org/elasticsearch/ingest/TrackingResultProcessor.java b/server/src/main/java/org/elasticsearch/ingest/TrackingResultProcessor.java index 41a984be5ad..4b787151446 100644 --- a/server/src/main/java/org/elasticsearch/ingest/TrackingResultProcessor.java +++ b/server/src/main/java/org/elasticsearch/ingest/TrackingResultProcessor.java @@ -19,11 +19,11 @@ package org.elasticsearch.ingest; +import org.elasticsearch.ElasticsearchException; import org.elasticsearch.action.ingest.SimulateProcessorResult; import java.util.ArrayList; import java.util.List; -import java.util.Set; /** * Processor to be used within Simulate API to keep track of processors executed in pipeline. @@ -42,14 +42,46 @@ public final class TrackingResultProcessor implements Processor { @Override public IngestDocument execute(IngestDocument ingestDocument) throws Exception { + Processor processor = actualProcessor; try { - actualProcessor.execute(ingestDocument); - processorResultList.add(new SimulateProcessorResult(actualProcessor.getTag(), new IngestDocument(ingestDocument))); + if (processor instanceof ConditionalProcessor) { + ConditionalProcessor conditionalProcessor = (ConditionalProcessor) processor; + if (conditionalProcessor.evaluate(ingestDocument) == false) { + return ingestDocument; + } + if (conditionalProcessor.getProcessor() instanceof PipelineProcessor) { + processor = conditionalProcessor.getProcessor(); + } + } + if (processor instanceof PipelineProcessor) { + PipelineProcessor pipelineProcessor = ((PipelineProcessor) processor); + Pipeline pipeline = pipelineProcessor.getPipeline(); + //runtime check for cycles against a copy of the document. This is needed to properly handle conditionals around pipelines + try { + IngestDocument ingestDocumentCopy = new IngestDocument(ingestDocument); + ingestDocumentCopy.executePipeline(pipelineProcessor.getPipeline()); + } catch (ElasticsearchException elasticsearchException) { + if (elasticsearchException.getCause().getCause() instanceof IllegalStateException) { + throw elasticsearchException; + } + //else do nothing, let the tracking processors throw the exception while recording the path up to the failure + } catch (Exception e) { + // do nothing, let the tracking processors throw the exception while recording the path up to the failure + } + //now that we know that there are no cycles between pipelines, decorate the processors for this pipeline and execute it + CompoundProcessor verbosePipelineProcessor = decorate(pipeline.getCompoundProcessor(), processorResultList); + Pipeline verbosePipeline = new Pipeline(pipeline.getId(), pipeline.getDescription(), pipeline.getVersion(), + verbosePipelineProcessor); + ingestDocument.executePipeline(verbosePipeline); + } else { + processor.execute(ingestDocument); + processorResultList.add(new SimulateProcessorResult(processor.getTag(), new IngestDocument(ingestDocument))); + } } catch (Exception e) { if (ignoreFailure) { - processorResultList.add(new SimulateProcessorResult(actualProcessor.getTag(), new IngestDocument(ingestDocument), e)); + processorResultList.add(new SimulateProcessorResult(processor.getTag(), new IngestDocument(ingestDocument), e)); } else { - processorResultList.add(new SimulateProcessorResult(actualProcessor.getTag(), e)); + processorResultList.add(new SimulateProcessorResult(processor.getTag(), e)); } throw e; } @@ -66,35 +98,19 @@ public final class TrackingResultProcessor implements Processor { return actualProcessor.getTag(); } - public static CompoundProcessor decorate(CompoundProcessor compoundProcessor, List processorResultList, - Set pipelinesSeen) { + public static CompoundProcessor decorate(CompoundProcessor compoundProcessor, List processorResultList) { List processors = new ArrayList<>(compoundProcessor.getProcessors().size()); for (Processor processor : compoundProcessor.getProcessors()) { - if (processor instanceof PipelineProcessor) { - PipelineProcessor pipelineProcessor = ((PipelineProcessor) processor); - if (pipelinesSeen.add(pipelineProcessor) == false) { - throw new IllegalStateException("Cycle detected for pipeline: " + pipelineProcessor.getPipeline().getId()); - } - processors.add(decorate(pipelineProcessor.getPipeline().getCompoundProcessor(), processorResultList, pipelinesSeen)); - pipelinesSeen.remove(pipelineProcessor); - } else if (processor instanceof CompoundProcessor) { - processors.add(decorate((CompoundProcessor) processor, processorResultList, pipelinesSeen)); + if (processor instanceof CompoundProcessor) { + processors.add(decorate((CompoundProcessor) processor, processorResultList)); } else { processors.add(new TrackingResultProcessor(compoundProcessor.isIgnoreFailure(), processor, processorResultList)); } } List onFailureProcessors = new ArrayList<>(compoundProcessor.getProcessors().size()); for (Processor processor : compoundProcessor.getOnFailureProcessors()) { - if (processor instanceof PipelineProcessor) { - PipelineProcessor pipelineProcessor = ((PipelineProcessor) processor); - if (pipelinesSeen.add(pipelineProcessor) == false) { - throw new IllegalStateException("Cycle detected for pipeline: " + pipelineProcessor.getPipeline().getId()); - } - onFailureProcessors.add(decorate(pipelineProcessor.getPipeline().getCompoundProcessor(), processorResultList, - pipelinesSeen)); - pipelinesSeen.remove(pipelineProcessor); - } else if (processor instanceof CompoundProcessor) { - onFailureProcessors.add(decorate((CompoundProcessor) processor, processorResultList, pipelinesSeen)); + if (processor instanceof CompoundProcessor) { + onFailureProcessors.add(decorate((CompoundProcessor) processor, processorResultList)); } else { onFailureProcessors.add(new TrackingResultProcessor(compoundProcessor.isIgnoreFailure(), processor, processorResultList)); } diff --git a/server/src/test/java/org/elasticsearch/ExceptionSerializationTests.java b/server/src/test/java/org/elasticsearch/ExceptionSerializationTests.java index 5c8c25cbfdd..62778af8d57 100644 --- a/server/src/test/java/org/elasticsearch/ExceptionSerializationTests.java +++ b/server/src/test/java/org/elasticsearch/ExceptionSerializationTests.java @@ -133,11 +133,7 @@ public class ExceptionSerializationTests extends ESTestCase { @Override public FileVisitResult preVisitDirectory(Path dir, BasicFileAttributes attrs) throws IOException { - Path next = pkgPrefix.resolve(dir.getFileName()); - if (ignore.contains(next)) { - return FileVisitResult.SKIP_SUBTREE; - } - pkgPrefix = next; + pkgPrefix = pkgPrefix.resolve(dir.getFileName()); return FileVisitResult.CONTINUE; } diff --git a/server/src/test/java/org/elasticsearch/action/admin/cluster/node/stats/NodeStatsTests.java b/server/src/test/java/org/elasticsearch/action/admin/cluster/node/stats/NodeStatsTests.java index 3384efcf836..8f51fb08dd2 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/cluster/node/stats/NodeStatsTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/cluster/node/stats/NodeStatsTests.java @@ -53,7 +53,6 @@ import static java.util.Collections.emptyMap; import static java.util.Collections.emptySet; public class NodeStatsTests extends ESTestCase { - public void testSerialization() throws IOException { NodeStats nodeStats = createNodeStats(); try (BytesStreamOutput out = new BytesStreamOutput()) { @@ -271,14 +270,29 @@ public class NodeStatsTests extends ESTestCase { assertEquals(totalStats.getIngestCurrent(), deserializedIngestStats.getTotalStats().getIngestCurrent()); assertEquals(totalStats.getIngestFailedCount(), deserializedIngestStats.getTotalStats().getIngestFailedCount()); assertEquals(totalStats.getIngestTimeInMillis(), deserializedIngestStats.getTotalStats().getIngestTimeInMillis()); - assertEquals(ingestStats.getStatsPerPipeline().size(), deserializedIngestStats.getStatsPerPipeline().size()); - for (Map.Entry entry : ingestStats.getStatsPerPipeline().entrySet()) { - IngestStats.Stats stats = entry.getValue(); - IngestStats.Stats deserializedStats = deserializedIngestStats.getStatsPerPipeline().get(entry.getKey()); - assertEquals(stats.getIngestFailedCount(), deserializedStats.getIngestFailedCount()); - assertEquals(stats.getIngestTimeInMillis(), deserializedStats.getIngestTimeInMillis()); - assertEquals(stats.getIngestCurrent(), deserializedStats.getIngestCurrent()); - assertEquals(stats.getIngestCount(), deserializedStats.getIngestCount()); + assertEquals(ingestStats.getPipelineStats().size(), deserializedIngestStats.getPipelineStats().size()); + for (IngestStats.PipelineStat pipelineStat : ingestStats.getPipelineStats()) { + String pipelineId = pipelineStat.getPipelineId(); + IngestStats.Stats deserializedPipelineStats = + getPipelineStats(deserializedIngestStats.getPipelineStats(), pipelineId); + assertEquals(pipelineStat.getStats().getIngestFailedCount(), deserializedPipelineStats.getIngestFailedCount()); + assertEquals(pipelineStat.getStats().getIngestTimeInMillis(), deserializedPipelineStats.getIngestTimeInMillis()); + assertEquals(pipelineStat.getStats().getIngestCurrent(), deserializedPipelineStats.getIngestCurrent()); + assertEquals(pipelineStat.getStats().getIngestCount(), deserializedPipelineStats.getIngestCount()); + List processorStats = ingestStats.getProcessorStats().get(pipelineId); + //intentionally validating identical order + Iterator it = deserializedIngestStats.getProcessorStats().get(pipelineId).iterator(); + for (IngestStats.ProcessorStat processorStat : processorStats) { + IngestStats.ProcessorStat deserializedProcessorStat = it.next(); + assertEquals(processorStat.getStats().getIngestFailedCount(), + deserializedProcessorStat.getStats().getIngestFailedCount()); + assertEquals(processorStat.getStats().getIngestTimeInMillis(), + deserializedProcessorStat.getStats().getIngestTimeInMillis()); + assertEquals(processorStat.getStats().getIngestCurrent(), + deserializedProcessorStat.getStats().getIngestCurrent()); + assertEquals(processorStat.getStats().getIngestCount(), deserializedProcessorStat.getStats().getIngestCount()); + } + assertFalse(it.hasNext()); } } AdaptiveSelectionStats adaptiveStats = nodeStats.getAdaptiveSelectionStats(); @@ -429,14 +443,24 @@ public class NodeStatsTests extends ESTestCase { if (frequently()) { IngestStats.Stats totalStats = new IngestStats.Stats(randomNonNegativeLong(), randomNonNegativeLong(), randomNonNegativeLong(), randomNonNegativeLong()); + int numPipelines = randomIntBetween(0, 10); + int numProcessors = randomIntBetween(0, 10); + List ingestPipelineStats = new ArrayList<>(numPipelines); + Map> ingestProcessorStats = new HashMap<>(numPipelines); + for (int i = 0; i < numPipelines; i++) { + String pipelineId = randomAlphaOfLengthBetween(3, 10); + ingestPipelineStats.add(new IngestStats.PipelineStat(pipelineId, new IngestStats.Stats + (randomNonNegativeLong(), randomNonNegativeLong(), randomNonNegativeLong(), randomNonNegativeLong()))); - int numStatsPerPipeline = randomIntBetween(0, 10); - Map statsPerPipeline = new HashMap<>(); - for (int i = 0; i < numStatsPerPipeline; i++) { - statsPerPipeline.put(randomAlphaOfLengthBetween(3, 10), new IngestStats.Stats(randomNonNegativeLong(), - randomNonNegativeLong(), randomNonNegativeLong(), randomNonNegativeLong())); + List processorPerPipeline = new ArrayList<>(numProcessors); + for (int j =0; j < numProcessors;j++) { + IngestStats.Stats processorStats = new IngestStats.Stats + (randomNonNegativeLong(), randomNonNegativeLong(), randomNonNegativeLong(), randomNonNegativeLong()); + processorPerPipeline.add(new IngestStats.ProcessorStat(randomAlphaOfLengthBetween(3, 10), processorStats)); + } + ingestProcessorStats.put(pipelineId,processorPerPipeline); } - ingestStats = new IngestStats(totalStats, statsPerPipeline); + ingestStats = new IngestStats(totalStats, ingestPipelineStats, ingestProcessorStats); } AdaptiveSelectionStats adaptiveSelectionStats = null; if (frequently()) { @@ -465,4 +489,8 @@ public class NodeStatsTests extends ESTestCase { fsInfo, transportStats, httpStats, allCircuitBreakerStats, scriptStats, discoveryStats, ingestStats, adaptiveSelectionStats); } + + private IngestStats.Stats getPipelineStats(List pipelineStats, String id) { + return pipelineStats.stream().filter(p1 -> p1.getPipelineId().equals(id)).findFirst().map(p2 -> p2.getStats()).orElse(null); + } } diff --git a/server/src/test/java/org/elasticsearch/cluster/metadata/DateMathExpressionResolverTests.java b/server/src/test/java/org/elasticsearch/cluster/metadata/DateMathExpressionResolverTests.java index 4f235e52636..b0660929648 100644 --- a/server/src/test/java/org/elasticsearch/cluster/metadata/DateMathExpressionResolverTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/metadata/DateMathExpressionResolverTests.java @@ -25,7 +25,6 @@ import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver.Context; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver.DateMathExpressionResolver; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.test.ESTestCase; import org.joda.time.DateTime; import org.joda.time.DateTimeZone; @@ -42,7 +41,7 @@ import static org.joda.time.DateTimeZone.UTC; public class DateMathExpressionResolverTests extends ESTestCase { - private final DateMathExpressionResolver expressionResolver = new DateMathExpressionResolver(Settings.EMPTY); + private final DateMathExpressionResolver expressionResolver = new DateMathExpressionResolver(); private final Context context = new Context( ClusterState.builder(new ClusterName("_name")).build(), IndicesOptions.strictExpand() ); @@ -118,37 +117,6 @@ public class DateMathExpressionResolverTests extends ESTestCase { assertThat(result.get(3), equalTo(".logstash-" + DateTimeFormat.forPattern("YYYY.MM").print(new DateTime(context.getStartTime(), UTC).withDayOfMonth(1)))); } - public void testExpression_CustomTimeZoneInSetting() throws Exception { - DateTimeZone timeZone; - int hoursOffset; - int minutesOffset = 0; - if (randomBoolean()) { - hoursOffset = randomIntBetween(-12, 14); - timeZone = DateTimeZone.forOffsetHours(hoursOffset); - } else { - hoursOffset = randomIntBetween(-11, 13); - minutesOffset = randomIntBetween(0, 59); - timeZone = DateTimeZone.forOffsetHoursMinutes(hoursOffset, minutesOffset); - } - DateTime now; - if (hoursOffset >= 0) { - // rounding to next day 00:00 - now = DateTime.now(UTC).plusHours(hoursOffset).plusMinutes(minutesOffset).withHourOfDay(0).withMinuteOfHour(0).withSecondOfMinute(0); - } else { - // rounding to today 00:00 - now = DateTime.now(UTC).withHourOfDay(0).withMinuteOfHour(0).withSecondOfMinute(0); - } - Settings settings = Settings.builder() - .put("date_math_expression_resolver.default_time_zone", timeZone.getID()) - .build(); - DateMathExpressionResolver expressionResolver = new DateMathExpressionResolver(settings); - Context context = new Context(this.context.getState(), this.context.getOptions(), now.getMillis()); - List results = expressionResolver.resolve(context, Arrays.asList("<.marvel-{now/d{YYYY.MM.dd}}>")); - assertThat(results.size(), equalTo(1)); - logger.info("timezone: [{}], now [{}], name: [{}]", timeZone, now, results.get(0)); - assertThat(results.get(0), equalTo(".marvel-" + DateTimeFormat.forPattern("YYYY.MM.dd").print(now.withZone(timeZone)))); - } - public void testExpression_CustomTimeZoneInIndexName() throws Exception { DateTimeZone timeZone; int hoursOffset; diff --git a/server/src/test/java/org/elasticsearch/cluster/metadata/IndexCreationTaskTests.java b/server/src/test/java/org/elasticsearch/cluster/metadata/IndexCreationTaskTests.java index 1aaec080307..9585381029f 100644 --- a/server/src/test/java/org/elasticsearch/cluster/metadata/IndexCreationTaskTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/metadata/IndexCreationTaskTests.java @@ -51,6 +51,7 @@ import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.RoutingFieldMapper; import org.elasticsearch.index.shard.IndexEventListener; import org.elasticsearch.indices.IndicesService; +import org.elasticsearch.indices.InvalidAliasNameException; import org.elasticsearch.test.ESTestCase; import org.hamcrest.Matchers; import org.mockito.ArgumentCaptor; @@ -82,7 +83,7 @@ import static org.mockito.Mockito.when; public class IndexCreationTaskTests extends ESTestCase { private final IndicesService indicesService = mock(IndicesService.class); - private final AliasValidator aliasValidator = mock(AliasValidator.class); + private final AliasValidator aliasValidator = new AliasValidator(Settings.EMPTY); private final NamedXContentRegistry xContentRegistry = mock(NamedXContentRegistry.class); private final CreateIndexClusterStateUpdateRequest request = mock(CreateIndexClusterStateUpdateRequest.class); private final Logger logger = mock(Logger.class); @@ -149,6 +150,12 @@ public class IndexCreationTaskTests extends ESTestCase { assertThat(getMappingsFromResponse(), Matchers.hasKey("mapping1")); } + public void testInvalidAliasName() throws Exception { + final String[] invalidAliasNames = new String[] { "-alias1", "+alias2", "_alias3", "a#lias", "al:ias", ".", ".." }; + setupRequestAlias(new Alias(randomFrom(invalidAliasNames))); + expectThrows(InvalidAliasNameException.class, this::executeTask); + } + public void testRequestDataHavePriorityOverTemplateData() throws Exception { final CompressedXContent tplMapping = createMapping("text"); final CompressedXContent reqMapping = createMapping("keyword"); diff --git a/server/src/test/java/org/elasticsearch/cluster/metadata/MetaDataTests.java b/server/src/test/java/org/elasticsearch/cluster/metadata/MetaDataTests.java index a929028a34e..c2950256884 100644 --- a/server/src/test/java/org/elasticsearch/cluster/metadata/MetaDataTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/metadata/MetaDataTests.java @@ -66,6 +66,14 @@ public class MetaDataTests extends ESTestCase { assertThat(aliases.size(), equalTo(0)); } { + final GetAliasesRequest request; + if (randomBoolean()) { + request = new GetAliasesRequest(); + } else { + request = new GetAliasesRequest(randomFrom("alias1", "alias2")); + // replacing with empty aliases behaves as if aliases were unspecified at request building + request.replaceAliases(Strings.EMPTY_ARRAY); + } ImmutableOpenMap> aliases = metaData.findAliases(new GetAliasesRequest(), new String[]{"index"}); assertThat(aliases.size(), equalTo(1)); List aliasMetaDataList = aliases.get("index"); @@ -73,12 +81,6 @@ public class MetaDataTests extends ESTestCase { assertThat(aliasMetaDataList.get(0).alias(), equalTo("alias1")); assertThat(aliasMetaDataList.get(1).alias(), equalTo("alias2")); } - { - GetAliasesRequest getAliasesRequest = new GetAliasesRequest("alias1"); - getAliasesRequest.replaceAliases(Strings.EMPTY_ARRAY); - ImmutableOpenMap> aliases = metaData.findAliases(getAliasesRequest, new String[]{"index"}); - assertThat(aliases.size(), equalTo(0)); - } { ImmutableOpenMap> aliases = metaData.findAliases(new GetAliasesRequest("alias*"), new String[]{"index"}); diff --git a/server/src/test/java/org/elasticsearch/index/query/GeoDistanceQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/GeoDistanceQueryBuilderTests.java index 3a011251224..1312486ff69 100644 --- a/server/src/test/java/org/elasticsearch/index/query/GeoDistanceQueryBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/GeoDistanceQueryBuilderTests.java @@ -156,7 +156,7 @@ public class GeoDistanceQueryBuilderTests extends AbstractQueryTestCase {}); - CompoundProcessor compoundProcessor = new CompoundProcessor(processor); + LongSupplier relativeTimeProvider = mock(LongSupplier.class); + when(relativeTimeProvider.getAsLong()).thenReturn(0L, TimeUnit.MILLISECONDS.toNanos(1)); + TestProcessor processor = new TestProcessor(ingestDocument ->{ + assertStats(0, ingestDocument.getFieldValue("compoundProcessor", CompoundProcessor.class), 1, 0, 0, 0); + }); + CompoundProcessor compoundProcessor = new CompoundProcessor(relativeTimeProvider, processor); + ingestDocument.setFieldValue("compoundProcessor", compoundProcessor); //ugly hack to assert current count = 1 assertThat(compoundProcessor.getProcessors().size(), equalTo(1)); assertThat(compoundProcessor.getProcessors().get(0), sameInstance(processor)); + assertThat(compoundProcessor.getProcessors().get(0), sameInstance(processor)); assertThat(compoundProcessor.getOnFailureProcessors().isEmpty(), is(true)); compoundProcessor.execute(ingestDocument); + verify(relativeTimeProvider, times(2)).getAsLong(); assertThat(processor.getInvokedCounter(), equalTo(1)); + assertStats(compoundProcessor, 1, 0, 1); + } public void testSingleProcessorWithException() throws Exception { TestProcessor processor = new TestProcessor(ingestDocument -> {throw new RuntimeException("error");}); - CompoundProcessor compoundProcessor = new CompoundProcessor(processor); + LongSupplier relativeTimeProvider = mock(LongSupplier.class); + when(relativeTimeProvider.getAsLong()).thenReturn(0L); + CompoundProcessor compoundProcessor = new CompoundProcessor(relativeTimeProvider, processor); assertThat(compoundProcessor.getProcessors().size(), equalTo(1)); assertThat(compoundProcessor.getProcessors().get(0), sameInstance(processor)); assertThat(compoundProcessor.getOnFailureProcessors().isEmpty(), is(true)); @@ -71,15 +88,22 @@ public class CompoundProcessorTests extends ESTestCase { assertThat(e.getRootCause().getMessage(), equalTo("error")); } assertThat(processor.getInvokedCounter(), equalTo(1)); + assertStats(compoundProcessor, 1, 1, 0); + } public void testIgnoreFailure() throws Exception { TestProcessor processor1 = new TestProcessor(ingestDocument -> {throw new RuntimeException("error");}); TestProcessor processor2 = new TestProcessor(ingestDocument -> {ingestDocument.setFieldValue("field", "value");}); - CompoundProcessor compoundProcessor = new CompoundProcessor(true, Arrays.asList(processor1, processor2), Collections.emptyList()); + LongSupplier relativeTimeProvider = mock(LongSupplier.class); + when(relativeTimeProvider.getAsLong()).thenReturn(0L); + CompoundProcessor compoundProcessor = + new CompoundProcessor(true, Arrays.asList(processor1, processor2), Collections.emptyList(), relativeTimeProvider); compoundProcessor.execute(ingestDocument); assertThat(processor1.getInvokedCounter(), equalTo(1)); + assertStats(0, compoundProcessor, 0, 1, 1, 0); assertThat(processor2.getInvokedCounter(), equalTo(1)); + assertStats(1, compoundProcessor, 0, 1, 0, 0); assertThat(ingestDocument.getFieldValue("field", String.class), equalTo("value")); } @@ -93,11 +117,15 @@ public class CompoundProcessorTests extends ESTestCase { assertThat(ingestMetadata.get(CompoundProcessor.ON_FAILURE_PROCESSOR_TAG_FIELD), equalTo("id")); }); + LongSupplier relativeTimeProvider = mock(LongSupplier.class); + when(relativeTimeProvider.getAsLong()).thenReturn(0L, TimeUnit.MILLISECONDS.toNanos(1)); CompoundProcessor compoundProcessor = new CompoundProcessor(false, Collections.singletonList(processor1), - Collections.singletonList(processor2)); + Collections.singletonList(processor2), relativeTimeProvider); compoundProcessor.execute(ingestDocument); + verify(relativeTimeProvider, times(2)).getAsLong(); assertThat(processor1.getInvokedCounter(), equalTo(1)); + assertStats(compoundProcessor, 1, 1, 1); assertThat(processor2.getInvokedCounter(), equalTo(1)); } @@ -118,14 +146,17 @@ public class CompoundProcessorTests extends ESTestCase { assertThat(ingestMetadata.get(CompoundProcessor.ON_FAILURE_PROCESSOR_TYPE_FIELD), equalTo("second")); assertThat(ingestMetadata.get(CompoundProcessor.ON_FAILURE_PROCESSOR_TAG_FIELD), equalTo("id2")); }); + LongSupplier relativeTimeProvider = mock(LongSupplier.class); + when(relativeTimeProvider.getAsLong()).thenReturn(0L); CompoundProcessor compoundOnFailProcessor = new CompoundProcessor(false, Collections.singletonList(processorToFail), - Collections.singletonList(lastProcessor)); + Collections.singletonList(lastProcessor), relativeTimeProvider); CompoundProcessor compoundProcessor = new CompoundProcessor(false, Collections.singletonList(processor), - Collections.singletonList(compoundOnFailProcessor)); + Collections.singletonList(compoundOnFailProcessor), relativeTimeProvider); compoundProcessor.execute(ingestDocument); assertThat(processorToFail.getInvokedCounter(), equalTo(1)); assertThat(lastProcessor.getInvokedCounter(), equalTo(1)); + assertStats(compoundProcessor, 1, 1, 0); } public void testCompoundProcessorExceptionFailWithoutOnFailure() throws Exception { @@ -137,21 +168,24 @@ public class CompoundProcessorTests extends ESTestCase { assertThat(ingestMetadata.get(CompoundProcessor.ON_FAILURE_PROCESSOR_TYPE_FIELD), equalTo("first")); assertThat(ingestMetadata.get(CompoundProcessor.ON_FAILURE_PROCESSOR_TAG_FIELD), equalTo("id1")); }); + LongSupplier relativeTimeProvider = mock(LongSupplier.class); + when(relativeTimeProvider.getAsLong()).thenReturn(0L); - CompoundProcessor failCompoundProcessor = new CompoundProcessor(firstProcessor); + CompoundProcessor failCompoundProcessor = new CompoundProcessor(relativeTimeProvider, firstProcessor); CompoundProcessor compoundProcessor = new CompoundProcessor(false, Collections.singletonList(failCompoundProcessor), - Collections.singletonList(secondProcessor)); + Collections.singletonList(secondProcessor), relativeTimeProvider); compoundProcessor.execute(ingestDocument); assertThat(firstProcessor.getInvokedCounter(), equalTo(1)); assertThat(secondProcessor.getInvokedCounter(), equalTo(1)); + assertStats(compoundProcessor, 1, 1, 0); } public void testCompoundProcessorExceptionFail() throws Exception { TestProcessor firstProcessor = new TestProcessor("id1", "first", ingestDocument -> {throw new RuntimeException("error");}); TestProcessor failProcessor = - new TestProcessor("tag_fail", "fail", ingestDocument -> {throw new RuntimeException("custom error message");}); + new TestProcessor("tag_fail", "fail", ingestDocument -> {throw new RuntimeException("custom error message");}); TestProcessor secondProcessor = new TestProcessor("id3", "second", ingestDocument -> { Map ingestMetadata = ingestDocument.getIngestMetadata(); assertThat(ingestMetadata.entrySet(), hasSize(3)); @@ -160,21 +194,24 @@ public class CompoundProcessorTests extends ESTestCase { assertThat(ingestMetadata.get(CompoundProcessor.ON_FAILURE_PROCESSOR_TAG_FIELD), equalTo("tag_fail")); }); + LongSupplier relativeTimeProvider = mock(LongSupplier.class); + when(relativeTimeProvider.getAsLong()).thenReturn(0L); CompoundProcessor failCompoundProcessor = new CompoundProcessor(false, Collections.singletonList(firstProcessor), - Collections.singletonList(failProcessor)); + Collections.singletonList(failProcessor), relativeTimeProvider); CompoundProcessor compoundProcessor = new CompoundProcessor(false, Collections.singletonList(failCompoundProcessor), - Collections.singletonList(secondProcessor)); + Collections.singletonList(secondProcessor), relativeTimeProvider); compoundProcessor.execute(ingestDocument); assertThat(firstProcessor.getInvokedCounter(), equalTo(1)); assertThat(secondProcessor.getInvokedCounter(), equalTo(1)); + assertStats(compoundProcessor, 1, 1, 0); } public void testCompoundProcessorExceptionFailInOnFailure() throws Exception { TestProcessor firstProcessor = new TestProcessor("id1", "first", ingestDocument -> {throw new RuntimeException("error");}); TestProcessor failProcessor = - new TestProcessor("tag_fail", "fail", ingestDocument -> {throw new RuntimeException("custom error message");}); + new TestProcessor("tag_fail", "fail", ingestDocument -> {throw new RuntimeException("custom error message");}); TestProcessor secondProcessor = new TestProcessor("id3", "second", ingestDocument -> { Map ingestMetadata = ingestDocument.getIngestMetadata(); assertThat(ingestMetadata.entrySet(), hasSize(3)); @@ -183,27 +220,44 @@ public class CompoundProcessorTests extends ESTestCase { assertThat(ingestMetadata.get(CompoundProcessor.ON_FAILURE_PROCESSOR_TAG_FIELD), equalTo("tag_fail")); }); + LongSupplier relativeTimeProvider = mock(LongSupplier.class); + when(relativeTimeProvider.getAsLong()).thenReturn(0L); CompoundProcessor failCompoundProcessor = new CompoundProcessor(false, Collections.singletonList(firstProcessor), - Collections.singletonList(new CompoundProcessor(failProcessor))); + Collections.singletonList(new CompoundProcessor(relativeTimeProvider, failProcessor))); CompoundProcessor compoundProcessor = new CompoundProcessor(false, Collections.singletonList(failCompoundProcessor), - Collections.singletonList(secondProcessor)); + Collections.singletonList(secondProcessor), relativeTimeProvider); compoundProcessor.execute(ingestDocument); assertThat(firstProcessor.getInvokedCounter(), equalTo(1)); assertThat(secondProcessor.getInvokedCounter(), equalTo(1)); + assertStats(compoundProcessor, 1, 1, 0); } public void testBreakOnFailure() throws Exception { TestProcessor firstProcessor = new TestProcessor("id1", "first", ingestDocument -> {throw new RuntimeException("error1");}); TestProcessor secondProcessor = new TestProcessor("id2", "second", ingestDocument -> {throw new RuntimeException("error2");}); TestProcessor onFailureProcessor = new TestProcessor("id2", "on_failure", ingestDocument -> {}); + LongSupplier relativeTimeProvider = mock(LongSupplier.class); + when(relativeTimeProvider.getAsLong()).thenReturn(0L); CompoundProcessor pipeline = new CompoundProcessor(false, Arrays.asList(firstProcessor, secondProcessor), - Collections.singletonList(onFailureProcessor)); + Collections.singletonList(onFailureProcessor), relativeTimeProvider); pipeline.execute(ingestDocument); assertThat(firstProcessor.getInvokedCounter(), equalTo(1)); assertThat(secondProcessor.getInvokedCounter(), equalTo(0)); assertThat(onFailureProcessor.getInvokedCounter(), equalTo(1)); + assertStats(pipeline, 1, 1, 0); + } + private void assertStats(CompoundProcessor compoundProcessor, long count, long failed, long time) { + assertStats(0, compoundProcessor, 0L, count, failed, time); + } + + private void assertStats(int processor, CompoundProcessor compoundProcessor, long current, long count, long failed, long time) { + IngestStats.Stats stats = compoundProcessor.getProcessorsWithMetrics().get(processor).v2().createStats(); + assertThat(stats.getIngestCount(), equalTo(count)); + assertThat(stats.getIngestCurrent(), equalTo(current)); + assertThat(stats.getIngestFailedCount(), equalTo(failed)); + assertThat(stats.getIngestTimeInMillis(), equalTo(time)); } } diff --git a/server/src/test/java/org/elasticsearch/ingest/ConditionalProcessorTests.java b/server/src/test/java/org/elasticsearch/ingest/ConditionalProcessorTests.java index c7d4dfa4e68..c5548ae5594 100644 --- a/server/src/test/java/org/elasticsearch/ingest/ConditionalProcessorTests.java +++ b/server/src/test/java/org/elasticsearch/ingest/ConditionalProcessorTests.java @@ -33,12 +33,18 @@ import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.concurrent.CompletableFuture; +import java.util.concurrent.TimeUnit; import java.util.function.Consumer; +import java.util.function.LongSupplier; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.greaterThanOrEqualTo; import static org.hamcrest.Matchers.hasKey; import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.not; import static org.hamcrest.core.Is.is; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; public class ConditionalProcessorTests extends ESTestCase { @@ -60,6 +66,8 @@ public class ConditionalProcessorTests extends ESTestCase { new HashMap<>(ScriptModule.CORE_CONTEXTS) ); Map document = new HashMap<>(); + LongSupplier relativeTimeProvider = mock(LongSupplier.class); + when(relativeTimeProvider.getAsLong()).thenReturn(0L, TimeUnit.MILLISECONDS.toNanos(1), 0L, TimeUnit.MILLISECONDS.toNanos(2)); ConditionalProcessor processor = new ConditionalProcessor( randomAlphaOfLength(10), new Script( @@ -67,7 +75,10 @@ public class ConditionalProcessorTests extends ESTestCase { scriptName, Collections.emptyMap()), scriptService, new Processor() { @Override - public IngestDocument execute(final IngestDocument ingestDocument) throws Exception { + public IngestDocument execute(final IngestDocument ingestDocument){ + if(ingestDocument.hasField("error")){ + throw new RuntimeException("error"); + } ingestDocument.setFieldValue("foo", "bar"); return ingestDocument; } @@ -81,20 +92,37 @@ public class ConditionalProcessorTests extends ESTestCase { public String getTag() { return null; } - }); - - IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), document); - ingestDocument.setFieldValue(conditionalField, trueValue); - processor.execute(ingestDocument); - assertThat(ingestDocument.getSourceAndMetadata().get(conditionalField), is(trueValue)); - assertThat(ingestDocument.getSourceAndMetadata().get("foo"), is("bar")); + }, relativeTimeProvider); + //false, never call processor never increments metrics String falseValue = "falsy"; - ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), document); + IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), document); ingestDocument.setFieldValue(conditionalField, falseValue); processor.execute(ingestDocument); assertThat(ingestDocument.getSourceAndMetadata().get(conditionalField), is(falseValue)); assertThat(ingestDocument.getSourceAndMetadata(), not(hasKey("foo"))); + assertStats(processor, 0, 0, 0); + + ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), document); + ingestDocument.setFieldValue(conditionalField, falseValue); + ingestDocument.setFieldValue("error", true); + processor.execute(ingestDocument); + assertStats(processor, 0, 0, 0); + + //true, always call processor and increments metrics + ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), document); + ingestDocument.setFieldValue(conditionalField, trueValue); + processor.execute(ingestDocument); + assertThat(ingestDocument.getSourceAndMetadata().get(conditionalField), is(trueValue)); + assertThat(ingestDocument.getSourceAndMetadata().get("foo"), is("bar")); + assertStats(processor, 1, 0, 1); + + ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), document); + ingestDocument.setFieldValue(conditionalField, trueValue); + ingestDocument.setFieldValue("error", true); + IngestDocument finalIngestDocument = ingestDocument; + expectThrows(RuntimeException.class, () -> processor.execute(finalIngestDocument)); + assertStats(processor, 2, 1, 2); } @SuppressWarnings("unchecked") @@ -141,5 +169,14 @@ public class ConditionalProcessorTests extends ESTestCase { Exception e = expectedException.get(); assertThat(e, instanceOf(UnsupportedOperationException.class)); assertEquals("Mutating ingest documents in conditionals is not supported", e.getMessage()); + assertStats(processor, 0, 0, 0); + } + + private static void assertStats(ConditionalProcessor conditionalProcessor, long count, long failed, long time) { + IngestStats.Stats stats = conditionalProcessor.getMetric().createStats(); + assertThat(stats.getIngestCount(), equalTo(count)); + assertThat(stats.getIngestCurrent(), equalTo(0L)); + assertThat(stats.getIngestFailedCount(), equalTo(failed)); + assertThat(stats.getIngestTimeInMillis(), greaterThanOrEqualTo(time)); } } diff --git a/server/src/test/java/org/elasticsearch/ingest/IngestServiceTests.java b/server/src/test/java/org/elasticsearch/ingest/IngestServiceTests.java index 4de39349dc5..3dde7babb0a 100644 --- a/server/src/test/java/org/elasticsearch/ingest/IngestServiceTests.java +++ b/server/src/test/java/org/elasticsearch/ingest/IngestServiceTests.java @@ -63,6 +63,7 @@ import java.util.function.Consumer; import static java.util.Collections.emptyMap; import static java.util.Collections.emptySet; import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.greaterThanOrEqualTo; import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.notNullValue; @@ -746,16 +747,23 @@ public class IngestServiceTests extends ESTestCase { verify(completionHandler, times(1)).accept(null); } - public void testStats() { + public void testStats() throws Exception { final Processor processor = mock(Processor.class); - IngestService ingestService = createWithProcessors(Collections.singletonMap( - "mock", (factories, tag, config) -> processor)); + final Processor processorFailure = mock(Processor.class); + when(processor.getType()).thenReturn("mock"); + when(processor.getTag()).thenReturn("mockTag"); + when(processorFailure.getType()).thenReturn("failure-mock"); + //avoid returning null and dropping the document + when(processor.execute(any(IngestDocument.class))).thenReturn( RandomDocumentPicks.randomIngestDocument(random())); + when(processorFailure.execute(any(IngestDocument.class))).thenThrow(new RuntimeException("error")); + Map map = new HashMap<>(2); + map.put("mock", (factories, tag, config) -> processor); + map.put("failure-mock", (factories, tag, config) -> processorFailure); + IngestService ingestService = createWithProcessors(map); + final IngestStats initialStats = ingestService.stats(); - assertThat(initialStats.getStatsPerPipeline().size(), equalTo(0)); - assertThat(initialStats.getTotalStats().getIngestCount(), equalTo(0L)); - assertThat(initialStats.getTotalStats().getIngestCurrent(), equalTo(0L)); - assertThat(initialStats.getTotalStats().getIngestFailedCount(), equalTo(0L)); - assertThat(initialStats.getTotalStats().getIngestTimeInMillis(), equalTo(0L)); + assertThat(initialStats.getPipelineStats().size(), equalTo(0)); + assertStats(initialStats.getTotalStats(), 0, 0, 0); PutPipelineRequest putRequest = new PutPipelineRequest("_id1", new BytesArray("{\"processors\": [{\"mock\" : {}}]}"), XContentType.JSON); @@ -769,7 +777,6 @@ public class IngestServiceTests extends ESTestCase { clusterState = IngestService.innerPut(putRequest, clusterState); ingestService.applyClusterState(new ClusterChangedEvent("", clusterState, previousClusterState)); - @SuppressWarnings("unchecked") final BiConsumer failureHandler = mock(BiConsumer.class); @SuppressWarnings("unchecked") final Consumer completionHandler = mock(Consumer.class); @@ -778,18 +785,33 @@ public class IngestServiceTests extends ESTestCase { indexRequest.source(randomAlphaOfLength(10), randomAlphaOfLength(10)); ingestService.executeBulkRequest(Collections.singletonList(indexRequest), failureHandler, completionHandler, indexReq -> {}); final IngestStats afterFirstRequestStats = ingestService.stats(); - assertThat(afterFirstRequestStats.getStatsPerPipeline().size(), equalTo(2)); - assertThat(afterFirstRequestStats.getStatsPerPipeline().get("_id1").getIngestCount(), equalTo(1L)); - assertThat(afterFirstRequestStats.getStatsPerPipeline().get("_id2").getIngestCount(), equalTo(0L)); - assertThat(afterFirstRequestStats.getTotalStats().getIngestCount(), equalTo(1L)); + assertThat(afterFirstRequestStats.getPipelineStats().size(), equalTo(2)); + + afterFirstRequestStats.getProcessorStats().get("_id1").forEach(p -> assertEquals(p.getName(), "mock:mockTag")); + afterFirstRequestStats.getProcessorStats().get("_id2").forEach(p -> assertEquals(p.getName(), "mock:mockTag")); + + //total + assertStats(afterFirstRequestStats.getTotalStats(), 1, 0 ,0); + //pipeline + assertPipelineStats(afterFirstRequestStats.getPipelineStats(), "_id1", 1, 0, 0); + assertPipelineStats(afterFirstRequestStats.getPipelineStats(), "_id2", 0, 0, 0); + //processor + assertProcessorStats(0, afterFirstRequestStats, "_id1", 1, 0, 0); + assertProcessorStats(0, afterFirstRequestStats, "_id2", 0, 0, 0); + indexRequest.setPipeline("_id2"); ingestService.executeBulkRequest(Collections.singletonList(indexRequest), failureHandler, completionHandler, indexReq -> {}); final IngestStats afterSecondRequestStats = ingestService.stats(); - assertThat(afterSecondRequestStats.getStatsPerPipeline().size(), equalTo(2)); - assertThat(afterSecondRequestStats.getStatsPerPipeline().get("_id1").getIngestCount(), equalTo(1L)); - assertThat(afterSecondRequestStats.getStatsPerPipeline().get("_id2").getIngestCount(), equalTo(1L)); - assertThat(afterSecondRequestStats.getTotalStats().getIngestCount(), equalTo(2L)); + assertThat(afterSecondRequestStats.getPipelineStats().size(), equalTo(2)); + //total + assertStats(afterSecondRequestStats.getTotalStats(), 2, 0 ,0); + //pipeline + assertPipelineStats(afterSecondRequestStats.getPipelineStats(), "_id1", 1, 0, 0); + assertPipelineStats(afterSecondRequestStats.getPipelineStats(), "_id2", 1, 0, 0); + //processor + assertProcessorStats(0, afterSecondRequestStats, "_id1", 1, 0, 0); + assertProcessorStats(0, afterSecondRequestStats, "_id2", 1, 0, 0); //update cluster state and ensure that new stats are added to old stats putRequest = new PutPipelineRequest("_id1", @@ -800,13 +822,66 @@ public class IngestServiceTests extends ESTestCase { indexRequest.setPipeline("_id1"); ingestService.executeBulkRequest(Collections.singletonList(indexRequest), failureHandler, completionHandler, indexReq -> {}); final IngestStats afterThirdRequestStats = ingestService.stats(); - assertThat(afterThirdRequestStats.getStatsPerPipeline().size(), equalTo(2)); - assertThat(afterThirdRequestStats.getStatsPerPipeline().get("_id1").getIngestCount(), equalTo(2L)); - assertThat(afterThirdRequestStats.getStatsPerPipeline().get("_id2").getIngestCount(), equalTo(1L)); - assertThat(afterThirdRequestStats.getTotalStats().getIngestCount(), equalTo(3L)); + assertThat(afterThirdRequestStats.getPipelineStats().size(), equalTo(2)); + //total + assertStats(afterThirdRequestStats.getTotalStats(), 3, 0 ,0); + //pipeline + assertPipelineStats(afterThirdRequestStats.getPipelineStats(), "_id1", 2, 0, 0); + assertPipelineStats(afterThirdRequestStats.getPipelineStats(), "_id2", 1, 0, 0); + //The number of processors for the "id1" pipeline changed, so the per-processor metrics are not carried forward. This is + //due to the parallel array's used to identify which metrics to carry forward. With out unique ids or semantic equals for each + //processor, parallel arrays are the best option for of carrying forward metrics between pipeline changes. However, in some cases, + //like this one it may not readily obvious why the metrics were not carried forward. + assertProcessorStats(0, afterThirdRequestStats, "_id1", 1, 0, 0); + assertProcessorStats(1, afterThirdRequestStats, "_id1", 1, 0, 0); + assertProcessorStats(0, afterThirdRequestStats, "_id2", 1, 0, 0); + //test a failure, and that the processor stats are added from the old stats + putRequest = new PutPipelineRequest("_id1", + new BytesArray("{\"processors\": [{\"failure-mock\" : { \"on_failure\": [{\"mock\" : {}}]}}, {\"mock\" : {}}]}"), + XContentType.JSON); + previousClusterState = clusterState; + clusterState = IngestService.innerPut(putRequest, clusterState); + ingestService.applyClusterState(new ClusterChangedEvent("", clusterState, previousClusterState)); + indexRequest.setPipeline("_id1"); + ingestService.executeBulkRequest(Collections.singletonList(indexRequest), failureHandler, completionHandler, indexReq -> {}); + final IngestStats afterForthRequestStats = ingestService.stats(); + assertThat(afterForthRequestStats.getPipelineStats().size(), equalTo(2)); + //total + assertStats(afterForthRequestStats.getTotalStats(), 4, 0 ,0); + //pipeline + assertPipelineStats(afterForthRequestStats.getPipelineStats(), "_id1", 3, 0, 0); + assertPipelineStats(afterForthRequestStats.getPipelineStats(), "_id2", 1, 0, 0); + //processor + assertProcessorStats(0, afterForthRequestStats, "_id1", 1, 1, 0); //not carried forward since type changed + assertProcessorStats(1, afterForthRequestStats, "_id1", 2, 0, 0); //carried forward and added from old stats + assertProcessorStats(0, afterForthRequestStats, "_id2", 1, 0, 0); } + public void testStatName(){ + Processor processor = mock(Processor.class); + String name = randomAlphaOfLength(10); + when(processor.getType()).thenReturn(name); + assertThat(IngestService.getProcessorName(processor), equalTo(name)); + String tag = randomAlphaOfLength(10); + when(processor.getTag()).thenReturn(tag); + assertThat(IngestService.getProcessorName(processor), equalTo(name + ":" + tag)); + + ConditionalProcessor conditionalProcessor = mock(ConditionalProcessor.class); + when(conditionalProcessor.getProcessor()).thenReturn(processor); + assertThat(IngestService.getProcessorName(conditionalProcessor), equalTo(name + ":" + tag)); + + PipelineProcessor pipelineProcessor = mock(PipelineProcessor.class); + String pipelineName = randomAlphaOfLength(10); + when(pipelineProcessor.getPipelineName()).thenReturn(pipelineName); + name = PipelineProcessor.TYPE; + when(pipelineProcessor.getType()).thenReturn(name); + assertThat(IngestService.getProcessorName(pipelineProcessor), equalTo(name + ":" + pipelineName)); + when(pipelineProcessor.getTag()).thenReturn(tag); + assertThat(IngestService.getProcessorName(pipelineProcessor), equalTo(name + ":" + pipelineName + ":" + tag)); + } + + public void testExecuteWithDrop() { Map factories = new HashMap<>(); factories.put("drop", new DropProcessor.Factory()); @@ -935,4 +1010,23 @@ public class IngestServiceTests extends ESTestCase { return false; } } + + private void assertProcessorStats(int processor, IngestStats stats, String pipelineId, long count, long failed, long time) { + assertStats(stats.getProcessorStats().get(pipelineId).get(processor).getStats(), count, failed, time); + } + + private void assertPipelineStats(List pipelineStats, String pipelineId, long count, long failed, long time) { + assertStats(getPipelineStats(pipelineStats, pipelineId), count, failed, time); + } + + private void assertStats(IngestStats.Stats stats, long count, long failed, long time) { + assertThat(stats.getIngestCount(), equalTo(count)); + assertThat(stats.getIngestCurrent(), equalTo(0L)); + assertThat(stats.getIngestFailedCount(), equalTo(failed)); + assertThat(stats.getIngestTimeInMillis(), greaterThanOrEqualTo(time)); + } + + private IngestStats.Stats getPipelineStats(List pipelineStats, String id) { + return pipelineStats.stream().filter(p1 -> p1.getPipelineId().equals(id)).findFirst().map(p2 -> p2.getStats()).orElse(null); + } } diff --git a/server/src/test/java/org/elasticsearch/ingest/IngestStatsTests.java b/server/src/test/java/org/elasticsearch/ingest/IngestStatsTests.java index 9974dd568a8..04bfcbb92b8 100644 --- a/server/src/test/java/org/elasticsearch/ingest/IngestStatsTests.java +++ b/server/src/test/java/org/elasticsearch/ingest/IngestStatsTests.java @@ -19,44 +19,70 @@ package org.elasticsearch.ingest; +import org.elasticsearch.Version; +import org.elasticsearch.common.collect.MapBuilder; import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.test.VersionUtils; import java.io.IOException; import java.util.Collections; +import java.util.Iterator; +import java.util.List; import java.util.Map; +import java.util.stream.Collectors; +import java.util.stream.Stream; public class IngestStatsTests extends ESTestCase { public void testSerialization() throws IOException { - IngestStats.Stats total = new IngestStats.Stats(5, 10, 20, 30); - IngestStats.Stats foo = new IngestStats.Stats(50, 100, 200, 300); - IngestStats ingestStats = new IngestStats(total, Collections.singletonMap("foo", foo)); - IngestStats serialize = serialize(ingestStats); - assertNotSame(serialize, ingestStats); - assertNotSame(serialize.getTotalStats(), total); - assertEquals(total.getIngestCount(), serialize.getTotalStats().getIngestCount()); - assertEquals(total.getIngestFailedCount(), serialize.getTotalStats().getIngestFailedCount()); - assertEquals(total.getIngestTimeInMillis(), serialize.getTotalStats().getIngestTimeInMillis()); - assertEquals(total.getIngestCurrent(), serialize.getTotalStats().getIngestCurrent()); + IngestStats.Stats totalStats = new IngestStats.Stats(50, 100, 200, 300); + List pipelineStats = createPipelineStats(); + Map> processorStats = createProcessorStats(pipelineStats); + IngestStats ingestStats = new IngestStats(totalStats, pipelineStats, processorStats); + IngestStats serializedStats = serialize(ingestStats); + assertIngestStats(ingestStats, serializedStats, true); + } - assertEquals(ingestStats.getStatsPerPipeline().size(), 1); - assertTrue(ingestStats.getStatsPerPipeline().containsKey("foo")); + public void testReadLegacyStream() throws IOException { + IngestStats.Stats totalStats = new IngestStats.Stats(50, 100, 200, 300); + List pipelineStats = createPipelineStats(); - Map left = ingestStats.getStatsPerPipeline(); - Map right = serialize.getStatsPerPipeline(); + //legacy output logic + BytesStreamOutput out = new BytesStreamOutput(); + out.setVersion(VersionUtils.getPreviousVersion(Version.V_6_5_0)); + totalStats.writeTo(out); + out.writeVInt(pipelineStats.size()); + for (IngestStats.PipelineStat pipelineStat : pipelineStats) { + out.writeString(pipelineStat.getPipelineId()); + pipelineStat.getStats().writeTo(out); + } - assertEquals(right.size(), 1); - assertTrue(right.containsKey("foo")); - assertEquals(left.size(), 1); - assertTrue(left.containsKey("foo")); - IngestStats.Stats leftStats = left.get("foo"); - IngestStats.Stats rightStats = right.get("foo"); - assertEquals(leftStats.getIngestCount(), rightStats.getIngestCount()); - assertEquals(leftStats.getIngestFailedCount(), rightStats.getIngestFailedCount()); - assertEquals(leftStats.getIngestTimeInMillis(), rightStats.getIngestTimeInMillis()); - assertEquals(leftStats.getIngestCurrent(), rightStats.getIngestCurrent()); + StreamInput in = out.bytes().streamInput(); + in.setVersion(VersionUtils.getPreviousVersion(Version.V_6_5_0)); + IngestStats serializedStats = new IngestStats(in); + IngestStats expectedStats = new IngestStats(totalStats, pipelineStats, Collections.emptyMap()); + assertIngestStats(expectedStats, serializedStats, false); + } + + private List createPipelineStats() { + IngestStats.PipelineStat pipeline1Stats = new IngestStats.PipelineStat("pipeline1", new IngestStats.Stats(3, 3, 3, 3)); + IngestStats.PipelineStat pipeline2Stats = new IngestStats.PipelineStat("pipeline2", new IngestStats.Stats(47, 97, 197, 297)); + IngestStats.PipelineStat pipeline3Stats = new IngestStats.PipelineStat("pipeline3", new IngestStats.Stats(0, 0, 0, 0)); + return Stream.of(pipeline1Stats, pipeline2Stats, pipeline3Stats).collect(Collectors.toList()); + } + + private Map> createProcessorStats(List pipelineStats){ + assert(pipelineStats.size() >= 2); + IngestStats.ProcessorStat processor1Stat = new IngestStats.ProcessorStat("processor1", new IngestStats.Stats(1, 1, 1, 1)); + IngestStats.ProcessorStat processor2Stat = new IngestStats.ProcessorStat("processor2", new IngestStats.Stats(2, 2, 2, 2)); + IngestStats.ProcessorStat processor3Stat = new IngestStats.ProcessorStat("processor3", new IngestStats.Stats(47, 97, 197, 297)); + //pipeline1 -> processor1,processor2; pipeline2 -> processor3 + return MapBuilder.>newMapBuilder() + .put(pipelineStats.get(0).getPipelineId(), Stream.of(processor1Stat, processor2Stat).collect(Collectors.toList())) + .put(pipelineStats.get(1).getPipelineId(), Collections.singletonList(processor3Stat)) + .map(); } private IngestStats serialize(IngestStats stats) throws IOException { @@ -65,4 +91,48 @@ public class IngestStatsTests extends ESTestCase { StreamInput in = out.bytes().streamInput(); return new IngestStats(in); } + + private void assertIngestStats(IngestStats ingestStats, IngestStats serializedStats, boolean expectProcessors){ + assertNotSame(ingestStats, serializedStats); + assertNotSame(ingestStats.getTotalStats(), serializedStats.getTotalStats()); + assertNotSame(ingestStats.getPipelineStats(), serializedStats.getPipelineStats()); + assertNotSame(ingestStats.getProcessorStats(), serializedStats.getProcessorStats()); + + assertStats(ingestStats.getTotalStats(), serializedStats.getTotalStats()); + assertEquals(ingestStats.getPipelineStats().size(), serializedStats.getPipelineStats().size()); + + for (IngestStats.PipelineStat serializedPipelineStat : serializedStats.getPipelineStats()) { + assertStats(getPipelineStats(ingestStats.getPipelineStats(), serializedPipelineStat.getPipelineId()), + serializedPipelineStat.getStats()); + List serializedProcessorStats = + serializedStats.getProcessorStats().get(serializedPipelineStat.getPipelineId()); + List processorStat = ingestStats.getProcessorStats().get(serializedPipelineStat.getPipelineId()); + if(expectProcessors) { + if (processorStat != null) { + Iterator it = processorStat.iterator(); + //intentionally enforcing the identical ordering + for (IngestStats.ProcessorStat serializedProcessorStat : serializedProcessorStats) { + IngestStats.ProcessorStat ps = it.next(); + assertEquals(ps.getName(), serializedProcessorStat.getName()); + assertStats(ps.getStats(), serializedProcessorStat.getStats()); + } + assertFalse(it.hasNext()); + } + }else{ + //pre 6.5 did not serialize any processor stats + assertNull(serializedProcessorStats); + } + } + + } + private void assertStats(IngestStats.Stats fromObject, IngestStats.Stats fromStream) { + assertEquals(fromObject.getIngestCount(), fromStream.getIngestCount()); + assertEquals(fromObject.getIngestFailedCount(), fromStream.getIngestFailedCount()); + assertEquals(fromObject.getIngestTimeInMillis(), fromStream.getIngestTimeInMillis()); + assertEquals(fromObject.getIngestCurrent(), fromStream.getIngestCurrent()); + } + + private IngestStats.Stats getPipelineStats(List pipelineStats, String id) { + return pipelineStats.stream().filter(p1 -> p1.getPipelineId().equals(id)).findFirst().map(p2 -> p2.getStats()).orElse(null); + } } diff --git a/server/src/test/java/org/elasticsearch/ingest/PipelineProcessorTests.java b/server/src/test/java/org/elasticsearch/ingest/PipelineProcessorTests.java index 018ded346d4..0ad88c05ccc 100644 --- a/server/src/test/java/org/elasticsearch/ingest/PipelineProcessorTests.java +++ b/server/src/test/java/org/elasticsearch/ingest/PipelineProcessorTests.java @@ -21,12 +21,13 @@ package org.elasticsearch.ingest; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.test.ESTestCase; -import java.time.Clock; import java.util.Arrays; import java.util.Collections; import java.util.HashMap; import java.util.Map; import java.util.concurrent.CompletableFuture; +import java.util.concurrent.TimeUnit; +import java.util.function.LongSupplier; import static org.hamcrest.CoreMatchers.equalTo; import static org.mockito.Mockito.mock; @@ -62,7 +63,7 @@ public class PipelineProcessorTests extends ESTestCase { when(ingestService.getPipeline(pipelineId)).thenReturn(pipeline); PipelineProcessor.Factory factory = new PipelineProcessor.Factory(ingestService); Map config = new HashMap<>(); - config.put("pipeline", pipelineId); + config.put("name", pipelineId); factory.create(Collections.emptyMap(), null, config).execute(testIngestDocument); assertEquals(testIngestDocument, invoked.get()); } @@ -72,7 +73,7 @@ public class PipelineProcessorTests extends ESTestCase { IngestDocument testIngestDocument = RandomDocumentPicks.randomIngestDocument(random(), new HashMap<>()); PipelineProcessor.Factory factory = new PipelineProcessor.Factory(ingestService); Map config = new HashMap<>(); - config.put("pipeline", "missingPipelineId"); + config.put("name", "missingPipelineId"); IllegalStateException e = expectThrows( IllegalStateException.class, () -> factory.create(Collections.emptyMap(), null, config).execute(testIngestDocument) @@ -88,21 +89,21 @@ public class PipelineProcessorTests extends ESTestCase { IngestService ingestService = mock(IngestService.class); IngestDocument testIngestDocument = RandomDocumentPicks.randomIngestDocument(random(), new HashMap<>()); Map outerConfig = new HashMap<>(); - outerConfig.put("pipeline", innerPipelineId); + outerConfig.put("name", innerPipelineId); PipelineProcessor.Factory factory = new PipelineProcessor.Factory(ingestService); Pipeline outer = new Pipeline( outerPipelineId, null, null, new CompoundProcessor(factory.create(Collections.emptyMap(), null, outerConfig)) ); Map innerConfig = new HashMap<>(); - innerConfig.put("pipeline", outerPipelineId); + innerConfig.put("name", outerPipelineId); Pipeline inner = new Pipeline( innerPipelineId, null, null, new CompoundProcessor(factory.create(Collections.emptyMap(), null, innerConfig)) ); when(ingestService.getPipeline(outerPipelineId)).thenReturn(outer); when(ingestService.getPipeline(innerPipelineId)).thenReturn(inner); - outerConfig.put("pipeline", innerPipelineId); + outerConfig.put("name", innerPipelineId); ElasticsearchException e = expectThrows( ElasticsearchException.class, () -> factory.create(Collections.emptyMap(), null, outerConfig).execute(testIngestDocument) @@ -117,7 +118,7 @@ public class PipelineProcessorTests extends ESTestCase { IngestService ingestService = mock(IngestService.class); IngestDocument testIngestDocument = RandomDocumentPicks.randomIngestDocument(random(), new HashMap<>()); Map outerConfig = new HashMap<>(); - outerConfig.put("pipeline", innerPipelineId); + outerConfig.put("name", innerPipelineId); PipelineProcessor.Factory factory = new PipelineProcessor.Factory(ingestService); Pipeline inner = new Pipeline( innerPipelineId, null, null, new CompoundProcessor() @@ -136,22 +137,22 @@ public class PipelineProcessorTests extends ESTestCase { PipelineProcessor.Factory factory = new PipelineProcessor.Factory(ingestService); Map pipeline1ProcessorConfig = new HashMap<>(); - pipeline1ProcessorConfig.put("pipeline", pipeline2Id); + pipeline1ProcessorConfig.put("name", pipeline2Id); PipelineProcessor pipeline1Processor = factory.create(Collections.emptyMap(), null, pipeline1ProcessorConfig); Map pipeline2ProcessorConfig = new HashMap<>(); - pipeline2ProcessorConfig.put("pipeline", pipeline3Id); + pipeline2ProcessorConfig.put("name", pipeline3Id); PipelineProcessor pipeline2Processor = factory.create(Collections.emptyMap(), null, pipeline2ProcessorConfig); - Clock clock = mock(Clock.class); - when(clock.millis()).thenReturn(0L).thenReturn(0L); + LongSupplier relativeTimeProvider = mock(LongSupplier.class); + when(relativeTimeProvider.getAsLong()).thenReturn(0L); Pipeline pipeline1 = new Pipeline( - pipeline1Id, null, null, new CompoundProcessor(pipeline1Processor), clock + pipeline1Id, null, null, new CompoundProcessor(pipeline1Processor), relativeTimeProvider ); String key1 = randomAlphaOfLength(10); - clock = mock(Clock.class); - when(clock.millis()).thenReturn(0L).thenReturn(3L); + relativeTimeProvider = mock(LongSupplier.class); + when(relativeTimeProvider.getAsLong()).thenReturn(0L, TimeUnit.MILLISECONDS.toNanos(3)); Pipeline pipeline2 = new Pipeline( pipeline2Id, null, null, new CompoundProcessor(true, Arrays.asList( @@ -160,15 +161,15 @@ public class PipelineProcessorTests extends ESTestCase { }), pipeline2Processor), Collections.emptyList()), - clock + relativeTimeProvider ); - clock = mock(Clock.class); - when(clock.millis()).thenReturn(0L).thenReturn(2L); + relativeTimeProvider = mock(LongSupplier.class); + when(relativeTimeProvider.getAsLong()).thenReturn(0L, TimeUnit.MILLISECONDS.toNanos(2)); Pipeline pipeline3 = new Pipeline( pipeline3Id, null, null, new CompoundProcessor( new TestProcessor(ingestDocument -> { throw new RuntimeException("error"); - })), clock + })), relativeTimeProvider ); when(ingestService.getPipeline(pipeline1Id)).thenReturn(pipeline1); when(ingestService.getPipeline(pipeline2Id)).thenReturn(pipeline2); diff --git a/server/src/test/java/org/elasticsearch/ingest/TrackingResultProcessorTests.java b/server/src/test/java/org/elasticsearch/ingest/TrackingResultProcessorTests.java index 7a7f9b77372..2c047283ed1 100644 --- a/server/src/test/java/org/elasticsearch/ingest/TrackingResultProcessorTests.java +++ b/server/src/test/java/org/elasticsearch/ingest/TrackingResultProcessorTests.java @@ -21,17 +21,22 @@ package org.elasticsearch.ingest; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.action.ingest.SimulateProcessorResult; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.script.MockScriptEngine; +import org.elasticsearch.script.Script; +import org.elasticsearch.script.ScriptModule; +import org.elasticsearch.script.ScriptService; +import org.elasticsearch.script.ScriptType; import org.elasticsearch.test.ESTestCase; import org.junit.Before; +import org.mockito.Mockito; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.HashMap; -import java.util.IdentityHashMap; import java.util.List; import java.util.Map; -import java.util.Set; import static org.elasticsearch.ingest.CompoundProcessor.ON_FAILURE_MESSAGE_FIELD; import static org.elasticsearch.ingest.CompoundProcessor.ON_FAILURE_PROCESSOR_TAG_FIELD; @@ -39,10 +44,11 @@ import static org.elasticsearch.ingest.CompoundProcessor.ON_FAILURE_PROCESSOR_TY import static org.elasticsearch.ingest.TrackingResultProcessor.decorate; import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.CoreMatchers.not; +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.nullValue; import static org.hamcrest.Matchers.sameInstance; import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; @@ -50,13 +56,11 @@ public class TrackingResultProcessorTests extends ESTestCase { private IngestDocument ingestDocument; private List resultList; - private Set pipelinesSeen; @Before public void init() { ingestDocument = new IngestDocument(new HashMap<>(), new HashMap<>()); resultList = new ArrayList<>(); - pipelinesSeen = Collections.newSetFromMap(new IdentityHashMap<>()); } public void testActualProcessor() throws Exception { @@ -76,9 +80,9 @@ public class TrackingResultProcessorTests extends ESTestCase { public void testActualCompoundProcessorWithoutOnFailure() throws Exception { RuntimeException exception = new RuntimeException("processor failed"); - TestProcessor testProcessor = new TestProcessor(ingestDocument -> { throw exception; }); + TestProcessor testProcessor = new TestProcessor(ingestDocument -> { throw exception; }); CompoundProcessor actualProcessor = new CompoundProcessor(testProcessor); - CompoundProcessor trackingProcessor = decorate(actualProcessor, resultList, pipelinesSeen); + CompoundProcessor trackingProcessor = decorate(actualProcessor, resultList); try { trackingProcessor.execute(ingestDocument); @@ -97,14 +101,14 @@ public class TrackingResultProcessorTests extends ESTestCase { public void testActualCompoundProcessorWithOnFailure() throws Exception { RuntimeException exception = new RuntimeException("fail"); - TestProcessor failProcessor = new TestProcessor("fail", "test", ingestDocument -> { throw exception; }); + TestProcessor failProcessor = new TestProcessor("fail", "test", ingestDocument -> { throw exception; }); TestProcessor onFailureProcessor = new TestProcessor("success", "test", ingestDocument -> {}); CompoundProcessor actualProcessor = new CompoundProcessor(false, Arrays.asList(new CompoundProcessor(false, Arrays.asList(failProcessor, onFailureProcessor), Arrays.asList(onFailureProcessor, failProcessor))), Arrays.asList(onFailureProcessor)); - CompoundProcessor trackingProcessor = decorate(actualProcessor, resultList, pipelinesSeen); + CompoundProcessor trackingProcessor = decorate(actualProcessor, resultList); trackingProcessor.execute(ingestDocument); SimulateProcessorResult expectedFailResult = new SimulateProcessorResult(failProcessor.getTag(), ingestDocument); @@ -139,10 +143,10 @@ public class TrackingResultProcessorTests extends ESTestCase { public void testActualCompoundProcessorWithIgnoreFailure() throws Exception { RuntimeException exception = new RuntimeException("processor failed"); - TestProcessor testProcessor = new TestProcessor(ingestDocument -> { throw exception; }); + TestProcessor testProcessor = new TestProcessor(ingestDocument -> { throw exception; }); CompoundProcessor actualProcessor = new CompoundProcessor(true, Collections.singletonList(testProcessor), Collections.emptyList()); - CompoundProcessor trackingProcessor = decorate(actualProcessor, resultList, pipelinesSeen); + CompoundProcessor trackingProcessor = decorate(actualProcessor, resultList); trackingProcessor.execute(ingestDocument); @@ -154,11 +158,50 @@ public class TrackingResultProcessorTests extends ESTestCase { assertThat(resultList.get(0).getProcessorTag(), equalTo(expectedResult.getProcessorTag())); } + public void testActualCompoundProcessorWithFalseConditional() throws Exception { + String key1 = randomAlphaOfLength(10); + String key2 = randomAlphaOfLength(10); + String key3 = randomAlphaOfLength(10); + + String scriptName = "conditionalScript"; + ScriptService scriptService = new ScriptService(Settings.builder().build(), Collections.singletonMap(Script.DEFAULT_SCRIPT_LANG, + new MockScriptEngine(Script.DEFAULT_SCRIPT_LANG, Collections.singletonMap(scriptName, ctx -> false), Collections.emptyMap())), + new HashMap<>(ScriptModule.CORE_CONTEXTS) + ); + + CompoundProcessor compoundProcessor = new CompoundProcessor( + new TestProcessor(ingestDocument -> {ingestDocument.setFieldValue(key1, randomInt()); }), + new ConditionalProcessor( + randomAlphaOfLength(10), + new Script(ScriptType.INLINE, Script.DEFAULT_SCRIPT_LANG, scriptName, Collections.emptyMap()), scriptService, + new TestProcessor(ingestDocument -> {ingestDocument.setFieldValue(key2, randomInt()); })), + new TestProcessor(ingestDocument -> { ingestDocument.setFieldValue(key3, randomInt()); })); + + CompoundProcessor trackingProcessor = decorate(compoundProcessor, resultList); + trackingProcessor.execute(ingestDocument); + SimulateProcessorResult expectedResult = new SimulateProcessorResult(compoundProcessor.getTag(), ingestDocument); + + //the step for key 2 is never executed due to conditional and thus not part of the result set + assertThat(resultList.size(), equalTo(2)); + + assertTrue(resultList.get(0).getIngestDocument().hasField(key1)); + assertFalse(resultList.get(0).getIngestDocument().hasField(key2)); + assertFalse(resultList.get(0).getIngestDocument().hasField(key3)); + + assertTrue(resultList.get(1).getIngestDocument().hasField(key1)); + assertFalse(resultList.get(1).getIngestDocument().hasField(key2)); + assertTrue(resultList.get(1).getIngestDocument().hasField(key3)); + + assertThat(resultList.get(1).getIngestDocument(), equalTo(expectedResult.getIngestDocument())); + assertThat(resultList.get(1).getFailure(), nullValue()); + assertThat(resultList.get(1).getProcessorTag(), nullValue()); + } + public void testActualPipelineProcessor() throws Exception { String pipelineId = "pipeline1"; IngestService ingestService = mock(IngestService.class); Map pipelineConfig = new HashMap<>(); - pipelineConfig.put("pipeline", pipelineId); + pipelineConfig.put("name", pipelineId); PipelineProcessor.Factory factory = new PipelineProcessor.Factory(ingestService); String key1 = randomAlphaOfLength(10); @@ -176,13 +219,13 @@ public class TrackingResultProcessorTests extends ESTestCase { PipelineProcessor pipelineProcessor = factory.create(Collections.emptyMap(), null, pipelineConfig); CompoundProcessor actualProcessor = new CompoundProcessor(pipelineProcessor); - CompoundProcessor trackingProcessor = decorate(actualProcessor, resultList, pipelinesSeen); + CompoundProcessor trackingProcessor = decorate(actualProcessor, resultList); trackingProcessor.execute(ingestDocument); SimulateProcessorResult expectedResult = new SimulateProcessorResult(actualProcessor.getTag(), ingestDocument); - verify(ingestService).getPipeline(pipelineId); + verify(ingestService, Mockito.atLeast(1)).getPipeline(pipelineId); assertThat(resultList.size(), equalTo(3)); assertTrue(resultList.get(0).getIngestDocument().hasField(key1)); @@ -198,13 +241,149 @@ public class TrackingResultProcessorTests extends ESTestCase { assertThat(resultList.get(2).getProcessorTag(), nullValue()); } + public void testActualPipelineProcessorWithTrueConditional() throws Exception { + String pipelineId1 = "pipeline1"; + String pipelineId2 = "pipeline2"; + IngestService ingestService = mock(IngestService.class); + Map pipelineConfig0 = new HashMap<>(); + pipelineConfig0.put("name", pipelineId1); + Map pipelineConfig1 = new HashMap<>(); + pipelineConfig1.put("name", pipelineId1); + Map pipelineConfig2 = new HashMap<>(); + pipelineConfig2.put("name", pipelineId2); + PipelineProcessor.Factory factory = new PipelineProcessor.Factory(ingestService); + + String key1 = randomAlphaOfLength(10); + String key2 = randomAlphaOfLength(10); + String key3 = randomAlphaOfLength(10); + + String scriptName = "conditionalScript"; + + ScriptService scriptService = new ScriptService(Settings.builder().build(), Collections.singletonMap(Script.DEFAULT_SCRIPT_LANG, + new MockScriptEngine(Script.DEFAULT_SCRIPT_LANG, Collections.singletonMap(scriptName, ctx -> true), Collections.emptyMap())), + new HashMap<>(ScriptModule.CORE_CONTEXTS) + ); + + Pipeline pipeline1 = new Pipeline( + pipelineId1, null, null, new CompoundProcessor( + new TestProcessor(ingestDocument -> {ingestDocument.setFieldValue(key1, randomInt()); }), + new ConditionalProcessor( + randomAlphaOfLength(10), + new Script(ScriptType.INLINE, Script.DEFAULT_SCRIPT_LANG, scriptName, Collections.emptyMap()), scriptService, + factory.create(Collections.emptyMap(), null, pipelineConfig2)), + new TestProcessor(ingestDocument -> {ingestDocument.setFieldValue(key3, randomInt()); }) + ) + ); + + Pipeline pipeline2 = new Pipeline( + pipelineId2, null, null, new CompoundProcessor( + new TestProcessor(ingestDocument -> { ingestDocument.setFieldValue(key2, randomInt()); }))); + + when(ingestService.getPipeline(pipelineId1)).thenReturn(pipeline1); + when(ingestService.getPipeline(pipelineId2)).thenReturn(pipeline2); + + PipelineProcessor pipelineProcessor = factory.create(Collections.emptyMap(), null, pipelineConfig0); + CompoundProcessor actualProcessor = new CompoundProcessor(pipelineProcessor); + + CompoundProcessor trackingProcessor = decorate(actualProcessor, resultList); + + trackingProcessor.execute(ingestDocument); + + + SimulateProcessorResult expectedResult = new SimulateProcessorResult(actualProcessor.getTag(), ingestDocument); + + verify(ingestService, Mockito.atLeast(1)).getPipeline(pipelineId1); + verify(ingestService, Mockito.atLeast(1)).getPipeline(pipelineId2); + assertThat(resultList.size(), equalTo(3)); + + assertTrue(resultList.get(0).getIngestDocument().hasField(key1)); + assertFalse(resultList.get(0).getIngestDocument().hasField(key2)); + assertFalse(resultList.get(0).getIngestDocument().hasField(key3)); + + assertTrue(resultList.get(1).getIngestDocument().hasField(key1)); + assertTrue(resultList.get(1).getIngestDocument().hasField(key2)); + assertFalse(resultList.get(1).getIngestDocument().hasField(key3)); + + assertThat(resultList.get(2).getIngestDocument(), equalTo(expectedResult.getIngestDocument())); + assertThat(resultList.get(2).getFailure(), nullValue()); + assertThat(resultList.get(2).getProcessorTag(), nullValue()); + } + + public void testActualPipelineProcessorWithFalseConditional() throws Exception { + String pipelineId1 = "pipeline1"; + String pipelineId2 = "pipeline2"; + IngestService ingestService = mock(IngestService.class); + Map pipelineConfig0 = new HashMap<>(); + pipelineConfig0.put("name", pipelineId1); + Map pipelineConfig1 = new HashMap<>(); + pipelineConfig1.put("name", pipelineId1); + Map pipelineConfig2 = new HashMap<>(); + pipelineConfig2.put("name", pipelineId2); + PipelineProcessor.Factory factory = new PipelineProcessor.Factory(ingestService); + + String key1 = randomAlphaOfLength(10); + String key2 = randomAlphaOfLength(10); + String key3 = randomAlphaOfLength(10); + + String scriptName = "conditionalScript"; + + ScriptService scriptService = new ScriptService(Settings.builder().build(), Collections.singletonMap(Script.DEFAULT_SCRIPT_LANG, + new MockScriptEngine(Script.DEFAULT_SCRIPT_LANG, Collections.singletonMap(scriptName, ctx -> false), Collections.emptyMap())), + new HashMap<>(ScriptModule.CORE_CONTEXTS) + ); + + Pipeline pipeline1 = new Pipeline( + pipelineId1, null, null, new CompoundProcessor( + new TestProcessor(ingestDocument -> {ingestDocument.setFieldValue(key1, randomInt()); }), + new ConditionalProcessor( + randomAlphaOfLength(10), + new Script(ScriptType.INLINE, Script.DEFAULT_SCRIPT_LANG, scriptName, Collections.emptyMap()), scriptService, + factory.create(Collections.emptyMap(), null, pipelineConfig2)), + new TestProcessor(ingestDocument -> {ingestDocument.setFieldValue(key3, randomInt()); }) + ) + ); + + Pipeline pipeline2 = new Pipeline( + pipelineId2, null, null, new CompoundProcessor( + new TestProcessor(ingestDocument -> { ingestDocument.setFieldValue(key2, randomInt()); }))); + + when(ingestService.getPipeline(pipelineId1)).thenReturn(pipeline1); + when(ingestService.getPipeline(pipelineId2)).thenReturn(pipeline2); + + PipelineProcessor pipelineProcessor = factory.create(Collections.emptyMap(), null, pipelineConfig0); + CompoundProcessor actualProcessor = new CompoundProcessor(pipelineProcessor); + + CompoundProcessor trackingProcessor = decorate(actualProcessor, resultList); + + trackingProcessor.execute(ingestDocument); + + + SimulateProcessorResult expectedResult = new SimulateProcessorResult(actualProcessor.getTag(), ingestDocument); + + verify(ingestService, Mockito.atLeast(1)).getPipeline(pipelineId1); + verify(ingestService, Mockito.never()).getPipeline(pipelineId2); + assertThat(resultList.size(), equalTo(2)); + + assertTrue(resultList.get(0).getIngestDocument().hasField(key1)); + assertFalse(resultList.get(0).getIngestDocument().hasField(key2)); + assertFalse(resultList.get(0).getIngestDocument().hasField(key3)); + + assertTrue(resultList.get(1).getIngestDocument().hasField(key1)); + assertFalse(resultList.get(1).getIngestDocument().hasField(key2)); + assertTrue(resultList.get(1).getIngestDocument().hasField(key3)); + + assertThat(resultList.get(1).getIngestDocument(), equalTo(expectedResult.getIngestDocument())); + assertThat(resultList.get(1).getFailure(), nullValue()); + assertThat(resultList.get(1).getProcessorTag(), nullValue()); + } + public void testActualPipelineProcessorWithHandledFailure() throws Exception { RuntimeException exception = new RuntimeException("processor failed"); String pipelineId = "pipeline1"; IngestService ingestService = mock(IngestService.class); Map pipelineConfig = new HashMap<>(); - pipelineConfig.put("pipeline", pipelineId); + pipelineConfig.put("name", pipelineId); PipelineProcessor.Factory factory = new PipelineProcessor.Factory(ingestService); String key1 = randomAlphaOfLength(10); @@ -226,13 +405,13 @@ public class TrackingResultProcessorTests extends ESTestCase { PipelineProcessor pipelineProcessor = factory.create(Collections.emptyMap(), null, pipelineConfig); CompoundProcessor actualProcessor = new CompoundProcessor(pipelineProcessor); - CompoundProcessor trackingProcessor = decorate(actualProcessor, resultList, pipelinesSeen); + CompoundProcessor trackingProcessor = decorate(actualProcessor, resultList); trackingProcessor.execute(ingestDocument); SimulateProcessorResult expectedResult = new SimulateProcessorResult(actualProcessor.getTag(), ingestDocument); - verify(ingestService).getPipeline(pipelineId); + verify(ingestService, Mockito.atLeast(2)).getPipeline(pipelineId); assertThat(resultList.size(), equalTo(4)); assertTrue(resultList.get(0).getIngestDocument().hasField(key1)); @@ -253,50 +432,61 @@ public class TrackingResultProcessorTests extends ESTestCase { } public void testActualPipelineProcessorWithCycle() throws Exception { - String pipelineId = "pipeline1"; + String pipelineId1 = "pipeline1"; + String pipelineId2 = "pipeline2"; IngestService ingestService = mock(IngestService.class); - Map pipelineConfig = new HashMap<>(); - pipelineConfig.put("pipeline", pipelineId); + Map pipelineConfig0 = new HashMap<>(); + pipelineConfig0.put("name", pipelineId1); + Map pipelineConfig1 = new HashMap<>(); + pipelineConfig1.put("name", pipelineId1); + Map pipelineConfig2 = new HashMap<>(); + pipelineConfig2.put("name", pipelineId2); PipelineProcessor.Factory factory = new PipelineProcessor.Factory(ingestService); - PipelineProcessor pipelineProcessor = factory.create(Collections.emptyMap(), null, pipelineConfig); - Pipeline pipeline = new Pipeline( - pipelineId, null, null, new CompoundProcessor(pipelineProcessor) - ); - when(ingestService.getPipeline(pipelineId)).thenReturn(pipeline); + Pipeline pipeline1 = new Pipeline( + pipelineId1, null, null, new CompoundProcessor(factory.create(Collections.emptyMap(), null, pipelineConfig2))); + Pipeline pipeline2 = new Pipeline( + pipelineId2, null, null, new CompoundProcessor(factory.create(Collections.emptyMap(), null, pipelineConfig1))); + + when(ingestService.getPipeline(pipelineId1)).thenReturn(pipeline1); + when(ingestService.getPipeline(pipelineId2)).thenReturn(pipeline2); + + PipelineProcessor pipelineProcessor = factory.create(Collections.emptyMap(), null, pipelineConfig0); CompoundProcessor actualProcessor = new CompoundProcessor(pipelineProcessor); - IllegalStateException exception = expectThrows(IllegalStateException.class, - () -> decorate(actualProcessor, resultList, pipelinesSeen)); - assertThat(exception.getMessage(), equalTo("Cycle detected for pipeline: pipeline1")); - } + CompoundProcessor trackingProcessor = decorate(actualProcessor, resultList); + ElasticsearchException exception = expectThrows(ElasticsearchException.class, () -> trackingProcessor.execute(ingestDocument)); + assertThat(exception.getCause(), instanceOf(IllegalArgumentException.class)); + assertThat(exception.getCause().getCause(), instanceOf(IllegalStateException.class)); + assertThat(exception.getMessage(), containsString("Cycle detected for pipeline: pipeline1")); + } public void testActualPipelineProcessorRepeatedInvocation() throws Exception { String pipelineId = "pipeline1"; IngestService ingestService = mock(IngestService.class); Map pipelineConfig = new HashMap<>(); - pipelineConfig.put("pipeline", pipelineId); + pipelineConfig.put("name", pipelineId); PipelineProcessor.Factory factory = new PipelineProcessor.Factory(ingestService); String key1 = randomAlphaOfLength(10); PipelineProcessor pipelineProcessor = factory.create(Collections.emptyMap(), null, pipelineConfig); Pipeline pipeline = new Pipeline( pipelineId, null, null, new CompoundProcessor( - new TestProcessor(ingestDocument -> { ingestDocument.setFieldValue(key1, randomInt()); })) + new TestProcessor(ingestDocument -> { ingestDocument.setFieldValue(key1, randomInt()); })) ); when(ingestService.getPipeline(pipelineId)).thenReturn(pipeline); CompoundProcessor actualProcessor = new CompoundProcessor(pipelineProcessor, pipelineProcessor); - CompoundProcessor trackingProcessor = decorate(actualProcessor, resultList, pipelinesSeen); + CompoundProcessor trackingProcessor = decorate(actualProcessor, resultList); trackingProcessor.execute(ingestDocument); SimulateProcessorResult expectedResult = new SimulateProcessorResult(actualProcessor.getTag(), ingestDocument); - verify(ingestService, times(2)).getPipeline(pipelineId); + verify(ingestService, Mockito.atLeast(2)).getPipeline(pipelineId); assertThat(resultList.size(), equalTo(2)); assertThat(resultList.get(0).getIngestDocument(), not(equalTo(expectedResult.getIngestDocument()))); diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java index 6952cfed7bf..74a7d1bc3ce 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java @@ -68,6 +68,7 @@ import java.security.KeyStoreException; import java.security.NoSuchAlgorithmException; import java.security.cert.CertificateException; import java.util.ArrayList; +import java.util.Arrays; import java.util.HashSet; import java.util.List; import java.util.Map; @@ -449,7 +450,7 @@ public abstract class ESRestTestCase extends ESTestCase { } } - private void wipeRollupJobs() throws IOException { + private void wipeRollupJobs() throws IOException, InterruptedException { Response response = adminClient().performRequest(new Request("GET", "/_xpack/rollup/job/_all")); Map jobs = entityAsMap(response); @SuppressWarnings("unchecked") @@ -460,6 +461,29 @@ public abstract class ESRestTestCase extends ESTestCase { return; } + for (Map jobConfig : jobConfigs) { + @SuppressWarnings("unchecked") + String jobId = (String) ((Map) jobConfig.get("config")).get("id"); + Request request = new Request("POST", "/_xpack/rollup/job/" + jobId + "/_stop"); + request.addParameter("ignore", "404"); + logger.debug("stopping rollup job [{}]", jobId); + adminClient().performRequest(request); + } + + // TODO this is temporary until StopJob API gains the ability to block until stopped + awaitBusy(() -> { + Request request = new Request("GET", "/_xpack/rollup/job/_all"); + try { + Response jobsResponse = adminClient().performRequest(request); + String body = EntityUtils.toString(jobsResponse.getEntity()); + logger.error(body); + // If the body contains any of the non-stopped states, at least one job is not finished yet + return Arrays.stream(new String[]{"started", "aborting", "stopping", "indexing"}).noneMatch(body::contains); + } catch (IOException e) { + return false; + } + }, 10, TimeUnit.SECONDS); + for (Map jobConfig : jobConfigs) { @SuppressWarnings("unchecked") String jobId = (String) ((Map) jobConfig.get("config")).get("id"); diff --git a/x-pack/plugin/ccr/qa/multi-cluster-with-non-compliant-license/src/test/java/org/elasticsearch/xpack/ccr/CcrMultiClusterLicenseIT.java b/x-pack/plugin/ccr/qa/multi-cluster-with-non-compliant-license/src/test/java/org/elasticsearch/xpack/ccr/CcrMultiClusterLicenseIT.java index 7c8036190e8..bc3d846343a 100644 --- a/x-pack/plugin/ccr/qa/multi-cluster-with-non-compliant-license/src/test/java/org/elasticsearch/xpack/ccr/CcrMultiClusterLicenseIT.java +++ b/x-pack/plugin/ccr/qa/multi-cluster-with-non-compliant-license/src/test/java/org/elasticsearch/xpack/ccr/CcrMultiClusterLicenseIT.java @@ -50,8 +50,8 @@ public class CcrMultiClusterLicenseIT extends ESRestTestCase { public void testAutoFollow() throws Exception { assumeFalse("windows is the worst", Constants.WINDOWS); if (runningAgainstLeaderCluster == false) { - final Request request = new Request("PUT", "/_ccr/auto_follow/leader_cluster"); - request.setJsonEntity("{\"leader_index_patterns\":[\"*\"]}"); + final Request request = new Request("PUT", "/_ccr/auto_follow/test_pattern"); + request.setJsonEntity("{\"leader_index_patterns\":[\"*\"], \"leader_cluster\": \"leader_cluster\"}"); client().performRequest(request); // parse the logs and ensure that the auto-coordinator skipped coordination on the leader cluster @@ -64,7 +64,7 @@ public class CcrMultiClusterLicenseIT extends ESRestTestCase { while (it.hasNext()) { final String line = it.next(); if (line.matches(".*\\[WARN\\s*\\]\\[o\\.e\\.x\\.c\\.a\\.AutoFollowCoordinator\\s*\\] \\[node-0\\] " + - "failure occurred while fetching cluster state in leader cluster \\[leader_cluster\\]")) { + "failure occurred while fetching cluster state for auto follow pattern \\[test_pattern\\]")) { warn = true; break; } diff --git a/x-pack/plugin/ccr/qa/multi-cluster-with-security/src/test/java/org/elasticsearch/xpack/ccr/FollowIndexSecurityIT.java b/x-pack/plugin/ccr/qa/multi-cluster-with-security/src/test/java/org/elasticsearch/xpack/ccr/FollowIndexSecurityIT.java index 872c995faf5..6d5ca4559fe 100644 --- a/x-pack/plugin/ccr/qa/multi-cluster-with-security/src/test/java/org/elasticsearch/xpack/ccr/FollowIndexSecurityIT.java +++ b/x-pack/plugin/ccr/qa/multi-cluster-with-security/src/test/java/org/elasticsearch/xpack/ccr/FollowIndexSecurityIT.java @@ -146,14 +146,14 @@ public class FollowIndexSecurityIT extends ESRestTestCase { String disallowedIndex = "logs-us-20190101"; { - Request request = new Request("PUT", "/_ccr/auto_follow/leader_cluster"); - request.setJsonEntity("{\"leader_index_patterns\": [\"logs-*\"]}"); + Request request = new Request("PUT", "/_ccr/auto_follow/test_pattern"); + request.setJsonEntity("{\"leader_index_patterns\": [\"logs-*\"], \"leader_cluster\": \"leader_cluster\"}"); Exception e = expectThrows(ResponseException.class, () -> assertOK(client().performRequest(request))); assertThat(e.getMessage(), containsString("insufficient privileges to follow index [logs-*]")); } - Request request = new Request("PUT", "/_ccr/auto_follow/leader_cluster"); - request.setJsonEntity("{\"leader_index_patterns\": [\"logs-eu-*\"]}"); + Request request = new Request("PUT", "/_ccr/auto_follow/test_pattern"); + request.setJsonEntity("{\"leader_index_patterns\": [\"logs-eu-*\"], \"leader_cluster\": \"leader_cluster\"}"); assertOK(client().performRequest(request)); try (RestClient leaderClient = buildLeaderClient()) { @@ -185,7 +185,7 @@ public class FollowIndexSecurityIT extends ESRestTestCase { }); // Cleanup by deleting auto follow pattern and pause following: - request = new Request("DELETE", "/_ccr/auto_follow/leader_cluster"); + request = new Request("DELETE", "/_ccr/auto_follow/test_pattern"); assertOK(client().performRequest(request)); pauseFollow(allowedIndex); } diff --git a/x-pack/plugin/ccr/qa/multi-cluster/src/test/java/org/elasticsearch/xpack/ccr/FollowIndexIT.java b/x-pack/plugin/ccr/qa/multi-cluster/src/test/java/org/elasticsearch/xpack/ccr/FollowIndexIT.java index 9bdeaa2a024..8e50b3697f6 100644 --- a/x-pack/plugin/ccr/qa/multi-cluster/src/test/java/org/elasticsearch/xpack/ccr/FollowIndexIT.java +++ b/x-pack/plugin/ccr/qa/multi-cluster/src/test/java/org/elasticsearch/xpack/ccr/FollowIndexIT.java @@ -103,8 +103,8 @@ public class FollowIndexIT extends ESRestTestCase { public void testAutoFollowPatterns() throws Exception { assumeFalse("Test should only run when both clusters are running", runningAgainstLeaderCluster); - Request request = new Request("PUT", "/_ccr/auto_follow/leader_cluster"); - request.setJsonEntity("{\"leader_index_patterns\": [\"logs-*\"]}"); + Request request = new Request("PUT", "/_ccr/auto_follow/test_pattern"); + request.setJsonEntity("{\"leader_index_patterns\": [\"logs-*\"], \"leader_cluster\": \"leader_cluster\"}"); assertOK(client().performRequest(request)); try (RestClient leaderClient = buildLeaderClient()) { diff --git a/x-pack/plugin/ccr/qa/rest/src/test/resources/rest-api-spec/test/ccr/auto_follow.yml b/x-pack/plugin/ccr/qa/rest/src/test/resources/rest-api-spec/test/ccr/auto_follow.yml index 21569ed5d05..357fc7e1f56 100644 --- a/x-pack/plugin/ccr/qa/rest/src/test/resources/rest-api-spec/test/ccr/auto_follow.yml +++ b/x-pack/plugin/ccr/qa/rest/src/test/resources/rest-api-spec/test/ccr/auto_follow.yml @@ -21,29 +21,32 @@ - do: ccr.put_auto_follow_pattern: - leader_cluster: local + name: my_pattern body: + leader_cluster: local leader_index_patterns: ['logs-*'] max_concurrent_read_batches: 2 - is_true: acknowledged - do: ccr.get_auto_follow_pattern: - leader_cluster: local - - match: { local.leader_index_patterns: ['logs-*'] } - - match: { local.max_concurrent_read_batches: 2 } + name: my_pattern + - match: { my_pattern.leader_cluster: 'local' } + - match: { my_pattern.leader_index_patterns: ['logs-*'] } + - match: { my_pattern.max_concurrent_read_batches: 2 } - do: ccr.get_auto_follow_pattern: {} - - match: { local.leader_index_patterns: ['logs-*'] } - - match: { local.max_concurrent_read_batches: 2 } + - match: { my_pattern.leader_cluster: 'local' } + - match: { my_pattern.leader_index_patterns: ['logs-*'] } + - match: { my_pattern.max_concurrent_read_batches: 2 } - do: ccr.delete_auto_follow_pattern: - leader_cluster: local + name: my_pattern - is_true: acknowledged - do: catch: missing ccr.get_auto_follow_pattern: - leader_cluster: local + name: my_pattern diff --git a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/AutoFollowCoordinator.java b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/AutoFollowCoordinator.java index 7ac40e682db..a18ec3bf6c4 100644 --- a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/AutoFollowCoordinator.java +++ b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/AutoFollowCoordinator.java @@ -47,6 +47,7 @@ import java.util.TreeMap; import java.util.function.BiConsumer; import java.util.function.Consumer; import java.util.function.Function; +import java.util.stream.Collectors; /** * A component that runs only on the elected master node and follows leader indices automatically @@ -105,19 +106,19 @@ public class AutoFollowCoordinator implements ClusterStateApplier { synchronized void updateStats(List results) { for (AutoFollowResult result : results) { if (result.clusterStateFetchException != null) { - recentAutoFollowErrors.put(result.clusterAlias, + recentAutoFollowErrors.put(result.autoFollowPatternName, new ElasticsearchException(result.clusterStateFetchException)); numberOfFailedRemoteClusterStateRequests++; - LOGGER.warn(new ParameterizedMessage("failure occurred while fetching cluster state in leader cluster [{}]", - result.clusterAlias), result.clusterStateFetchException); + LOGGER.warn(new ParameterizedMessage("failure occurred while fetching cluster state for auto follow pattern [{}]", + result.autoFollowPatternName), result.clusterStateFetchException); } else { for (Map.Entry entry : result.autoFollowExecutionResults.entrySet()) { if (entry.getValue() != null) { numberOfFailedIndicesAutoFollowed++; - recentAutoFollowErrors.put(result.clusterAlias + ":" + entry.getKey().getName(), + recentAutoFollowErrors.put(result.autoFollowPatternName + ":" + entry.getKey().getName(), ExceptionsHelper.convertToElastic(entry.getValue())); - LOGGER.warn(new ParameterizedMessage("failure occurred while auto following index [{}] in leader cluster [{}]", - entry.getKey(), result.clusterAlias), entry.getValue()); + LOGGER.warn(new ParameterizedMessage("failure occurred while auto following index [{}] for auto follow " + + "pattern [{}]", entry.getKey(), result.autoFollowPatternName), entry.getValue()); } else { numberOfSuccessfulIndicesAutoFollowed++; } @@ -243,34 +244,45 @@ public class AutoFollowCoordinator implements ClusterStateApplier { int i = 0; for (Map.Entry entry : autoFollowMetadata.getPatterns().entrySet()) { final int slot = i; - final String clusterAlias = entry.getKey(); + final String autoFollowPattenName = entry.getKey(); final AutoFollowPattern autoFollowPattern = entry.getValue(); + final String leaderCluster = autoFollowPattern.getLeaderCluster(); - Map headers = autoFollowMetadata.getHeaders().get(clusterAlias); - getLeaderClusterState(headers, clusterAlias, (leaderClusterState, e) -> { + Map headers = autoFollowMetadata.getHeaders().get(autoFollowPattenName); + getLeaderClusterState(headers, leaderCluster, (leaderClusterState, e) -> { if (leaderClusterState != null) { assert e == null; - final List followedIndices = autoFollowMetadata.getFollowedLeaderIndexUUIDs().get(clusterAlias); - final List leaderIndicesToFollow = getLeaderIndicesToFollow(clusterAlias, autoFollowPattern, + final List followedIndices = autoFollowMetadata.getFollowedLeaderIndexUUIDs().get(autoFollowPattenName); + final List leaderIndicesToFollow = getLeaderIndicesToFollow(leaderCluster, autoFollowPattern, leaderClusterState, followerClusterState, followedIndices); if (leaderIndicesToFollow.isEmpty()) { - finalise(slot, new AutoFollowResult(clusterAlias)); + finalise(slot, new AutoFollowResult(autoFollowPattenName)); } else { + List> patternsForTheSameLeaderCluster = autoFollowMetadata.getPatterns() + .entrySet().stream() + .filter(item -> autoFollowPattenName.equals(item.getKey()) == false) + .filter(item -> leaderCluster.equals(item.getValue().getLeaderCluster())) + .map(item -> new Tuple<>(item.getKey(), item.getValue())) + .collect(Collectors.toList()); + Consumer resultHandler = result -> finalise(slot, result); - checkAutoFollowPattern(clusterAlias, autoFollowPattern, leaderIndicesToFollow, headers, resultHandler); + checkAutoFollowPattern(autoFollowPattenName, leaderCluster, autoFollowPattern, leaderIndicesToFollow, headers, + patternsForTheSameLeaderCluster, resultHandler); } } else { - finalise(slot, new AutoFollowResult(clusterAlias, e)); + finalise(slot, new AutoFollowResult(autoFollowPattenName, e)); } }); i++; } } - private void checkAutoFollowPattern(String clusterAlias, + private void checkAutoFollowPattern(String autoFollowPattenName, + String clusterAlias, AutoFollowPattern autoFollowPattern, List leaderIndicesToFollow, Map headers, + List> patternsForTheSameLeaderCluster, Consumer resultHandler) { final CountDown leaderIndicesCountDown = new CountDown(leaderIndicesToFollow.size()); @@ -278,16 +290,31 @@ public class AutoFollowCoordinator implements ClusterStateApplier { for (int i = 0; i < leaderIndicesToFollow.size(); i++) { final Index indexToFollow = leaderIndicesToFollow.get(i); final int slot = i; - followLeaderIndex(clusterAlias, indexToFollow, autoFollowPattern, headers, error -> { - results.set(slot, new Tuple<>(indexToFollow, error)); + + List otherMatchingPatterns = patternsForTheSameLeaderCluster.stream() + .filter(otherPattern -> otherPattern.v2().match(indexToFollow.getName())) + .map(Tuple::v1) + .collect(Collectors.toList()); + if (otherMatchingPatterns.size() != 0) { + results.set(slot, new Tuple<>(indexToFollow, new ElasticsearchException("index to follow [" + indexToFollow.getName() + + "] for pattern [" + autoFollowPattenName + "] matches with other patterns " + otherMatchingPatterns + ""))); if (leaderIndicesCountDown.countDown()) { - resultHandler.accept(new AutoFollowResult(clusterAlias, results.asList())); + resultHandler.accept(new AutoFollowResult(autoFollowPattenName, results.asList())); } - }); + } else { + followLeaderIndex(autoFollowPattenName, clusterAlias, indexToFollow, autoFollowPattern, headers, error -> { + results.set(slot, new Tuple<>(indexToFollow, error)); + if (leaderIndicesCountDown.countDown()) { + resultHandler.accept(new AutoFollowResult(autoFollowPattenName, results.asList())); + } + }); + } + } } - private void followLeaderIndex(String clusterAlias, + private void followLeaderIndex(String autoFollowPattenName, + String clusterAlias, Index indexToFollow, AutoFollowPattern pattern, Map headers, @@ -313,7 +340,7 @@ public class AutoFollowCoordinator implements ClusterStateApplier { // This function updates the auto follow metadata in the cluster to record that the leader index has been followed: // (so that we do not try to follow it in subsequent auto follow runs) - Function function = recordLeaderIndexAsFollowFunction(clusterAlias, indexToFollow); + Function function = recordLeaderIndexAsFollowFunction(autoFollowPattenName, indexToFollow); // The coordinator always runs on the elected master node, so we can update cluster state here: updateAutoFollowMetadata(function, onResult); }; @@ -356,12 +383,12 @@ public class AutoFollowCoordinator implements ClusterStateApplier { } } - static Function recordLeaderIndexAsFollowFunction(String clusterAlias, + static Function recordLeaderIndexAsFollowFunction(String name, Index indexToFollow) { return currentState -> { AutoFollowMetadata currentAutoFollowMetadata = currentState.metaData().custom(AutoFollowMetadata.TYPE); Map> newFollowedIndexUUIDS = new HashMap<>(currentAutoFollowMetadata.getFollowedLeaderIndexUUIDs()); - newFollowedIndexUUIDS.compute(clusterAlias, (key, existingUUIDs) -> { + newFollowedIndexUUIDS.compute(name, (key, existingUUIDs) -> { assert existingUUIDs != null; List newUUIDs = new ArrayList<>(existingUUIDs); newUUIDs.add(indexToFollow.getUUID()); @@ -405,12 +432,12 @@ public class AutoFollowCoordinator implements ClusterStateApplier { static class AutoFollowResult { - final String clusterAlias; + final String autoFollowPatternName; final Exception clusterStateFetchException; final Map autoFollowExecutionResults; - AutoFollowResult(String clusterAlias, List> results) { - this.clusterAlias = clusterAlias; + AutoFollowResult(String autoFollowPatternName, List> results) { + this.autoFollowPatternName = autoFollowPatternName; Map autoFollowExecutionResults = new HashMap<>(); for (Tuple result : results) { @@ -421,14 +448,14 @@ public class AutoFollowCoordinator implements ClusterStateApplier { this.autoFollowExecutionResults = Collections.unmodifiableMap(autoFollowExecutionResults); } - AutoFollowResult(String clusterAlias, Exception e) { - this.clusterAlias = clusterAlias; + AutoFollowResult(String autoFollowPatternName, Exception e) { + this.autoFollowPatternName = autoFollowPatternName; this.clusterStateFetchException = e; this.autoFollowExecutionResults = Collections.emptyMap(); } - AutoFollowResult(String clusterAlias) { - this(clusterAlias, (Exception) null); + AutoFollowResult(String autoFollowPatternName) { + this(autoFollowPatternName, (Exception) null); } } } diff --git a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/ShardChangesAction.java b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/ShardChangesAction.java index 4c82c90b2d5..611d3197869 100644 --- a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/ShardChangesAction.java +++ b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/ShardChangesAction.java @@ -39,6 +39,7 @@ import java.util.ArrayList; import java.util.Arrays; import java.util.List; import java.util.Objects; +import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeoutException; import static org.elasticsearch.action.ValidateActions.addValidationError; @@ -67,6 +68,8 @@ public class ShardChangesAction extends Action { private TimeValue pollTimeout = TransportResumeFollowAction.DEFAULT_POLL_TIMEOUT; private ByteSizeValue maxBatchSize = TransportResumeFollowAction.DEFAULT_MAX_BATCH_SIZE; + private long relativeStartNanos; + public Request(ShardId shardId, String expectedHistoryUUID) { super(shardId.getIndexName()); this.shardId = shardId; @@ -142,6 +145,9 @@ public class ShardChangesAction extends Action { expectedHistoryUUID = in.readString(); pollTimeout = in.readTimeValue(); maxBatchSize = new ByteSizeValue(in); + + // Starting the clock in order to know how much time is spent on fetching operations: + relativeStartNanos = System.nanoTime(); } @Override @@ -220,6 +226,12 @@ public class ShardChangesAction extends Action { return operations; } + private long tookInMillis; + + public long getTookInMillis() { + return tookInMillis; + } + Response() { } @@ -228,13 +240,15 @@ public class ShardChangesAction extends Action { final long globalCheckpoint, final long maxSeqNo, final long maxSeqNoOfUpdatesOrDeletes, - final Translog.Operation[] operations) { + final Translog.Operation[] operations, + final long tookInMillis) { this.mappingVersion = mappingVersion; this.globalCheckpoint = globalCheckpoint; this.maxSeqNo = maxSeqNo; this.maxSeqNoOfUpdatesOrDeletes = maxSeqNoOfUpdatesOrDeletes; this.operations = operations; + this.tookInMillis = tookInMillis; } @Override @@ -245,6 +259,7 @@ public class ShardChangesAction extends Action { maxSeqNo = in.readZLong(); maxSeqNoOfUpdatesOrDeletes = in.readZLong(); operations = in.readArray(Translog.Operation::readOperation, Translog.Operation[]::new); + tookInMillis = in.readVLong(); } @Override @@ -255,6 +270,7 @@ public class ShardChangesAction extends Action { out.writeZLong(maxSeqNo); out.writeZLong(maxSeqNoOfUpdatesOrDeletes); out.writeArray(Translog.Operation::writeOperation, operations); + out.writeVLong(tookInMillis); } @Override @@ -266,12 +282,14 @@ public class ShardChangesAction extends Action { globalCheckpoint == that.globalCheckpoint && maxSeqNo == that.maxSeqNo && maxSeqNoOfUpdatesOrDeletes == that.maxSeqNoOfUpdatesOrDeletes && - Arrays.equals(operations, that.operations); + Arrays.equals(operations, that.operations) && + tookInMillis == that.tookInMillis; } @Override public int hashCode() { - return Objects.hash(mappingVersion, globalCheckpoint, maxSeqNo, maxSeqNoOfUpdatesOrDeletes, Arrays.hashCode(operations)); + return Objects.hash(mappingVersion, globalCheckpoint, maxSeqNo, maxSeqNoOfUpdatesOrDeletes, + Arrays.hashCode(operations), tookInMillis); } } @@ -308,7 +326,7 @@ public class ShardChangesAction extends Action { request.getMaxBatchSize()); // must capture after after snapshotting operations to ensure this MUS is at least the highest MUS of any of these operations. final long maxSeqNoOfUpdatesOrDeletes = indexShard.getMaxSeqNoOfUpdatesOrDeletes(); - return getResponse(mappingVersion, seqNoStats, maxSeqNoOfUpdatesOrDeletes, operations); + return getResponse(mappingVersion, seqNoStats, maxSeqNoOfUpdatesOrDeletes, operations, request.relativeStartNanos); } @Override @@ -373,7 +391,8 @@ public class ShardChangesAction extends Action { clusterService.state().metaData().index(shardId.getIndex()).getMappingVersion(); final SeqNoStats latestSeqNoStats = indexShard.seqNoStats(); final long maxSeqNoOfUpdatesOrDeletes = indexShard.getMaxSeqNoOfUpdatesOrDeletes(); - listener.onResponse(getResponse(mappingVersion, latestSeqNoStats, maxSeqNoOfUpdatesOrDeletes, EMPTY_OPERATIONS_ARRAY)); + listener.onResponse(getResponse(mappingVersion, latestSeqNoStats, maxSeqNoOfUpdatesOrDeletes, EMPTY_OPERATIONS_ARRAY, + request.relativeStartNanos)); } catch (final Exception caught) { caught.addSuppressed(e); listener.onFailure(caught); @@ -459,8 +478,11 @@ public class ShardChangesAction extends Action { } static Response getResponse(final long mappingVersion, final SeqNoStats seqNoStats, - final long maxSeqNoOfUpdates, final Translog.Operation[] operations) { - return new Response(mappingVersion, seqNoStats.getGlobalCheckpoint(), seqNoStats.getMaxSeqNo(), maxSeqNoOfUpdates, operations); + final long maxSeqNoOfUpdates, final Translog.Operation[] operations, long relativeStartNanos) { + long tookInNanos = System.nanoTime() - relativeStartNanos; + long tookInMillis = TimeUnit.NANOSECONDS.toMillis(tookInNanos); + return new Response(mappingVersion, seqNoStats.getGlobalCheckpoint(), seqNoStats.getMaxSeqNo(), maxSeqNoOfUpdates, + operations, tookInMillis); } } diff --git a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/ShardFollowNodeTask.java b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/ShardFollowNodeTask.java index 55d246fea4b..b156a41896a 100644 --- a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/ShardFollowNodeTask.java +++ b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/ShardFollowNodeTask.java @@ -71,6 +71,7 @@ public abstract class ShardFollowNodeTask extends AllocatedPersistentTask { private int numConcurrentReads = 0; private int numConcurrentWrites = 0; private long currentMappingVersion = 0; + private long totalFetchTookTimeMillis = 0; private long totalFetchTimeMillis = 0; private long numberOfSuccessfulFetches = 0; private long numberOfFailedFetches = 0; @@ -238,6 +239,7 @@ public abstract class ShardFollowNodeTask extends AllocatedPersistentTask { fetchExceptions.remove(from); if (response.getOperations().length > 0) { // do not count polls against fetch stats + totalFetchTookTimeMillis += response.getTookInMillis(); totalFetchTimeMillis += TimeUnit.NANOSECONDS.toMillis(relativeTimeProvider.getAsLong() - startTime); numberOfSuccessfulFetches++; operationsReceived += response.getOperations().length; @@ -449,6 +451,7 @@ public abstract class ShardFollowNodeTask extends AllocatedPersistentTask { buffer.size(), currentMappingVersion, totalFetchTimeMillis, + totalFetchTookTimeMillis, numberOfSuccessfulFetches, numberOfFailedFetches, operationsReceived, diff --git a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/TransportDeleteAutoFollowPatternAction.java b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/TransportDeleteAutoFollowPatternAction.java index 2d0407824d9..5d5885294f3 100644 --- a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/TransportDeleteAutoFollowPatternAction.java +++ b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/TransportDeleteAutoFollowPatternAction.java @@ -54,7 +54,7 @@ public class TransportDeleteAutoFollowPatternAction extends protected void masterOperation(DeleteAutoFollowPatternAction.Request request, ClusterState state, ActionListener listener) throws Exception { - clusterService.submitStateUpdateTask("put-auto-follow-pattern-" + request.getLeaderCluster(), + clusterService.submitStateUpdateTask("put-auto-follow-pattern-" + request.getName(), new AckedClusterStateUpdateTask(request, listener) { @Override @@ -72,23 +72,23 @@ public class TransportDeleteAutoFollowPatternAction extends static ClusterState innerDelete(DeleteAutoFollowPatternAction.Request request, ClusterState currentState) { AutoFollowMetadata currentAutoFollowMetadata = currentState.metaData().custom(AutoFollowMetadata.TYPE); if (currentAutoFollowMetadata == null) { - throw new ResourceNotFoundException("no auto-follow patterns for cluster alias [{}] found", - request.getLeaderCluster()); + throw new ResourceNotFoundException("auto-follow pattern [{}] is missing", + request.getName()); } Map patterns = currentAutoFollowMetadata.getPatterns(); - AutoFollowPattern autoFollowPatternToRemove = patterns.get(request.getLeaderCluster()); + AutoFollowPattern autoFollowPatternToRemove = patterns.get(request.getName()); if (autoFollowPatternToRemove == null) { - throw new ResourceNotFoundException("no auto-follow patterns for cluster alias [{}] found", - request.getLeaderCluster()); + throw new ResourceNotFoundException("auto-follow pattern [{}] is missing", + request.getName()); } final Map patternsCopy = new HashMap<>(patterns); final Map> followedLeaderIndexUUIDSCopy = new HashMap<>(currentAutoFollowMetadata.getFollowedLeaderIndexUUIDs()); final Map> headers = new HashMap<>(currentAutoFollowMetadata.getHeaders()); - patternsCopy.remove(request.getLeaderCluster()); - followedLeaderIndexUUIDSCopy.remove(request.getLeaderCluster()); - headers.remove(request.getLeaderCluster()); + patternsCopy.remove(request.getName()); + followedLeaderIndexUUIDSCopy.remove(request.getName()); + headers.remove(request.getName()); AutoFollowMetadata newAutoFollowMetadata = new AutoFollowMetadata(patternsCopy, followedLeaderIndexUUIDSCopy, headers); ClusterState.Builder newState = ClusterState.builder(currentState); diff --git a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/TransportGetAutoFollowPatternAction.java b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/TransportGetAutoFollowPatternAction.java index bd4885a4f75..9f738026ef6 100644 --- a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/TransportGetAutoFollowPatternAction.java +++ b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/TransportGetAutoFollowPatternAction.java @@ -55,7 +55,7 @@ public class TransportGetAutoFollowPatternAction protected void masterOperation(GetAutoFollowPatternAction.Request request, ClusterState state, ActionListener listener) throws Exception { - Map autoFollowPatterns = getAutoFollowPattern(state.metaData(), request.getLeaderCluster()); + Map autoFollowPatterns = getAutoFollowPattern(state.metaData(), request.getName()); listener.onResponse(new GetAutoFollowPatternAction.Response(autoFollowPatterns)); } @@ -64,20 +64,20 @@ public class TransportGetAutoFollowPatternAction return state.blocks().globalBlockedException(ClusterBlockLevel.METADATA_READ); } - static Map getAutoFollowPattern(MetaData metaData, String leaderClusterAlias) { + static Map getAutoFollowPattern(MetaData metaData, String name) { AutoFollowMetadata autoFollowMetadata = metaData.custom(AutoFollowMetadata.TYPE); if (autoFollowMetadata == null) { - throw new ResourceNotFoundException("no auto-follow patterns for cluster alias [{}] found", leaderClusterAlias); + throw new ResourceNotFoundException("auto-follow pattern [{}] is missing", name); } - if (leaderClusterAlias == null) { + if (name == null) { return autoFollowMetadata.getPatterns(); } - AutoFollowPattern autoFollowPattern = autoFollowMetadata.getPatterns().get(leaderClusterAlias); + AutoFollowPattern autoFollowPattern = autoFollowMetadata.getPatterns().get(name); if (autoFollowPattern == null) { - throw new ResourceNotFoundException("no auto-follow patterns for cluster alias [{}] found", leaderClusterAlias); + throw new ResourceNotFoundException("auto-follow pattern [{}] is missing", name); } - return Collections.singletonMap(leaderClusterAlias, autoFollowPattern); + return Collections.singletonMap(name, autoFollowPattern); } } diff --git a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/TransportPutAutoFollowPatternAction.java b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/TransportPutAutoFollowPatternAction.java index 19f05f575d9..8ac28e23fda 100644 --- a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/TransportPutAutoFollowPatternAction.java +++ b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/TransportPutAutoFollowPatternAction.java @@ -135,14 +135,14 @@ public class TransportPutAutoFollowPatternAction extends headers = new HashMap<>(); } - AutoFollowPattern previousPattern = patterns.get(request.getLeaderCluster()); + AutoFollowPattern previousPattern = patterns.get(request.getName()); final List followedIndexUUIDs; - if (followedLeaderIndices.containsKey(request.getLeaderCluster())) { - followedIndexUUIDs = new ArrayList<>(followedLeaderIndices.get(request.getLeaderCluster())); + if (followedLeaderIndices.containsKey(request.getName())) { + followedIndexUUIDs = new ArrayList<>(followedLeaderIndices.get(request.getName())); } else { followedIndexUUIDs = new ArrayList<>(); } - followedLeaderIndices.put(request.getLeaderCluster(), followedIndexUUIDs); + followedLeaderIndices.put(request.getName(), followedIndexUUIDs); // Mark existing leader indices as already auto followed: if (previousPattern != null) { markExistingIndicesAsAutoFollowedForNewPatterns(request.getLeaderIndexPatterns(), leaderClusterState.metaData(), @@ -153,10 +153,11 @@ public class TransportPutAutoFollowPatternAction extends } if (filteredHeaders != null) { - headers.put(request.getLeaderCluster(), filteredHeaders); + headers.put(request.getName(), filteredHeaders); } AutoFollowPattern autoFollowPattern = new AutoFollowPattern( + request.getLeaderCluster(), request.getLeaderIndexPatterns(), request.getFollowIndexNamePattern(), request.getMaxBatchOperationCount(), @@ -166,7 +167,7 @@ public class TransportPutAutoFollowPatternAction extends request.getMaxWriteBufferSize(), request.getMaxRetryDelay(), request.getPollTimeout()); - patterns.put(request.getLeaderCluster(), autoFollowPattern); + patterns.put(request.getName(), autoFollowPattern); ClusterState.Builder newState = ClusterState.builder(localState); newState.metaData(MetaData.builder(localState.getMetaData()) .putCustom(AutoFollowMetadata.TYPE, new AutoFollowMetadata(patterns, followedLeaderIndices, headers)) diff --git a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/TransportPutFollowAction.java b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/TransportPutFollowAction.java index e2a562a5186..13d173ed815 100644 --- a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/TransportPutFollowAction.java +++ b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/TransportPutFollowAction.java @@ -28,6 +28,7 @@ import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.UUIDs; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.index.IndexSettings; import org.elasticsearch.license.LicenseUtils; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; @@ -168,6 +169,7 @@ public final class TransportPutFollowAction settingsBuilder.put(IndexMetaData.SETTING_INDEX_UUID, UUIDs.randomBase64UUID()); settingsBuilder.put(IndexMetaData.SETTING_INDEX_PROVIDED_NAME, followIndex); settingsBuilder.put(CcrSettings.CCR_FOLLOWING_INDEX_SETTING.getKey(), true); + settingsBuilder.put(IndexSettings.INDEX_SOFT_DELETES_SETTING.getKey(), true); imdBuilder.settings(settingsBuilder); // Copy mappings from leader IMD to follow IMD diff --git a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/TransportResumeFollowAction.java b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/TransportResumeFollowAction.java index bbace5b70df..569e2d2cacf 100644 --- a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/TransportResumeFollowAction.java +++ b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/TransportResumeFollowAction.java @@ -240,6 +240,9 @@ public class TransportResumeFollowAction extends HandledTransportAction client.execute(INSTANCE, request, new RestToXContentListener<>(channel)); } diff --git a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/rest/RestGetAutoFollowPatternAction.java b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/rest/RestGetAutoFollowPatternAction.java index 62aae8536d1..40858f7f326 100644 --- a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/rest/RestGetAutoFollowPatternAction.java +++ b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/rest/RestGetAutoFollowPatternAction.java @@ -21,7 +21,7 @@ public class RestGetAutoFollowPatternAction extends BaseRestHandler { public RestGetAutoFollowPatternAction(Settings settings, RestController controller) { super(settings); - controller.registerHandler(RestRequest.Method.GET, "/_ccr/auto_follow/{leader_cluster}", this); + controller.registerHandler(RestRequest.Method.GET, "/_ccr/auto_follow/{name}", this); controller.registerHandler(RestRequest.Method.GET, "/_ccr/auto_follow", this); } @@ -33,7 +33,7 @@ public class RestGetAutoFollowPatternAction extends BaseRestHandler { @Override protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient client) throws IOException { Request request = new Request(); - request.setLeaderCluster(restRequest.param("leader_cluster")); + request.setName(restRequest.param("name")); return channel -> client.execute(INSTANCE, request, new RestToXContentListener<>(channel)); } diff --git a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/rest/RestPutAutoFollowPatternAction.java b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/rest/RestPutAutoFollowPatternAction.java index 04d9872ff76..957312ff78d 100644 --- a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/rest/RestPutAutoFollowPatternAction.java +++ b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/rest/RestPutAutoFollowPatternAction.java @@ -22,7 +22,7 @@ public class RestPutAutoFollowPatternAction extends BaseRestHandler { public RestPutAutoFollowPatternAction(Settings settings, RestController controller) { super(settings); - controller.registerHandler(RestRequest.Method.PUT, "/_ccr/auto_follow/{leader_cluster}", this); + controller.registerHandler(RestRequest.Method.PUT, "/_ccr/auto_follow/{name}", this); } @Override @@ -38,7 +38,7 @@ public class RestPutAutoFollowPatternAction extends BaseRestHandler { static Request createRequest(RestRequest restRequest) throws IOException { try (XContentParser parser = restRequest.contentOrSourceParamParser()) { - return Request.fromXContent(parser, restRequest.param("leader_cluster")); + return Request.fromXContent(parser, restRequest.param("name")); } } } diff --git a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/AutoFollowIT.java b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/AutoFollowIT.java index 5c181a99c29..1f2cbc4961b 100644 --- a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/AutoFollowIT.java +++ b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/AutoFollowIT.java @@ -5,6 +5,7 @@ */ package org.elasticsearch.xpack.ccr; +import org.elasticsearch.ElasticsearchException; import org.elasticsearch.action.admin.indices.create.CreateIndexRequest; import org.elasticsearch.action.admin.indices.exists.indices.IndicesExistsRequest; import org.elasticsearch.cluster.metadata.IndexMetaData; @@ -26,6 +27,7 @@ import java.util.Arrays; import java.util.Collections; import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.greaterThanOrEqualTo; import static org.hamcrest.Matchers.notNullValue; public class AutoFollowIT extends CcrIntegTestCase { @@ -45,7 +47,12 @@ public class AutoFollowIT extends CcrIntegTestCase { createLeaderIndex("logs-201812", leaderIndexSettings); // Enabling auto following: - putAutoFollowPatterns("logs-*", "transactions-*"); + if (randomBoolean()) { + putAutoFollowPatterns("my-pattern", new String[] {"logs-*", "transactions-*"}); + } else { + putAutoFollowPatterns("my-pattern1", new String[] {"logs-*"}); + putAutoFollowPatterns("my-pattern2", new String[] {"transactions-*"}); + } createLeaderIndex("metrics-201901", leaderIndexSettings); @@ -76,7 +83,7 @@ public class AutoFollowIT extends CcrIntegTestCase { .put(IndexMetaData.INDEX_NUMBER_OF_REPLICAS_SETTING.getKey(), 0) .build(); - putAutoFollowPatterns("logs-*"); + putAutoFollowPatterns("my-pattern", new String[] {"logs-*"}); int numIndices = randomIntBetween(4, 32); for (int i = 0; i < numIndices; i++) { createLeaderIndex("logs-" + i, leaderIndexSettings); @@ -90,7 +97,7 @@ public class AutoFollowIT extends CcrIntegTestCase { deleteAutoFollowPatternSetting(); createLeaderIndex("logs-does-not-count", leaderIndexSettings); - putAutoFollowPatterns("logs-*"); + putAutoFollowPatterns("my-pattern", new String[] {"logs-*"}); int i = numIndices; numIndices = numIndices + randomIntBetween(4, 32); for (; i < numIndices; i++) { @@ -113,6 +120,7 @@ public class AutoFollowIT extends CcrIntegTestCase { // Enabling auto following: PutAutoFollowPatternAction.Request request = new PutAutoFollowPatternAction.Request(); + request.setName("my-pattern"); request.setLeaderCluster("leader_cluster"); request.setLeaderIndexPatterns(Collections.singletonList("logs-*")); // Need to set this, because following an index in the same cluster @@ -173,8 +181,53 @@ public class AutoFollowIT extends CcrIntegTestCase { }); } - private void putAutoFollowPatterns(String... patterns) { + public void testConflictingPatterns() throws Exception { + Settings leaderIndexSettings = Settings.builder() + .put(IndexSettings.INDEX_SOFT_DELETES_SETTING.getKey(), true) + .put(IndexMetaData.INDEX_NUMBER_OF_SHARDS_SETTING.getKey(), 1) + .put(IndexMetaData.INDEX_NUMBER_OF_REPLICAS_SETTING.getKey(), 0) + .build(); + + // Enabling auto following: + putAutoFollowPatterns("my-pattern1", new String[] {"logs-*"}); + putAutoFollowPatterns("my-pattern2", new String[] {"logs-2018*"}); + + createLeaderIndex("logs-201701", leaderIndexSettings); + assertBusy(() -> { + AutoFollowStats autoFollowStats = getAutoFollowStats(); + assertThat(autoFollowStats.getNumberOfSuccessfulFollowIndices(), equalTo(1L)); + assertThat(autoFollowStats.getNumberOfFailedFollowIndices(), equalTo(0L)); + assertThat(autoFollowStats.getNumberOfFailedRemoteClusterStateRequests(), equalTo(0L)); + }); + IndicesExistsRequest request = new IndicesExistsRequest("copy-logs-201701"); + assertTrue(followerClient().admin().indices().exists(request).actionGet().isExists()); + + createLeaderIndex("logs-201801", leaderIndexSettings); + assertBusy(() -> { + AutoFollowStats autoFollowStats = getAutoFollowStats(); + assertThat(autoFollowStats.getNumberOfSuccessfulFollowIndices(), equalTo(1L)); + assertThat(autoFollowStats.getNumberOfFailedFollowIndices(), greaterThanOrEqualTo(1L)); + assertThat(autoFollowStats.getNumberOfFailedRemoteClusterStateRequests(), equalTo(0L)); + + assertThat(autoFollowStats.getRecentAutoFollowErrors().size(), equalTo(2)); + ElasticsearchException autoFollowError1 = autoFollowStats.getRecentAutoFollowErrors().get("my-pattern1:logs-201801"); + assertThat(autoFollowError1, notNullValue()); + assertThat(autoFollowError1.getRootCause().getMessage(), equalTo("index to follow [logs-201801] for pattern [my-pattern1] " + + "matches with other patterns [my-pattern2]")); + + ElasticsearchException autoFollowError2 = autoFollowStats.getRecentAutoFollowErrors().get("my-pattern2:logs-201801"); + assertThat(autoFollowError2, notNullValue()); + assertThat(autoFollowError2.getRootCause().getMessage(), equalTo("index to follow [logs-201801] for pattern [my-pattern2] " + + "matches with other patterns [my-pattern1]")); + }); + + request = new IndicesExistsRequest("copy-logs-201801"); + assertFalse(followerClient().admin().indices().exists(request).actionGet().isExists()); + } + + private void putAutoFollowPatterns(String name, String[] patterns) { PutAutoFollowPatternAction.Request request = new PutAutoFollowPatternAction.Request(); + request.setName(name); request.setLeaderCluster("leader_cluster"); request.setLeaderIndexPatterns(Arrays.asList(patterns)); // Need to set this, because following an index in the same cluster @@ -184,7 +237,7 @@ public class AutoFollowIT extends CcrIntegTestCase { private void deleteAutoFollowPatternSetting() { DeleteAutoFollowPatternAction.Request request = new DeleteAutoFollowPatternAction.Request(); - request.setLeaderCluster("leader_cluster"); + request.setName("my-pattern"); assertTrue(followerClient().execute(DeleteAutoFollowPatternAction.INSTANCE, request).actionGet().isAcknowledged()); } diff --git a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/AutoFollowMetadataTests.java b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/AutoFollowMetadataTests.java index 89bac141ee3..67071bd1be5 100644 --- a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/AutoFollowMetadataTests.java +++ b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/AutoFollowMetadataTests.java @@ -41,6 +41,7 @@ public class AutoFollowMetadataTests extends AbstractSerializingTestCase leaderPatterns = Arrays.asList(generateRandomStringArray(4, 4, false)); AutoFollowMetadata.AutoFollowPattern autoFollowPattern = new AutoFollowMetadata.AutoFollowPattern( + randomAlphaOfLength(4), leaderPatterns, randomAlphaOfLength(4), randomIntBetween(0, Integer.MAX_VALUE), diff --git a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/CcrLicenseIT.java b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/CcrLicenseIT.java index 26e29eeacec..ab14f2dfb8e 100644 --- a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/CcrLicenseIT.java +++ b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/CcrLicenseIT.java @@ -118,6 +118,7 @@ public class CcrLicenseIT extends CcrSingleNodeTestCase { public void testThatPutAutoFollowPatternsIsUnavailableWithNonCompliantLicense() throws InterruptedException { final CountDownLatch latch = new CountDownLatch(1); final PutAutoFollowPatternAction.Request request = new PutAutoFollowPatternAction.Request(); + request.setName("name"); request.setLeaderCluster("leader"); request.setLeaderIndexPatterns(Collections.singletonList("*")); client().execute( @@ -147,8 +148,8 @@ public class CcrLicenseIT extends CcrSingleNodeTestCase { @Override public ClusterState execute(ClusterState currentState) throws Exception { - AutoFollowPattern autoFollowPattern = - new AutoFollowPattern(Collections.singletonList("logs-*"), null, null, null, null, null, null, null, null); + AutoFollowPattern autoFollowPattern = new AutoFollowPattern("test_alias", Collections.singletonList("logs-*"), + null, null, null, null, null, null, null, null); AutoFollowMetadata autoFollowMetadata = new AutoFollowMetadata( Collections.singletonMap("test_alias", autoFollowPattern), Collections.emptyMap(), diff --git a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/IndexFollowingIT.java b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/IndexFollowingIT.java index ca6d2747c6c..17bb6c8d70d 100644 --- a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/IndexFollowingIT.java +++ b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/IndexFollowingIT.java @@ -683,6 +683,7 @@ public class IndexFollowingIT extends CcrIntegTestCase { () -> followerClient().execute(ResumeFollowAction.INSTANCE, followRequest.getFollowRequest()).actionGet()); assertThat(e.getMessage(), equalTo("unknown cluster alias [another_cluster]")); PutAutoFollowPatternAction.Request putAutoFollowRequest = new PutAutoFollowPatternAction.Request(); + putAutoFollowRequest.setName("name"); putAutoFollowRequest.setLeaderCluster("another_cluster"); putAutoFollowRequest.setLeaderIndexPatterns(Collections.singletonList("logs-*")); e = expectThrows(IllegalArgumentException.class, diff --git a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/AutoFollowCoordinatorTests.java b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/AutoFollowCoordinatorTests.java index 69fe1fcdd85..3f4c70f0165 100644 --- a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/AutoFollowCoordinatorTests.java +++ b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/AutoFollowCoordinatorTests.java @@ -56,7 +56,7 @@ public class AutoFollowCoordinatorTests extends ESTestCase { .build(); AutoFollowPattern autoFollowPattern = - new AutoFollowPattern(Collections.singletonList("logs-*"), null, null, null, null, null, null, null, null); + new AutoFollowPattern("remote", Collections.singletonList("logs-*"), null, null, null, null, null, null, null, null); Map patterns = new HashMap<>(); patterns.put("remote", autoFollowPattern); Map> followedLeaderIndexUUIDS = new HashMap<>(); @@ -120,7 +120,7 @@ public class AutoFollowCoordinatorTests extends ESTestCase { when(client.getRemoteClusterClient(anyString())).thenReturn(client); AutoFollowPattern autoFollowPattern = - new AutoFollowPattern(Collections.singletonList("logs-*"), null, null, null, null, null, null, null, null); + new AutoFollowPattern("remote", Collections.singletonList("logs-*"), null, null, null, null, null, null, null, null); Map patterns = new HashMap<>(); patterns.put("remote", autoFollowPattern); Map> followedLeaderIndexUUIDS = new HashMap<>(); @@ -178,7 +178,7 @@ public class AutoFollowCoordinatorTests extends ESTestCase { .build(); AutoFollowPattern autoFollowPattern = - new AutoFollowPattern(Collections.singletonList("logs-*"), null, null, null, null, null, null, null, null); + new AutoFollowPattern("remote", Collections.singletonList("logs-*"), null, null, null, null, null, null, null, null); Map patterns = new HashMap<>(); patterns.put("remote", autoFollowPattern); Map> followedLeaderIndexUUIDS = new HashMap<>(); @@ -241,7 +241,7 @@ public class AutoFollowCoordinatorTests extends ESTestCase { .build(); AutoFollowPattern autoFollowPattern = - new AutoFollowPattern(Collections.singletonList("logs-*"), null, null, null, null, null, null, null, null); + new AutoFollowPattern("remote", Collections.singletonList("logs-*"), null, null, null, null, null, null, null, null); Map patterns = new HashMap<>(); patterns.put("remote", autoFollowPattern); Map> followedLeaderIndexUUIDS = new HashMap<>(); @@ -295,7 +295,7 @@ public class AutoFollowCoordinatorTests extends ESTestCase { public void testGetLeaderIndicesToFollow() { AutoFollowPattern autoFollowPattern = - new AutoFollowPattern(Collections.singletonList("metrics-*"), null, null, null, null, null, null, null, null); + new AutoFollowPattern("remote", Collections.singletonList("metrics-*"), null, null, null, null, null, null, null, null); Map> headers = new HashMap<>(); ClusterState followerState = ClusterState.builder(new ClusterName("remote")) .metaData(MetaData.builder().putCustom(AutoFollowMetadata.TYPE, @@ -342,15 +342,15 @@ public class AutoFollowCoordinatorTests extends ESTestCase { } public void testGetFollowerIndexName() { - AutoFollowPattern autoFollowPattern = new AutoFollowPattern(Collections.singletonList("metrics-*"), null, null, + AutoFollowPattern autoFollowPattern = new AutoFollowPattern("remote", Collections.singletonList("metrics-*"), null, null, null, null, null, null, null, null); assertThat(AutoFollower.getFollowerIndexName(autoFollowPattern, "metrics-0"), equalTo("metrics-0")); - autoFollowPattern = new AutoFollowPattern(Collections.singletonList("metrics-*"), "eu-metrics-0", null, null, + autoFollowPattern = new AutoFollowPattern("remote", Collections.singletonList("metrics-*"), "eu-metrics-0", null, null, null, null, null, null, null); assertThat(AutoFollower.getFollowerIndexName(autoFollowPattern, "metrics-0"), equalTo("eu-metrics-0")); - autoFollowPattern = new AutoFollowPattern(Collections.singletonList("metrics-*"), "eu-{{leader_index}}", null, + autoFollowPattern = new AutoFollowPattern("remote", Collections.singletonList("metrics-*"), "eu-{{leader_index}}", null, null, null, null, null, null, null); assertThat(AutoFollower.getFollowerIndexName(autoFollowPattern, "metrics-0"), equalTo("eu-metrics-0")); } diff --git a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/DeleteAutoFollowPatternRequestTests.java b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/DeleteAutoFollowPatternRequestTests.java index 832b6bd4e46..b993132bcfa 100644 --- a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/DeleteAutoFollowPatternRequestTests.java +++ b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/DeleteAutoFollowPatternRequestTests.java @@ -18,7 +18,7 @@ public class DeleteAutoFollowPatternRequestTests extends AbstractStreamableTestC @Override protected DeleteAutoFollowPatternAction.Request createTestInstance() { DeleteAutoFollowPatternAction.Request request = new DeleteAutoFollowPatternAction.Request(); - request.setLeaderCluster(randomAlphaOfLength(4)); + request.setName(randomAlphaOfLength(4)); return request; } } diff --git a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/GetAutoFollowPatternRequestTests.java b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/GetAutoFollowPatternRequestTests.java index 086b322ed48..5284dd0fa61 100644 --- a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/GetAutoFollowPatternRequestTests.java +++ b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/GetAutoFollowPatternRequestTests.java @@ -20,7 +20,7 @@ public class GetAutoFollowPatternRequestTests extends AbstractWireSerializingTes protected GetAutoFollowPatternAction.Request createTestInstance() { GetAutoFollowPatternAction.Request request = new GetAutoFollowPatternAction.Request(); if (randomBoolean()) { - request.setLeaderCluster(randomAlphaOfLength(4)); + request.setName(randomAlphaOfLength(4)); } return request; } diff --git a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/GetAutoFollowPatternResponseTests.java b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/GetAutoFollowPatternResponseTests.java index f9850c4e2ed..e67509f7ee8 100644 --- a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/GetAutoFollowPatternResponseTests.java +++ b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/GetAutoFollowPatternResponseTests.java @@ -29,6 +29,7 @@ public class GetAutoFollowPatternResponseTests extends AbstractStreamableTestCas Map patterns = new HashMap<>(numPatterns); for (int i = 0; i < numPatterns; i++) { AutoFollowPattern autoFollowPattern = new AutoFollowPattern( + "remote", Collections.singletonList(randomAlphaOfLength(4)), randomAlphaOfLength(4), randomIntBetween(0, Integer.MAX_VALUE), diff --git a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/PutAutoFollowPatternRequestTests.java b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/PutAutoFollowPatternRequestTests.java index 5efbf643d1e..2cefc163ee9 100644 --- a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/PutAutoFollowPatternRequestTests.java +++ b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/PutAutoFollowPatternRequestTests.java @@ -41,6 +41,7 @@ public class PutAutoFollowPatternRequestTests extends AbstractStreamableXContent @Override protected PutAutoFollowPatternAction.Request createTestInstance() { PutAutoFollowPatternAction.Request request = new PutAutoFollowPatternAction.Request(); + request.setName(randomAlphaOfLength(4)); request.setLeaderCluster(randomAlphaOfLength(4)); request.setLeaderIndexPatterns(Arrays.asList(generateRandomStringArray(4, 4, false))); if (randomBoolean()) { @@ -74,6 +75,11 @@ public class PutAutoFollowPatternRequestTests extends AbstractStreamableXContent PutAutoFollowPatternAction.Request request = new PutAutoFollowPatternAction.Request(); ActionRequestValidationException validationException = request.validate(); assertThat(validationException, notNullValue()); + assertThat(validationException.getMessage(), containsString("[name] is missing")); + + request.setName("name"); + validationException = request.validate(); + assertThat(validationException, notNullValue()); assertThat(validationException.getMessage(), containsString("[leader_cluster] is missing")); request.setLeaderCluster("_alias"); diff --git a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/ShardChangesResponseTests.java b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/ShardChangesResponseTests.java index a99e930188c..b9ac4fee3d2 100644 --- a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/ShardChangesResponseTests.java +++ b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/ShardChangesResponseTests.java @@ -26,7 +26,8 @@ public class ShardChangesResponseTests extends AbstractStreamableTestCase seqNoStats.getGlobalCheckpoint()) { handler.accept(ShardChangesAction.getResponse(1L, seqNoStats, - maxSeqNoOfUpdatesOrDeletes, ShardChangesAction.EMPTY_OPERATIONS_ARRAY)); + maxSeqNoOfUpdatesOrDeletes, ShardChangesAction.EMPTY_OPERATIONS_ARRAY, 1L)); return; } Translog.Operation[] ops = ShardChangesAction.getOperations(indexShard, seqNoStats.getGlobalCheckpoint(), from, @@ -440,7 +440,8 @@ public class ShardFollowTaskReplicationTests extends ESIndexLevelReplicationTest seqNoStats.getGlobalCheckpoint(), seqNoStats.getMaxSeqNo(), maxSeqNoOfUpdatesOrDeletes, - ops + ops, + 1L ); handler.accept(response); return; diff --git a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/StatsResponsesTests.java b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/StatsResponsesTests.java index 81f862e5cf0..c93da38666e 100644 --- a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/StatsResponsesTests.java +++ b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/StatsResponsesTests.java @@ -49,6 +49,7 @@ public class StatsResponsesTests extends AbstractStreamableTestCase> existingAlreadyFollowedIndexUUIDS = new HashMap<>(); Map> existingHeaders = new HashMap<>(); - Map existingAutoFollowPatterns = new HashMap<>(); + Map existingAutoFollowPatterns = new HashMap<>(); { List existingPatterns = new ArrayList<>(); existingPatterns.add("transactions-*"); - existingAutoFollowPatterns.put("eu_cluster", - new AutoFollowMetadata.AutoFollowPattern(existingPatterns, null, null, null, null, null, null, null, null)); + existingAutoFollowPatterns.put("name1", + new AutoFollowPattern("eu_cluster", existingPatterns, null, null, null, null, null, null, null, null)); List existingUUIDS = new ArrayList<>(); existingUUIDS.add("_val"); - existingAlreadyFollowedIndexUUIDS.put("eu_cluster", existingUUIDS); - existingHeaders.put("eu_cluster", Collections.singletonMap("key", "val")); + existingAlreadyFollowedIndexUUIDS.put("name1", existingUUIDS); + existingHeaders.put("name1", Collections.singletonMap("key", "val")); } { List existingPatterns = new ArrayList<>(); existingPatterns.add("logs-*"); - existingAutoFollowPatterns.put("asia_cluster", - new AutoFollowMetadata.AutoFollowPattern(existingPatterns, null, null, null, null, null, null, null, null)); + existingAutoFollowPatterns.put("name2", + new AutoFollowPattern("asia_cluster", existingPatterns, null, null, null, null, null, null, null, null)); List existingUUIDS = new ArrayList<>(); existingUUIDS.add("_val"); - existingAlreadyFollowedIndexUUIDS.put("asia_cluster", existingUUIDS); - existingHeaders.put("asia_cluster", Collections.singletonMap("key", "val")); + existingAlreadyFollowedIndexUUIDS.put("name2", existingUUIDS); + existingHeaders.put("name2", Collections.singletonMap("key", "val")); } ClusterState clusterState = ClusterState.builder(new ClusterName("us_cluster")) .metaData(MetaData.builder().putCustom(AutoFollowMetadata.TYPE, @@ -56,27 +57,28 @@ public class TransportDeleteAutoFollowPatternActionTests extends ESTestCase { .build(); Request request = new Request(); - request.setLeaderCluster("eu_cluster"); + request.setName("name1"); AutoFollowMetadata result = TransportDeleteAutoFollowPatternAction.innerDelete(request, clusterState) .getMetaData() .custom(AutoFollowMetadata.TYPE); assertThat(result.getPatterns().size(), equalTo(1)); - assertThat(result.getPatterns().get("asia_cluster"), notNullValue()); + assertThat(result.getPatterns().get("name2"), notNullValue()); + assertThat(result.getPatterns().get("name2").getLeaderCluster(), equalTo("asia_cluster")); assertThat(result.getFollowedLeaderIndexUUIDs().size(), equalTo(1)); - assertThat(result.getFollowedLeaderIndexUUIDs().get("asia_cluster"), notNullValue()); + assertThat(result.getFollowedLeaderIndexUUIDs().get("name2"), notNullValue()); assertThat(result.getHeaders().size(), equalTo(1)); - assertThat(result.getHeaders().get("asia_cluster"), notNullValue()); + assertThat(result.getHeaders().get("name2"), notNullValue()); } public void testInnerDeleteDoesNotExist() { Map> existingAlreadyFollowedIndexUUIDS = new HashMap<>(); - Map existingAutoFollowPatterns = new HashMap<>(); + Map existingAutoFollowPatterns = new HashMap<>(); Map> existingHeaders = new HashMap<>(); { List existingPatterns = new ArrayList<>(); existingPatterns.add("transactions-*"); - existingAutoFollowPatterns.put("eu_cluster", - new AutoFollowMetadata.AutoFollowPattern(existingPatterns, null, null, null, null, null, null, null, null)); + existingAutoFollowPatterns.put("name1", + new AutoFollowPattern("eu_cluster", existingPatterns, null, null, null, null, null, null, null, null)); existingHeaders.put("key", Collections.singletonMap("key", "val")); } ClusterState clusterState = ClusterState.builder(new ClusterName("us_cluster")) @@ -85,10 +87,10 @@ public class TransportDeleteAutoFollowPatternActionTests extends ESTestCase { .build(); Request request = new Request(); - request.setLeaderCluster("asia_cluster"); + request.setName("name2"); Exception e = expectThrows(ResourceNotFoundException.class, () -> TransportDeleteAutoFollowPatternAction.innerDelete(request, clusterState)); - assertThat(e.getMessage(), equalTo("no auto-follow patterns for cluster alias [asia_cluster] found")); + assertThat(e.getMessage(), equalTo("auto-follow pattern [name2] is missing")); } public void testInnerDeleteNoAutoFollowMetadata() { @@ -97,10 +99,10 @@ public class TransportDeleteAutoFollowPatternActionTests extends ESTestCase { .build(); Request request = new Request(); - request.setLeaderCluster("asia_cluster"); + request.setName("name1"); Exception e = expectThrows(ResourceNotFoundException.class, () -> TransportDeleteAutoFollowPatternAction.innerDelete(request, clusterState)); - assertThat(e.getMessage(), equalTo("no auto-follow patterns for cluster alias [asia_cluster] found")); + assertThat(e.getMessage(), equalTo("auto-follow pattern [name1] is missing")); } } diff --git a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/TransportGetAutoFollowPatternActionTests.java b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/TransportGetAutoFollowPatternActionTests.java index 187e404abbb..ffc2d115091 100644 --- a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/TransportGetAutoFollowPatternActionTests.java +++ b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/TransportGetAutoFollowPatternActionTests.java @@ -23,22 +23,22 @@ public class TransportGetAutoFollowPatternActionTests extends ESTestCase { public void testGetAutoFollowPattern() { Map patterns = new HashMap<>(); - patterns.put("test_alias1", - new AutoFollowPattern(Collections.singletonList("index-*"), null, null, null, null, null, null, null, null)); - patterns.put("test_alias2", - new AutoFollowPattern(Collections.singletonList("index-*"), null, null, null, null, null, null, null, null)); + patterns.put("name1", + new AutoFollowPattern("test_alias1", Collections.singletonList("index-*"), null, null, null, null, null, null, null, null)); + patterns.put("name2", + new AutoFollowPattern("test_alias1", Collections.singletonList("index-*"), null, null, null, null, null, null, null, null)); MetaData metaData = MetaData.builder() .putCustom(AutoFollowMetadata.TYPE, new AutoFollowMetadata(patterns, Collections.emptyMap(), Collections.emptyMap())) .build(); - Map result = TransportGetAutoFollowPatternAction.getAutoFollowPattern(metaData, "test_alias1"); + Map result = TransportGetAutoFollowPatternAction.getAutoFollowPattern(metaData, "name1"); assertThat(result.size(), equalTo(1)); - assertThat(result, hasEntry("test_alias1", patterns.get("test_alias1"))); + assertThat(result, hasEntry("name1", patterns.get("name1"))); result = TransportGetAutoFollowPatternAction.getAutoFollowPattern(metaData, null); assertThat(result.size(), equalTo(2)); - assertThat(result, hasEntry("test_alias1", patterns.get("test_alias1"))); - assertThat(result, hasEntry("test_alias2", patterns.get("test_alias2"))); + assertThat(result, hasEntry("name1", patterns.get("name1"))); + assertThat(result, hasEntry("name2", patterns.get("name2"))); expectThrows(ResourceNotFoundException.class, () -> TransportGetAutoFollowPatternAction.getAutoFollowPattern(metaData, "another_alias")); @@ -51,13 +51,13 @@ public class TransportGetAutoFollowPatternActionTests extends ESTestCase { .putCustom(AutoFollowMetadata.TYPE, autoFollowMetadata) .build(); expectThrows(ResourceNotFoundException.class, - () -> TransportGetAutoFollowPatternAction.getAutoFollowPattern(metaData, "test_alias")); + () -> TransportGetAutoFollowPatternAction.getAutoFollowPattern(metaData, "name1")); } public void testGetAutoFollowPatternNoAutoFollowMetadata() { MetaData metaData = MetaData.builder().build(); expectThrows(ResourceNotFoundException.class, - () -> TransportGetAutoFollowPatternAction.getAutoFollowPattern(metaData, "test_alias")); + () -> TransportGetAutoFollowPatternAction.getAutoFollowPattern(metaData, "name1")); } } diff --git a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/TransportPutAutoFollowPatternActionTests.java b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/TransportPutAutoFollowPatternActionTests.java index 562a5a339f4..7c4368d317f 100644 --- a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/TransportPutAutoFollowPatternActionTests.java +++ b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/TransportPutAutoFollowPatternActionTests.java @@ -12,6 +12,7 @@ import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xpack.core.ccr.AutoFollowMetadata; +import org.elasticsearch.xpack.core.ccr.AutoFollowMetadata.AutoFollowPattern; import org.elasticsearch.xpack.core.ccr.action.PutAutoFollowPatternAction; import java.util.ArrayList; @@ -28,6 +29,7 @@ public class TransportPutAutoFollowPatternActionTests extends ESTestCase { public void testInnerPut() { PutAutoFollowPatternAction.Request request = new PutAutoFollowPatternAction.Request(); + request.setName("name1"); request.setLeaderCluster("eu_cluster"); request.setLeaderIndexPatterns(Collections.singletonList("logs-*")); @@ -43,14 +45,16 @@ public class TransportPutAutoFollowPatternActionTests extends ESTestCase { AutoFollowMetadata autoFollowMetadata = result.metaData().custom(AutoFollowMetadata.TYPE); assertThat(autoFollowMetadata, notNullValue()); assertThat(autoFollowMetadata.getPatterns().size(), equalTo(1)); - assertThat(autoFollowMetadata.getPatterns().get("eu_cluster").getLeaderIndexPatterns().size(), equalTo(1)); - assertThat(autoFollowMetadata.getPatterns().get("eu_cluster").getLeaderIndexPatterns().get(0), equalTo("logs-*")); + assertThat(autoFollowMetadata.getPatterns().get("name1").getLeaderCluster(), equalTo("eu_cluster")); + assertThat(autoFollowMetadata.getPatterns().get("name1").getLeaderIndexPatterns().size(), equalTo(1)); + assertThat(autoFollowMetadata.getPatterns().get("name1").getLeaderIndexPatterns().get(0), equalTo("logs-*")); assertThat(autoFollowMetadata.getFollowedLeaderIndexUUIDs().size(), equalTo(1)); - assertThat(autoFollowMetadata.getFollowedLeaderIndexUUIDs().get("eu_cluster").size(), equalTo(0)); + assertThat(autoFollowMetadata.getFollowedLeaderIndexUUIDs().get("name1").size(), equalTo(0)); } public void testInnerPut_existingLeaderIndices() { PutAutoFollowPatternAction.Request request = new PutAutoFollowPatternAction.Request(); + request.setName("name1"); request.setLeaderCluster("eu_cluster"); request.setLeaderIndexPatterns(Collections.singletonList("logs-*")); @@ -82,28 +86,30 @@ public class TransportPutAutoFollowPatternActionTests extends ESTestCase { AutoFollowMetadata autoFollowMetadata = result.metaData().custom(AutoFollowMetadata.TYPE); assertThat(autoFollowMetadata, notNullValue()); assertThat(autoFollowMetadata.getPatterns().size(), equalTo(1)); - assertThat(autoFollowMetadata.getPatterns().get("eu_cluster").getLeaderIndexPatterns().size(), equalTo(1)); - assertThat(autoFollowMetadata.getPatterns().get("eu_cluster").getLeaderIndexPatterns().get(0), equalTo("logs-*")); + assertThat(autoFollowMetadata.getPatterns().get("name1").getLeaderCluster(), equalTo("eu_cluster")); + assertThat(autoFollowMetadata.getPatterns().get("name1").getLeaderIndexPatterns().size(), equalTo(1)); + assertThat(autoFollowMetadata.getPatterns().get("name1").getLeaderIndexPatterns().get(0), equalTo("logs-*")); assertThat(autoFollowMetadata.getFollowedLeaderIndexUUIDs().size(), equalTo(1)); - assertThat(autoFollowMetadata.getFollowedLeaderIndexUUIDs().get("eu_cluster").size(), equalTo(numMatchingLeaderIndices)); + assertThat(autoFollowMetadata.getFollowedLeaderIndexUUIDs().get("name1").size(), equalTo(numMatchingLeaderIndices)); } public void testInnerPut_existingLeaderIndicesAndAutoFollowMetadata() { PutAutoFollowPatternAction.Request request = new PutAutoFollowPatternAction.Request(); + request.setName("name1"); request.setLeaderCluster("eu_cluster"); request.setLeaderIndexPatterns(Arrays.asList("logs-*", "transactions-*")); - Map existingAutoFollowPatterns = new HashMap<>(); + Map existingAutoFollowPatterns = new HashMap<>(); List existingPatterns = new ArrayList<>(); existingPatterns.add("transactions-*"); - existingAutoFollowPatterns.put("eu_cluster", - new AutoFollowMetadata.AutoFollowPattern(existingPatterns, null, null, null, null, null, null, null, null)); + existingAutoFollowPatterns.put("name1", + new AutoFollowPattern("eu_cluster", existingPatterns, null, null, null, null, null, null, null, null)); Map> existingAlreadyFollowedIndexUUIDS = new HashMap<>(); List existingUUIDS = new ArrayList<>(); existingUUIDS.add("_val"); - existingAlreadyFollowedIndexUUIDS.put("eu_cluster", existingUUIDS); + existingAlreadyFollowedIndexUUIDS.put("name1", existingUUIDS); Map> existingHeaders = new HashMap<>(); - existingHeaders.put("eu_cluster", Collections.singletonMap("key", "val")); + existingHeaders.put("name1", Collections.singletonMap("key", "val")); ClusterState localState = ClusterState.builder(new ClusterName("us_cluster")) .metaData(MetaData.builder().putCustom(AutoFollowMetadata.TYPE, @@ -127,13 +133,14 @@ public class TransportPutAutoFollowPatternActionTests extends ESTestCase { AutoFollowMetadata autoFollowMetadata = result.metaData().custom(AutoFollowMetadata.TYPE); assertThat(autoFollowMetadata, notNullValue()); assertThat(autoFollowMetadata.getPatterns().size(), equalTo(1)); - assertThat(autoFollowMetadata.getPatterns().get("eu_cluster").getLeaderIndexPatterns().size(), equalTo(2)); - assertThat(autoFollowMetadata.getPatterns().get("eu_cluster").getLeaderIndexPatterns().get(0), equalTo("logs-*")); - assertThat(autoFollowMetadata.getPatterns().get("eu_cluster").getLeaderIndexPatterns().get(1), equalTo("transactions-*")); + assertThat(autoFollowMetadata.getPatterns().get("name1").getLeaderCluster(), equalTo("eu_cluster")); + assertThat(autoFollowMetadata.getPatterns().get("name1").getLeaderIndexPatterns().size(), equalTo(2)); + assertThat(autoFollowMetadata.getPatterns().get("name1").getLeaderIndexPatterns().get(0), equalTo("logs-*")); + assertThat(autoFollowMetadata.getPatterns().get("name1").getLeaderIndexPatterns().get(1), equalTo("transactions-*")); assertThat(autoFollowMetadata.getFollowedLeaderIndexUUIDs().size(), equalTo(1)); - assertThat(autoFollowMetadata.getFollowedLeaderIndexUUIDs().get("eu_cluster").size(), equalTo(numLeaderIndices + 1)); + assertThat(autoFollowMetadata.getFollowedLeaderIndexUUIDs().get("name1").size(), equalTo(numLeaderIndices + 1)); assertThat(autoFollowMetadata.getHeaders().size(), equalTo(1)); - assertThat(autoFollowMetadata.getHeaders().get("eu_cluster"), notNullValue()); + assertThat(autoFollowMetadata.getHeaders().get("name1"), notNullValue()); } } diff --git a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/TransportResumeFollowActionTests.java b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/TransportResumeFollowActionTests.java index a90bd0a4366..01f22723d14 100644 --- a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/TransportResumeFollowActionTests.java +++ b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/TransportResumeFollowActionTests.java @@ -85,11 +85,20 @@ public class TransportResumeFollowActionTests extends ESTestCase { Exception e = expectThrows(IllegalArgumentException.class, () -> validate(request, leaderIMD, followIMD, UUIDs, null)); assertThat(e.getMessage(), equalTo("leader index [leader_cluster:index1] does not have soft deletes enabled")); } + { + // should fail because the follower index does not have soft deletes enabled + IndexMetaData leaderIMD = createIMD("index1", 5, Settings.builder() + .put(IndexSettings.INDEX_SOFT_DELETES_SETTING.getKey(), "true").build(), null); + IndexMetaData followIMD = createIMD("index2", 5, Settings.EMPTY, customMetaData); + Exception e = expectThrows(IllegalArgumentException.class, () -> validate(request, leaderIMD, followIMD, UUIDs, null)); + assertThat(e.getMessage(), equalTo("follower index [index2] does not have soft deletes enabled")); + } { // should fail because the number of primary shards between leader and follow index are not equal IndexMetaData leaderIMD = createIMD("index1", 5, Settings.builder() .put(IndexSettings.INDEX_SOFT_DELETES_SETTING.getKey(), "true").build(), null); - IndexMetaData followIMD = createIMD("index2", 4, Settings.EMPTY, customMetaData); + IndexMetaData followIMD = createIMD("index2", 4, + Settings.builder().put(IndexSettings.INDEX_SOFT_DELETES_SETTING.getKey(), "true").build(), customMetaData); Exception e = expectThrows(IllegalArgumentException.class, () -> validate(request, leaderIMD, followIMD, UUIDs, null)); assertThat(e.getMessage(), equalTo("leader index primary shards [5] does not match with the number of shards of the follow index [4]")); @@ -98,8 +107,8 @@ public class TransportResumeFollowActionTests extends ESTestCase { // should fail, because leader index is closed IndexMetaData leaderIMD = createIMD("index1", State.CLOSE, "{}", 5, Settings.builder() .put(IndexSettings.INDEX_SOFT_DELETES_SETTING.getKey(), "true").build(), null); - IndexMetaData followIMD = createIMD("index2", State.OPEN, "{}", 5, Settings.builder() - .put(IndexSettings.INDEX_SOFT_DELETES_SETTING.getKey(), "true").build(), customMetaData); + IndexMetaData followIMD = createIMD("index2", State.OPEN, "{}", 5, + Settings.builder().put(IndexSettings.INDEX_SOFT_DELETES_SETTING.getKey(), "true").build(), customMetaData); Exception e = expectThrows(IllegalArgumentException.class, () -> validate(request, leaderIMD, followIMD, UUIDs, null)); assertThat(e.getMessage(), equalTo("leader and follow index must be open")); } @@ -107,7 +116,8 @@ public class TransportResumeFollowActionTests extends ESTestCase { // should fail, because index.xpack.ccr.following_index setting has not been enabled in leader index IndexMetaData leaderIMD = createIMD("index1", 1, Settings.builder().put(IndexSettings.INDEX_SOFT_DELETES_SETTING.getKey(), "true").build(), null); - IndexMetaData followIMD = createIMD("index2", 1, Settings.EMPTY, customMetaData); + IndexMetaData followIMD = createIMD("index2", 1, + Settings.builder().put(IndexSettings.INDEX_SOFT_DELETES_SETTING.getKey(), "true").build(), customMetaData); MapperService mapperService = MapperTestUtils.newMapperService(xContentRegistry(), createTempDir(), Settings.EMPTY, "index2"); mapperService.updateMapping(null, followIMD); Exception e = expectThrows(IllegalArgumentException.class, @@ -120,7 +130,8 @@ public class TransportResumeFollowActionTests extends ESTestCase { IndexMetaData leaderIMD = createIMD("index1", State.OPEN, "{\"properties\": {\"field\": {\"type\": \"keyword\"}}}", 5, Settings.builder().put(IndexSettings.INDEX_SOFT_DELETES_SETTING.getKey(), "true").build(), null); IndexMetaData followIMD = createIMD("index2", State.OPEN, "{\"properties\": {\"field\": {\"type\": \"text\"}}}", 5, - Settings.builder().put(CcrSettings.CCR_FOLLOWING_INDEX_SETTING.getKey(), true).build(), customMetaData); + Settings.builder().put(CcrSettings.CCR_FOLLOWING_INDEX_SETTING.getKey(), true) + .put(IndexSettings.INDEX_SOFT_DELETES_SETTING.getKey(), true).build(), customMetaData); MapperService mapperService = MapperTestUtils.newMapperService(xContentRegistry(), createTempDir(), Settings.EMPTY, "index2"); mapperService.updateMapping(null, followIMD); Exception e = expectThrows(IllegalArgumentException.class, () -> validate(request, leaderIMD, followIMD, UUIDs, mapperService)); @@ -135,6 +146,7 @@ public class TransportResumeFollowActionTests extends ESTestCase { .put("index.analysis.analyzer.my_analyzer.tokenizer", "whitespace").build(), null); IndexMetaData followIMD = createIMD("index2", State.OPEN, mapping, 5, Settings.builder() .put(CcrSettings.CCR_FOLLOWING_INDEX_SETTING.getKey(), true) + .put(IndexSettings.INDEX_SOFT_DELETES_SETTING.getKey(), true) .put("index.analysis.analyzer.my_analyzer.type", "custom") .put("index.analysis.analyzer.my_analyzer.tokenizer", "standard").build(), customMetaData); Exception e = expectThrows(IllegalArgumentException.class, () -> validate(request, leaderIMD, followIMD, UUIDs, null)); @@ -144,8 +156,8 @@ public class TransportResumeFollowActionTests extends ESTestCase { // should fail because the following index does not have the following_index settings IndexMetaData leaderIMD = createIMD("index1", 5, Settings.builder().put(IndexSettings.INDEX_SOFT_DELETES_SETTING.getKey(), "true").build(), null); - Settings followingIndexSettings = randomBoolean() ? Settings.EMPTY : - Settings.builder().put(CcrSettings.CCR_FOLLOWING_INDEX_SETTING.getKey(), false).build(); + Settings followingIndexSettings = Settings.builder().put(IndexSettings.INDEX_SOFT_DELETES_SETTING.getKey(), true) + .put(CcrSettings.CCR_FOLLOWING_INDEX_SETTING.getKey(), false).build(); IndexMetaData followIMD = createIMD("index2", 5, followingIndexSettings, customMetaData); MapperService mapperService = MapperTestUtils.newMapperService(xContentRegistry(), createTempDir(), followingIndexSettings, "index2"); @@ -160,6 +172,7 @@ public class TransportResumeFollowActionTests extends ESTestCase { IndexMetaData leaderIMD = createIMD("index1", 5, Settings.builder() .put(IndexSettings.INDEX_SOFT_DELETES_SETTING.getKey(), "true").build(), null); IndexMetaData followIMD = createIMD("index2", 5, Settings.builder() + .put(IndexSettings.INDEX_SOFT_DELETES_SETTING.getKey(), true) .put(CcrSettings.CCR_FOLLOWING_INDEX_SETTING.getKey(), true).build(), customMetaData); MapperService mapperService = MapperTestUtils.newMapperService(xContentRegistry(), createTempDir(), Settings.EMPTY, "index2"); mapperService.updateMapping(null, followIMD); @@ -174,6 +187,7 @@ public class TransportResumeFollowActionTests extends ESTestCase { .put("index.analysis.analyzer.my_analyzer.tokenizer", "standard").build(), null); IndexMetaData followIMD = createIMD("index2", State.OPEN, mapping, 5, Settings.builder() .put(CcrSettings.CCR_FOLLOWING_INDEX_SETTING.getKey(), true) + .put(IndexSettings.INDEX_SOFT_DELETES_SETTING.getKey(), true) .put("index.analysis.analyzer.my_analyzer.type", "custom") .put("index.analysis.analyzer.my_analyzer.tokenizer", "standard").build(), customMetaData); MapperService mapperService = MapperTestUtils.newMapperService(xContentRegistry(), createTempDir(), @@ -191,6 +205,7 @@ public class TransportResumeFollowActionTests extends ESTestCase { .put("index.analysis.analyzer.my_analyzer.tokenizer", "standard").build(), null); IndexMetaData followIMD = createIMD("index2", State.OPEN, mapping, 5, Settings.builder() .put(CcrSettings.CCR_FOLLOWING_INDEX_SETTING.getKey(), true) + .put(IndexSettings.INDEX_SOFT_DELETES_SETTING.getKey(), true) .put(IndexSettings.INDEX_REFRESH_INTERVAL_SETTING.getKey(), "10s") .put("index.analysis.analyzer.my_analyzer.type", "custom") .put("index.analysis.analyzer.my_analyzer.tokenizer", "standard").build(), customMetaData); diff --git a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/bulk/BulkShardOperationsTests.java b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/bulk/BulkShardOperationsTests.java index 1cbfe4cec5a..856b6da2f9d 100644 --- a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/bulk/BulkShardOperationsTests.java +++ b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/bulk/BulkShardOperationsTests.java @@ -41,7 +41,8 @@ public class BulkShardOperationsTests extends IndexShardTestCase { // test that we use the primary term on the follower when applying operations from the leader public void testPrimaryTermFromFollower() throws IOException { - final Settings settings = Settings.builder().put(CcrSettings.CCR_FOLLOWING_INDEX_SETTING.getKey(), true).build(); + final Settings settings = Settings.builder().put(CcrSettings.CCR_FOLLOWING_INDEX_SETTING.getKey(), true) + .put(IndexSettings.INDEX_SOFT_DELETES_SETTING.getKey(), true).build(); final IndexShard followerPrimary = newStartedShard(true, settings, new FollowingEngineFactory()); // we use this primary on the operations yet we expect the applied operations to have the primary term of the follower diff --git a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/index/engine/FollowEngineIndexShardTests.java b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/index/engine/FollowEngineIndexShardTests.java index e14b7513035..9428ca89785 100644 --- a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/index/engine/FollowEngineIndexShardTests.java +++ b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/index/engine/FollowEngineIndexShardTests.java @@ -14,6 +14,7 @@ import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.lease.Releasable; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentType; +import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.mapper.SourceToParse; import org.elasticsearch.index.shard.IndexShard; import org.elasticsearch.index.shard.IndexShardTestCase; @@ -31,6 +32,7 @@ public class FollowEngineIndexShardTests extends IndexShardTestCase { public void testDoNotFillGaps() throws Exception { Settings settings = Settings.builder() .put(CcrSettings.CCR_FOLLOWING_INDEX_SETTING.getKey(), true) + .put(IndexSettings.INDEX_SOFT_DELETES_SETTING.getKey(), true) .build(); final IndexShard indexShard = newStartedShard(false, settings, new FollowingEngineFactory()); diff --git a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/index/engine/FollowingEngineTests.java b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/index/engine/FollowingEngineTests.java index 9e62eb6cfa1..de5291f2b20 100644 --- a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/index/engine/FollowingEngineTests.java +++ b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/index/engine/FollowingEngineTests.java @@ -127,6 +127,7 @@ public class FollowingEngineTests extends ESTestCase { .put("index.number_of_replicas", 0) .put("index.version.created", Version.CURRENT) .put("index.xpack.ccr.following_index", true) + .put("index.soft_deletes.enabled", true) .build(); final IndexMetaData indexMetaData = IndexMetaData.builder(index.getName()).settings(settings).build(); final IndexSettings indexSettings = new IndexSettings(indexMetaData, settings); @@ -152,6 +153,7 @@ public class FollowingEngineTests extends ESTestCase { .put("index.number_of_replicas", 0) .put("index.version.created", Version.CURRENT) .put("index.xpack.ccr.following_index", true) + .put("index.soft_deletes.enabled", true) .build(); final IndexMetaData indexMetaData = IndexMetaData.builder(index.getName()).settings(settings).build(); final IndexSettings indexSettings = new IndexSettings(indexMetaData, settings); @@ -186,6 +188,7 @@ public class FollowingEngineTests extends ESTestCase { .put("index.number_of_replicas", 0) .put("index.version.created", Version.CURRENT) .put("index.xpack.ccr.following_index", true) + .put("index.soft_deletes.enabled", true) .build(); final IndexMetaData indexMetaData = IndexMetaData.builder(index.getName()).settings(settings).build(); final IndexSettings indexSettings = new IndexSettings(indexMetaData, settings); @@ -216,6 +219,7 @@ public class FollowingEngineTests extends ESTestCase { .put("index.number_of_replicas", 0) .put("index.version.created", Version.CURRENT) .put("index.xpack.ccr.following_index", true) + .put("index.soft_deletes.enabled", true) .build(); final IndexMetaData indexMetaData = IndexMetaData.builder(index.getName()).settings(settings).build(); final IndexSettings indexSettings = new IndexSettings(indexMetaData, settings); diff --git a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/monitoring/collector/ccr/FollowStatsMonitoringDocTests.java b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/monitoring/collector/ccr/FollowStatsMonitoringDocTests.java index f96032d2181..219bf7187ba 100644 --- a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/monitoring/collector/ccr/FollowStatsMonitoringDocTests.java +++ b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/monitoring/collector/ccr/FollowStatsMonitoringDocTests.java @@ -94,6 +94,7 @@ public class FollowStatsMonitoringDocTests extends BaseMonitoringDocTestCase template = XContentHelper.convertToMap(XContentType.JSON.xContent(), MonitoringTemplateUtils.loadTemplate("es"), false); Map followStatsMapping = (Map) XContentMapValues.extractValue("mappings.doc.properties.ccr_stats.properties", template); - assertThat(serializedStatus.size(), equalTo(followStatsMapping.size())); for (Map.Entry entry : serializedStatus.entrySet()) { String fieldName = entry.getKey(); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/AutoFollowMetadata.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/AutoFollowMetadata.java index e063a85d0b1..8f01c56c3f0 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/AutoFollowMetadata.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/AutoFollowMetadata.java @@ -175,6 +175,7 @@ public class AutoFollowMetadata extends AbstractNamedDiffable i public static class AutoFollowPattern implements Writeable, ToXContentObject { + public static final ParseField LEADER_CLUSTER_FIELD = new ParseField("leader_cluster"); public static final ParseField LEADER_PATTERNS_FIELD = new ParseField("leader_index_patterns"); public static final ParseField FOLLOW_PATTERN_FIELD = new ParseField("follow_index_pattern"); public static final ParseField MAX_BATCH_OPERATION_COUNT = new ParseField("max_batch_operation_count"); @@ -188,10 +189,12 @@ public class AutoFollowMetadata extends AbstractNamedDiffable i @SuppressWarnings("unchecked") private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>("auto_follow_pattern", - args -> new AutoFollowPattern((List) args[0], (String) args[1], (Integer) args[2], (Integer) args[3], - (ByteSizeValue) args[4], (Integer) args[5], (Integer) args[6], (TimeValue) args[7], (TimeValue) args[8])); + args -> new AutoFollowPattern((String) args[0], (List) args[1], (String) args[2], (Integer) args[3], + (Integer) args[4], (ByteSizeValue) args[5], (Integer) args[6], (Integer) args[7], (TimeValue) args[8], + (TimeValue) args[9])); static { + PARSER.declareString(ConstructingObjectParser.constructorArg(), LEADER_CLUSTER_FIELD); PARSER.declareStringArray(ConstructingObjectParser.constructorArg(), LEADER_PATTERNS_FIELD); PARSER.declareString(ConstructingObjectParser.optionalConstructorArg(), FOLLOW_PATTERN_FIELD); PARSER.declareInt(ConstructingObjectParser.optionalConstructorArg(), MAX_BATCH_OPERATION_COUNT); @@ -211,6 +214,7 @@ public class AutoFollowMetadata extends AbstractNamedDiffable i POLL_TIMEOUT, ObjectParser.ValueType.STRING); } + private final String leaderCluster; private final List leaderIndexPatterns; private final String followIndexPattern; private final Integer maxBatchOperationCount; @@ -221,7 +225,8 @@ public class AutoFollowMetadata extends AbstractNamedDiffable i private final TimeValue maxRetryDelay; private final TimeValue pollTimeout; - public AutoFollowPattern(List leaderIndexPatterns, + public AutoFollowPattern(String leaderCluster, + List leaderIndexPatterns, String followIndexPattern, Integer maxBatchOperationCount, Integer maxConcurrentReadBatches, @@ -230,6 +235,7 @@ public class AutoFollowMetadata extends AbstractNamedDiffable i Integer maxWriteBufferSize, TimeValue maxRetryDelay, TimeValue pollTimeout) { + this.leaderCluster = leaderCluster; this.leaderIndexPatterns = leaderIndexPatterns; this.followIndexPattern = followIndexPattern; this.maxBatchOperationCount = maxBatchOperationCount; @@ -242,6 +248,7 @@ public class AutoFollowMetadata extends AbstractNamedDiffable i } public AutoFollowPattern(StreamInput in) throws IOException { + leaderCluster = in.readString(); leaderIndexPatterns = in.readList(StreamInput::readString); followIndexPattern = in.readOptionalString(); maxBatchOperationCount = in.readOptionalVInt(); @@ -261,6 +268,10 @@ public class AutoFollowMetadata extends AbstractNamedDiffable i return Regex.simpleMatch(leaderIndexPatterns, indexName); } + public String getLeaderCluster() { + return leaderCluster; + } + public List getLeaderIndexPatterns() { return leaderIndexPatterns; } @@ -299,6 +310,7 @@ public class AutoFollowMetadata extends AbstractNamedDiffable i @Override public void writeTo(StreamOutput out) throws IOException { + out.writeString(leaderCluster); out.writeStringList(leaderIndexPatterns); out.writeOptionalString(followIndexPattern); out.writeOptionalVInt(maxBatchOperationCount); @@ -312,6 +324,7 @@ public class AutoFollowMetadata extends AbstractNamedDiffable i @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.field(LEADER_CLUSTER_FIELD.getPreferredName(), leaderCluster); builder.array(LEADER_PATTERNS_FIELD.getPreferredName(), leaderIndexPatterns.toArray(new String[0])); if (followIndexPattern != null) { builder.field(FOLLOW_PATTERN_FIELD.getPreferredName(), followIndexPattern); @@ -350,7 +363,8 @@ public class AutoFollowMetadata extends AbstractNamedDiffable i if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; AutoFollowPattern that = (AutoFollowPattern) o; - return Objects.equals(leaderIndexPatterns, that.leaderIndexPatterns) && + return Objects.equals(leaderCluster, that.leaderCluster) && + Objects.equals(leaderIndexPatterns, that.leaderIndexPatterns) && Objects.equals(followIndexPattern, that.followIndexPattern) && Objects.equals(maxBatchOperationCount, that.maxBatchOperationCount) && Objects.equals(maxConcurrentReadBatches, that.maxConcurrentReadBatches) && @@ -364,6 +378,7 @@ public class AutoFollowMetadata extends AbstractNamedDiffable i @Override public int hashCode() { return Objects.hash( + leaderCluster, leaderIndexPatterns, followIndexPattern, maxBatchOperationCount, diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/ShardFollowNodeTaskStatus.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/ShardFollowNodeTaskStatus.java index f002fb44c14..e21729df58b 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/ShardFollowNodeTaskStatus.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/ShardFollowNodeTaskStatus.java @@ -48,6 +48,7 @@ public class ShardFollowNodeTaskStatus implements Task.Status { private static final ParseField NUMBER_OF_QUEUED_WRITES_FIELD = new ParseField("number_of_queued_writes"); private static final ParseField MAPPING_VERSION_FIELD = new ParseField("mapping_version"); private static final ParseField TOTAL_FETCH_TIME_MILLIS_FIELD = new ParseField("total_fetch_time_millis"); + private static final ParseField TOTAL_FETCH_LEADER_TIME_MILLIS_FIELD = new ParseField("total_fetch_leader_time_millis"); private static final ParseField NUMBER_OF_SUCCESSFUL_FETCHES_FIELD = new ParseField("number_of_successful_fetches"); private static final ParseField NUMBER_OF_FAILED_FETCHES_FIELD = new ParseField("number_of_failed_fetches"); private static final ParseField OPERATIONS_RECEIVED_FIELD = new ParseField("operations_received"); @@ -87,12 +88,13 @@ public class ShardFollowNodeTaskStatus implements Task.Status { (long) args[19], (long) args[20], (long) args[21], + (long) args[22], new TreeMap<>( - ((List>>) args[22]) + ((List>>) args[23]) .stream() .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue))), - (long) args[23], - (ElasticsearchException) args[24])); + (long) args[24], + (ElasticsearchException) args[25])); public static final String FETCH_EXCEPTIONS_ENTRY_PARSER_NAME = "shard-follow-node-task-status-fetch-exceptions-entry"; @@ -116,6 +118,7 @@ public class ShardFollowNodeTaskStatus implements Task.Status { STATUS_PARSER.declareInt(ConstructingObjectParser.constructorArg(), NUMBER_OF_QUEUED_WRITES_FIELD); STATUS_PARSER.declareLong(ConstructingObjectParser.constructorArg(), MAPPING_VERSION_FIELD); STATUS_PARSER.declareLong(ConstructingObjectParser.constructorArg(), TOTAL_FETCH_TIME_MILLIS_FIELD); + STATUS_PARSER.declareLong(ConstructingObjectParser.constructorArg(), TOTAL_FETCH_LEADER_TIME_MILLIS_FIELD); STATUS_PARSER.declareLong(ConstructingObjectParser.constructorArg(), NUMBER_OF_SUCCESSFUL_FETCHES_FIELD); STATUS_PARSER.declareLong(ConstructingObjectParser.constructorArg(), NUMBER_OF_FAILED_FETCHES_FIELD); STATUS_PARSER.declareLong(ConstructingObjectParser.constructorArg(), OPERATIONS_RECEIVED_FIELD); @@ -228,6 +231,12 @@ public class ShardFollowNodeTaskStatus implements Task.Status { return totalFetchTimeMillis; } + private final long totalFetchLeaderTimeMillis; + + public long totalFetchLeaderTimeMillis() { + return totalFetchLeaderTimeMillis; + } + private final long numberOfSuccessfulFetches; public long numberOfSuccessfulFetches() { @@ -309,6 +318,7 @@ public class ShardFollowNodeTaskStatus implements Task.Status { final int numberOfQueuedWrites, final long mappingVersion, final long totalFetchTimeMillis, + final long totalFetchLeaderTimeMillis, final long numberOfSuccessfulFetches, final long numberOfFailedFetches, final long operationsReceived, @@ -334,6 +344,7 @@ public class ShardFollowNodeTaskStatus implements Task.Status { this.numberOfQueuedWrites = numberOfQueuedWrites; this.mappingVersion = mappingVersion; this.totalFetchTimeMillis = totalFetchTimeMillis; + this.totalFetchLeaderTimeMillis = totalFetchLeaderTimeMillis; this.numberOfSuccessfulFetches = numberOfSuccessfulFetches; this.numberOfFailedFetches = numberOfFailedFetches; this.operationsReceived = operationsReceived; @@ -362,6 +373,7 @@ public class ShardFollowNodeTaskStatus implements Task.Status { this.numberOfQueuedWrites = in.readVInt(); this.mappingVersion = in.readVLong(); this.totalFetchTimeMillis = in.readVLong(); + this.totalFetchLeaderTimeMillis = in.readVLong(); this.numberOfSuccessfulFetches = in.readVLong(); this.numberOfFailedFetches = in.readVLong(); this.operationsReceived = in.readVLong(); @@ -397,6 +409,7 @@ public class ShardFollowNodeTaskStatus implements Task.Status { out.writeVInt(numberOfQueuedWrites); out.writeVLong(mappingVersion); out.writeVLong(totalFetchTimeMillis); + out.writeVLong(totalFetchLeaderTimeMillis); out.writeVLong(numberOfSuccessfulFetches); out.writeVLong(numberOfFailedFetches); out.writeVLong(operationsReceived); @@ -444,6 +457,10 @@ public class ShardFollowNodeTaskStatus implements Task.Status { TOTAL_FETCH_TIME_MILLIS_FIELD.getPreferredName(), "total_fetch_time", new TimeValue(totalFetchTimeMillis, TimeUnit.MILLISECONDS)); + builder.humanReadableField( + TOTAL_FETCH_LEADER_TIME_MILLIS_FIELD.getPreferredName(), + "total_fetch_leader_time", + new TimeValue(totalFetchLeaderTimeMillis, TimeUnit.MILLISECONDS)); builder.field(NUMBER_OF_SUCCESSFUL_FETCHES_FIELD.getPreferredName(), numberOfSuccessfulFetches); builder.field(NUMBER_OF_FAILED_FETCHES_FIELD.getPreferredName(), numberOfFailedFetches); builder.field(OPERATIONS_RECEIVED_FIELD.getPreferredName(), operationsReceived); @@ -516,6 +533,7 @@ public class ShardFollowNodeTaskStatus implements Task.Status { numberOfQueuedWrites == that.numberOfQueuedWrites && mappingVersion == that.mappingVersion && totalFetchTimeMillis == that.totalFetchTimeMillis && + totalFetchLeaderTimeMillis == that.totalFetchLeaderTimeMillis && numberOfSuccessfulFetches == that.numberOfSuccessfulFetches && numberOfFailedFetches == that.numberOfFailedFetches && operationsReceived == that.operationsReceived && @@ -552,6 +570,7 @@ public class ShardFollowNodeTaskStatus implements Task.Status { numberOfQueuedWrites, mappingVersion, totalFetchTimeMillis, + totalFetchLeaderTimeMillis, numberOfSuccessfulFetches, numberOfFailedFetches, operationsReceived, diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/DeleteAutoFollowPatternAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/DeleteAutoFollowPatternAction.java index b0275c6a555..7b389ec6cf1 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/DeleteAutoFollowPatternAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/DeleteAutoFollowPatternAction.java @@ -33,35 +33,35 @@ public class DeleteAutoFollowPatternAction extends Action public static class Request extends AcknowledgedRequest { - private String leaderCluster; + private String name; @Override public ActionRequestValidationException validate() { ActionRequestValidationException validationException = null; - if (leaderCluster == null) { - validationException = addValidationError("leaderCluster is missing", validationException); + if (name == null) { + validationException = addValidationError("name is missing", validationException); } return validationException; } - public String getLeaderCluster() { - return leaderCluster; + public String getName() { + return name; } - public void setLeaderCluster(String leaderCluster) { - this.leaderCluster = leaderCluster; + public void setName(String name) { + this.name = name; } @Override public void readFrom(StreamInput in) throws IOException { super.readFrom(in); - leaderCluster = in.readString(); + name = in.readString(); } @Override public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); - out.writeString(leaderCluster); + out.writeString(name); } @Override @@ -69,12 +69,12 @@ public class DeleteAutoFollowPatternAction extends Action if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; Request request = (Request) o; - return Objects.equals(leaderCluster, request.leaderCluster); + return Objects.equals(name, request.name); } @Override public int hashCode() { - return Objects.hash(leaderCluster); + return Objects.hash(name); } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/GetAutoFollowPatternAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/GetAutoFollowPatternAction.java index 2379abf59c9..a226118e17b 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/GetAutoFollowPatternAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/GetAutoFollowPatternAction.java @@ -36,14 +36,14 @@ public class GetAutoFollowPatternAction extends Action { - private String leaderCluster; + private String name; public Request() { } public Request(StreamInput in) throws IOException { super(in); - this.leaderCluster = in.readOptionalString(); + this.name = in.readOptionalString(); } @Override @@ -51,18 +51,18 @@ public class GetAutoFollowPatternAction extends Action { private static final ObjectParser PARSER = new ObjectParser<>("put_auto_follow_pattern_request", Request::new); + private static final ParseField NAME_FIELD = new ParseField("name"); + static { - PARSER.declareString(Request::setLeaderCluster, LEADER_CLUSTER_FIELD); + PARSER.declareString(Request::setName, NAME_FIELD); + PARSER.declareString(Request::setLeaderCluster, AutoFollowPattern.LEADER_CLUSTER_FIELD); PARSER.declareStringArray(Request::setLeaderIndexPatterns, AutoFollowPattern.LEADER_PATTERNS_FIELD); PARSER.declareString(Request::setFollowIndexNamePattern, AutoFollowPattern.FOLLOW_PATTERN_FIELD); PARSER.declareInt(Request::setMaxBatchOperationCount, AutoFollowPattern.MAX_BATCH_OPERATION_COUNT); @@ -67,20 +70,21 @@ public class PutAutoFollowPatternAction extends Action { AutoFollowPattern.POLL_TIMEOUT, ObjectParser.ValueType.STRING); } - public static Request fromXContent(XContentParser parser, String remoteClusterAlias) throws IOException { + public static Request fromXContent(XContentParser parser, String name) throws IOException { Request request = PARSER.parse(parser, null); - if (remoteClusterAlias != null) { - if (request.leaderCluster == null) { - request.leaderCluster = remoteClusterAlias; + if (name != null) { + if (request.name == null) { + request.name = name; } else { - if (request.leaderCluster.equals(remoteClusterAlias) == false) { - throw new IllegalArgumentException("provided leaderCluster is not equal"); + if (request.name.equals(name) == false) { + throw new IllegalArgumentException("provided name is not equal"); } } } return request; } + private String name; private String leaderCluster; private List leaderIndexPatterns; private String followIndexNamePattern; @@ -96,8 +100,11 @@ public class PutAutoFollowPatternAction extends Action { @Override public ActionRequestValidationException validate() { ActionRequestValidationException validationException = null; + if (name == null) { + validationException = addValidationError("[" + NAME_FIELD.getPreferredName() + "] is missing", validationException); + } if (leaderCluster == null) { - validationException = addValidationError("[" + LEADER_CLUSTER_FIELD.getPreferredName() + + validationException = addValidationError("[" + AutoFollowPattern.LEADER_CLUSTER_FIELD.getPreferredName() + "] is missing", validationException); } if (leaderIndexPatterns == null || leaderIndexPatterns.isEmpty()) { @@ -120,6 +127,14 @@ public class PutAutoFollowPatternAction extends Action { return validationException; } + public String getName() { + return name; + } + + public void setName(String name) { + this.name = name; + } + public String getLeaderCluster() { return leaderCluster; } @@ -203,6 +218,7 @@ public class PutAutoFollowPatternAction extends Action { @Override public void readFrom(StreamInput in) throws IOException { super.readFrom(in); + name = in.readString(); leaderCluster = in.readString(); leaderIndexPatterns = in.readList(StreamInput::readString); followIndexNamePattern = in.readOptionalString(); @@ -218,6 +234,7 @@ public class PutAutoFollowPatternAction extends Action { @Override public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); + out.writeString(name); out.writeString(leaderCluster); out.writeStringList(leaderIndexPatterns); out.writeOptionalString(followIndexNamePattern); @@ -234,7 +251,8 @@ public class PutAutoFollowPatternAction extends Action { public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); { - builder.field(LEADER_CLUSTER_FIELD.getPreferredName(), leaderCluster); + builder.field(NAME_FIELD.getPreferredName(), name); + builder.field(AutoFollowPattern.LEADER_CLUSTER_FIELD.getPreferredName(), leaderCluster); builder.field(AutoFollowPattern.LEADER_PATTERNS_FIELD.getPreferredName(), leaderIndexPatterns); if (followIndexNamePattern != null) { builder.field(AutoFollowPattern.FOLLOW_PATTERN_FIELD.getPreferredName(), followIndexNamePattern); @@ -270,7 +288,8 @@ public class PutAutoFollowPatternAction extends Action { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; Request request = (Request) o; - return Objects.equals(leaderCluster, request.leaderCluster) && + return Objects.equals(name, request.name) && + Objects.equals(leaderCluster, request.leaderCluster) && Objects.equals(leaderIndexPatterns, request.leaderIndexPatterns) && Objects.equals(followIndexNamePattern, request.followIndexNamePattern) && Objects.equals(maxBatchOperationCount, request.maxBatchOperationCount) && @@ -285,7 +304,8 @@ public class PutAutoFollowPatternAction extends Action { @Override public int hashCode() { return Objects.hash( - leaderCluster, + name, + leaderCluster, leaderIndexPatterns, followIndexNamePattern, maxBatchOperationCount, diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/action/DeleteRollupJobAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/action/DeleteRollupJobAction.java index df2c70c7653..f1c6213cd70 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/action/DeleteRollupJobAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/action/DeleteRollupJobAction.java @@ -7,22 +7,29 @@ package org.elasticsearch.xpack.core.rollup.action; import org.elasticsearch.action.Action; +import org.elasticsearch.action.ActionRequestBuilder; import org.elasticsearch.action.ActionRequestValidationException; -import org.elasticsearch.action.support.master.AcknowledgedRequest; -import org.elasticsearch.action.support.master.AcknowledgedResponse; -import org.elasticsearch.action.support.master.MasterNodeOperationRequestBuilder; +import org.elasticsearch.action.FailedNodeException; +import org.elasticsearch.action.TaskOperationFailure; +import org.elasticsearch.action.support.tasks.BaseTasksRequest; +import org.elasticsearch.action.support.tasks.BaseTasksResponse; import org.elasticsearch.client.ElasticsearchClient; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.xcontent.ToXContent; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.common.xcontent.ToXContentFragment; +import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.tasks.Task; import org.elasticsearch.xpack.core.ml.utils.ExceptionsHelper; import org.elasticsearch.xpack.core.rollup.RollupField; import java.io.IOException; +import java.util.Collections; +import java.util.List; import java.util.Objects; -public class DeleteRollupJobAction extends Action { +public class DeleteRollupJobAction extends Action { public static final DeleteRollupJobAction INSTANCE = new DeleteRollupJobAction(); public static final String NAME = "cluster:admin/xpack/rollup/delete"; @@ -32,11 +39,11 @@ public class DeleteRollupJobAction extends Action { } @Override - public AcknowledgedResponse newResponse() { - return new AcknowledgedResponse(); + public Response newResponse() { + return new Response(); } - public static class Request extends AcknowledgedRequest implements ToXContent { + public static class Request extends BaseTasksRequest implements ToXContentFragment { private String id; public Request(String id) { @@ -45,6 +52,11 @@ public class DeleteRollupJobAction extends Action { public Request() {} + @Override + public boolean match(Task task) { + return task.getDescription().equals(RollupField.NAME + "_" + id); + } + public String getId() { return id; } @@ -90,10 +102,74 @@ public class DeleteRollupJobAction extends Action { } } - public static class RequestBuilder extends MasterNodeOperationRequestBuilder { - + public static class RequestBuilder extends ActionRequestBuilder { protected RequestBuilder(ElasticsearchClient client, DeleteRollupJobAction action) { - super(client, action, new Request()); + super(client, action, new DeleteRollupJobAction.Request()); + } + } + + public static class Response extends BaseTasksResponse implements Writeable, ToXContentObject { + + private boolean acknowledged; + + public Response(StreamInput in) throws IOException { + super(Collections.emptyList(), Collections.emptyList()); + readFrom(in); + } + + public Response(boolean acknowledged, List taskFailures, List nodeFailures) { + super(taskFailures, nodeFailures); + this.acknowledged = acknowledged; + } + + public Response(boolean acknowledged) { + super(Collections.emptyList(), Collections.emptyList()); + this.acknowledged = acknowledged; + } + + public Response() { + super(Collections.emptyList(), Collections.emptyList()); + this.acknowledged = false; + } + + public boolean isDeleted() { + return acknowledged; + } + + @Override + public void readFrom(StreamInput in) throws IOException { + super.readFrom(in); + acknowledged = in.readBoolean(); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + out.writeBoolean(acknowledged); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + { + toXContentCommon(builder, params); + builder.field("acknowledged", acknowledged); + } + builder.endObject(); + return builder; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DeleteRollupJobAction.Response response = (DeleteRollupJobAction.Response) o; + return super.equals(o) && acknowledged == response.acknowledged; + } + + @Override + public int hashCode() { + return Objects.hash(super.hashCode(), acknowledged); } } } diff --git a/x-pack/plugin/core/src/main/resources/monitoring-es.json b/x-pack/plugin/core/src/main/resources/monitoring-es.json index d55cdd690be..791a0ea02c3 100644 --- a/x-pack/plugin/core/src/main/resources/monitoring-es.json +++ b/x-pack/plugin/core/src/main/resources/monitoring-es.json @@ -971,6 +971,9 @@ "total_fetch_time_millis": { "type": "long" }, + "total_fetch_leader_time_millis": { + "type": "long" + }, "number_of_successful_fetches": { "type": "long" }, diff --git a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/action/TransportDeleteRollupJobAction.java b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/action/TransportDeleteRollupJobAction.java index 97b4483b1ff..5cdc40df4d6 100644 --- a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/action/TransportDeleteRollupJobAction.java +++ b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/action/TransportDeleteRollupJobAction.java @@ -5,103 +5,101 @@ */ package org.elasticsearch.xpack.rollup.action; -import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.ResourceNotFoundException; import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.ActionListenerResponseHandler; +import org.elasticsearch.action.FailedNodeException; +import org.elasticsearch.action.TaskOperationFailure; import org.elasticsearch.action.support.ActionFilters; -import org.elasticsearch.action.support.master.AcknowledgedResponse; -import org.elasticsearch.action.support.master.TransportMasterNodeAction; +import org.elasticsearch.action.support.tasks.TransportTasksAction; import org.elasticsearch.cluster.ClusterState; -import org.elasticsearch.cluster.block.ClusterBlockException; -import org.elasticsearch.cluster.block.ClusterBlockLevel; -import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; +import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.discovery.MasterNotDiscoveredException; import org.elasticsearch.persistent.PersistentTasksCustomMetaData; -import org.elasticsearch.persistent.PersistentTasksService; +import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; +import org.elasticsearch.xpack.core.indexing.IndexerState; import org.elasticsearch.xpack.core.rollup.action.DeleteRollupJobAction; -import org.elasticsearch.xpack.core.rollup.job.RollupJob; +import org.elasticsearch.xpack.core.rollup.job.RollupJobStatus; +import org.elasticsearch.xpack.rollup.job.RollupJobTask; -import java.util.Objects; -import java.util.concurrent.TimeUnit; +import java.io.IOException; +import java.util.List; -public class TransportDeleteRollupJobAction - extends TransportMasterNodeAction { - - private final PersistentTasksService persistentTasksService; +public class TransportDeleteRollupJobAction extends TransportTasksAction { @Inject - public TransportDeleteRollupJobAction(Settings settings, TransportService transportService, ThreadPool threadPool, - ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver, - PersistentTasksService persistentTasksService, ClusterService clusterService) { - super(settings, DeleteRollupJobAction.NAME, transportService, clusterService, threadPool, actionFilters, - indexNameExpressionResolver, DeleteRollupJobAction.Request::new); - this.persistentTasksService = persistentTasksService; + public TransportDeleteRollupJobAction(Settings settings, TransportService transportService, + ActionFilters actionFilters, ClusterService clusterService) { + super(settings, DeleteRollupJobAction.NAME, clusterService, transportService, actionFilters, + DeleteRollupJobAction.Request::new, DeleteRollupJobAction.Response::new, ThreadPool.Names.SAME); } @Override - protected String executor() { - return ThreadPool.Names.SAME; - } + protected void doExecute(Task task, DeleteRollupJobAction.Request request, ActionListener listener) { + final ClusterState state = clusterService.state(); + final DiscoveryNodes nodes = state.nodes(); - @Override - protected AcknowledgedResponse newResponse() { - return new AcknowledgedResponse(); - } - - @Override - protected void masterOperation(DeleteRollupJobAction.Request request, ClusterState state, - ActionListener listener) throws Exception { - - String jobId = request.getId(); - TimeValue timeout = new TimeValue(60, TimeUnit.SECONDS); // TODO make this a config option - - // Step 1. Cancel the persistent task - persistentTasksService.sendRemoveRequest(jobId, new ActionListener>() { - @Override - public void onResponse(PersistentTasksCustomMetaData.PersistentTask persistentTask) { - logger.debug("Request to cancel Task for Rollup job [" + jobId + "] successful."); - - // Step 2. Wait for the task to finish cancellation internally - persistentTasksService.waitForPersistentTaskCondition(jobId, Objects::isNull, timeout, - new PersistentTasksService.WaitForPersistentTaskListener() { - @Override - public void onResponse(PersistentTasksCustomMetaData.PersistentTask task) { - logger.debug("Task for Rollup job [" + jobId + "] successfully canceled."); - listener.onResponse(new AcknowledgedResponse(true)); - } - - @Override - public void onFailure(Exception e) { - logger.error("Error while cancelling task for Rollup job [" + jobId - + "]." + e); - listener.onFailure(e); - } - - @Override - public void onTimeout(TimeValue timeout) { - String msg = "Stopping of Rollup job [" + jobId + "] timed out after [" + timeout + "]."; - logger.warn(msg); - listener.onFailure(new ElasticsearchException(msg)); - } - }); + if (nodes.isLocalNodeElectedMaster()) { + PersistentTasksCustomMetaData pTasksMeta = state.getMetaData().custom(PersistentTasksCustomMetaData.TYPE); + if (pTasksMeta != null && pTasksMeta.getTask(request.getId()) != null) { + super.doExecute(task, request, listener); + } else { + // If we couldn't find the job in the persistent task CS, it means it was deleted prior to this call, + // no need to go looking for the allocated task + listener.onFailure(new ResourceNotFoundException("the task with id [" + request.getId() + "] doesn't exist")); } - @Override - public void onFailure(Exception e) { - logger.error("Error while requesting to cancel task for Rollup job [" + jobId - + "]" + e); - listener.onFailure(e); + } else { + // Delegates DeleteJob to elected master node, so it becomes the coordinating node. + // Non-master nodes may have a stale cluster state that shows jobs which are cancelled + // on the master, which makes testing difficult. + if (nodes.getMasterNode() == null) { + listener.onFailure(new MasterNotDiscoveredException("no known master nodes")); + } else { + transportService.sendRequest(nodes.getMasterNode(), actionName, request, + new ActionListenerResponseHandler<>(listener, DeleteRollupJobAction.Response::new)); } - }); - + } } @Override - protected ClusterBlockException checkBlock(DeleteRollupJobAction.Request request, ClusterState state) { - return state.blocks().globalBlockedException(ClusterBlockLevel.METADATA_WRITE); + protected void taskOperation(DeleteRollupJobAction.Request request, RollupJobTask jobTask, + ActionListener listener) { + + assert jobTask.getConfig().getId().equals(request.getId()); + IndexerState state = ((RollupJobStatus) jobTask.getStatus()).getIndexerState(); + if (state.equals(IndexerState.STOPPED) ) { + jobTask.onCancelled(); + listener.onResponse(new DeleteRollupJobAction.Response(true)); + } else { + listener.onFailure(new IllegalStateException("Could not delete job [" + request.getId() + "] because " + + "indexer state is [" + state + "]. Job must be [" + IndexerState.STOPPED + "] before deletion.")); + } + } + + @Override + protected DeleteRollupJobAction.Response newResponse(DeleteRollupJobAction.Request request, List tasks, + List taskOperationFailures, + List failedNodeExceptions) { + // There should theoretically only be one task running the rollup job + // If there are more, in production it should be ok as long as they are acknowledge shutting down. + // But in testing we'd like to know there were more than one hence the assert + assert tasks.size() + taskOperationFailures.size() == 1; + boolean cancelled = tasks.size() > 0 && tasks.stream().allMatch(DeleteRollupJobAction.Response::isDeleted); + return new DeleteRollupJobAction.Response(cancelled, taskOperationFailures, failedNodeExceptions); + } + + @Override + protected DeleteRollupJobAction.Response readTaskResponse(StreamInput in) throws IOException { + DeleteRollupJobAction.Response response = new DeleteRollupJobAction.Response(); + response.readFrom(in); + return response; } } diff --git a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/job/RollupJobTask.java b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/job/RollupJobTask.java index d16f47b1a35..3fbe77b64b4 100644 --- a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/job/RollupJobTask.java +++ b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/job/RollupJobTask.java @@ -349,7 +349,7 @@ public class RollupJobTask extends AllocatedPersistentTask implements SchedulerE * shut down from the inside. */ @Override - protected synchronized void onCancelled() { + public synchronized void onCancelled() { logger.info("Received cancellation request for Rollup job [" + job.getConfig().getId() + "], state: [" + indexer.getState() + "]"); if (indexer.abort()) { // there is no background job running, we can shutdown safely diff --git a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/rest/RestDeleteRollupJobAction.java b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/rest/RestDeleteRollupJobAction.java index 140b7d9b769..77d39a45ac5 100644 --- a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/rest/RestDeleteRollupJobAction.java +++ b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/rest/RestDeleteRollupJobAction.java @@ -12,6 +12,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.RestStatus; import org.elasticsearch.rest.action.RestToXContentListener; import org.elasticsearch.xpack.core.rollup.action.DeleteRollupJobAction; import org.elasticsearch.xpack.rollup.Rollup; @@ -31,7 +32,16 @@ public class RestDeleteRollupJobAction extends BaseRestHandler { String id = restRequest.param(ID.getPreferredName()); DeleteRollupJobAction.Request request = new DeleteRollupJobAction.Request(id); - return channel -> client.execute(DeleteRollupJobAction.INSTANCE, request, new RestToXContentListener<>(channel)); + return channel -> client.execute(DeleteRollupJobAction.INSTANCE, request, + new RestToXContentListener(channel) { + @Override + protected RestStatus getStatus(DeleteRollupJobAction.Response response) { + if (response.getNodeFailures().size() > 0 || response.getTaskFailures().size() > 0) { + return RestStatus.INTERNAL_SERVER_ERROR; + } + return RestStatus.OK; + } + }); } @Override diff --git a/x-pack/plugin/security/cli/build.gradle b/x-pack/plugin/security/cli/build.gradle index 377d10ec7f2..9c76f1758a3 100644 --- a/x-pack/plugin/security/cli/build.gradle +++ b/x-pack/plugin/security/cli/build.gradle @@ -1,4 +1,4 @@ -import org.elasticsearch.gradle.precommit.ForbiddenApisCliTask +import de.thetaphi.forbiddenapis.gradle.CheckForbiddenApis apply plugin: 'elasticsearch.build' @@ -26,7 +26,7 @@ if (project.inFipsJvm) { test.enabled = false // Forbiden APIs non-portable checks fail because bouncy castle classes being used from the FIPS JDK since those are // not part of the Java specification - all of this is as designed, so we have to relax this check for FIPS. - tasks.withType(ForbiddenApisCliTask) { + tasks.withType(CheckForbiddenApis) { bundledSignatures -= "jdk-non-portable" } // FIPS JVM includes manny classes from bouncycastle which count as jar hell for the third party audit, diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/IndicesAndAliasesResolver.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/IndicesAndAliasesResolver.java index cac9baf1512..26fa8405ccf 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/IndicesAndAliasesResolver.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/IndicesAndAliasesResolver.java @@ -46,9 +46,9 @@ import static org.elasticsearch.xpack.core.security.authz.IndicesAndAliasesResol class IndicesAndAliasesResolver { - //`*,-*` what we replace indices with if we need Elasticsearch to return empty responses without throwing exception - private static final String[] NO_INDICES_ARRAY = new String[] { "*", "-*" }; - static final List NO_INDICES_LIST = Arrays.asList(NO_INDICES_ARRAY); + //`*,-*` what we replace indices and aliases with if we need Elasticsearch to return empty responses without throwing exception + static final String[] NO_INDICES_OR_ALIASES_ARRAY = new String[] { "*", "-*" }; + static final List NO_INDICES_OR_ALIASES_LIST = Arrays.asList(NO_INDICES_OR_ALIASES_ARRAY); private final IndexNameExpressionResolver nameExpressionResolver; private final RemoteClusterResolver remoteClusterResolver; @@ -165,7 +165,7 @@ class IndicesAndAliasesResolver { //this is how we tell es core to return an empty response, we can let the request through being sure //that the '-*' wildcard expression will be resolved to no indices. We can't let empty indices through //as that would be resolved to _all by es core. - replaceable.indices(NO_INDICES_ARRAY); + replaceable.indices(NO_INDICES_OR_ALIASES_ARRAY); indicesReplacedWithNoIndices = true; resolvedIndicesBuilder.addLocal(NO_INDEX_PLACEHOLDER); } else { @@ -176,8 +176,6 @@ class IndicesAndAliasesResolver { } } else { if (containsWildcards(indicesRequest)) { - //an alias can still contain '*' in its name as of 5.0. Such aliases cannot be referred to when using - //the security plugin, otherwise the following exception gets thrown throw new IllegalStateException("There are no external requests known to support wildcards that don't support replacing " + "their indices"); } @@ -198,8 +196,6 @@ class IndicesAndAliasesResolver { if (aliasesRequest.expandAliasesWildcards()) { List aliases = replaceWildcardsWithAuthorizedAliases(aliasesRequest.aliases(), loadAuthorizedAliases(authorizedIndices.get(), metaData)); - //it may be that we replace aliases with an empty array, in case there are no authorized aliases for the action. - //MetaData#findAliases will return nothing when some alias was originally requested, which was replaced with empty. aliasesRequest.replaceAliases(aliases.toArray(new String[aliases.size()])); } if (indicesReplacedWithNoIndices) { @@ -213,6 +209,13 @@ class IndicesAndAliasesResolver { } else { resolvedIndicesBuilder.addLocal(aliasesRequest.aliases()); } + // if no aliases are authorized, then fill in an expression that + // MetaData#findAliases evaluates to the empty alias list. You cannot put + // "nothing" (the empty list) explicitly because this is resolved by es core to + // _all + if (aliasesRequest.aliases().length == 0) { + aliasesRequest.replaceAliases(NO_INDICES_OR_ALIASES_ARRAY); + } } return resolvedIndicesBuilder.build(); } diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/AuthorizationServiceTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/AuthorizationServiceTests.java index 47cf458e19a..17562b13f0d 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/AuthorizationServiceTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/AuthorizationServiceTests.java @@ -818,7 +818,7 @@ public class AuthorizationServiceTests extends ESTestCase { final SearchRequest searchRequest = new SearchRequest("_all"); authorize(authentication, SearchAction.NAME, searchRequest); assertEquals(2, searchRequest.indices().length); - assertEquals(IndicesAndAliasesResolver.NO_INDICES_LIST, Arrays.asList(searchRequest.indices())); + assertEquals(IndicesAndAliasesResolver.NO_INDICES_OR_ALIASES_LIST, Arrays.asList(searchRequest.indices())); } public void testGrantedNonXPackUserCanExecuteMonitoringOperationsAgainstSecurityIndex() { diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/IndicesAndAliasesResolverTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/IndicesAndAliasesResolverTests.java index f9d91527942..4dc0909552c 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/IndicesAndAliasesResolverTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/IndicesAndAliasesResolverTests.java @@ -778,11 +778,11 @@ public class IndicesAndAliasesResolverTests extends ESTestCase { public void testResolveAliasesWildcardsIndicesAliasesRequestDeleteActionsNoAuthorizedIndices() { IndicesAliasesRequest request = new IndicesAliasesRequest(); request.addAliasAction(AliasActions.remove().index("foo*").alias("foo*")); - //no authorized aliases match bar*, hence aliases are replaced with empty string for that action + //no authorized aliases match bar*, hence aliases are replaced with no-aliases-expression for that action request.addAliasAction(AliasActions.remove().index("*bar").alias("bar*")); resolveIndices(request, buildAuthorizedIndices(user, IndicesAliasesAction.NAME)); assertThat(request.getAliasActions().get(0).aliases(), arrayContainingInAnyOrder("foofoobar", "foobarfoo")); - assertThat(request.getAliasActions().get(1).aliases().length, equalTo(0)); + assertThat(request.getAliasActions().get(1).aliases(), arrayContaining(IndicesAndAliasesResolver.NO_INDICES_OR_ALIASES_ARRAY)); } public void testResolveWildcardsIndicesAliasesRequestAddAndDeleteActions() { @@ -1084,11 +1084,11 @@ public class IndicesAndAliasesResolverTests extends ESTestCase { public void testResolveAliasesWildcardsGetAliasesRequestNoAuthorizedIndices() { GetAliasesRequest request = new GetAliasesRequest(); - //no authorized aliases match bar*, hence aliases are replaced with empty array + //no authorized aliases match bar*, hence aliases are replaced with the no-aliases-expression request.aliases("bar*"); request.indices("*bar"); resolveIndices(request, buildAuthorizedIndices(user, GetAliasesAction.NAME)); - assertThat(request.aliases().length, equalTo(0)); + assertThat(request.aliases(), arrayContaining(IndicesAndAliasesResolver.NO_INDICES_OR_ALIASES_ARRAY)); } public void testResolveAliasesExclusionWildcardsGetAliasesRequest() { @@ -1112,10 +1112,11 @@ public class IndicesAndAliasesResolverTests extends ESTestCase { request.aliases("_all"); } request.indices("non_existing"); - //current user is not authorized for any index, foo* resolves to no indices, aliases are replaced with empty array + //current user is not authorized for any index, aliases are replaced with the no-aliases-expression ResolvedIndices resolvedIndices = resolveIndices(request, buildAuthorizedIndices(userNoIndices, GetAliasesAction.NAME)); assertThat(resolvedIndices.getLocal(), contains("non_existing")); - assertThat(request.aliases().length, equalTo(0)); + assertThat(Arrays.asList(request.indices()), contains("non_existing")); + assertThat(request.aliases(), arrayContaining(IndicesAndAliasesResolver.NO_INDICES_OR_ALIASES_ARRAY)); } /** @@ -1372,7 +1373,7 @@ public class IndicesAndAliasesResolverTests extends ESTestCase { final List localIndices = resolvedIndices.getLocal(); assertEquals(1, localIndices.size()); assertEquals(IndicesAndAliasesResolverField.NO_INDEX_PLACEHOLDER, localIndices.iterator().next()); - assertEquals(IndicesAndAliasesResolver.NO_INDICES_LIST, Arrays.asList(request.indices())); + assertEquals(IndicesAndAliasesResolver.NO_INDICES_OR_ALIASES_LIST, Arrays.asList(request.indices())); assertEquals(0, resolvedIndices.getRemote().size()); } diff --git a/x-pack/plugin/sql/jdbc/src/test/java/org/elasticsearch/xpack/sql/jdbc/JdbcConfigurationTests.java b/x-pack/plugin/sql/jdbc/src/test/java/org/elasticsearch/xpack/sql/jdbc/JdbcConfigurationTests.java index c7f2b50ace0..1497545d893 100644 --- a/x-pack/plugin/sql/jdbc/src/test/java/org/elasticsearch/xpack/sql/jdbc/JdbcConfigurationTests.java +++ b/x-pack/plugin/sql/jdbc/src/test/java/org/elasticsearch/xpack/sql/jdbc/JdbcConfigurationTests.java @@ -73,15 +73,40 @@ public class JdbcConfigurationTests extends ESTestCase { assertThat(ci.debugOut(), is("jdbc.out")); } - public void testHttpWithSSLEnabled() throws Exception { + public void testHttpWithSSLEnabledFromProperty() throws Exception { JdbcConfiguration ci = ci("jdbc:es://test?ssl=true"); assertThat(ci.baseUri().toString(), is("https://test:9200/")); } + + public void testHttpWithSSLEnabledFromPropertyAndDisabledFromProtocol() throws Exception { + JdbcConfiguration ci = ci("jdbc:es://http://test?ssl=true"); + assertThat(ci.baseUri().toString(), is("https://test:9200/")); + } + + public void testHttpWithSSLEnabledFromProtocol() throws Exception { + JdbcConfiguration ci = ci("jdbc:es://https://test:9200"); + assertThat(ci.baseUri().toString(), is("https://test:9200/")); + } + + public void testHttpWithSSLEnabledFromProtocolAndProperty() throws Exception { + JdbcConfiguration ci = ci("jdbc:es://https://test:9200?ssl=true"); + assertThat(ci.baseUri().toString(), is("https://test:9200/")); + } - public void testHttpWithSSLDisabled() throws Exception { + public void testHttpWithSSLDisabledFromProperty() throws Exception { JdbcConfiguration ci = ci("jdbc:es://test?ssl=false"); assertThat(ci.baseUri().toString(), is("http://test:9200/")); } + + public void testHttpWithSSLDisabledFromPropertyAndProtocol() throws Exception { + JdbcConfiguration ci = ci("jdbc:es://http://test?ssl=false"); + assertThat(ci.baseUri().toString(), is("http://test:9200/")); + } + + public void testHttpWithSSLDisabledFromPropertyAndEnabledFromProtocol() throws Exception { + Exception e = expectThrows(JdbcSQLException.class, () -> ci("jdbc:es://https://test?ssl=false")); + assertEquals("Cannot enable SSL: HTTPS protocol being used in the URL and SSL disabled in properties", e.getMessage()); + } public void testTimoutOverride() throws Exception { Properties properties = new Properties(); diff --git a/x-pack/plugin/sql/sql-cli/build.gradle b/x-pack/plugin/sql/sql-cli/build.gradle index 0b2559c6a84..f0022040b49 100644 --- a/x-pack/plugin/sql/sql-cli/build.gradle +++ b/x-pack/plugin/sql/sql-cli/build.gradle @@ -1,4 +1,4 @@ -import org.elasticsearch.gradle.precommit.ForbiddenApisCliTask +import de.thetaphi.forbiddenapis.gradle.CheckForbiddenApis /* * This project is named sql-cli because it is in the "org.elasticsearch.plugin" @@ -75,7 +75,7 @@ artifacts { } -tasks.withType(ForbiddenApisCliTask) { +tasks.withType(CheckForbiddenApis) { signaturesFiles += files('src/forbidden/cli-signatures.txt') } diff --git a/x-pack/plugin/sql/sql-client/src/main/java/org/elasticsearch/xpack/sql/client/ConnectionConfiguration.java b/x-pack/plugin/sql/sql-client/src/main/java/org/elasticsearch/xpack/sql/client/ConnectionConfiguration.java index 0de71d27471..5314b7450a5 100644 --- a/x-pack/plugin/sql/sql-client/src/main/java/org/elasticsearch/xpack/sql/client/ConnectionConfiguration.java +++ b/x-pack/plugin/sql/sql-client/src/main/java/org/elasticsearch/xpack/sql/client/ConnectionConfiguration.java @@ -98,7 +98,7 @@ public class ConnectionConfiguration { user = settings.getProperty(AUTH_USER); pass = settings.getProperty(AUTH_PASS); - sslConfig = new SslConfig(settings); + sslConfig = new SslConfig(settings, baseURI); proxyConfig = new ProxyConfig(settings); this.baseURI = normalizeSchema(baseURI, connectionString, sslConfig.isEnabled()); @@ -126,20 +126,9 @@ public class ConnectionConfiguration { private static URI normalizeSchema(URI uri, String connectionString, boolean isSSLEnabled) { - // Make sure the protocol is correct - final String scheme; - if (isSSLEnabled) { - // It's ok to upgrade from http to https - scheme = "https"; - } else { - // Silently downgrading from https to http can cause security issues - if ("https".equals(uri.getScheme())) { - throw new ClientException("SSL is disabled"); - } - scheme = "http"; - } try { - return new URI(scheme, null, uri.getHost(), uri.getPort(), uri.getPath(), uri.getQuery(), uri.getFragment()); + return new URI(isSSLEnabled ? "https" : "http", null, uri.getHost(), uri.getPort(), uri.getPath(), uri.getQuery(), + uri.getFragment()); } catch (URISyntaxException ex) { throw new ClientException("Cannot parse process baseURI [" + connectionString + "] " + ex.getMessage()); } diff --git a/x-pack/plugin/sql/sql-client/src/main/java/org/elasticsearch/xpack/sql/client/SslConfig.java b/x-pack/plugin/sql/sql-client/src/main/java/org/elasticsearch/xpack/sql/client/SslConfig.java index fecfb44492c..9e89f7b848c 100644 --- a/x-pack/plugin/sql/sql-client/src/main/java/org/elasticsearch/xpack/sql/client/SslConfig.java +++ b/x-pack/plugin/sql/sql-client/src/main/java/org/elasticsearch/xpack/sql/client/SslConfig.java @@ -7,6 +7,7 @@ package org.elasticsearch.xpack.sql.client; import java.io.IOException; import java.io.InputStream; +import java.net.URI; import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.Paths; @@ -62,8 +63,19 @@ public class SslConfig { private final SSLContext sslContext; - SslConfig(Properties settings) { - enabled = StringUtils.parseBoolean(settings.getProperty(SSL, SSL_DEFAULT)); + SslConfig(Properties settings, URI baseURI) { + boolean isSchemaPresent = baseURI.getScheme() != null; + boolean isSSLPropertyPresent = settings.getProperty(SSL) != null; + boolean isHttpsScheme = "https".equals(baseURI.getScheme()); + + if (!isSSLPropertyPresent && !isSchemaPresent) { + enabled = StringUtils.parseBoolean(SSL_DEFAULT); + } else { + if (isSSLPropertyPresent && isHttpsScheme && !StringUtils.parseBoolean(settings.getProperty(SSL))) { + throw new ClientException("Cannot enable SSL: HTTPS protocol being used in the URL and SSL disabled in properties"); + } + enabled = isHttpsScheme || StringUtils.parseBoolean(settings.getProperty(SSL, SSL_DEFAULT)); + } protocol = settings.getProperty(SSL_PROTOCOL, SSL_PROTOCOL_DEFAULT); keystoreLocation = settings.getProperty(SSL_KEYSTORE_LOCATION, SSL_KEYSTORE_LOCATION_DEFAULT); keystorePass = settings.getProperty(SSL_KEYSTORE_PASS, SSL_KEYSTORE_PASS_DEFAULT); diff --git a/x-pack/plugin/sql/sql-proto/src/main/java/org/elasticsearch/xpack/sql/type/DataType.java b/x-pack/plugin/sql/sql-proto/src/main/java/org/elasticsearch/xpack/sql/type/DataType.java index 05fb192f8d1..1c9cf6ac925 100644 --- a/x-pack/plugin/sql/sql-proto/src/main/java/org/elasticsearch/xpack/sql/type/DataType.java +++ b/x-pack/plugin/sql/sql-proto/src/main/java/org/elasticsearch/xpack/sql/type/DataType.java @@ -9,6 +9,7 @@ import java.sql.JDBCType; import java.sql.SQLType; import java.sql.Timestamp; import java.util.Arrays; +import java.util.HashMap; import java.util.Locale; import java.util.Map; import java.util.stream.Collectors; @@ -35,8 +36,8 @@ public enum DataType { SCALED_FLOAT(JDBCType.FLOAT, Double.class, Double.BYTES, 19, 25, false, true, true), KEYWORD( JDBCType.VARCHAR, String.class, Integer.MAX_VALUE, 256, 0), TEXT( JDBCType.VARCHAR, String.class, Integer.MAX_VALUE, Integer.MAX_VALUE, 0, false, false, false), - OBJECT( JDBCType.STRUCT, null, -1, 0, 0), - NESTED( JDBCType.STRUCT, null, -1, 0, 0), + OBJECT( JDBCType.STRUCT, null, -1, 0, 0, false, false, false), + NESTED( JDBCType.STRUCT, null, -1, 0, 0, false, false, false), BINARY( JDBCType.VARBINARY, byte[].class, -1, Integer.MAX_VALUE, 0), // since ODBC and JDBC interpret precision for Date as display size, // the precision is 23 (number of chars in ISO8601 with millis) + Z (the UTC timezone) @@ -44,12 +45,63 @@ public enum DataType { DATE( JDBCType.TIMESTAMP, Timestamp.class, Long.BYTES, 24, 24); // @formatter:on + public static final String ODBC_DATATYPE_PREFIX = "SQL_"; + private static final Map jdbcToEs; + private static final Map odbcToEs; static { jdbcToEs = Arrays.stream(DataType.values()) .filter(dataType -> dataType != TEXT && dataType != NESTED && dataType != SCALED_FLOAT) // Remove duplicates .collect(Collectors.toMap(dataType -> dataType.jdbcType, dataType -> dataType)); + + odbcToEs = new HashMap<>(36); + + // Numeric + odbcToEs.put("SQL_BIT", BOOLEAN); + odbcToEs.put("SQL_TINYINT", BYTE); + odbcToEs.put("SQL_SMALLINT", SHORT); + odbcToEs.put("SQL_INTEGER", INTEGER); + odbcToEs.put("SQL_BIGINT", LONG); + odbcToEs.put("SQL_FLOAT", FLOAT); + odbcToEs.put("SQL_REAL", FLOAT); + odbcToEs.put("SQL_DOUBLE", DOUBLE); + odbcToEs.put("SQL_DECIMAL", DOUBLE); + odbcToEs.put("SQL_NUMERIC", DOUBLE); + + // String + odbcToEs.put("SQL_GUID", KEYWORD); + odbcToEs.put("SQL_CHAR", KEYWORD); + odbcToEs.put("SQL_WCHAR", KEYWORD); + odbcToEs.put("SQL_VARCHAR", TEXT); + odbcToEs.put("SQL_WVARCHAR", TEXT); + odbcToEs.put("SQL_LONGVARCHAR", TEXT); + odbcToEs.put("SQL_WLONGVARCHAR", TEXT); + + // Binary + odbcToEs.put("SQL_BINARY", BINARY); + odbcToEs.put("SQL_VARBINARY", BINARY); + odbcToEs.put("SQL_LONGVARBINARY", BINARY); + + // Date + odbcToEs.put("SQL_DATE", DATE); + odbcToEs.put("SQL_TIME", DATE); + odbcToEs.put("SQL_TIMESTAMP", DATE); + + // Intervals - Currently Not Supported + odbcToEs.put("SQL_INTERVAL_HOUR_TO_MINUTE", UNSUPPORTED); + odbcToEs.put("SQL_INTERVAL_HOUR_TO_SECOND", UNSUPPORTED); + odbcToEs.put("SQL_INTERVAL_MINUTE_TO_SECOND", UNSUPPORTED); + odbcToEs.put("SQL_INTERVAL_MONTH", UNSUPPORTED); + odbcToEs.put("SQL_INTERVAL_YEAR", UNSUPPORTED); + odbcToEs.put("SQL_INTERVAL_YEAR_TO_MONTH", UNSUPPORTED); + odbcToEs.put("SQL_INTERVAL_DAY", UNSUPPORTED); + odbcToEs.put("SQL_INTERVAL_HOUR", UNSUPPORTED); + odbcToEs.put("SQL_INTERVAL_MINUTE", UNSUPPORTED); + odbcToEs.put("SQL_INTERVAL_SECOND", UNSUPPORTED); + odbcToEs.put("SQL_INTERVAL_DAY_TO_HOUR", UNSUPPORTED); + odbcToEs.put("SQL_INTERVAL_DAY_TO_MINUTE", UNSUPPORTED); + odbcToEs.put("SQL_INTERVAL_DAY_TO_SECOND", UNSUPPORTED); } /** @@ -162,12 +214,31 @@ public enum DataType { return jdbcToEs.get(jdbcType).javaClass(); } + public static DataType fromODBCType(String odbcType) { + return odbcToEs.get(odbcType); + } /** * Creates returns DataType enum coresponding to the specified es type *

* For any dataType DataType.fromEsType(dataType.esType) == dataType */ public static DataType fromEsType(String esType) { - return DataType.valueOf(esType.toUpperCase(Locale.ROOT)); + try { + return DataType.valueOf(esType.toUpperCase(Locale.ROOT)); + } catch (IllegalArgumentException ex) { + return DataType.UNSUPPORTED; + } } -} \ No newline at end of file + + public boolean isCompatibleWith(DataType other) { + if (this == other) { + return true; + } else if (isString() && other.isString()) { + return true; + } else if (isNumeric() && other.isNumeric()) { + return true; + } else { + return false; + } + } +} diff --git a/x-pack/plugin/sql/src/main/antlr/SqlBase.g4 b/x-pack/plugin/sql/src/main/antlr/SqlBase.g4 index 859cc26bf9c..1b3cbc04307 100644 --- a/x-pack/plugin/sql/src/main/antlr/SqlBase.g4 +++ b/x-pack/plugin/sql/src/main/antlr/SqlBase.g4 @@ -226,14 +226,20 @@ primaryExpression ; castExpression - : castTemplate - | FUNCTION_ESC castTemplate ESC_END + : castTemplate + | FUNCTION_ESC castTemplate ESC_END + | convertTemplate + | FUNCTION_ESC convertTemplate ESC_END ; castTemplate : CAST '(' expression AS dataType ')' ; - + +convertTemplate + : CONVERT '(' expression ',' dataType ')' + ; + extractExpression : extractTemplate | FUNCTION_ESC extractTemplate ESC_END @@ -347,6 +353,7 @@ CAST: 'CAST'; CATALOG: 'CATALOG'; CATALOGS: 'CATALOGS'; COLUMNS: 'COLUMNS'; +CONVERT: 'CONVERT'; DEBUG: 'DEBUG'; DESC: 'DESC'; DESCRIBE: 'DESCRIBE'; diff --git a/x-pack/plugin/sql/src/main/antlr/SqlBase.tokens b/x-pack/plugin/sql/src/main/antlr/SqlBase.tokens index 1a90fb72ef5..b422b510aa9 100644 --- a/x-pack/plugin/sql/src/main/antlr/SqlBase.tokens +++ b/x-pack/plugin/sql/src/main/antlr/SqlBase.tokens @@ -15,98 +15,99 @@ CAST=14 CATALOG=15 CATALOGS=16 COLUMNS=17 -DEBUG=18 -DESC=19 -DESCRIBE=20 -DISTINCT=21 -ESCAPE=22 -EXECUTABLE=23 -EXISTS=24 -EXPLAIN=25 -EXTRACT=26 -FALSE=27 -FIRST=28 -FORMAT=29 -FROM=30 -FULL=31 -FUNCTIONS=32 -GRAPHVIZ=33 -GROUP=34 -HAVING=35 -IN=36 -INNER=37 -IS=38 -JOIN=39 -LAST=40 -LEFT=41 -LIKE=42 -LIMIT=43 -MAPPED=44 -MATCH=45 -NATURAL=46 -NOT=47 -NULL=48 -NULLS=49 -ON=50 -OPTIMIZED=51 -OR=52 -ORDER=53 -OUTER=54 -PARSED=55 -PHYSICAL=56 -PLAN=57 -RIGHT=58 -RLIKE=59 -QUERY=60 -SCHEMAS=61 -SELECT=62 -SHOW=63 -SYS=64 -TABLE=65 -TABLES=66 -TEXT=67 -TRUE=68 -TYPE=69 -TYPES=70 -USING=71 -VERIFY=72 -WHERE=73 -WITH=74 -ESCAPE_ESC=75 -FUNCTION_ESC=76 -LIMIT_ESC=77 -DATE_ESC=78 -TIME_ESC=79 -TIMESTAMP_ESC=80 -GUID_ESC=81 -ESC_END=82 -EQ=83 -NEQ=84 -LT=85 -LTE=86 -GT=87 -GTE=88 -PLUS=89 -MINUS=90 -ASTERISK=91 -SLASH=92 -PERCENT=93 -CONCAT=94 -DOT=95 -PARAM=96 -STRING=97 -INTEGER_VALUE=98 -DECIMAL_VALUE=99 -IDENTIFIER=100 -DIGIT_IDENTIFIER=101 -TABLE_IDENTIFIER=102 -QUOTED_IDENTIFIER=103 -BACKQUOTED_IDENTIFIER=104 -SIMPLE_COMMENT=105 -BRACKETED_COMMENT=106 -WS=107 -UNRECOGNIZED=108 -DELIMITER=109 +CONVERT=18 +DEBUG=19 +DESC=20 +DESCRIBE=21 +DISTINCT=22 +ESCAPE=23 +EXECUTABLE=24 +EXISTS=25 +EXPLAIN=26 +EXTRACT=27 +FALSE=28 +FIRST=29 +FORMAT=30 +FROM=31 +FULL=32 +FUNCTIONS=33 +GRAPHVIZ=34 +GROUP=35 +HAVING=36 +IN=37 +INNER=38 +IS=39 +JOIN=40 +LAST=41 +LEFT=42 +LIKE=43 +LIMIT=44 +MAPPED=45 +MATCH=46 +NATURAL=47 +NOT=48 +NULL=49 +NULLS=50 +ON=51 +OPTIMIZED=52 +OR=53 +ORDER=54 +OUTER=55 +PARSED=56 +PHYSICAL=57 +PLAN=58 +RIGHT=59 +RLIKE=60 +QUERY=61 +SCHEMAS=62 +SELECT=63 +SHOW=64 +SYS=65 +TABLE=66 +TABLES=67 +TEXT=68 +TRUE=69 +TYPE=70 +TYPES=71 +USING=72 +VERIFY=73 +WHERE=74 +WITH=75 +ESCAPE_ESC=76 +FUNCTION_ESC=77 +LIMIT_ESC=78 +DATE_ESC=79 +TIME_ESC=80 +TIMESTAMP_ESC=81 +GUID_ESC=82 +ESC_END=83 +EQ=84 +NEQ=85 +LT=86 +LTE=87 +GT=88 +GTE=89 +PLUS=90 +MINUS=91 +ASTERISK=92 +SLASH=93 +PERCENT=94 +CONCAT=95 +DOT=96 +PARAM=97 +STRING=98 +INTEGER_VALUE=99 +DECIMAL_VALUE=100 +IDENTIFIER=101 +DIGIT_IDENTIFIER=102 +TABLE_IDENTIFIER=103 +QUOTED_IDENTIFIER=104 +BACKQUOTED_IDENTIFIER=105 +SIMPLE_COMMENT=106 +BRACKETED_COMMENT=107 +WS=108 +UNRECOGNIZED=109 +DELIMITER=110 '('=1 ')'=2 ','=3 @@ -124,81 +125,82 @@ DELIMITER=109 'CATALOG'=15 'CATALOGS'=16 'COLUMNS'=17 -'DEBUG'=18 -'DESC'=19 -'DESCRIBE'=20 -'DISTINCT'=21 -'ESCAPE'=22 -'EXECUTABLE'=23 -'EXISTS'=24 -'EXPLAIN'=25 -'EXTRACT'=26 -'FALSE'=27 -'FIRST'=28 -'FORMAT'=29 -'FROM'=30 -'FULL'=31 -'FUNCTIONS'=32 -'GRAPHVIZ'=33 -'GROUP'=34 -'HAVING'=35 -'IN'=36 -'INNER'=37 -'IS'=38 -'JOIN'=39 -'LAST'=40 -'LEFT'=41 -'LIKE'=42 -'LIMIT'=43 -'MAPPED'=44 -'MATCH'=45 -'NATURAL'=46 -'NOT'=47 -'NULL'=48 -'NULLS'=49 -'ON'=50 -'OPTIMIZED'=51 -'OR'=52 -'ORDER'=53 -'OUTER'=54 -'PARSED'=55 -'PHYSICAL'=56 -'PLAN'=57 -'RIGHT'=58 -'RLIKE'=59 -'QUERY'=60 -'SCHEMAS'=61 -'SELECT'=62 -'SHOW'=63 -'SYS'=64 -'TABLE'=65 -'TABLES'=66 -'TEXT'=67 -'TRUE'=68 -'TYPE'=69 -'TYPES'=70 -'USING'=71 -'VERIFY'=72 -'WHERE'=73 -'WITH'=74 -'{ESCAPE'=75 -'{FN'=76 -'{LIMIT'=77 -'{D'=78 -'{T'=79 -'{TS'=80 -'{GUID'=81 -'}'=82 -'='=83 -'<'=85 -'<='=86 -'>'=87 -'>='=88 -'+'=89 -'-'=90 -'*'=91 -'/'=92 -'%'=93 -'||'=94 -'.'=95 -'?'=96 +'CONVERT'=18 +'DEBUG'=19 +'DESC'=20 +'DESCRIBE'=21 +'DISTINCT'=22 +'ESCAPE'=23 +'EXECUTABLE'=24 +'EXISTS'=25 +'EXPLAIN'=26 +'EXTRACT'=27 +'FALSE'=28 +'FIRST'=29 +'FORMAT'=30 +'FROM'=31 +'FULL'=32 +'FUNCTIONS'=33 +'GRAPHVIZ'=34 +'GROUP'=35 +'HAVING'=36 +'IN'=37 +'INNER'=38 +'IS'=39 +'JOIN'=40 +'LAST'=41 +'LEFT'=42 +'LIKE'=43 +'LIMIT'=44 +'MAPPED'=45 +'MATCH'=46 +'NATURAL'=47 +'NOT'=48 +'NULL'=49 +'NULLS'=50 +'ON'=51 +'OPTIMIZED'=52 +'OR'=53 +'ORDER'=54 +'OUTER'=55 +'PARSED'=56 +'PHYSICAL'=57 +'PLAN'=58 +'RIGHT'=59 +'RLIKE'=60 +'QUERY'=61 +'SCHEMAS'=62 +'SELECT'=63 +'SHOW'=64 +'SYS'=65 +'TABLE'=66 +'TABLES'=67 +'TEXT'=68 +'TRUE'=69 +'TYPE'=70 +'TYPES'=71 +'USING'=72 +'VERIFY'=73 +'WHERE'=74 +'WITH'=75 +'{ESCAPE'=76 +'{FN'=77 +'{LIMIT'=78 +'{D'=79 +'{T'=80 +'{TS'=81 +'{GUID'=82 +'}'=83 +'='=84 +'<'=86 +'<='=87 +'>'=88 +'>='=89 +'+'=90 +'-'=91 +'*'=92 +'/'=93 +'%'=94 +'||'=95 +'.'=96 +'?'=97 diff --git a/x-pack/plugin/sql/src/main/antlr/SqlBaseLexer.tokens b/x-pack/plugin/sql/src/main/antlr/SqlBaseLexer.tokens index b3ca016bb45..96ccf27d1f9 100644 --- a/x-pack/plugin/sql/src/main/antlr/SqlBaseLexer.tokens +++ b/x-pack/plugin/sql/src/main/antlr/SqlBaseLexer.tokens @@ -15,97 +15,98 @@ CAST=14 CATALOG=15 CATALOGS=16 COLUMNS=17 -DEBUG=18 -DESC=19 -DESCRIBE=20 -DISTINCT=21 -ESCAPE=22 -EXECUTABLE=23 -EXISTS=24 -EXPLAIN=25 -EXTRACT=26 -FALSE=27 -FIRST=28 -FORMAT=29 -FROM=30 -FULL=31 -FUNCTIONS=32 -GRAPHVIZ=33 -GROUP=34 -HAVING=35 -IN=36 -INNER=37 -IS=38 -JOIN=39 -LAST=40 -LEFT=41 -LIKE=42 -LIMIT=43 -MAPPED=44 -MATCH=45 -NATURAL=46 -NOT=47 -NULL=48 -NULLS=49 -ON=50 -OPTIMIZED=51 -OR=52 -ORDER=53 -OUTER=54 -PARSED=55 -PHYSICAL=56 -PLAN=57 -RIGHT=58 -RLIKE=59 -QUERY=60 -SCHEMAS=61 -SELECT=62 -SHOW=63 -SYS=64 -TABLE=65 -TABLES=66 -TEXT=67 -TRUE=68 -TYPE=69 -TYPES=70 -USING=71 -VERIFY=72 -WHERE=73 -WITH=74 -ESCAPE_ESC=75 -FUNCTION_ESC=76 -LIMIT_ESC=77 -DATE_ESC=78 -TIME_ESC=79 -TIMESTAMP_ESC=80 -GUID_ESC=81 -ESC_END=82 -EQ=83 -NEQ=84 -LT=85 -LTE=86 -GT=87 -GTE=88 -PLUS=89 -MINUS=90 -ASTERISK=91 -SLASH=92 -PERCENT=93 -CONCAT=94 -DOT=95 -PARAM=96 -STRING=97 -INTEGER_VALUE=98 -DECIMAL_VALUE=99 -IDENTIFIER=100 -DIGIT_IDENTIFIER=101 -TABLE_IDENTIFIER=102 -QUOTED_IDENTIFIER=103 -BACKQUOTED_IDENTIFIER=104 -SIMPLE_COMMENT=105 -BRACKETED_COMMENT=106 -WS=107 -UNRECOGNIZED=108 +CONVERT=18 +DEBUG=19 +DESC=20 +DESCRIBE=21 +DISTINCT=22 +ESCAPE=23 +EXECUTABLE=24 +EXISTS=25 +EXPLAIN=26 +EXTRACT=27 +FALSE=28 +FIRST=29 +FORMAT=30 +FROM=31 +FULL=32 +FUNCTIONS=33 +GRAPHVIZ=34 +GROUP=35 +HAVING=36 +IN=37 +INNER=38 +IS=39 +JOIN=40 +LAST=41 +LEFT=42 +LIKE=43 +LIMIT=44 +MAPPED=45 +MATCH=46 +NATURAL=47 +NOT=48 +NULL=49 +NULLS=50 +ON=51 +OPTIMIZED=52 +OR=53 +ORDER=54 +OUTER=55 +PARSED=56 +PHYSICAL=57 +PLAN=58 +RIGHT=59 +RLIKE=60 +QUERY=61 +SCHEMAS=62 +SELECT=63 +SHOW=64 +SYS=65 +TABLE=66 +TABLES=67 +TEXT=68 +TRUE=69 +TYPE=70 +TYPES=71 +USING=72 +VERIFY=73 +WHERE=74 +WITH=75 +ESCAPE_ESC=76 +FUNCTION_ESC=77 +LIMIT_ESC=78 +DATE_ESC=79 +TIME_ESC=80 +TIMESTAMP_ESC=81 +GUID_ESC=82 +ESC_END=83 +EQ=84 +NEQ=85 +LT=86 +LTE=87 +GT=88 +GTE=89 +PLUS=90 +MINUS=91 +ASTERISK=92 +SLASH=93 +PERCENT=94 +CONCAT=95 +DOT=96 +PARAM=97 +STRING=98 +INTEGER_VALUE=99 +DECIMAL_VALUE=100 +IDENTIFIER=101 +DIGIT_IDENTIFIER=102 +TABLE_IDENTIFIER=103 +QUOTED_IDENTIFIER=104 +BACKQUOTED_IDENTIFIER=105 +SIMPLE_COMMENT=106 +BRACKETED_COMMENT=107 +WS=108 +UNRECOGNIZED=109 '('=1 ')'=2 ','=3 @@ -123,81 +124,82 @@ UNRECOGNIZED=108 'CATALOG'=15 'CATALOGS'=16 'COLUMNS'=17 -'DEBUG'=18 -'DESC'=19 -'DESCRIBE'=20 -'DISTINCT'=21 -'ESCAPE'=22 -'EXECUTABLE'=23 -'EXISTS'=24 -'EXPLAIN'=25 -'EXTRACT'=26 -'FALSE'=27 -'FIRST'=28 -'FORMAT'=29 -'FROM'=30 -'FULL'=31 -'FUNCTIONS'=32 -'GRAPHVIZ'=33 -'GROUP'=34 -'HAVING'=35 -'IN'=36 -'INNER'=37 -'IS'=38 -'JOIN'=39 -'LAST'=40 -'LEFT'=41 -'LIKE'=42 -'LIMIT'=43 -'MAPPED'=44 -'MATCH'=45 -'NATURAL'=46 -'NOT'=47 -'NULL'=48 -'NULLS'=49 -'ON'=50 -'OPTIMIZED'=51 -'OR'=52 -'ORDER'=53 -'OUTER'=54 -'PARSED'=55 -'PHYSICAL'=56 -'PLAN'=57 -'RIGHT'=58 -'RLIKE'=59 -'QUERY'=60 -'SCHEMAS'=61 -'SELECT'=62 -'SHOW'=63 -'SYS'=64 -'TABLE'=65 -'TABLES'=66 -'TEXT'=67 -'TRUE'=68 -'TYPE'=69 -'TYPES'=70 -'USING'=71 -'VERIFY'=72 -'WHERE'=73 -'WITH'=74 -'{ESCAPE'=75 -'{FN'=76 -'{LIMIT'=77 -'{D'=78 -'{T'=79 -'{TS'=80 -'{GUID'=81 -'}'=82 -'='=83 -'<'=85 -'<='=86 -'>'=87 -'>='=88 -'+'=89 -'-'=90 -'*'=91 -'/'=92 -'%'=93 -'||'=94 -'.'=95 -'?'=96 +'CONVERT'=18 +'DEBUG'=19 +'DESC'=20 +'DESCRIBE'=21 +'DISTINCT'=22 +'ESCAPE'=23 +'EXECUTABLE'=24 +'EXISTS'=25 +'EXPLAIN'=26 +'EXTRACT'=27 +'FALSE'=28 +'FIRST'=29 +'FORMAT'=30 +'FROM'=31 +'FULL'=32 +'FUNCTIONS'=33 +'GRAPHVIZ'=34 +'GROUP'=35 +'HAVING'=36 +'IN'=37 +'INNER'=38 +'IS'=39 +'JOIN'=40 +'LAST'=41 +'LEFT'=42 +'LIKE'=43 +'LIMIT'=44 +'MAPPED'=45 +'MATCH'=46 +'NATURAL'=47 +'NOT'=48 +'NULL'=49 +'NULLS'=50 +'ON'=51 +'OPTIMIZED'=52 +'OR'=53 +'ORDER'=54 +'OUTER'=55 +'PARSED'=56 +'PHYSICAL'=57 +'PLAN'=58 +'RIGHT'=59 +'RLIKE'=60 +'QUERY'=61 +'SCHEMAS'=62 +'SELECT'=63 +'SHOW'=64 +'SYS'=65 +'TABLE'=66 +'TABLES'=67 +'TEXT'=68 +'TRUE'=69 +'TYPE'=70 +'TYPES'=71 +'USING'=72 +'VERIFY'=73 +'WHERE'=74 +'WITH'=75 +'{ESCAPE'=76 +'{FN'=77 +'{LIMIT'=78 +'{D'=79 +'{T'=80 +'{TS'=81 +'{GUID'=82 +'}'=83 +'='=84 +'<'=86 +'<='=87 +'>'=88 +'>='=89 +'+'=90 +'-'=91 +'*'=92 +'/'=93 +'%'=94 +'||'=95 +'.'=96 +'?'=97 diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/analysis/analyzer/Verifier.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/analysis/analyzer/Verifier.java index 4915a25a55b..e5ab3ce082b 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/analysis/analyzer/Verifier.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/analysis/analyzer/Verifier.java @@ -18,6 +18,7 @@ import org.elasticsearch.xpack.sql.expression.function.FunctionAttribute; import org.elasticsearch.xpack.sql.expression.function.Functions; import org.elasticsearch.xpack.sql.expression.function.Score; import org.elasticsearch.xpack.sql.expression.function.scalar.ScalarFunction; +import org.elasticsearch.xpack.sql.expression.predicate.In; import org.elasticsearch.xpack.sql.plan.logical.Aggregate; import org.elasticsearch.xpack.sql.plan.logical.Distinct; import org.elasticsearch.xpack.sql.plan.logical.Filter; @@ -40,7 +41,9 @@ import java.util.function.Consumer; import static java.lang.String.format; -abstract class Verifier { +final class Verifier { + + private Verifier() {} static class Failure { private final Node source; @@ -188,6 +191,8 @@ abstract class Verifier { Set localFailures = new LinkedHashSet<>(); + validateInExpression(p, localFailures); + if (!groupingFailures.contains(p)) { checkGroupBy(p, localFailures, resolvedFunctions, groupingFailures); } @@ -488,4 +493,19 @@ abstract class Verifier { fail(nested.get(0), "HAVING isn't (yet) compatible with nested fields " + new AttributeSet(nested).names())); } } -} \ No newline at end of file + + private static void validateInExpression(LogicalPlan p, Set localFailures) { + p.forEachExpressions(e -> + e.forEachUp((In in) -> { + DataType dt = in.value().dataType(); + for (Expression value : in.list()) { + if (!in.value().dataType().isCompatibleWith(value.dataType())) { + localFailures.add(fail(value, "expected data type [%s], value provided is of type [%s]", + dt, value.dataType())); + return; + } + } + }, + In.class)); + } +} diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/analysis/index/IndexResolver.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/analysis/index/IndexResolver.java index 0382729aa9f..574106f07ca 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/analysis/index/IndexResolver.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/analysis/index/IndexResolver.java @@ -15,6 +15,8 @@ import org.elasticsearch.action.admin.indices.alias.get.GetAliasesResponse; import org.elasticsearch.action.admin.indices.get.GetIndexRequest; import org.elasticsearch.action.admin.indices.get.GetIndexRequest.Feature; import org.elasticsearch.action.admin.indices.get.GetIndexResponse; +import org.elasticsearch.action.fieldcaps.FieldCapabilities; +import org.elasticsearch.action.fieldcaps.FieldCapabilitiesRequest; import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.action.support.IndicesOptions.Option; import org.elasticsearch.action.support.IndicesOptions.WildcardStates; @@ -24,23 +26,34 @@ import org.elasticsearch.cluster.metadata.MappingMetaData; import org.elasticsearch.common.Strings; import org.elasticsearch.common.collect.ImmutableOpenMap; import org.elasticsearch.index.IndexNotFoundException; +import org.elasticsearch.xpack.sql.SqlIllegalArgumentException; +import org.elasticsearch.xpack.sql.type.DataType; +import org.elasticsearch.xpack.sql.type.DateEsField; import org.elasticsearch.xpack.sql.type.EsField; +import org.elasticsearch.xpack.sql.type.KeywordEsField; +import org.elasticsearch.xpack.sql.type.TextEsField; import org.elasticsearch.xpack.sql.type.Types; +import org.elasticsearch.xpack.sql.type.UnsupportedEsField; import org.elasticsearch.xpack.sql.util.CollectionUtils; import java.util.ArrayList; +import java.util.Arrays; import java.util.Collections; import java.util.Comparator; import java.util.EnumSet; +import java.util.LinkedHashMap; import java.util.List; import java.util.Locale; import java.util.Map; +import java.util.Map.Entry; +import java.util.NavigableSet; import java.util.Objects; import java.util.Set; +import java.util.TreeMap; import java.util.TreeSet; import java.util.regex.Pattern; -import static java.util.Collections.emptyList; +import static java.util.Collections.emptyMap; public class IndexResolver { @@ -222,64 +235,157 @@ public class IndexResolver { listener.onResponse(result); } - /** * Resolves a pattern to one (potentially compound meaning that spawns multiple indices) mapping. */ - public void resolveWithSameMapping(String indexWildcard, String javaRegex, ActionListener listener) { - GetIndexRequest getIndexRequest = createGetIndexRequest(indexWildcard); - client.admin().indices().getIndex(getIndexRequest, ActionListener.wrap(response -> { - ImmutableOpenMap> mappings = response.getMappings(); + public void resolveAsMergedMapping(String indexWildcard, String javaRegex, ActionListener listener) { + FieldCapabilitiesRequest fieldRequest = createFieldCapsRequest(indexWildcard); + client.fieldCaps(fieldRequest, + ActionListener.wrap(response -> listener.onResponse(mergedMapping(indexWildcard, response.get())), listener::onFailure)); + } - List resolutions; - if (mappings.size() > 0) { - resolutions = new ArrayList<>(mappings.size()); - Pattern pattern = javaRegex != null ? Pattern.compile(javaRegex) : null; - for (ObjectObjectCursor> indexMappings : mappings) { - String concreteIndex = indexMappings.key; - if (pattern == null || pattern.matcher(concreteIndex).matches()) { - resolutions.add(buildGetIndexResult(concreteIndex, concreteIndex, indexMappings.value)); + static IndexResolution mergedMapping(String indexPattern, Map> fieldCaps) { + if (fieldCaps == null || fieldCaps.isEmpty()) { + return IndexResolution.notFound(indexPattern); + } + + StringBuilder errorMessage = new StringBuilder(); + + NavigableSet>> sortedFields = new TreeSet<>( + // for some reason .reversed doesn't work (prolly due to inference) + Collections.reverseOrder(Comparator.comparing(Entry::getKey))); + sortedFields.addAll(fieldCaps.entrySet()); + + Map hierarchicalMapping = new TreeMap<>(); + Map flattedMapping = new LinkedHashMap<>(); + + // sort keys descending in order to easily detect multi-fields (a.b.c multi-field of a.b) + // without sorting, they can still be detected however without the emptyMap optimization + // (fields without multi-fields have no children) + for (Entry> entry : sortedFields) { + String name = entry.getKey(); + // skip internal fields + if (!name.startsWith("_")) { + Map types = entry.getValue(); + // field is mapped differently across indices + if (types.size() > 1) { + // build error message + for (Entry type : types.entrySet()) { + if (errorMessage.length() > 0) { + errorMessage.append(", "); + } + errorMessage.append("["); + errorMessage.append(type.getKey()); + errorMessage.append("] in "); + errorMessage.append(Arrays.toString(type.getValue().indices())); } + + errorMessage.insert(0, + "[" + indexPattern + "] points to indices with incompatible mappings; " + + "field [" + name + "] is mapped in [" + types.size() + "] different ways: "); + } + if (errorMessage.length() > 0) { + return IndexResolution.invalid(errorMessage.toString()); + } + + FieldCapabilities fieldCap = types.values().iterator().next(); + // validate search/agg-able + if (fieldCap.isAggregatable() && fieldCap.nonAggregatableIndices() != null) { + errorMessage.append("[" + indexPattern + "] points to indices with incompatible mappings: "); + errorMessage.append("field [" + name + "] is aggregateable except in "); + errorMessage.append(Arrays.toString(fieldCap.nonAggregatableIndices())); + } + if (fieldCap.isSearchable() && fieldCap.nonSearchableIndices() != null) { + if (errorMessage.length() > 0) { + errorMessage.append(","); + } + errorMessage.append("[" + indexPattern + "] points to indices with incompatible mappings: "); + errorMessage.append("field [" + name + "] is searchable except in "); + errorMessage.append(Arrays.toString(fieldCap.nonSearchableIndices())); + } + if (errorMessage.length() > 0) { + return IndexResolution.invalid(errorMessage.toString()); + } + + // validation passes - create the field + // and name wasn't added before + if (!flattedMapping.containsKey(name)) { + createField(name, fieldCap, fieldCaps, hierarchicalMapping, flattedMapping, false); } - } else { - resolutions = emptyList(); - } - - listener.onResponse(merge(resolutions, indexWildcard)); - }, listener::onFailure)); - } - - static IndexResolution merge(List resolutions, String indexWildcard) { - IndexResolution merged = null; - for (IndexResolution resolution : resolutions) { - // everything that follows gets compared - if (!resolution.isValid()) { - return resolution; - } - // initialize resolution on first run - if (merged == null) { - merged = resolution; - } - // need the same mapping across all resolutions - if (!merged.get().mapping().equals(resolution.get().mapping())) { - return IndexResolution.invalid( - "[" + indexWildcard + "] points to indices [" + merged.get().name() + "] " - + "and [" + resolution.get().name() + "] which have different mappings. " - + "When using multiple indices, the mappings must be identical."); } } - if (merged != null) { - // at this point, we are sure there's the same mapping across all (if that's the case) indices - // to keep things simple, use the given pattern as index name - merged = IndexResolution.valid(new EsIndex(indexWildcard, merged.get().mapping())); - } else { - merged = IndexResolution.notFound(indexWildcard); - } - return merged; + + return IndexResolution.valid(new EsIndex(indexPattern, hierarchicalMapping)); } + private static EsField createField(String fieldName, FieldCapabilities caps, Map> globalCaps, + Map hierarchicalMapping, Map flattedMapping, boolean hasChildren) { + + Map parentProps = hierarchicalMapping; + + int dot = fieldName.lastIndexOf('.'); + String fullFieldName = fieldName; + + if (dot >= 0) { + String parentName = fieldName.substring(0, dot); + fieldName = fieldName.substring(dot + 1); + EsField parent = flattedMapping.get(parentName); + if (parent == null) { + Map map = globalCaps.get(parentName); + if (map == null) { + throw new SqlIllegalArgumentException("Cannot find field {}; this is likely a bug", parentName); + } + FieldCapabilities parentCap = map.values().iterator().next(); + parent = createField(parentName, parentCap, globalCaps, hierarchicalMapping, flattedMapping, true); + } + parentProps = parent.getProperties(); + } + + EsField field = null; + Map props = hasChildren ? new TreeMap<>() : emptyMap(); + + DataType esType = DataType.fromEsType(caps.getType()); + switch (esType) { + case TEXT: + field = new TextEsField(fieldName, props, false); + break; + case KEYWORD: + int length = DataType.KEYWORD.defaultPrecision; + // TODO: to check whether isSearchable/isAggregateable takes into account the presence of the normalizer + boolean normalized = false; + field = new KeywordEsField(fieldName, props, caps.isAggregatable(), length, normalized); + break; + case DATE: + field = new DateEsField(fieldName, props, caps.isAggregatable()); + break; + case UNSUPPORTED: + field = new UnsupportedEsField(fieldName, caps.getType()); + break; + default: + field = new EsField(fieldName, esType, props, caps.isAggregatable()); + } + + parentProps.put(fieldName, field); + flattedMapping.put(fullFieldName, field); + + return field; + } + + private static FieldCapabilitiesRequest createFieldCapsRequest(String index) { + return new FieldCapabilitiesRequest() + .indices(Strings.commaDelimitedListToStringArray(index)) + .fields("*") + //lenient because we throw our own errors looking at the response e.g. if something was not resolved + //also because this way security doesn't throw authorization exceptions but rather honors ignore_unavailable + .indicesOptions(IndicesOptions.lenientExpandOpen()); + } + + // TODO: Concrete indices still uses get mapping + // waiting on https://github.com/elastic/elasticsearch/pull/34071 + // + /** - * Resolves a pattern to multiple, separate indices. + * Resolves a pattern to multiple, separate indices. Doesn't perform validation. */ public void resolveAsSeparateMappings(String indexWildcard, String javaRegex, ActionListener> listener) { GetIndexRequest getIndexRequest = createGetIndexRequest(indexWildcard); @@ -306,7 +412,7 @@ public class IndexResolver { listener.onResponse(results); }, listener::onFailure)); } - + private static GetIndexRequest createGetIndexRequest(String index) { return new GetIndexRequest() .local(true) diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/Expressions.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/Expressions.java index dfaef60abd5..e9a37240be0 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/Expressions.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/Expressions.java @@ -8,6 +8,7 @@ package org.elasticsearch.xpack.sql.expression; import org.elasticsearch.xpack.sql.SqlIllegalArgumentException; import org.elasticsearch.xpack.sql.expression.Expression.TypeResolution; import org.elasticsearch.xpack.sql.expression.gen.pipeline.Pipe; +import org.elasticsearch.xpack.sql.type.DataType; import java.util.ArrayList; import java.util.Collection; @@ -16,15 +17,10 @@ import java.util.function.Predicate; import static java.util.Collections.emptyList; import static java.util.Collections.emptyMap; -import static java.util.stream.Collectors.toList; -public abstract class Expressions { +public final class Expressions { - public static List asNamed(List exp) { - return exp.stream() - .map(NamedExpression.class::cast) - .collect(toList()); - } + private Expressions() {} public static NamedExpression wrapAsNamed(Expression exp) { return exp instanceof NamedExpression ? (NamedExpression) exp : new Alias(exp.location(), exp.nodeName(), exp); @@ -71,6 +67,15 @@ public abstract class Expressions { return true; } + public static boolean foldable(List exps) { + for (Expression exp : exps) { + if (!exp.foldable()) { + return false; + } + } + return true; + } + public static AttributeSet references(List exps) { if (exps.isEmpty()) { return AttributeSet.EMPTY; @@ -126,7 +131,16 @@ public abstract class Expressions { } public static TypeResolution typeMustBeNumeric(Expression e) { - return e.dataType().isNumeric()? TypeResolution.TYPE_RESOLVED : new TypeResolution( - "Argument required to be numeric ('" + Expressions.name(e) + "' of type '" + e.dataType().esType + "')"); + return e.dataType().isNumeric() ? TypeResolution.TYPE_RESOLVED : new TypeResolution(numericErrorMessage(e)); + } + + public static TypeResolution typeMustBeNumericOrDate(Expression e) { + return e.dataType().isNumeric() || e.dataType() == DataType.DATE ? + TypeResolution.TYPE_RESOLVED : + new TypeResolution(numericErrorMessage(e)); + } + + private static String numericErrorMessage(Expression e) { + return "Argument required to be numeric ('" + Expressions.name(e) + "' of type '" + e.dataType().esType + "')"; } } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/aggregate/Max.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/aggregate/Max.java index 6b710cf06d5..fde06f239cb 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/aggregate/Max.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/aggregate/Max.java @@ -5,12 +5,14 @@ */ package org.elasticsearch.xpack.sql.expression.function.aggregate; -import java.util.List; import org.elasticsearch.xpack.sql.expression.Expression; +import org.elasticsearch.xpack.sql.expression.Expressions; import org.elasticsearch.xpack.sql.tree.Location; import org.elasticsearch.xpack.sql.tree.NodeInfo; import org.elasticsearch.xpack.sql.type.DataType; +import java.util.List; + /** * Find the maximum value in matching documents. */ @@ -39,4 +41,9 @@ public class Max extends NumericAggregate implements EnclosedAgg { public String innerName() { return "max"; } + + @Override + protected TypeResolution resolveType() { + return Expressions.typeMustBeNumericOrDate(field()); + } } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/aggregate/Min.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/aggregate/Min.java index 16adf6461e1..42109aaf5d6 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/aggregate/Min.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/aggregate/Min.java @@ -5,12 +5,14 @@ */ package org.elasticsearch.xpack.sql.expression.function.aggregate; -import java.util.List; import org.elasticsearch.xpack.sql.expression.Expression; +import org.elasticsearch.xpack.sql.expression.Expressions; import org.elasticsearch.xpack.sql.tree.Location; import org.elasticsearch.xpack.sql.tree.NodeInfo; import org.elasticsearch.xpack.sql.type.DataType; +import java.util.List; + /** * Find the minimum value in matched documents. */ @@ -42,4 +44,9 @@ public class Min extends NumericAggregate implements EnclosedAgg { public String innerName() { return "min"; } + + @Override + protected TypeResolution resolveType() { + return Expressions.typeMustBeNumericOrDate(field()); + } } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/gen/pipeline/Pipe.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/gen/pipeline/Pipe.java index 4d1604ff535..5c96d2c9244 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/gen/pipeline/Pipe.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/gen/pipeline/Pipe.java @@ -5,6 +5,7 @@ */ package org.elasticsearch.xpack.sql.expression.gen.pipeline; +import org.elasticsearch.xpack.sql.capabilities.Resolvable; import org.elasticsearch.xpack.sql.execution.search.FieldExtraction; import org.elasticsearch.xpack.sql.expression.Attribute; import org.elasticsearch.xpack.sql.expression.Expression; @@ -24,7 +25,7 @@ import java.util.List; * Is an {@code Add} operator with left {@code ABS} over an aggregate (MAX), and * right being a {@code CAST} function. */ -public abstract class Pipe extends Node implements FieldExtraction { +public abstract class Pipe extends Node implements FieldExtraction, Resolvable { private final Expression expression; @@ -37,8 +38,6 @@ public abstract class Pipe extends Node implements FieldExtraction { return expression; } - public abstract boolean resolved(); - public abstract Processor asProcessor(); /** diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/In.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/In.java index fb04f6d438a..a820833d1a0 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/In.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/In.java @@ -5,43 +5,55 @@ */ package org.elasticsearch.xpack.sql.expression.predicate; -import org.elasticsearch.xpack.sql.SqlIllegalArgumentException; import org.elasticsearch.xpack.sql.expression.Attribute; import org.elasticsearch.xpack.sql.expression.Expression; +import org.elasticsearch.xpack.sql.expression.Expressions; import org.elasticsearch.xpack.sql.expression.NamedExpression; +import org.elasticsearch.xpack.sql.expression.function.scalar.ScalarFunctionAttribute; +import org.elasticsearch.xpack.sql.expression.gen.pipeline.Pipe; +import org.elasticsearch.xpack.sql.expression.gen.script.Params; +import org.elasticsearch.xpack.sql.expression.gen.script.ParamsBuilder; import org.elasticsearch.xpack.sql.expression.gen.script.ScriptTemplate; +import org.elasticsearch.xpack.sql.expression.gen.script.ScriptWeaver; +import org.elasticsearch.xpack.sql.expression.predicate.operator.comparison.Comparisons; +import org.elasticsearch.xpack.sql.expression.predicate.operator.comparison.InPipe; import org.elasticsearch.xpack.sql.tree.Location; import org.elasticsearch.xpack.sql.tree.NodeInfo; import org.elasticsearch.xpack.sql.type.DataType; import org.elasticsearch.xpack.sql.util.CollectionUtils; +import java.util.ArrayList; +import java.util.LinkedHashSet; import java.util.List; +import java.util.Locale; import java.util.Objects; +import java.util.StringJoiner; +import java.util.stream.Collectors; -public class In extends NamedExpression { +import static java.lang.String.format; +import static org.elasticsearch.xpack.sql.expression.gen.script.ParamsBuilder.paramsBuilder; + +public class In extends NamedExpression implements ScriptWeaver { private final Expression value; private final List list; - private final boolean nullable, foldable; + private Attribute lazyAttribute; public In(Location location, Expression value, List list) { super(location, null, CollectionUtils.combine(list, value), null); this.value = value; - this.list = list; - - this.nullable = children().stream().anyMatch(Expression::nullable); - this.foldable = children().stream().allMatch(Expression::foldable); + this.list = list.stream().distinct().collect(Collectors.toList()); } @Override protected NodeInfo info() { - return NodeInfo.create(this, In::new, value(), list()); + return NodeInfo.create(this, In::new, value, list); } @Override public Expression replaceChildren(List newChildren) { - if (newChildren.size() < 1) { - throw new IllegalArgumentException("expected one or more children but received [" + newChildren.size() + "]"); + if (newChildren.size() < 2) { + throw new IllegalArgumentException("expected at least [2] children but received [" + newChildren.size() + "]"); } return new In(location(), newChildren.get(newChildren.size() - 1), newChildren.subList(0, newChildren.size() - 1)); } @@ -61,22 +73,75 @@ public class In extends NamedExpression { @Override public boolean nullable() { - return nullable; + return Expressions.nullable(children()); } @Override public boolean foldable() { - return foldable; + return Expressions.foldable(children()); + } + + @Override + public Object fold() { + Object foldedLeftValue = value.fold(); + + for (Expression rightValue : list) { + Boolean compResult = Comparisons.eq(foldedLeftValue, rightValue.fold()); + if (compResult != null && compResult) { + return true; + } + } + return false; + } + + @Override + public String name() { + StringJoiner sj = new StringJoiner(", ", " IN(", ")"); + list.forEach(e -> sj.add(Expressions.name(e))); + return Expressions.name(value) + sj.toString(); } @Override public Attribute toAttribute() { - throw new SqlIllegalArgumentException("not implemented yet"); + if (lazyAttribute == null) { + lazyAttribute = new ScalarFunctionAttribute(location(), name(), dataType(), null, + false, id(), false, "IN", asScript(), null, asPipe()); + } + return lazyAttribute; } @Override public ScriptTemplate asScript() { - throw new SqlIllegalArgumentException("not implemented yet"); + StringJoiner sj = new StringJoiner(" || "); + ScriptTemplate leftScript = asScript(value); + List rightParams = new ArrayList<>(); + String scriptPrefix = leftScript + "=="; + LinkedHashSet values = list.stream().map(Expression::fold).collect(Collectors.toCollection(LinkedHashSet::new)); + for (Object valueFromList : values) { + if (valueFromList instanceof Expression) { + ScriptTemplate rightScript = asScript((Expression) valueFromList); + sj.add(scriptPrefix + rightScript.template()); + rightParams.add(rightScript.params()); + } else { + if (valueFromList instanceof String) { + sj.add(scriptPrefix + '"' + valueFromList + '"'); + } else { + sj.add(scriptPrefix + valueFromList.toString()); + } + } + } + + ParamsBuilder paramsBuilder = paramsBuilder().script(leftScript.params()); + for (Params p : rightParams) { + paramsBuilder = paramsBuilder.script(p); + } + + return new ScriptTemplate(format(Locale.ROOT, "%s", sj.toString()), paramsBuilder.build(), dataType()); + } + + @Override + protected Pipe makePipe() { + return new InPipe(location(), this, children().stream().map(Expressions::pipe).collect(Collectors.toList())); } @Override @@ -97,4 +162,4 @@ public class In extends NamedExpression { return Objects.equals(value, other.value) && Objects.equals(list, other.list); } -} \ No newline at end of file +} diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/operator/comparison/Comparisons.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/operator/comparison/Comparisons.java index cdd293cb1af..79d3f2b318b 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/operator/comparison/Comparisons.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/operator/comparison/Comparisons.java @@ -5,12 +5,16 @@ */ package org.elasticsearch.xpack.sql.expression.predicate.operator.comparison; +import java.util.Set; + /** * Comparison utilities. */ -abstract class Comparisons { +public final class Comparisons { - static Boolean eq(Object l, Object r) { + private Comparisons() {} + + public static Boolean eq(Object l, Object r) { Integer i = compare(l, r); return i == null ? null : i.intValue() == 0; } @@ -35,6 +39,10 @@ abstract class Comparisons { return i == null ? null : i.intValue() >= 0; } + static Boolean in(Object l, Set r) { + return r.contains(l); + } + /** * Compares two expression arguments (typically Numbers), if possible. * Otherwise returns null (the arguments are not comparable or at least @@ -73,4 +81,4 @@ abstract class Comparisons { return Integer.valueOf(Integer.compare(l.intValue(), r.intValue())); } -} \ No newline at end of file +} diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/operator/comparison/InPipe.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/operator/comparison/InPipe.java new file mode 100644 index 00000000000..4ae72b4b49e --- /dev/null +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/operator/comparison/InPipe.java @@ -0,0 +1,90 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.sql.expression.predicate.operator.comparison; + +import org.elasticsearch.xpack.sql.capabilities.Resolvables; +import org.elasticsearch.xpack.sql.execution.search.FieldExtraction; +import org.elasticsearch.xpack.sql.execution.search.SqlSourceBuilder; +import org.elasticsearch.xpack.sql.expression.Expression; +import org.elasticsearch.xpack.sql.expression.gen.pipeline.Pipe; +import org.elasticsearch.xpack.sql.tree.Location; +import org.elasticsearch.xpack.sql.tree.NodeInfo; + +import java.util.ArrayList; +import java.util.List; +import java.util.Objects; +import java.util.stream.Collectors; + +public class InPipe extends Pipe { + + private List pipes; + + public InPipe(Location location, Expression expression, List pipes) { + super(location, expression, pipes); + this.pipes = pipes; + } + + @Override + public final Pipe replaceChildren(List newChildren) { + if (newChildren.size() < 2) { + throw new IllegalArgumentException("expected at least [2] children but received [" + newChildren.size() + "]"); + } + return new InPipe(location(), expression(), newChildren); + } + + @Override + protected NodeInfo info() { + return NodeInfo.create(this, InPipe::new, expression(), pipes); + } + + @Override + public boolean supportedByAggsOnlyQuery() { + return pipes.stream().allMatch(FieldExtraction::supportedByAggsOnlyQuery); + } + + @Override + public final Pipe resolveAttributes(AttributeResolver resolver) { + List newPipes = new ArrayList<>(pipes.size()); + for (Pipe p : pipes) { + newPipes.add(p.resolveAttributes(resolver)); + } + return replaceChildren(newPipes); + } + + @Override + public boolean resolved() { + return Resolvables.resolved(pipes); + } + + @Override + public final void collectFields(SqlSourceBuilder sourceBuilder) { + pipes.forEach(p -> p.collectFields(sourceBuilder)); + } + + @Override + public int hashCode() { + return Objects.hash(pipes); + } + + @Override + public boolean equals(Object obj) { + if (this == obj) { + return true; + } + + if (obj == null || getClass() != obj.getClass()) { + return false; + } + + InPipe other = (InPipe) obj; + return Objects.equals(pipes, other.pipes); + } + + @Override + public InProcessor asProcessor() { + return new InProcessor(pipes.stream().map(Pipe::asProcessor).collect(Collectors.toList())); + } +} diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/operator/comparison/InProcessor.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/operator/comparison/InProcessor.java new file mode 100644 index 00000000000..5ebf8870965 --- /dev/null +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/operator/comparison/InProcessor.java @@ -0,0 +1,65 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.sql.expression.predicate.operator.comparison; + +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.xpack.sql.expression.gen.processor.Processor; + +import java.io.IOException; +import java.util.List; +import java.util.Objects; + +public class InProcessor implements Processor { + + public static final String NAME = "in"; + + private final List processsors; + + public InProcessor(List processors) { + this.processsors = processors; + } + + public InProcessor(StreamInput in) throws IOException { + processsors = in.readNamedWriteableList(Processor.class); + } + + @Override + public String getWriteableName() { + return NAME; + } + + @Override + public final void writeTo(StreamOutput out) throws IOException { + out.writeNamedWriteableList(processsors); + } + + @Override + public Object process(Object input) { + Object leftValue = processsors.get(processsors.size() - 1).process(input); + + for (int i = 0; i < processsors.size() - 1; i++) { + Boolean compResult = Comparisons.eq(leftValue, processsors.get(i).process(input)); + if (compResult != null && compResult) { + return true; + } + } + return false; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + InProcessor that = (InProcessor) o; + return Objects.equals(processsors, that.processsors); + } + + @Override + public int hashCode() { + return Objects.hash(processsors); + } +} diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/optimizer/Optimizer.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/optimizer/Optimizer.java index 18ba4ff41b7..8443358a12c 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/optimizer/Optimizer.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/optimizer/Optimizer.java @@ -1892,4 +1892,4 @@ public class Optimizer extends RuleExecutor { enum TransformDirection { UP, DOWN }; -} \ No newline at end of file +} diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/ExpressionBuilder.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/ExpressionBuilder.java index 5375f50dde5..30f2edc53f1 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/ExpressionBuilder.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/ExpressionBuilder.java @@ -53,6 +53,7 @@ import org.elasticsearch.xpack.sql.parser.SqlBaseParser.BooleanLiteralContext; import org.elasticsearch.xpack.sql.parser.SqlBaseParser.CastExpressionContext; import org.elasticsearch.xpack.sql.parser.SqlBaseParser.CastTemplateContext; import org.elasticsearch.xpack.sql.parser.SqlBaseParser.ComparisonContext; +import org.elasticsearch.xpack.sql.parser.SqlBaseParser.ConvertTemplateContext; import org.elasticsearch.xpack.sql.parser.SqlBaseParser.DateEscapedLiteralContext; import org.elasticsearch.xpack.sql.parser.SqlBaseParser.DecimalLiteralContext; import org.elasticsearch.xpack.sql.parser.SqlBaseParser.DereferenceContext; @@ -397,8 +398,27 @@ abstract class ExpressionBuilder extends IdentifierBuilder { // @Override public Cast visitCastExpression(CastExpressionContext ctx) { - CastTemplateContext ctc = ctx.castTemplate(); - return new Cast(source(ctc), expression(ctc.expression()), typedParsing(ctc.dataType(), DataType.class)); + CastTemplateContext castTc = ctx.castTemplate(); + if (castTc != null) { + return new Cast(source(castTc), expression(castTc.expression()), typedParsing(castTc.dataType(), DataType.class)); + } else { + ConvertTemplateContext convertTc = ctx.convertTemplate(); + String convertDataType = convertTc.dataType().getText().toUpperCase(Locale.ROOT); + DataType dataType; + if (convertDataType.startsWith(DataType.ODBC_DATATYPE_PREFIX)) { + dataType = DataType.fromODBCType(convertDataType); + if (dataType == null) { + throw new ParsingException(source(convertTc.dataType()), "Invalid data type [{}] provided", convertDataType); + } + } else { + try { + dataType = DataType.valueOf(convertDataType); + } catch (IllegalArgumentException e) { + throw new ParsingException(source(convertTc.dataType()), "Invalid data type [{}] provided", convertDataType); + } + } + return new Cast(source(convertTc), expression(convertTc.expression()), dataType); + } } @Override diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/SqlBaseBaseListener.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/SqlBaseBaseListener.java index 7dc0d5e985d..380428b8ac9 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/SqlBaseBaseListener.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/SqlBaseBaseListener.java @@ -767,6 +767,18 @@ class SqlBaseBaseListener implements SqlBaseListener { *

The default implementation does nothing.

*/ @Override public void exitCastTemplate(SqlBaseParser.CastTemplateContext ctx) { } + /** + * {@inheritDoc} + * + *

The default implementation does nothing.

+ */ + @Override public void enterConvertTemplate(SqlBaseParser.ConvertTemplateContext ctx) { } + /** + * {@inheritDoc} + * + *

The default implementation does nothing.

+ */ + @Override public void exitConvertTemplate(SqlBaseParser.ConvertTemplateContext ctx) { } /** * {@inheritDoc} * diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/SqlBaseBaseVisitor.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/SqlBaseBaseVisitor.java index 323fd914f55..1305d5788b7 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/SqlBaseBaseVisitor.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/SqlBaseBaseVisitor.java @@ -452,6 +452,13 @@ class SqlBaseBaseVisitor extends AbstractParseTreeVisitor implements SqlBa * {@link #visitChildren} on {@code ctx}.

*/ @Override public T visitCastTemplate(SqlBaseParser.CastTemplateContext ctx) { return visitChildren(ctx); } + /** + * {@inheritDoc} + * + *

The default implementation returns the result of calling + * {@link #visitChildren} on {@code ctx}.

+ */ + @Override public T visitConvertTemplate(SqlBaseParser.ConvertTemplateContext ctx) { return visitChildren(ctx); } /** * {@inheritDoc} * diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/SqlBaseLexer.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/SqlBaseLexer.java index f82072aa13c..07406b4297a 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/SqlBaseLexer.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/SqlBaseLexer.java @@ -19,20 +19,20 @@ class SqlBaseLexer extends Lexer { public static final int T__0=1, T__1=2, T__2=3, T__3=4, ALL=5, ANALYZE=6, ANALYZED=7, AND=8, ANY=9, AS=10, ASC=11, BETWEEN=12, BY=13, CAST=14, CATALOG=15, CATALOGS=16, COLUMNS=17, - DEBUG=18, DESC=19, DESCRIBE=20, DISTINCT=21, ESCAPE=22, EXECUTABLE=23, - EXISTS=24, EXPLAIN=25, EXTRACT=26, FALSE=27, FIRST=28, FORMAT=29, FROM=30, - FULL=31, FUNCTIONS=32, GRAPHVIZ=33, GROUP=34, HAVING=35, IN=36, INNER=37, - IS=38, JOIN=39, LAST=40, LEFT=41, LIKE=42, LIMIT=43, MAPPED=44, MATCH=45, - NATURAL=46, NOT=47, NULL=48, NULLS=49, ON=50, OPTIMIZED=51, OR=52, ORDER=53, - OUTER=54, PARSED=55, PHYSICAL=56, PLAN=57, RIGHT=58, RLIKE=59, QUERY=60, - SCHEMAS=61, SELECT=62, SHOW=63, SYS=64, TABLE=65, TABLES=66, TEXT=67, - TRUE=68, TYPE=69, TYPES=70, USING=71, VERIFY=72, WHERE=73, WITH=74, ESCAPE_ESC=75, - FUNCTION_ESC=76, LIMIT_ESC=77, DATE_ESC=78, TIME_ESC=79, TIMESTAMP_ESC=80, - GUID_ESC=81, ESC_END=82, EQ=83, NEQ=84, LT=85, LTE=86, GT=87, GTE=88, - PLUS=89, MINUS=90, ASTERISK=91, SLASH=92, PERCENT=93, CONCAT=94, DOT=95, - PARAM=96, STRING=97, INTEGER_VALUE=98, DECIMAL_VALUE=99, IDENTIFIER=100, - DIGIT_IDENTIFIER=101, TABLE_IDENTIFIER=102, QUOTED_IDENTIFIER=103, BACKQUOTED_IDENTIFIER=104, - SIMPLE_COMMENT=105, BRACKETED_COMMENT=106, WS=107, UNRECOGNIZED=108; + CONVERT=18, DEBUG=19, DESC=20, DESCRIBE=21, DISTINCT=22, ESCAPE=23, EXECUTABLE=24, + EXISTS=25, EXPLAIN=26, EXTRACT=27, FALSE=28, FIRST=29, FORMAT=30, FROM=31, + FULL=32, FUNCTIONS=33, GRAPHVIZ=34, GROUP=35, HAVING=36, IN=37, INNER=38, + IS=39, JOIN=40, LAST=41, LEFT=42, LIKE=43, LIMIT=44, MAPPED=45, MATCH=46, + NATURAL=47, NOT=48, NULL=49, NULLS=50, ON=51, OPTIMIZED=52, OR=53, ORDER=54, + OUTER=55, PARSED=56, PHYSICAL=57, PLAN=58, RIGHT=59, RLIKE=60, QUERY=61, + SCHEMAS=62, SELECT=63, SHOW=64, SYS=65, TABLE=66, TABLES=67, TEXT=68, + TRUE=69, TYPE=70, TYPES=71, USING=72, VERIFY=73, WHERE=74, WITH=75, ESCAPE_ESC=76, + FUNCTION_ESC=77, LIMIT_ESC=78, DATE_ESC=79, TIME_ESC=80, TIMESTAMP_ESC=81, + GUID_ESC=82, ESC_END=83, EQ=84, NEQ=85, LT=86, LTE=87, GT=88, GTE=89, + PLUS=90, MINUS=91, ASTERISK=92, SLASH=93, PERCENT=94, CONCAT=95, DOT=96, + PARAM=97, STRING=98, INTEGER_VALUE=99, DECIMAL_VALUE=100, IDENTIFIER=101, + DIGIT_IDENTIFIER=102, TABLE_IDENTIFIER=103, QUOTED_IDENTIFIER=104, BACKQUOTED_IDENTIFIER=105, + SIMPLE_COMMENT=106, BRACKETED_COMMENT=107, WS=108, UNRECOGNIZED=109; public static String[] modeNames = { "DEFAULT_MODE" }; @@ -40,27 +40,28 @@ class SqlBaseLexer extends Lexer { public static final String[] ruleNames = { "T__0", "T__1", "T__2", "T__3", "ALL", "ANALYZE", "ANALYZED", "AND", "ANY", "AS", "ASC", "BETWEEN", "BY", "CAST", "CATALOG", "CATALOGS", "COLUMNS", - "DEBUG", "DESC", "DESCRIBE", "DISTINCT", "ESCAPE", "EXECUTABLE", "EXISTS", - "EXPLAIN", "EXTRACT", "FALSE", "FIRST", "FORMAT", "FROM", "FULL", "FUNCTIONS", - "GRAPHVIZ", "GROUP", "HAVING", "IN", "INNER", "IS", "JOIN", "LAST", "LEFT", - "LIKE", "LIMIT", "MAPPED", "MATCH", "NATURAL", "NOT", "NULL", "NULLS", - "ON", "OPTIMIZED", "OR", "ORDER", "OUTER", "PARSED", "PHYSICAL", "PLAN", - "RIGHT", "RLIKE", "QUERY", "SCHEMAS", "SELECT", "SHOW", "SYS", "TABLE", - "TABLES", "TEXT", "TRUE", "TYPE", "TYPES", "USING", "VERIFY", "WHERE", - "WITH", "ESCAPE_ESC", "FUNCTION_ESC", "LIMIT_ESC", "DATE_ESC", "TIME_ESC", - "TIMESTAMP_ESC", "GUID_ESC", "ESC_END", "EQ", "NEQ", "LT", "LTE", "GT", - "GTE", "PLUS", "MINUS", "ASTERISK", "SLASH", "PERCENT", "CONCAT", "DOT", - "PARAM", "STRING", "INTEGER_VALUE", "DECIMAL_VALUE", "IDENTIFIER", "DIGIT_IDENTIFIER", - "TABLE_IDENTIFIER", "QUOTED_IDENTIFIER", "BACKQUOTED_IDENTIFIER", "EXPONENT", - "DIGIT", "LETTER", "SIMPLE_COMMENT", "BRACKETED_COMMENT", "WS", "UNRECOGNIZED" + "CONVERT", "DEBUG", "DESC", "DESCRIBE", "DISTINCT", "ESCAPE", "EXECUTABLE", + "EXISTS", "EXPLAIN", "EXTRACT", "FALSE", "FIRST", "FORMAT", "FROM", "FULL", + "FUNCTIONS", "GRAPHVIZ", "GROUP", "HAVING", "IN", "INNER", "IS", "JOIN", + "LAST", "LEFT", "LIKE", "LIMIT", "MAPPED", "MATCH", "NATURAL", "NOT", + "NULL", "NULLS", "ON", "OPTIMIZED", "OR", "ORDER", "OUTER", "PARSED", + "PHYSICAL", "PLAN", "RIGHT", "RLIKE", "QUERY", "SCHEMAS", "SELECT", "SHOW", + "SYS", "TABLE", "TABLES", "TEXT", "TRUE", "TYPE", "TYPES", "USING", "VERIFY", + "WHERE", "WITH", "ESCAPE_ESC", "FUNCTION_ESC", "LIMIT_ESC", "DATE_ESC", + "TIME_ESC", "TIMESTAMP_ESC", "GUID_ESC", "ESC_END", "EQ", "NEQ", "LT", + "LTE", "GT", "GTE", "PLUS", "MINUS", "ASTERISK", "SLASH", "PERCENT", "CONCAT", + "DOT", "PARAM", "STRING", "INTEGER_VALUE", "DECIMAL_VALUE", "IDENTIFIER", + "DIGIT_IDENTIFIER", "TABLE_IDENTIFIER", "QUOTED_IDENTIFIER", "BACKQUOTED_IDENTIFIER", + "EXPONENT", "DIGIT", "LETTER", "SIMPLE_COMMENT", "BRACKETED_COMMENT", + "WS", "UNRECOGNIZED" }; private static final String[] _LITERAL_NAMES = { null, "'('", "')'", "','", "':'", "'ALL'", "'ANALYZE'", "'ANALYZED'", "'AND'", "'ANY'", "'AS'", "'ASC'", "'BETWEEN'", "'BY'", "'CAST'", "'CATALOG'", - "'CATALOGS'", "'COLUMNS'", "'DEBUG'", "'DESC'", "'DESCRIBE'", "'DISTINCT'", - "'ESCAPE'", "'EXECUTABLE'", "'EXISTS'", "'EXPLAIN'", "'EXTRACT'", "'FALSE'", - "'FIRST'", "'FORMAT'", "'FROM'", "'FULL'", "'FUNCTIONS'", "'GRAPHVIZ'", + "'CATALOGS'", "'COLUMNS'", "'CONVERT'", "'DEBUG'", "'DESC'", "'DESCRIBE'", + "'DISTINCT'", "'ESCAPE'", "'EXECUTABLE'", "'EXISTS'", "'EXPLAIN'", "'EXTRACT'", + "'FALSE'", "'FIRST'", "'FORMAT'", "'FROM'", "'FULL'", "'FUNCTIONS'", "'GRAPHVIZ'", "'GROUP'", "'HAVING'", "'IN'", "'INNER'", "'IS'", "'JOIN'", "'LAST'", "'LEFT'", "'LIKE'", "'LIMIT'", "'MAPPED'", "'MATCH'", "'NATURAL'", "'NOT'", "'NULL'", "'NULLS'", "'ON'", "'OPTIMIZED'", "'OR'", "'ORDER'", "'OUTER'", @@ -74,19 +75,19 @@ class SqlBaseLexer extends Lexer { private static final String[] _SYMBOLIC_NAMES = { null, null, null, null, null, "ALL", "ANALYZE", "ANALYZED", "AND", "ANY", "AS", "ASC", "BETWEEN", "BY", "CAST", "CATALOG", "CATALOGS", "COLUMNS", - "DEBUG", "DESC", "DESCRIBE", "DISTINCT", "ESCAPE", "EXECUTABLE", "EXISTS", - "EXPLAIN", "EXTRACT", "FALSE", "FIRST", "FORMAT", "FROM", "FULL", "FUNCTIONS", - "GRAPHVIZ", "GROUP", "HAVING", "IN", "INNER", "IS", "JOIN", "LAST", "LEFT", - "LIKE", "LIMIT", "MAPPED", "MATCH", "NATURAL", "NOT", "NULL", "NULLS", - "ON", "OPTIMIZED", "OR", "ORDER", "OUTER", "PARSED", "PHYSICAL", "PLAN", - "RIGHT", "RLIKE", "QUERY", "SCHEMAS", "SELECT", "SHOW", "SYS", "TABLE", - "TABLES", "TEXT", "TRUE", "TYPE", "TYPES", "USING", "VERIFY", "WHERE", - "WITH", "ESCAPE_ESC", "FUNCTION_ESC", "LIMIT_ESC", "DATE_ESC", "TIME_ESC", - "TIMESTAMP_ESC", "GUID_ESC", "ESC_END", "EQ", "NEQ", "LT", "LTE", "GT", - "GTE", "PLUS", "MINUS", "ASTERISK", "SLASH", "PERCENT", "CONCAT", "DOT", - "PARAM", "STRING", "INTEGER_VALUE", "DECIMAL_VALUE", "IDENTIFIER", "DIGIT_IDENTIFIER", - "TABLE_IDENTIFIER", "QUOTED_IDENTIFIER", "BACKQUOTED_IDENTIFIER", "SIMPLE_COMMENT", - "BRACKETED_COMMENT", "WS", "UNRECOGNIZED" + "CONVERT", "DEBUG", "DESC", "DESCRIBE", "DISTINCT", "ESCAPE", "EXECUTABLE", + "EXISTS", "EXPLAIN", "EXTRACT", "FALSE", "FIRST", "FORMAT", "FROM", "FULL", + "FUNCTIONS", "GRAPHVIZ", "GROUP", "HAVING", "IN", "INNER", "IS", "JOIN", + "LAST", "LEFT", "LIKE", "LIMIT", "MAPPED", "MATCH", "NATURAL", "NOT", + "NULL", "NULLS", "ON", "OPTIMIZED", "OR", "ORDER", "OUTER", "PARSED", + "PHYSICAL", "PLAN", "RIGHT", "RLIKE", "QUERY", "SCHEMAS", "SELECT", "SHOW", + "SYS", "TABLE", "TABLES", "TEXT", "TRUE", "TYPE", "TYPES", "USING", "VERIFY", + "WHERE", "WITH", "ESCAPE_ESC", "FUNCTION_ESC", "LIMIT_ESC", "DATE_ESC", + "TIME_ESC", "TIMESTAMP_ESC", "GUID_ESC", "ESC_END", "EQ", "NEQ", "LT", + "LTE", "GT", "GTE", "PLUS", "MINUS", "ASTERISK", "SLASH", "PERCENT", "CONCAT", + "DOT", "PARAM", "STRING", "INTEGER_VALUE", "DECIMAL_VALUE", "IDENTIFIER", + "DIGIT_IDENTIFIER", "TABLE_IDENTIFIER", "QUOTED_IDENTIFIER", "BACKQUOTED_IDENTIFIER", + "SIMPLE_COMMENT", "BRACKETED_COMMENT", "WS", "UNRECOGNIZED" }; public static final Vocabulary VOCABULARY = new VocabularyImpl(_LITERAL_NAMES, _SYMBOLIC_NAMES); @@ -143,7 +144,7 @@ class SqlBaseLexer extends Lexer { public ATN getATN() { return _ATN; } public static final String _serializedATN = - "\3\u0430\ud6d1\u8206\uad2d\u4417\uaef1\u8d80\uaadd\2n\u0386\b\1\4\2\t"+ + "\3\u0430\ud6d1\u8206\uad2d\u4417\uaef1\u8d80\uaadd\2o\u0390\b\1\4\2\t"+ "\2\4\3\t\3\4\4\t\4\4\5\t\5\4\6\t\6\4\7\t\7\4\b\t\b\4\t\t\t\4\n\t\n\4\13"+ "\t\13\4\f\t\f\4\r\t\r\4\16\t\16\4\17\t\17\4\20\t\20\4\21\t\21\4\22\t\22"+ "\4\23\t\23\4\24\t\24\4\25\t\25\4\26\t\26\4\27\t\27\4\30\t\30\4\31\t\31"+ @@ -155,298 +156,302 @@ class SqlBaseLexer extends Lexer { "\tI\4J\tJ\4K\tK\4L\tL\4M\tM\4N\tN\4O\tO\4P\tP\4Q\tQ\4R\tR\4S\tS\4T\tT"+ "\4U\tU\4V\tV\4W\tW\4X\tX\4Y\tY\4Z\tZ\4[\t[\4\\\t\\\4]\t]\4^\t^\4_\t_\4"+ "`\t`\4a\ta\4b\tb\4c\tc\4d\td\4e\te\4f\tf\4g\tg\4h\th\4i\ti\4j\tj\4k\t"+ - "k\4l\tl\4m\tm\4n\tn\4o\to\4p\tp\3\2\3\2\3\3\3\3\3\4\3\4\3\5\3\5\3\6\3"+ - "\6\3\6\3\6\3\7\3\7\3\7\3\7\3\7\3\7\3\7\3\7\3\b\3\b\3\b\3\b\3\b\3\b\3\b"+ - "\3\b\3\b\3\t\3\t\3\t\3\t\3\n\3\n\3\n\3\n\3\13\3\13\3\13\3\f\3\f\3\f\3"+ - "\f\3\r\3\r\3\r\3\r\3\r\3\r\3\r\3\r\3\16\3\16\3\16\3\17\3\17\3\17\3\17"+ - "\3\17\3\20\3\20\3\20\3\20\3\20\3\20\3\20\3\20\3\21\3\21\3\21\3\21\3\21"+ - "\3\21\3\21\3\21\3\21\3\22\3\22\3\22\3\22\3\22\3\22\3\22\3\22\3\23\3\23"+ - "\3\23\3\23\3\23\3\23\3\24\3\24\3\24\3\24\3\24\3\25\3\25\3\25\3\25\3\25"+ + "k\4l\tl\4m\tm\4n\tn\4o\to\4p\tp\4q\tq\3\2\3\2\3\3\3\3\3\4\3\4\3\5\3\5"+ + "\3\6\3\6\3\6\3\6\3\7\3\7\3\7\3\7\3\7\3\7\3\7\3\7\3\b\3\b\3\b\3\b\3\b\3"+ + "\b\3\b\3\b\3\b\3\t\3\t\3\t\3\t\3\n\3\n\3\n\3\n\3\13\3\13\3\13\3\f\3\f"+ + "\3\f\3\f\3\r\3\r\3\r\3\r\3\r\3\r\3\r\3\r\3\16\3\16\3\16\3\17\3\17\3\17"+ + "\3\17\3\17\3\20\3\20\3\20\3\20\3\20\3\20\3\20\3\20\3\21\3\21\3\21\3\21"+ + "\3\21\3\21\3\21\3\21\3\21\3\22\3\22\3\22\3\22\3\22\3\22\3\22\3\22\3\23"+ + "\3\23\3\23\3\23\3\23\3\23\3\23\3\23\3\24\3\24\3\24\3\24\3\24\3\24\3\25"+ "\3\25\3\25\3\25\3\25\3\26\3\26\3\26\3\26\3\26\3\26\3\26\3\26\3\26\3\27"+ - "\3\27\3\27\3\27\3\27\3\27\3\27\3\30\3\30\3\30\3\30\3\30\3\30\3\30\3\30"+ - "\3\30\3\30\3\30\3\31\3\31\3\31\3\31\3\31\3\31\3\31\3\32\3\32\3\32\3\32"+ - "\3\32\3\32\3\32\3\32\3\33\3\33\3\33\3\33\3\33\3\33\3\33\3\33\3\34\3\34"+ - "\3\34\3\34\3\34\3\34\3\35\3\35\3\35\3\35\3\35\3\35\3\36\3\36\3\36\3\36"+ - "\3\36\3\36\3\36\3\37\3\37\3\37\3\37\3\37\3 \3 \3 \3 \3 \3!\3!\3!\3!\3"+ - "!\3!\3!\3!\3!\3!\3\"\3\"\3\"\3\"\3\"\3\"\3\"\3\"\3\"\3#\3#\3#\3#\3#\3"+ - "#\3$\3$\3$\3$\3$\3$\3$\3%\3%\3%\3&\3&\3&\3&\3&\3&\3\'\3\'\3\'\3(\3(\3"+ - "(\3(\3(\3)\3)\3)\3)\3)\3*\3*\3*\3*\3*\3+\3+\3+\3+\3+\3,\3,\3,\3,\3,\3"+ - ",\3-\3-\3-\3-\3-\3-\3-\3.\3.\3.\3.\3.\3.\3/\3/\3/\3/\3/\3/\3/\3/\3\60"+ - "\3\60\3\60\3\60\3\61\3\61\3\61\3\61\3\61\3\62\3\62\3\62\3\62\3\62\3\62"+ - "\3\63\3\63\3\63\3\64\3\64\3\64\3\64\3\64\3\64\3\64\3\64\3\64\3\64\3\65"+ - "\3\65\3\65\3\66\3\66\3\66\3\66\3\66\3\66\3\67\3\67\3\67\3\67\3\67\3\67"+ - "\38\38\38\38\38\38\38\39\39\39\39\39\39\39\39\39\3:\3:\3:\3:\3:\3;\3;"+ - "\3;\3;\3;\3;\3<\3<\3<\3<\3<\3<\3=\3=\3=\3=\3=\3=\3>\3>\3>\3>\3>\3>\3>"+ - "\3>\3?\3?\3?\3?\3?\3?\3?\3@\3@\3@\3@\3@\3A\3A\3A\3A\3B\3B\3B\3B\3B\3B"+ - "\3C\3C\3C\3C\3C\3C\3C\3D\3D\3D\3D\3D\3E\3E\3E\3E\3E\3F\3F\3F\3F\3F\3G"+ - "\3G\3G\3G\3G\3G\3H\3H\3H\3H\3H\3H\3I\3I\3I\3I\3I\3I\3I\3J\3J\3J\3J\3J"+ - "\3J\3K\3K\3K\3K\3K\3L\3L\3L\3L\3L\3L\3L\3L\3M\3M\3M\3M\3N\3N\3N\3N\3N"+ - "\3N\3N\3O\3O\3O\3P\3P\3P\3Q\3Q\3Q\3Q\3R\3R\3R\3R\3R\3R\3S\3S\3T\3T\3U"+ - "\3U\3U\3U\3U\3U\3U\5U\u02c7\nU\3V\3V\3W\3W\3W\3X\3X\3Y\3Y\3Y\3Z\3Z\3["+ - "\3[\3\\\3\\\3]\3]\3^\3^\3_\3_\3_\3`\3`\3a\3a\3b\3b\3b\3b\7b\u02e8\nb\f"+ - "b\16b\u02eb\13b\3b\3b\3c\6c\u02f0\nc\rc\16c\u02f1\3d\6d\u02f5\nd\rd\16"+ - "d\u02f6\3d\3d\7d\u02fb\nd\fd\16d\u02fe\13d\3d\3d\6d\u0302\nd\rd\16d\u0303"+ - "\3d\6d\u0307\nd\rd\16d\u0308\3d\3d\7d\u030d\nd\fd\16d\u0310\13d\5d\u0312"+ - "\nd\3d\3d\3d\3d\6d\u0318\nd\rd\16d\u0319\3d\3d\5d\u031e\nd\3e\3e\5e\u0322"+ - "\ne\3e\3e\3e\7e\u0327\ne\fe\16e\u032a\13e\3f\3f\3f\3f\6f\u0330\nf\rf\16"+ - "f\u0331\3g\3g\3g\6g\u0337\ng\rg\16g\u0338\3h\3h\3h\3h\7h\u033f\nh\fh\16"+ - "h\u0342\13h\3h\3h\3i\3i\3i\3i\7i\u034a\ni\fi\16i\u034d\13i\3i\3i\3j\3"+ - "j\5j\u0353\nj\3j\6j\u0356\nj\rj\16j\u0357\3k\3k\3l\3l\3m\3m\3m\3m\7m\u0362"+ - "\nm\fm\16m\u0365\13m\3m\5m\u0368\nm\3m\5m\u036b\nm\3m\3m\3n\3n\3n\3n\3"+ - "n\7n\u0374\nn\fn\16n\u0377\13n\3n\3n\3n\3n\3n\3o\6o\u037f\no\ro\16o\u0380"+ - "\3o\3o\3p\3p\3\u0375\2q\3\3\5\4\7\5\t\6\13\7\r\b\17\t\21\n\23\13\25\f"+ - "\27\r\31\16\33\17\35\20\37\21!\22#\23%\24\'\25)\26+\27-\30/\31\61\32\63"+ - "\33\65\34\67\359\36;\37= ?!A\"C#E$G%I&K\'M(O)Q*S+U,W-Y.[/]\60_\61a\62"+ - "c\63e\64g\65i\66k\67m8o9q:s;u{?}@\177A\u0081B\u0083C\u0085D\u0087"+ - "E\u0089F\u008bG\u008dH\u008fI\u0091J\u0093K\u0095L\u0097M\u0099N\u009b"+ - "O\u009dP\u009fQ\u00a1R\u00a3S\u00a5T\u00a7U\u00a9V\u00abW\u00adX\u00af"+ - "Y\u00b1Z\u00b3[\u00b5\\\u00b7]\u00b9^\u00bb_\u00bd`\u00bfa\u00c1b\u00c3"+ - "c\u00c5d\u00c7e\u00c9f\u00cbg\u00cdh\u00cfi\u00d1j\u00d3\2\u00d5\2\u00d7"+ - "\2\u00d9k\u00dbl\u00ddm\u00dfn\3\2\f\3\2))\4\2BBaa\5\2<\3\2\2\2\u0194\u0195\7H\2\2\u0195\u0196"+ - "\7W\2\2\u0196\u0197\7N\2\2\u0197\u0198\7N\2\2\u0198@\3\2\2\2\u0199\u019a"+ - "\7H\2\2\u019a\u019b\7W\2\2\u019b\u019c\7P\2\2\u019c\u019d\7E\2\2\u019d"+ - "\u019e\7V\2\2\u019e\u019f\7K\2\2\u019f\u01a0\7Q\2\2\u01a0\u01a1\7P\2\2"+ - "\u01a1\u01a2\7U\2\2\u01a2B\3\2\2\2\u01a3\u01a4\7I\2\2\u01a4\u01a5\7T\2"+ - "\2\u01a5\u01a6\7C\2\2\u01a6\u01a7\7R\2\2\u01a7\u01a8\7J\2\2\u01a8\u01a9"+ - "\7X\2\2\u01a9\u01aa\7K\2\2\u01aa\u01ab\7\\\2\2\u01abD\3\2\2\2\u01ac\u01ad"+ - "\7I\2\2\u01ad\u01ae\7T\2\2\u01ae\u01af\7Q\2\2\u01af\u01b0\7W\2\2\u01b0"+ - "\u01b1\7R\2\2\u01b1F\3\2\2\2\u01b2\u01b3\7J\2\2\u01b3\u01b4\7C\2\2\u01b4"+ - "\u01b5\7X\2\2\u01b5\u01b6\7K\2\2\u01b6\u01b7\7P\2\2\u01b7\u01b8\7I\2\2"+ - "\u01b8H\3\2\2\2\u01b9\u01ba\7K\2\2\u01ba\u01bb\7P\2\2\u01bbJ\3\2\2\2\u01bc"+ - "\u01bd\7K\2\2\u01bd\u01be\7P\2\2\u01be\u01bf\7P\2\2\u01bf\u01c0\7G\2\2"+ - "\u01c0\u01c1\7T\2\2\u01c1L\3\2\2\2\u01c2\u01c3\7K\2\2\u01c3\u01c4\7U\2"+ - "\2\u01c4N\3\2\2\2\u01c5\u01c6\7L\2\2\u01c6\u01c7\7Q\2\2\u01c7\u01c8\7"+ - "K\2\2\u01c8\u01c9\7P\2\2\u01c9P\3\2\2\2\u01ca\u01cb\7N\2\2\u01cb\u01cc"+ - "\7C\2\2\u01cc\u01cd\7U\2\2\u01cd\u01ce\7V\2\2\u01ceR\3\2\2\2\u01cf\u01d0"+ - "\7N\2\2\u01d0\u01d1\7G\2\2\u01d1\u01d2\7H\2\2\u01d2\u01d3\7V\2\2\u01d3"+ - "T\3\2\2\2\u01d4\u01d5\7N\2\2\u01d5\u01d6\7K\2\2\u01d6\u01d7\7M\2\2\u01d7"+ - "\u01d8\7G\2\2\u01d8V\3\2\2\2\u01d9\u01da\7N\2\2\u01da\u01db\7K\2\2\u01db"+ - "\u01dc\7O\2\2\u01dc\u01dd\7K\2\2\u01dd\u01de\7V\2\2\u01deX\3\2\2\2\u01df"+ - "\u01e0\7O\2\2\u01e0\u01e1\7C\2\2\u01e1\u01e2\7R\2\2\u01e2\u01e3\7R\2\2"+ - "\u01e3\u01e4\7G\2\2\u01e4\u01e5\7F\2\2\u01e5Z\3\2\2\2\u01e6\u01e7\7O\2"+ - "\2\u01e7\u01e8\7C\2\2\u01e8\u01e9\7V\2\2\u01e9\u01ea\7E\2\2\u01ea\u01eb"+ - "\7J\2\2\u01eb\\\3\2\2\2\u01ec\u01ed\7P\2\2\u01ed\u01ee\7C\2\2\u01ee\u01ef"+ - "\7V\2\2\u01ef\u01f0\7W\2\2\u01f0\u01f1\7T\2\2\u01f1\u01f2\7C\2\2\u01f2"+ - "\u01f3\7N\2\2\u01f3^\3\2\2\2\u01f4\u01f5\7P\2\2\u01f5\u01f6\7Q\2\2\u01f6"+ - "\u01f7\7V\2\2\u01f7`\3\2\2\2\u01f8\u01f9\7P\2\2\u01f9\u01fa\7W\2\2\u01fa"+ - "\u01fb\7N\2\2\u01fb\u01fc\7N\2\2\u01fcb\3\2\2\2\u01fd\u01fe\7P\2\2\u01fe"+ - "\u01ff\7W\2\2\u01ff\u0200\7N\2\2\u0200\u0201\7N\2\2\u0201\u0202\7U\2\2"+ - "\u0202d\3\2\2\2\u0203\u0204\7Q\2\2\u0204\u0205\7P\2\2\u0205f\3\2\2\2\u0206"+ - "\u0207\7Q\2\2\u0207\u0208\7R\2\2\u0208\u0209\7V\2\2\u0209\u020a\7K\2\2"+ - "\u020a\u020b\7O\2\2\u020b\u020c\7K\2\2\u020c\u020d\7\\\2\2\u020d\u020e"+ - "\7G\2\2\u020e\u020f\7F\2\2\u020fh\3\2\2\2\u0210\u0211\7Q\2\2\u0211\u0212"+ - "\7T\2\2\u0212j\3\2\2\2\u0213\u0214\7Q\2\2\u0214\u0215\7T\2\2\u0215\u0216"+ - "\7F\2\2\u0216\u0217\7G\2\2\u0217\u0218\7T\2\2\u0218l\3\2\2\2\u0219\u021a"+ - "\7Q\2\2\u021a\u021b\7W\2\2\u021b\u021c\7V\2\2\u021c\u021d\7G\2\2\u021d"+ - "\u021e\7T\2\2\u021en\3\2\2\2\u021f\u0220\7R\2\2\u0220\u0221\7C\2\2\u0221"+ - "\u0222\7T\2\2\u0222\u0223\7U\2\2\u0223\u0224\7G\2\2\u0224\u0225\7F\2\2"+ - "\u0225p\3\2\2\2\u0226\u0227\7R\2\2\u0227\u0228\7J\2\2\u0228\u0229\7[\2"+ - "\2\u0229\u022a\7U\2\2\u022a\u022b\7K\2\2\u022b\u022c\7E\2\2\u022c\u022d"+ - "\7C\2\2\u022d\u022e\7N\2\2\u022er\3\2\2\2\u022f\u0230\7R\2\2\u0230\u0231"+ - "\7N\2\2\u0231\u0232\7C\2\2\u0232\u0233\7P\2\2\u0233t\3\2\2\2\u0234\u0235"+ - "\7T\2\2\u0235\u0236\7K\2\2\u0236\u0237\7I\2\2\u0237\u0238\7J\2\2\u0238"+ - "\u0239\7V\2\2\u0239v\3\2\2\2\u023a\u023b\7T\2\2\u023b\u023c\7N\2\2\u023c"+ - "\u023d\7K\2\2\u023d\u023e\7M\2\2\u023e\u023f\7G\2\2\u023fx\3\2\2\2\u0240"+ - "\u0241\7S\2\2\u0241\u0242\7W\2\2\u0242\u0243\7G\2\2\u0243\u0244\7T\2\2"+ - "\u0244\u0245\7[\2\2\u0245z\3\2\2\2\u0246\u0247\7U\2\2\u0247\u0248\7E\2"+ - "\2\u0248\u0249\7J\2\2\u0249\u024a\7G\2\2\u024a\u024b\7O\2\2\u024b\u024c"+ - "\7C\2\2\u024c\u024d\7U\2\2\u024d|\3\2\2\2\u024e\u024f\7U\2\2\u024f\u0250"+ - "\7G\2\2\u0250\u0251\7N\2\2\u0251\u0252\7G\2\2\u0252\u0253\7E\2\2\u0253"+ - "\u0254\7V\2\2\u0254~\3\2\2\2\u0255\u0256\7U\2\2\u0256\u0257\7J\2\2\u0257"+ - "\u0258\7Q\2\2\u0258\u0259\7Y\2\2\u0259\u0080\3\2\2\2\u025a\u025b\7U\2"+ - "\2\u025b\u025c\7[\2\2\u025c\u025d\7U\2\2\u025d\u0082\3\2\2\2\u025e\u025f"+ - "\7V\2\2\u025f\u0260\7C\2\2\u0260\u0261\7D\2\2\u0261\u0262\7N\2\2\u0262"+ - "\u0263\7G\2\2\u0263\u0084\3\2\2\2\u0264\u0265\7V\2\2\u0265\u0266\7C\2"+ - "\2\u0266\u0267\7D\2\2\u0267\u0268\7N\2\2\u0268\u0269\7G\2\2\u0269\u026a"+ - "\7U\2\2\u026a\u0086\3\2\2\2\u026b\u026c\7V\2\2\u026c\u026d\7G\2\2\u026d"+ - "\u026e\7Z\2\2\u026e\u026f\7V\2\2\u026f\u0088\3\2\2\2\u0270\u0271\7V\2"+ - "\2\u0271\u0272\7T\2\2\u0272\u0273\7W\2\2\u0273\u0274\7G\2\2\u0274\u008a"+ - "\3\2\2\2\u0275\u0276\7V\2\2\u0276\u0277\7[\2\2\u0277\u0278\7R\2\2\u0278"+ - "\u0279\7G\2\2\u0279\u008c\3\2\2\2\u027a\u027b\7V\2\2\u027b\u027c\7[\2"+ - "\2\u027c\u027d\7R\2\2\u027d\u027e\7G\2\2\u027e\u027f\7U\2\2\u027f\u008e"+ - "\3\2\2\2\u0280\u0281\7W\2\2\u0281\u0282\7U\2\2\u0282\u0283\7K\2\2\u0283"+ - "\u0284\7P\2\2\u0284\u0285\7I\2\2\u0285\u0090\3\2\2\2\u0286\u0287\7X\2"+ - "\2\u0287\u0288\7G\2\2\u0288\u0289\7T\2\2\u0289\u028a\7K\2\2\u028a\u028b"+ - "\7H\2\2\u028b\u028c\7[\2\2\u028c\u0092\3\2\2\2\u028d\u028e\7Y\2\2\u028e"+ - "\u028f\7J\2\2\u028f\u0290\7G\2\2\u0290\u0291\7T\2\2\u0291\u0292\7G\2\2"+ - "\u0292\u0094\3\2\2\2\u0293\u0294\7Y\2\2\u0294\u0295\7K\2\2\u0295\u0296"+ - "\7V\2\2\u0296\u0297\7J\2\2\u0297\u0096\3\2\2\2\u0298\u0299\7}\2\2\u0299"+ - "\u029a\7G\2\2\u029a\u029b\7U\2\2\u029b\u029c\7E\2\2\u029c\u029d\7C\2\2"+ - "\u029d\u029e\7R\2\2\u029e\u029f\7G\2\2\u029f\u0098\3\2\2\2\u02a0\u02a1"+ - "\7}\2\2\u02a1\u02a2\7H\2\2\u02a2\u02a3\7P\2\2\u02a3\u009a\3\2\2\2\u02a4"+ - "\u02a5\7}\2\2\u02a5\u02a6\7N\2\2\u02a6\u02a7\7K\2\2\u02a7\u02a8\7O\2\2"+ - "\u02a8\u02a9\7K\2\2\u02a9\u02aa\7V\2\2\u02aa\u009c\3\2\2\2\u02ab\u02ac"+ - "\7}\2\2\u02ac\u02ad\7F\2\2\u02ad\u009e\3\2\2\2\u02ae\u02af\7}\2\2\u02af"+ - "\u02b0\7V\2\2\u02b0\u00a0\3\2\2\2\u02b1\u02b2\7}\2\2\u02b2\u02b3\7V\2"+ - "\2\u02b3\u02b4\7U\2\2\u02b4\u00a2\3\2\2\2\u02b5\u02b6\7}\2\2\u02b6\u02b7"+ - "\7I\2\2\u02b7\u02b8\7W\2\2\u02b8\u02b9\7K\2\2\u02b9\u02ba\7F\2\2\u02ba"+ - "\u00a4\3\2\2\2\u02bb\u02bc\7\177\2\2\u02bc\u00a6\3\2\2\2\u02bd\u02be\7"+ - "?\2\2\u02be\u00a8\3\2\2\2\u02bf\u02c0\7>\2\2\u02c0\u02c7\7@\2\2\u02c1"+ - "\u02c2\7#\2\2\u02c2\u02c7\7?\2\2\u02c3\u02c4\7>\2\2\u02c4\u02c5\7?\2\2"+ - "\u02c5\u02c7\7@\2\2\u02c6\u02bf\3\2\2\2\u02c6\u02c1\3\2\2\2\u02c6\u02c3"+ - "\3\2\2\2\u02c7\u00aa\3\2\2\2\u02c8\u02c9\7>\2\2\u02c9\u00ac\3\2\2\2\u02ca"+ - "\u02cb\7>\2\2\u02cb\u02cc\7?\2\2\u02cc\u00ae\3\2\2\2\u02cd\u02ce\7@\2"+ - "\2\u02ce\u00b0\3\2\2\2\u02cf\u02d0\7@\2\2\u02d0\u02d1\7?\2\2\u02d1\u00b2"+ - "\3\2\2\2\u02d2\u02d3\7-\2\2\u02d3\u00b4\3\2\2\2\u02d4\u02d5\7/\2\2\u02d5"+ - "\u00b6\3\2\2\2\u02d6\u02d7\7,\2\2\u02d7\u00b8\3\2\2\2\u02d8\u02d9\7\61"+ - "\2\2\u02d9\u00ba\3\2\2\2\u02da\u02db\7\'\2\2\u02db\u00bc\3\2\2\2\u02dc"+ - "\u02dd\7~\2\2\u02dd\u02de\7~\2\2\u02de\u00be\3\2\2\2\u02df\u02e0\7\60"+ - "\2\2\u02e0\u00c0\3\2\2\2\u02e1\u02e2\7A\2\2\u02e2\u00c2\3\2\2\2\u02e3"+ - "\u02e9\7)\2\2\u02e4\u02e8\n\2\2\2\u02e5\u02e6\7)\2\2\u02e6\u02e8\7)\2"+ - "\2\u02e7\u02e4\3\2\2\2\u02e7\u02e5\3\2\2\2\u02e8\u02eb\3\2\2\2\u02e9\u02e7"+ - "\3\2\2\2\u02e9\u02ea\3\2\2\2\u02ea\u02ec\3\2\2\2\u02eb\u02e9\3\2\2\2\u02ec"+ - "\u02ed\7)\2\2\u02ed\u00c4\3\2\2\2\u02ee\u02f0\5\u00d5k\2\u02ef\u02ee\3"+ - "\2\2\2\u02f0\u02f1\3\2\2\2\u02f1\u02ef\3\2\2\2\u02f1\u02f2\3\2\2\2\u02f2"+ - "\u00c6\3\2\2\2\u02f3\u02f5\5\u00d5k\2\u02f4\u02f3\3\2\2\2\u02f5\u02f6"+ - "\3\2\2\2\u02f6\u02f4\3\2\2\2\u02f6\u02f7\3\2\2\2\u02f7\u02f8\3\2\2\2\u02f8"+ - "\u02fc\5\u00bf`\2\u02f9\u02fb\5\u00d5k\2\u02fa\u02f9\3\2\2\2\u02fb\u02fe"+ - "\3\2\2\2\u02fc\u02fa\3\2\2\2\u02fc\u02fd\3\2\2\2\u02fd\u031e\3\2\2\2\u02fe"+ - "\u02fc\3\2\2\2\u02ff\u0301\5\u00bf`\2\u0300\u0302\5\u00d5k\2\u0301\u0300"+ - "\3\2\2\2\u0302\u0303\3\2\2\2\u0303\u0301\3\2\2\2\u0303\u0304\3\2\2\2\u0304"+ - "\u031e\3\2\2\2\u0305\u0307\5\u00d5k\2\u0306\u0305\3\2\2\2\u0307\u0308"+ - "\3\2\2\2\u0308\u0306\3\2\2\2\u0308\u0309\3\2\2\2\u0309\u0311\3\2\2\2\u030a"+ - "\u030e\5\u00bf`\2\u030b\u030d\5\u00d5k\2\u030c\u030b\3\2\2\2\u030d\u0310"+ - "\3\2\2\2\u030e\u030c\3\2\2\2\u030e\u030f\3\2\2\2\u030f\u0312\3\2\2\2\u0310"+ - "\u030e\3\2\2\2\u0311\u030a\3\2\2\2\u0311\u0312\3\2\2\2\u0312\u0313\3\2"+ - "\2\2\u0313\u0314\5\u00d3j\2\u0314\u031e\3\2\2\2\u0315\u0317\5\u00bf`\2"+ - "\u0316\u0318\5\u00d5k\2\u0317\u0316\3\2\2\2\u0318\u0319\3\2\2\2\u0319"+ - "\u0317\3\2\2\2\u0319\u031a\3\2\2\2\u031a\u031b\3\2\2\2\u031b\u031c\5\u00d3"+ - "j\2\u031c\u031e\3\2\2\2\u031d\u02f4\3\2\2\2\u031d\u02ff\3\2\2\2\u031d"+ - "\u0306\3\2\2\2\u031d\u0315\3\2\2\2\u031e\u00c8\3\2\2\2\u031f\u0322\5\u00d7"+ - "l\2\u0320\u0322\7a\2\2\u0321\u031f\3\2\2\2\u0321\u0320\3\2\2\2\u0322\u0328"+ - "\3\2\2\2\u0323\u0327\5\u00d7l\2\u0324\u0327\5\u00d5k\2\u0325\u0327\t\3"+ - "\2\2\u0326\u0323\3\2\2\2\u0326\u0324\3\2\2\2\u0326\u0325\3\2\2\2\u0327"+ - "\u032a\3\2\2\2\u0328\u0326\3\2\2\2\u0328\u0329\3\2\2\2\u0329\u00ca\3\2"+ - "\2\2\u032a\u0328\3\2\2\2\u032b\u032f\5\u00d5k\2\u032c\u0330\5\u00d7l\2"+ - "\u032d\u0330\5\u00d5k\2\u032e\u0330\t\4\2\2\u032f\u032c\3\2\2\2\u032f"+ - "\u032d\3\2\2\2\u032f\u032e\3\2\2\2\u0330\u0331\3\2\2\2\u0331\u032f\3\2"+ - "\2\2\u0331\u0332\3\2\2\2\u0332\u00cc\3\2\2\2\u0333\u0337\5\u00d7l\2\u0334"+ - "\u0337\5\u00d5k\2\u0335\u0337\7a\2\2\u0336\u0333\3\2\2\2\u0336\u0334\3"+ - "\2\2\2\u0336\u0335\3\2\2\2\u0337\u0338\3\2\2\2\u0338\u0336\3\2\2\2\u0338"+ - "\u0339\3\2\2\2\u0339\u00ce\3\2\2\2\u033a\u0340\7$\2\2\u033b\u033f\n\5"+ - "\2\2\u033c\u033d\7$\2\2\u033d\u033f\7$\2\2\u033e\u033b\3\2\2\2\u033e\u033c"+ - "\3\2\2\2\u033f\u0342\3\2\2\2\u0340\u033e\3\2\2\2\u0340\u0341\3\2\2\2\u0341"+ - "\u0343\3\2\2\2\u0342\u0340\3\2\2\2\u0343\u0344\7$\2\2\u0344\u00d0\3\2"+ - "\2\2\u0345\u034b\7b\2\2\u0346\u034a\n\6\2\2\u0347\u0348\7b\2\2\u0348\u034a"+ - "\7b\2\2\u0349\u0346\3\2\2\2\u0349\u0347\3\2\2\2\u034a\u034d\3\2\2\2\u034b"+ - "\u0349\3\2\2\2\u034b\u034c\3\2\2\2\u034c\u034e\3\2\2\2\u034d\u034b\3\2"+ - "\2\2\u034e\u034f\7b\2\2\u034f\u00d2\3\2\2\2\u0350\u0352\7G\2\2\u0351\u0353"+ - "\t\7\2\2\u0352\u0351\3\2\2\2\u0352\u0353\3\2\2\2\u0353\u0355\3\2\2\2\u0354"+ - "\u0356\5\u00d5k\2\u0355\u0354\3\2\2\2\u0356\u0357\3\2\2\2\u0357\u0355"+ - "\3\2\2\2\u0357\u0358\3\2\2\2\u0358\u00d4\3\2\2\2\u0359\u035a\t\b\2\2\u035a"+ - "\u00d6\3\2\2\2\u035b\u035c\t\t\2\2\u035c\u00d8\3\2\2\2\u035d\u035e\7/"+ - "\2\2\u035e\u035f\7/\2\2\u035f\u0363\3\2\2\2\u0360\u0362\n\n\2\2\u0361"+ - "\u0360\3\2\2\2\u0362\u0365\3\2\2\2\u0363\u0361\3\2\2\2\u0363\u0364\3\2"+ - "\2\2\u0364\u0367\3\2\2\2\u0365\u0363\3\2\2\2\u0366\u0368\7\17\2\2\u0367"+ - "\u0366\3\2\2\2\u0367\u0368\3\2\2\2\u0368\u036a\3\2\2\2\u0369\u036b\7\f"+ - "\2\2\u036a\u0369\3\2\2\2\u036a\u036b\3\2\2\2\u036b\u036c\3\2\2\2\u036c"+ - "\u036d\bm\2\2\u036d\u00da\3\2\2\2\u036e\u036f\7\61\2\2\u036f\u0370\7,"+ - "\2\2\u0370\u0375\3\2\2\2\u0371\u0374\5\u00dbn\2\u0372\u0374\13\2\2\2\u0373"+ - "\u0371\3\2\2\2\u0373\u0372\3\2\2\2\u0374\u0377\3\2\2\2\u0375\u0376\3\2"+ - "\2\2\u0375\u0373\3\2\2\2\u0376\u0378\3\2\2\2\u0377\u0375\3\2\2\2\u0378"+ - "\u0379\7,\2\2\u0379\u037a\7\61\2\2\u037a\u037b\3\2\2\2\u037b\u037c\bn"+ - "\2\2\u037c\u00dc\3\2\2\2\u037d\u037f\t\13\2\2\u037e\u037d\3\2\2\2\u037f"+ - "\u0380\3\2\2\2\u0380\u037e\3\2\2\2\u0380\u0381\3\2\2\2\u0381\u0382\3\2"+ - "\2\2\u0382\u0383\bo\2\2\u0383\u00de\3\2\2\2\u0384\u0385\13\2\2\2\u0385"+ - "\u00e0\3\2\2\2\"\2\u02c6\u02e7\u02e9\u02f1\u02f6\u02fc\u0303\u0308\u030e"+ - "\u0311\u0319\u031d\u0321\u0326\u0328\u032f\u0331\u0336\u0338\u033e\u0340"+ - "\u0349\u034b\u0352\u0357\u0363\u0367\u036a\u0373\u0375\u0380\3\2\3\2"; + "\3\27\3\27\3\27\3\27\3\27\3\27\3\27\3\27\3\30\3\30\3\30\3\30\3\30\3\30"+ + "\3\30\3\31\3\31\3\31\3\31\3\31\3\31\3\31\3\31\3\31\3\31\3\31\3\32\3\32"+ + "\3\32\3\32\3\32\3\32\3\32\3\33\3\33\3\33\3\33\3\33\3\33\3\33\3\33\3\34"+ + "\3\34\3\34\3\34\3\34\3\34\3\34\3\34\3\35\3\35\3\35\3\35\3\35\3\35\3\36"+ + "\3\36\3\36\3\36\3\36\3\36\3\37\3\37\3\37\3\37\3\37\3\37\3\37\3 \3 \3 "+ + "\3 \3 \3!\3!\3!\3!\3!\3\"\3\"\3\"\3\"\3\"\3\"\3\"\3\"\3\"\3\"\3#\3#\3"+ + "#\3#\3#\3#\3#\3#\3#\3$\3$\3$\3$\3$\3$\3%\3%\3%\3%\3%\3%\3%\3&\3&\3&\3"+ + "\'\3\'\3\'\3\'\3\'\3\'\3(\3(\3(\3)\3)\3)\3)\3)\3*\3*\3*\3*\3*\3+\3+\3"+ + "+\3+\3+\3,\3,\3,\3,\3,\3-\3-\3-\3-\3-\3-\3.\3.\3.\3.\3.\3.\3.\3/\3/\3"+ + "/\3/\3/\3/\3\60\3\60\3\60\3\60\3\60\3\60\3\60\3\60\3\61\3\61\3\61\3\61"+ + "\3\62\3\62\3\62\3\62\3\62\3\63\3\63\3\63\3\63\3\63\3\63\3\64\3\64\3\64"+ + "\3\65\3\65\3\65\3\65\3\65\3\65\3\65\3\65\3\65\3\65\3\66\3\66\3\66\3\67"+ + "\3\67\3\67\3\67\3\67\3\67\38\38\38\38\38\38\39\39\39\39\39\39\39\3:\3"+ + ":\3:\3:\3:\3:\3:\3:\3:\3;\3;\3;\3;\3;\3<\3<\3<\3<\3<\3<\3=\3=\3=\3=\3"+ + "=\3=\3>\3>\3>\3>\3>\3>\3?\3?\3?\3?\3?\3?\3?\3?\3@\3@\3@\3@\3@\3@\3@\3"+ + "A\3A\3A\3A\3A\3B\3B\3B\3B\3C\3C\3C\3C\3C\3C\3D\3D\3D\3D\3D\3D\3D\3E\3"+ + "E\3E\3E\3E\3F\3F\3F\3F\3F\3G\3G\3G\3G\3G\3H\3H\3H\3H\3H\3H\3I\3I\3I\3"+ + "I\3I\3I\3J\3J\3J\3J\3J\3J\3J\3K\3K\3K\3K\3K\3K\3L\3L\3L\3L\3L\3M\3M\3"+ + "M\3M\3M\3M\3M\3M\3N\3N\3N\3N\3O\3O\3O\3O\3O\3O\3O\3P\3P\3P\3Q\3Q\3Q\3"+ + "R\3R\3R\3R\3S\3S\3S\3S\3S\3S\3T\3T\3U\3U\3V\3V\3V\3V\3V\3V\3V\5V\u02d1"+ + "\nV\3W\3W\3X\3X\3X\3Y\3Y\3Z\3Z\3Z\3[\3[\3\\\3\\\3]\3]\3^\3^\3_\3_\3`\3"+ + "`\3`\3a\3a\3b\3b\3c\3c\3c\3c\7c\u02f2\nc\fc\16c\u02f5\13c\3c\3c\3d\6d"+ + "\u02fa\nd\rd\16d\u02fb\3e\6e\u02ff\ne\re\16e\u0300\3e\3e\7e\u0305\ne\f"+ + "e\16e\u0308\13e\3e\3e\6e\u030c\ne\re\16e\u030d\3e\6e\u0311\ne\re\16e\u0312"+ + "\3e\3e\7e\u0317\ne\fe\16e\u031a\13e\5e\u031c\ne\3e\3e\3e\3e\6e\u0322\n"+ + "e\re\16e\u0323\3e\3e\5e\u0328\ne\3f\3f\5f\u032c\nf\3f\3f\3f\7f\u0331\n"+ + "f\ff\16f\u0334\13f\3g\3g\3g\3g\6g\u033a\ng\rg\16g\u033b\3h\3h\3h\6h\u0341"+ + "\nh\rh\16h\u0342\3i\3i\3i\3i\7i\u0349\ni\fi\16i\u034c\13i\3i\3i\3j\3j"+ + "\3j\3j\7j\u0354\nj\fj\16j\u0357\13j\3j\3j\3k\3k\5k\u035d\nk\3k\6k\u0360"+ + "\nk\rk\16k\u0361\3l\3l\3m\3m\3n\3n\3n\3n\7n\u036c\nn\fn\16n\u036f\13n"+ + "\3n\5n\u0372\nn\3n\5n\u0375\nn\3n\3n\3o\3o\3o\3o\3o\7o\u037e\no\fo\16"+ + "o\u0381\13o\3o\3o\3o\3o\3o\3p\6p\u0389\np\rp\16p\u038a\3p\3p\3q\3q\3\u037f"+ + "\2r\3\3\5\4\7\5\t\6\13\7\r\b\17\t\21\n\23\13\25\f\27\r\31\16\33\17\35"+ + "\20\37\21!\22#\23%\24\'\25)\26+\27-\30/\31\61\32\63\33\65\34\67\359\36"+ + ";\37= ?!A\"C#E$G%I&K\'M(O)Q*S+U,W-Y.[/]\60_\61a\62c\63e\64g\65i\66k\67"+ + "m8o9q:s;u{?}@\177A\u0081B\u0083C\u0085D\u0087E\u0089F\u008bG\u008d"+ + "H\u008fI\u0091J\u0093K\u0095L\u0097M\u0099N\u009bO\u009dP\u009fQ\u00a1"+ + "R\u00a3S\u00a5T\u00a7U\u00a9V\u00abW\u00adX\u00afY\u00b1Z\u00b3[\u00b5"+ + "\\\u00b7]\u00b9^\u00bb_\u00bd`\u00bfa\u00c1b\u00c3c\u00c5d\u00c7e\u00c9"+ + "f\u00cbg\u00cdh\u00cfi\u00d1j\u00d3k\u00d5\2\u00d7\2\u00d9\2\u00dbl\u00dd"+ + "m\u00dfn\u00e1o\3\2\f\3\2))\4\2BBaa\5\2<\3\2\2\2\u0199\u019a\7H\2\2\u019a\u019b\7T\2"+ + "\2\u019b\u019c\7Q\2\2\u019c\u019d\7O\2\2\u019d@\3\2\2\2\u019e\u019f\7"+ + "H\2\2\u019f\u01a0\7W\2\2\u01a0\u01a1\7N\2\2\u01a1\u01a2\7N\2\2\u01a2B"+ + "\3\2\2\2\u01a3\u01a4\7H\2\2\u01a4\u01a5\7W\2\2\u01a5\u01a6\7P\2\2\u01a6"+ + "\u01a7\7E\2\2\u01a7\u01a8\7V\2\2\u01a8\u01a9\7K\2\2\u01a9\u01aa\7Q\2\2"+ + "\u01aa\u01ab\7P\2\2\u01ab\u01ac\7U\2\2\u01acD\3\2\2\2\u01ad\u01ae\7I\2"+ + "\2\u01ae\u01af\7T\2\2\u01af\u01b0\7C\2\2\u01b0\u01b1\7R\2\2\u01b1\u01b2"+ + "\7J\2\2\u01b2\u01b3\7X\2\2\u01b3\u01b4\7K\2\2\u01b4\u01b5\7\\\2\2\u01b5"+ + "F\3\2\2\2\u01b6\u01b7\7I\2\2\u01b7\u01b8\7T\2\2\u01b8\u01b9\7Q\2\2\u01b9"+ + "\u01ba\7W\2\2\u01ba\u01bb\7R\2\2\u01bbH\3\2\2\2\u01bc\u01bd\7J\2\2\u01bd"+ + "\u01be\7C\2\2\u01be\u01bf\7X\2\2\u01bf\u01c0\7K\2\2\u01c0\u01c1\7P\2\2"+ + "\u01c1\u01c2\7I\2\2\u01c2J\3\2\2\2\u01c3\u01c4\7K\2\2\u01c4\u01c5\7P\2"+ + "\2\u01c5L\3\2\2\2\u01c6\u01c7\7K\2\2\u01c7\u01c8\7P\2\2\u01c8\u01c9\7"+ + "P\2\2\u01c9\u01ca\7G\2\2\u01ca\u01cb\7T\2\2\u01cbN\3\2\2\2\u01cc\u01cd"+ + "\7K\2\2\u01cd\u01ce\7U\2\2\u01ceP\3\2\2\2\u01cf\u01d0\7L\2\2\u01d0\u01d1"+ + "\7Q\2\2\u01d1\u01d2\7K\2\2\u01d2\u01d3\7P\2\2\u01d3R\3\2\2\2\u01d4\u01d5"+ + "\7N\2\2\u01d5\u01d6\7C\2\2\u01d6\u01d7\7U\2\2\u01d7\u01d8\7V\2\2\u01d8"+ + "T\3\2\2\2\u01d9\u01da\7N\2\2\u01da\u01db\7G\2\2\u01db\u01dc\7H\2\2\u01dc"+ + "\u01dd\7V\2\2\u01ddV\3\2\2\2\u01de\u01df\7N\2\2\u01df\u01e0\7K\2\2\u01e0"+ + "\u01e1\7M\2\2\u01e1\u01e2\7G\2\2\u01e2X\3\2\2\2\u01e3\u01e4\7N\2\2\u01e4"+ + "\u01e5\7K\2\2\u01e5\u01e6\7O\2\2\u01e6\u01e7\7K\2\2\u01e7\u01e8\7V\2\2"+ + "\u01e8Z\3\2\2\2\u01e9\u01ea\7O\2\2\u01ea\u01eb\7C\2\2\u01eb\u01ec\7R\2"+ + "\2\u01ec\u01ed\7R\2\2\u01ed\u01ee\7G\2\2\u01ee\u01ef\7F\2\2\u01ef\\\3"+ + "\2\2\2\u01f0\u01f1\7O\2\2\u01f1\u01f2\7C\2\2\u01f2\u01f3\7V\2\2\u01f3"+ + "\u01f4\7E\2\2\u01f4\u01f5\7J\2\2\u01f5^\3\2\2\2\u01f6\u01f7\7P\2\2\u01f7"+ + "\u01f8\7C\2\2\u01f8\u01f9\7V\2\2\u01f9\u01fa\7W\2\2\u01fa\u01fb\7T\2\2"+ + "\u01fb\u01fc\7C\2\2\u01fc\u01fd\7N\2\2\u01fd`\3\2\2\2\u01fe\u01ff\7P\2"+ + "\2\u01ff\u0200\7Q\2\2\u0200\u0201\7V\2\2\u0201b\3\2\2\2\u0202\u0203\7"+ + "P\2\2\u0203\u0204\7W\2\2\u0204\u0205\7N\2\2\u0205\u0206\7N\2\2\u0206d"+ + "\3\2\2\2\u0207\u0208\7P\2\2\u0208\u0209\7W\2\2\u0209\u020a\7N\2\2\u020a"+ + "\u020b\7N\2\2\u020b\u020c\7U\2\2\u020cf\3\2\2\2\u020d\u020e\7Q\2\2\u020e"+ + "\u020f\7P\2\2\u020fh\3\2\2\2\u0210\u0211\7Q\2\2\u0211\u0212\7R\2\2\u0212"+ + "\u0213\7V\2\2\u0213\u0214\7K\2\2\u0214\u0215\7O\2\2\u0215\u0216\7K\2\2"+ + "\u0216\u0217\7\\\2\2\u0217\u0218\7G\2\2\u0218\u0219\7F\2\2\u0219j\3\2"+ + "\2\2\u021a\u021b\7Q\2\2\u021b\u021c\7T\2\2\u021cl\3\2\2\2\u021d\u021e"+ + "\7Q\2\2\u021e\u021f\7T\2\2\u021f\u0220\7F\2\2\u0220\u0221\7G\2\2\u0221"+ + "\u0222\7T\2\2\u0222n\3\2\2\2\u0223\u0224\7Q\2\2\u0224\u0225\7W\2\2\u0225"+ + "\u0226\7V\2\2\u0226\u0227\7G\2\2\u0227\u0228\7T\2\2\u0228p\3\2\2\2\u0229"+ + "\u022a\7R\2\2\u022a\u022b\7C\2\2\u022b\u022c\7T\2\2\u022c\u022d\7U\2\2"+ + "\u022d\u022e\7G\2\2\u022e\u022f\7F\2\2\u022fr\3\2\2\2\u0230\u0231\7R\2"+ + "\2\u0231\u0232\7J\2\2\u0232\u0233\7[\2\2\u0233\u0234\7U\2\2\u0234\u0235"+ + "\7K\2\2\u0235\u0236\7E\2\2\u0236\u0237\7C\2\2\u0237\u0238\7N\2\2\u0238"+ + "t\3\2\2\2\u0239\u023a\7R\2\2\u023a\u023b\7N\2\2\u023b\u023c\7C\2\2\u023c"+ + "\u023d\7P\2\2\u023dv\3\2\2\2\u023e\u023f\7T\2\2\u023f\u0240\7K\2\2\u0240"+ + "\u0241\7I\2\2\u0241\u0242\7J\2\2\u0242\u0243\7V\2\2\u0243x\3\2\2\2\u0244"+ + "\u0245\7T\2\2\u0245\u0246\7N\2\2\u0246\u0247\7K\2\2\u0247\u0248\7M\2\2"+ + "\u0248\u0249\7G\2\2\u0249z\3\2\2\2\u024a\u024b\7S\2\2\u024b\u024c\7W\2"+ + "\2\u024c\u024d\7G\2\2\u024d\u024e\7T\2\2\u024e\u024f\7[\2\2\u024f|\3\2"+ + "\2\2\u0250\u0251\7U\2\2\u0251\u0252\7E\2\2\u0252\u0253\7J\2\2\u0253\u0254"+ + "\7G\2\2\u0254\u0255\7O\2\2\u0255\u0256\7C\2\2\u0256\u0257\7U\2\2\u0257"+ + "~\3\2\2\2\u0258\u0259\7U\2\2\u0259\u025a\7G\2\2\u025a\u025b\7N\2\2\u025b"+ + "\u025c\7G\2\2\u025c\u025d\7E\2\2\u025d\u025e\7V\2\2\u025e\u0080\3\2\2"+ + "\2\u025f\u0260\7U\2\2\u0260\u0261\7J\2\2\u0261\u0262\7Q\2\2\u0262\u0263"+ + "\7Y\2\2\u0263\u0082\3\2\2\2\u0264\u0265\7U\2\2\u0265\u0266\7[\2\2\u0266"+ + "\u0267\7U\2\2\u0267\u0084\3\2\2\2\u0268\u0269\7V\2\2\u0269\u026a\7C\2"+ + "\2\u026a\u026b\7D\2\2\u026b\u026c\7N\2\2\u026c\u026d\7G\2\2\u026d\u0086"+ + "\3\2\2\2\u026e\u026f\7V\2\2\u026f\u0270\7C\2\2\u0270\u0271\7D\2\2\u0271"+ + "\u0272\7N\2\2\u0272\u0273\7G\2\2\u0273\u0274\7U\2\2\u0274\u0088\3\2\2"+ + "\2\u0275\u0276\7V\2\2\u0276\u0277\7G\2\2\u0277\u0278\7Z\2\2\u0278\u0279"+ + "\7V\2\2\u0279\u008a\3\2\2\2\u027a\u027b\7V\2\2\u027b\u027c\7T\2\2\u027c"+ + "\u027d\7W\2\2\u027d\u027e\7G\2\2\u027e\u008c\3\2\2\2\u027f\u0280\7V\2"+ + "\2\u0280\u0281\7[\2\2\u0281\u0282\7R\2\2\u0282\u0283\7G\2\2\u0283\u008e"+ + "\3\2\2\2\u0284\u0285\7V\2\2\u0285\u0286\7[\2\2\u0286\u0287\7R\2\2\u0287"+ + "\u0288\7G\2\2\u0288\u0289\7U\2\2\u0289\u0090\3\2\2\2\u028a\u028b\7W\2"+ + "\2\u028b\u028c\7U\2\2\u028c\u028d\7K\2\2\u028d\u028e\7P\2\2\u028e\u028f"+ + "\7I\2\2\u028f\u0092\3\2\2\2\u0290\u0291\7X\2\2\u0291\u0292\7G\2\2\u0292"+ + "\u0293\7T\2\2\u0293\u0294\7K\2\2\u0294\u0295\7H\2\2\u0295\u0296\7[\2\2"+ + "\u0296\u0094\3\2\2\2\u0297\u0298\7Y\2\2\u0298\u0299\7J\2\2\u0299\u029a"+ + "\7G\2\2\u029a\u029b\7T\2\2\u029b\u029c\7G\2\2\u029c\u0096\3\2\2\2\u029d"+ + "\u029e\7Y\2\2\u029e\u029f\7K\2\2\u029f\u02a0\7V\2\2\u02a0\u02a1\7J\2\2"+ + "\u02a1\u0098\3\2\2\2\u02a2\u02a3\7}\2\2\u02a3\u02a4\7G\2\2\u02a4\u02a5"+ + "\7U\2\2\u02a5\u02a6\7E\2\2\u02a6\u02a7\7C\2\2\u02a7\u02a8\7R\2\2\u02a8"+ + "\u02a9\7G\2\2\u02a9\u009a\3\2\2\2\u02aa\u02ab\7}\2\2\u02ab\u02ac\7H\2"+ + "\2\u02ac\u02ad\7P\2\2\u02ad\u009c\3\2\2\2\u02ae\u02af\7}\2\2\u02af\u02b0"+ + "\7N\2\2\u02b0\u02b1\7K\2\2\u02b1\u02b2\7O\2\2\u02b2\u02b3\7K\2\2\u02b3"+ + "\u02b4\7V\2\2\u02b4\u009e\3\2\2\2\u02b5\u02b6\7}\2\2\u02b6\u02b7\7F\2"+ + "\2\u02b7\u00a0\3\2\2\2\u02b8\u02b9\7}\2\2\u02b9\u02ba\7V\2\2\u02ba\u00a2"+ + "\3\2\2\2\u02bb\u02bc\7}\2\2\u02bc\u02bd\7V\2\2\u02bd\u02be\7U\2\2\u02be"+ + "\u00a4\3\2\2\2\u02bf\u02c0\7}\2\2\u02c0\u02c1\7I\2\2\u02c1\u02c2\7W\2"+ + "\2\u02c2\u02c3\7K\2\2\u02c3\u02c4\7F\2\2\u02c4\u00a6\3\2\2\2\u02c5\u02c6"+ + "\7\177\2\2\u02c6\u00a8\3\2\2\2\u02c7\u02c8\7?\2\2\u02c8\u00aa\3\2\2\2"+ + "\u02c9\u02ca\7>\2\2\u02ca\u02d1\7@\2\2\u02cb\u02cc\7#\2\2\u02cc\u02d1"+ + "\7?\2\2\u02cd\u02ce\7>\2\2\u02ce\u02cf\7?\2\2\u02cf\u02d1\7@\2\2\u02d0"+ + "\u02c9\3\2\2\2\u02d0\u02cb\3\2\2\2\u02d0\u02cd\3\2\2\2\u02d1\u00ac\3\2"+ + "\2\2\u02d2\u02d3\7>\2\2\u02d3\u00ae\3\2\2\2\u02d4\u02d5\7>\2\2\u02d5\u02d6"+ + "\7?\2\2\u02d6\u00b0\3\2\2\2\u02d7\u02d8\7@\2\2\u02d8\u00b2\3\2\2\2\u02d9"+ + "\u02da\7@\2\2\u02da\u02db\7?\2\2\u02db\u00b4\3\2\2\2\u02dc\u02dd\7-\2"+ + "\2\u02dd\u00b6\3\2\2\2\u02de\u02df\7/\2\2\u02df\u00b8\3\2\2\2\u02e0\u02e1"+ + "\7,\2\2\u02e1\u00ba\3\2\2\2\u02e2\u02e3\7\61\2\2\u02e3\u00bc\3\2\2\2\u02e4"+ + "\u02e5\7\'\2\2\u02e5\u00be\3\2\2\2\u02e6\u02e7\7~\2\2\u02e7\u02e8\7~\2"+ + "\2\u02e8\u00c0\3\2\2\2\u02e9\u02ea\7\60\2\2\u02ea\u00c2\3\2\2\2\u02eb"+ + "\u02ec\7A\2\2\u02ec\u00c4\3\2\2\2\u02ed\u02f3\7)\2\2\u02ee\u02f2\n\2\2"+ + "\2\u02ef\u02f0\7)\2\2\u02f0\u02f2\7)\2\2\u02f1\u02ee\3\2\2\2\u02f1\u02ef"+ + "\3\2\2\2\u02f2\u02f5\3\2\2\2\u02f3\u02f1\3\2\2\2\u02f3\u02f4\3\2\2\2\u02f4"+ + "\u02f6\3\2\2\2\u02f5\u02f3\3\2\2\2\u02f6\u02f7\7)\2\2\u02f7\u00c6\3\2"+ + "\2\2\u02f8\u02fa\5\u00d7l\2\u02f9\u02f8\3\2\2\2\u02fa\u02fb\3\2\2\2\u02fb"+ + "\u02f9\3\2\2\2\u02fb\u02fc\3\2\2\2\u02fc\u00c8\3\2\2\2\u02fd\u02ff\5\u00d7"+ + "l\2\u02fe\u02fd\3\2\2\2\u02ff\u0300\3\2\2\2\u0300\u02fe\3\2\2\2\u0300"+ + "\u0301\3\2\2\2\u0301\u0302\3\2\2\2\u0302\u0306\5\u00c1a\2\u0303\u0305"+ + "\5\u00d7l\2\u0304\u0303\3\2\2\2\u0305\u0308\3\2\2\2\u0306\u0304\3\2\2"+ + "\2\u0306\u0307\3\2\2\2\u0307\u0328\3\2\2\2\u0308\u0306\3\2\2\2\u0309\u030b"+ + "\5\u00c1a\2\u030a\u030c\5\u00d7l\2\u030b\u030a\3\2\2\2\u030c\u030d\3\2"+ + "\2\2\u030d\u030b\3\2\2\2\u030d\u030e\3\2\2\2\u030e\u0328\3\2\2\2\u030f"+ + "\u0311\5\u00d7l\2\u0310\u030f\3\2\2\2\u0311\u0312\3\2\2\2\u0312\u0310"+ + "\3\2\2\2\u0312\u0313\3\2\2\2\u0313\u031b\3\2\2\2\u0314\u0318\5\u00c1a"+ + "\2\u0315\u0317\5\u00d7l\2\u0316\u0315\3\2\2\2\u0317\u031a\3\2\2\2\u0318"+ + "\u0316\3\2\2\2\u0318\u0319\3\2\2\2\u0319\u031c\3\2\2\2\u031a\u0318\3\2"+ + "\2\2\u031b\u0314\3\2\2\2\u031b\u031c\3\2\2\2\u031c\u031d\3\2\2\2\u031d"+ + "\u031e\5\u00d5k\2\u031e\u0328\3\2\2\2\u031f\u0321\5\u00c1a\2\u0320\u0322"+ + "\5\u00d7l\2\u0321\u0320\3\2\2\2\u0322\u0323\3\2\2\2\u0323\u0321\3\2\2"+ + "\2\u0323\u0324\3\2\2\2\u0324\u0325\3\2\2\2\u0325\u0326\5\u00d5k\2\u0326"+ + "\u0328\3\2\2\2\u0327\u02fe\3\2\2\2\u0327\u0309\3\2\2\2\u0327\u0310\3\2"+ + "\2\2\u0327\u031f\3\2\2\2\u0328\u00ca\3\2\2\2\u0329\u032c\5\u00d9m\2\u032a"+ + "\u032c\7a\2\2\u032b\u0329\3\2\2\2\u032b\u032a\3\2\2\2\u032c\u0332\3\2"+ + "\2\2\u032d\u0331\5\u00d9m\2\u032e\u0331\5\u00d7l\2\u032f\u0331\t\3\2\2"+ + "\u0330\u032d\3\2\2\2\u0330\u032e\3\2\2\2\u0330\u032f\3\2\2\2\u0331\u0334"+ + "\3\2\2\2\u0332\u0330\3\2\2\2\u0332\u0333\3\2\2\2\u0333\u00cc\3\2\2\2\u0334"+ + "\u0332\3\2\2\2\u0335\u0339\5\u00d7l\2\u0336\u033a\5\u00d9m\2\u0337\u033a"+ + "\5\u00d7l\2\u0338\u033a\t\4\2\2\u0339\u0336\3\2\2\2\u0339\u0337\3\2\2"+ + "\2\u0339\u0338\3\2\2\2\u033a\u033b\3\2\2\2\u033b\u0339\3\2\2\2\u033b\u033c"+ + "\3\2\2\2\u033c\u00ce\3\2\2\2\u033d\u0341\5\u00d9m\2\u033e\u0341\5\u00d7"+ + "l\2\u033f\u0341\7a\2\2\u0340\u033d\3\2\2\2\u0340\u033e\3\2\2\2\u0340\u033f"+ + "\3\2\2\2\u0341\u0342\3\2\2\2\u0342\u0340\3\2\2\2\u0342\u0343\3\2\2\2\u0343"+ + "\u00d0\3\2\2\2\u0344\u034a\7$\2\2\u0345\u0349\n\5\2\2\u0346\u0347\7$\2"+ + "\2\u0347\u0349\7$\2\2\u0348\u0345\3\2\2\2\u0348\u0346\3\2\2\2\u0349\u034c"+ + "\3\2\2\2\u034a\u0348\3\2\2\2\u034a\u034b\3\2\2\2\u034b\u034d\3\2\2\2\u034c"+ + "\u034a\3\2\2\2\u034d\u034e\7$\2\2\u034e\u00d2\3\2\2\2\u034f\u0355\7b\2"+ + "\2\u0350\u0354\n\6\2\2\u0351\u0352\7b\2\2\u0352\u0354\7b\2\2\u0353\u0350"+ + "\3\2\2\2\u0353\u0351\3\2\2\2\u0354\u0357\3\2\2\2\u0355\u0353\3\2\2\2\u0355"+ + "\u0356\3\2\2\2\u0356\u0358\3\2\2\2\u0357\u0355\3\2\2\2\u0358\u0359\7b"+ + "\2\2\u0359\u00d4\3\2\2\2\u035a\u035c\7G\2\2\u035b\u035d\t\7\2\2\u035c"+ + "\u035b\3\2\2\2\u035c\u035d\3\2\2\2\u035d\u035f\3\2\2\2\u035e\u0360\5\u00d7"+ + "l\2\u035f\u035e\3\2\2\2\u0360\u0361\3\2\2\2\u0361\u035f\3\2\2\2\u0361"+ + "\u0362\3\2\2\2\u0362\u00d6\3\2\2\2\u0363\u0364\t\b\2\2\u0364\u00d8\3\2"+ + "\2\2\u0365\u0366\t\t\2\2\u0366\u00da\3\2\2\2\u0367\u0368\7/\2\2\u0368"+ + "\u0369\7/\2\2\u0369\u036d\3\2\2\2\u036a\u036c\n\n\2\2\u036b\u036a\3\2"+ + "\2\2\u036c\u036f\3\2\2\2\u036d\u036b\3\2\2\2\u036d\u036e\3\2\2\2\u036e"+ + "\u0371\3\2\2\2\u036f\u036d\3\2\2\2\u0370\u0372\7\17\2\2\u0371\u0370\3"+ + "\2\2\2\u0371\u0372\3\2\2\2\u0372\u0374\3\2\2\2\u0373\u0375\7\f\2\2\u0374"+ + "\u0373\3\2\2\2\u0374\u0375\3\2\2\2\u0375\u0376\3\2\2\2\u0376\u0377\bn"+ + "\2\2\u0377\u00dc\3\2\2\2\u0378\u0379\7\61\2\2\u0379\u037a\7,\2\2\u037a"+ + "\u037f\3\2\2\2\u037b\u037e\5\u00ddo\2\u037c\u037e\13\2\2\2\u037d\u037b"+ + "\3\2\2\2\u037d\u037c\3\2\2\2\u037e\u0381\3\2\2\2\u037f\u0380\3\2\2\2\u037f"+ + "\u037d\3\2\2\2\u0380\u0382\3\2\2\2\u0381\u037f\3\2\2\2\u0382\u0383\7,"+ + "\2\2\u0383\u0384\7\61\2\2\u0384\u0385\3\2\2\2\u0385\u0386\bo\2\2\u0386"+ + "\u00de\3\2\2\2\u0387\u0389\t\13\2\2\u0388\u0387\3\2\2\2\u0389\u038a\3"+ + "\2\2\2\u038a\u0388\3\2\2\2\u038a\u038b\3\2\2\2\u038b\u038c\3\2\2\2\u038c"+ + "\u038d\bp\2\2\u038d\u00e0\3\2\2\2\u038e\u038f\13\2\2\2\u038f\u00e2\3\2"+ + "\2\2\"\2\u02d0\u02f1\u02f3\u02fb\u0300\u0306\u030d\u0312\u0318\u031b\u0323"+ + "\u0327\u032b\u0330\u0332\u0339\u033b\u0340\u0342\u0348\u034a\u0353\u0355"+ + "\u035c\u0361\u036d\u0371\u0374\u037d\u037f\u038a\3\2\3\2"; public static final ATN _ATN = new ATNDeserializer().deserialize(_serializedATN.toCharArray()); static { diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/SqlBaseListener.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/SqlBaseListener.java index 3bd7737aff1..4e449809cfe 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/SqlBaseListener.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/SqlBaseListener.java @@ -713,6 +713,16 @@ interface SqlBaseListener extends ParseTreeListener { * @param ctx the parse tree */ void exitCastTemplate(SqlBaseParser.CastTemplateContext ctx); + /** + * Enter a parse tree produced by {@link SqlBaseParser#convertTemplate}. + * @param ctx the parse tree + */ + void enterConvertTemplate(SqlBaseParser.ConvertTemplateContext ctx); + /** + * Exit a parse tree produced by {@link SqlBaseParser#convertTemplate}. + * @param ctx the parse tree + */ + void exitConvertTemplate(SqlBaseParser.ConvertTemplateContext ctx); /** * Enter a parse tree produced by {@link SqlBaseParser#extractExpression}. * @param ctx the parse tree diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/SqlBaseParser.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/SqlBaseParser.java index 321cd058c08..4e25bd18dc9 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/SqlBaseParser.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/SqlBaseParser.java @@ -19,20 +19,20 @@ class SqlBaseParser extends Parser { public static final int T__0=1, T__1=2, T__2=3, T__3=4, ALL=5, ANALYZE=6, ANALYZED=7, AND=8, ANY=9, AS=10, ASC=11, BETWEEN=12, BY=13, CAST=14, CATALOG=15, CATALOGS=16, COLUMNS=17, - DEBUG=18, DESC=19, DESCRIBE=20, DISTINCT=21, ESCAPE=22, EXECUTABLE=23, - EXISTS=24, EXPLAIN=25, EXTRACT=26, FALSE=27, FIRST=28, FORMAT=29, FROM=30, - FULL=31, FUNCTIONS=32, GRAPHVIZ=33, GROUP=34, HAVING=35, IN=36, INNER=37, - IS=38, JOIN=39, LAST=40, LEFT=41, LIKE=42, LIMIT=43, MAPPED=44, MATCH=45, - NATURAL=46, NOT=47, NULL=48, NULLS=49, ON=50, OPTIMIZED=51, OR=52, ORDER=53, - OUTER=54, PARSED=55, PHYSICAL=56, PLAN=57, RIGHT=58, RLIKE=59, QUERY=60, - SCHEMAS=61, SELECT=62, SHOW=63, SYS=64, TABLE=65, TABLES=66, TEXT=67, - TRUE=68, TYPE=69, TYPES=70, USING=71, VERIFY=72, WHERE=73, WITH=74, ESCAPE_ESC=75, - FUNCTION_ESC=76, LIMIT_ESC=77, DATE_ESC=78, TIME_ESC=79, TIMESTAMP_ESC=80, - GUID_ESC=81, ESC_END=82, EQ=83, NEQ=84, LT=85, LTE=86, GT=87, GTE=88, - PLUS=89, MINUS=90, ASTERISK=91, SLASH=92, PERCENT=93, CONCAT=94, DOT=95, - PARAM=96, STRING=97, INTEGER_VALUE=98, DECIMAL_VALUE=99, IDENTIFIER=100, - DIGIT_IDENTIFIER=101, TABLE_IDENTIFIER=102, QUOTED_IDENTIFIER=103, BACKQUOTED_IDENTIFIER=104, - SIMPLE_COMMENT=105, BRACKETED_COMMENT=106, WS=107, UNRECOGNIZED=108, DELIMITER=109; + CONVERT=18, DEBUG=19, DESC=20, DESCRIBE=21, DISTINCT=22, ESCAPE=23, EXECUTABLE=24, + EXISTS=25, EXPLAIN=26, EXTRACT=27, FALSE=28, FIRST=29, FORMAT=30, FROM=31, + FULL=32, FUNCTIONS=33, GRAPHVIZ=34, GROUP=35, HAVING=36, IN=37, INNER=38, + IS=39, JOIN=40, LAST=41, LEFT=42, LIKE=43, LIMIT=44, MAPPED=45, MATCH=46, + NATURAL=47, NOT=48, NULL=49, NULLS=50, ON=51, OPTIMIZED=52, OR=53, ORDER=54, + OUTER=55, PARSED=56, PHYSICAL=57, PLAN=58, RIGHT=59, RLIKE=60, QUERY=61, + SCHEMAS=62, SELECT=63, SHOW=64, SYS=65, TABLE=66, TABLES=67, TEXT=68, + TRUE=69, TYPE=70, TYPES=71, USING=72, VERIFY=73, WHERE=74, WITH=75, ESCAPE_ESC=76, + FUNCTION_ESC=77, LIMIT_ESC=78, DATE_ESC=79, TIME_ESC=80, TIMESTAMP_ESC=81, + GUID_ESC=82, ESC_END=83, EQ=84, NEQ=85, LT=86, LTE=87, GT=88, GTE=89, + PLUS=90, MINUS=91, ASTERISK=92, SLASH=93, PERCENT=94, CONCAT=95, DOT=96, + PARAM=97, STRING=98, INTEGER_VALUE=99, DECIMAL_VALUE=100, IDENTIFIER=101, + DIGIT_IDENTIFIER=102, TABLE_IDENTIFIER=103, QUOTED_IDENTIFIER=104, BACKQUOTED_IDENTIFIER=105, + SIMPLE_COMMENT=106, BRACKETED_COMMENT=107, WS=108, UNRECOGNIZED=109, DELIMITER=110; public static final int RULE_singleStatement = 0, RULE_singleExpression = 1, RULE_statement = 2, RULE_query = 3, RULE_queryNoWith = 4, RULE_limitClause = 5, RULE_queryTerm = 6, @@ -43,12 +43,13 @@ class SqlBaseParser extends Parser { RULE_expression = 21, RULE_booleanExpression = 22, RULE_matchQueryOptions = 23, RULE_predicated = 24, RULE_predicate = 25, RULE_likePattern = 26, RULE_pattern = 27, RULE_patternEscape = 28, RULE_valueExpression = 29, RULE_primaryExpression = 30, - RULE_castExpression = 31, RULE_castTemplate = 32, RULE_extractExpression = 33, - RULE_extractTemplate = 34, RULE_functionExpression = 35, RULE_functionTemplate = 36, - RULE_functionName = 37, RULE_constant = 38, RULE_comparisonOperator = 39, - RULE_booleanValue = 40, RULE_dataType = 41, RULE_qualifiedName = 42, RULE_identifier = 43, - RULE_tableIdentifier = 44, RULE_quoteIdentifier = 45, RULE_unquoteIdentifier = 46, - RULE_number = 47, RULE_string = 48, RULE_nonReserved = 49; + RULE_castExpression = 31, RULE_castTemplate = 32, RULE_convertTemplate = 33, + RULE_extractExpression = 34, RULE_extractTemplate = 35, RULE_functionExpression = 36, + RULE_functionTemplate = 37, RULE_functionName = 38, RULE_constant = 39, + RULE_comparisonOperator = 40, RULE_booleanValue = 41, RULE_dataType = 42, + RULE_qualifiedName = 43, RULE_identifier = 44, RULE_tableIdentifier = 45, + RULE_quoteIdentifier = 46, RULE_unquoteIdentifier = 47, RULE_number = 48, + RULE_string = 49, RULE_nonReserved = 50; public static final String[] ruleNames = { "singleStatement", "singleExpression", "statement", "query", "queryNoWith", "limitClause", "queryTerm", "orderBy", "querySpecification", "fromClause", @@ -57,18 +58,18 @@ class SqlBaseParser extends Parser { "relationPrimary", "expression", "booleanExpression", "matchQueryOptions", "predicated", "predicate", "likePattern", "pattern", "patternEscape", "valueExpression", "primaryExpression", "castExpression", "castTemplate", - "extractExpression", "extractTemplate", "functionExpression", "functionTemplate", - "functionName", "constant", "comparisonOperator", "booleanValue", "dataType", - "qualifiedName", "identifier", "tableIdentifier", "quoteIdentifier", "unquoteIdentifier", - "number", "string", "nonReserved" + "convertTemplate", "extractExpression", "extractTemplate", "functionExpression", + "functionTemplate", "functionName", "constant", "comparisonOperator", + "booleanValue", "dataType", "qualifiedName", "identifier", "tableIdentifier", + "quoteIdentifier", "unquoteIdentifier", "number", "string", "nonReserved" }; private static final String[] _LITERAL_NAMES = { null, "'('", "')'", "','", "':'", "'ALL'", "'ANALYZE'", "'ANALYZED'", "'AND'", "'ANY'", "'AS'", "'ASC'", "'BETWEEN'", "'BY'", "'CAST'", "'CATALOG'", - "'CATALOGS'", "'COLUMNS'", "'DEBUG'", "'DESC'", "'DESCRIBE'", "'DISTINCT'", - "'ESCAPE'", "'EXECUTABLE'", "'EXISTS'", "'EXPLAIN'", "'EXTRACT'", "'FALSE'", - "'FIRST'", "'FORMAT'", "'FROM'", "'FULL'", "'FUNCTIONS'", "'GRAPHVIZ'", + "'CATALOGS'", "'COLUMNS'", "'CONVERT'", "'DEBUG'", "'DESC'", "'DESCRIBE'", + "'DISTINCT'", "'ESCAPE'", "'EXECUTABLE'", "'EXISTS'", "'EXPLAIN'", "'EXTRACT'", + "'FALSE'", "'FIRST'", "'FORMAT'", "'FROM'", "'FULL'", "'FUNCTIONS'", "'GRAPHVIZ'", "'GROUP'", "'HAVING'", "'IN'", "'INNER'", "'IS'", "'JOIN'", "'LAST'", "'LEFT'", "'LIKE'", "'LIMIT'", "'MAPPED'", "'MATCH'", "'NATURAL'", "'NOT'", "'NULL'", "'NULLS'", "'ON'", "'OPTIMIZED'", "'OR'", "'ORDER'", "'OUTER'", @@ -82,19 +83,19 @@ class SqlBaseParser extends Parser { private static final String[] _SYMBOLIC_NAMES = { null, null, null, null, null, "ALL", "ANALYZE", "ANALYZED", "AND", "ANY", "AS", "ASC", "BETWEEN", "BY", "CAST", "CATALOG", "CATALOGS", "COLUMNS", - "DEBUG", "DESC", "DESCRIBE", "DISTINCT", "ESCAPE", "EXECUTABLE", "EXISTS", - "EXPLAIN", "EXTRACT", "FALSE", "FIRST", "FORMAT", "FROM", "FULL", "FUNCTIONS", - "GRAPHVIZ", "GROUP", "HAVING", "IN", "INNER", "IS", "JOIN", "LAST", "LEFT", - "LIKE", "LIMIT", "MAPPED", "MATCH", "NATURAL", "NOT", "NULL", "NULLS", - "ON", "OPTIMIZED", "OR", "ORDER", "OUTER", "PARSED", "PHYSICAL", "PLAN", - "RIGHT", "RLIKE", "QUERY", "SCHEMAS", "SELECT", "SHOW", "SYS", "TABLE", - "TABLES", "TEXT", "TRUE", "TYPE", "TYPES", "USING", "VERIFY", "WHERE", - "WITH", "ESCAPE_ESC", "FUNCTION_ESC", "LIMIT_ESC", "DATE_ESC", "TIME_ESC", - "TIMESTAMP_ESC", "GUID_ESC", "ESC_END", "EQ", "NEQ", "LT", "LTE", "GT", - "GTE", "PLUS", "MINUS", "ASTERISK", "SLASH", "PERCENT", "CONCAT", "DOT", - "PARAM", "STRING", "INTEGER_VALUE", "DECIMAL_VALUE", "IDENTIFIER", "DIGIT_IDENTIFIER", - "TABLE_IDENTIFIER", "QUOTED_IDENTIFIER", "BACKQUOTED_IDENTIFIER", "SIMPLE_COMMENT", - "BRACKETED_COMMENT", "WS", "UNRECOGNIZED", "DELIMITER" + "CONVERT", "DEBUG", "DESC", "DESCRIBE", "DISTINCT", "ESCAPE", "EXECUTABLE", + "EXISTS", "EXPLAIN", "EXTRACT", "FALSE", "FIRST", "FORMAT", "FROM", "FULL", + "FUNCTIONS", "GRAPHVIZ", "GROUP", "HAVING", "IN", "INNER", "IS", "JOIN", + "LAST", "LEFT", "LIKE", "LIMIT", "MAPPED", "MATCH", "NATURAL", "NOT", + "NULL", "NULLS", "ON", "OPTIMIZED", "OR", "ORDER", "OUTER", "PARSED", + "PHYSICAL", "PLAN", "RIGHT", "RLIKE", "QUERY", "SCHEMAS", "SELECT", "SHOW", + "SYS", "TABLE", "TABLES", "TEXT", "TRUE", "TYPE", "TYPES", "USING", "VERIFY", + "WHERE", "WITH", "ESCAPE_ESC", "FUNCTION_ESC", "LIMIT_ESC", "DATE_ESC", + "TIME_ESC", "TIMESTAMP_ESC", "GUID_ESC", "ESC_END", "EQ", "NEQ", "LT", + "LTE", "GT", "GTE", "PLUS", "MINUS", "ASTERISK", "SLASH", "PERCENT", "CONCAT", + "DOT", "PARAM", "STRING", "INTEGER_VALUE", "DECIMAL_VALUE", "IDENTIFIER", + "DIGIT_IDENTIFIER", "TABLE_IDENTIFIER", "QUOTED_IDENTIFIER", "BACKQUOTED_IDENTIFIER", + "SIMPLE_COMMENT", "BRACKETED_COMMENT", "WS", "UNRECOGNIZED", "DELIMITER" }; public static final Vocabulary VOCABULARY = new VocabularyImpl(_LITERAL_NAMES, _SYMBOLIC_NAMES); @@ -175,9 +176,9 @@ class SqlBaseParser extends Parser { try { enterOuterAlt(_localctx, 1); { - setState(100); + setState(102); statement(); - setState(101); + setState(103); match(EOF); } } @@ -222,9 +223,9 @@ class SqlBaseParser extends Parser { try { enterOuterAlt(_localctx, 1); { - setState(103); + setState(105); expression(); - setState(104); + setState(106); match(EOF); } } @@ -618,14 +619,14 @@ class SqlBaseParser extends Parser { enterRule(_localctx, 4, RULE_statement); int _la; try { - setState(206); + setState(208); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,17,_ctx) ) { case 1: _localctx = new StatementDefaultContext(_localctx); enterOuterAlt(_localctx, 1); { - setState(106); + setState(108); query(); } break; @@ -633,27 +634,27 @@ class SqlBaseParser extends Parser { _localctx = new ExplainContext(_localctx); enterOuterAlt(_localctx, 2); { - setState(107); + setState(109); match(EXPLAIN); - setState(121); + setState(123); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,2,_ctx) ) { case 1: { - setState(108); + setState(110); match(T__0); - setState(117); + setState(119); _errHandler.sync(this); _la = _input.LA(1); - while (((((_la - 29)) & ~0x3f) == 0 && ((1L << (_la - 29)) & ((1L << (FORMAT - 29)) | (1L << (PLAN - 29)) | (1L << (VERIFY - 29)))) != 0)) { + while (((((_la - 30)) & ~0x3f) == 0 && ((1L << (_la - 30)) & ((1L << (FORMAT - 30)) | (1L << (PLAN - 30)) | (1L << (VERIFY - 30)))) != 0)) { { - setState(115); + setState(117); switch (_input.LA(1)) { case PLAN: { - setState(109); + setState(111); match(PLAN); - setState(110); + setState(112); ((ExplainContext)_localctx).type = _input.LT(1); _la = _input.LA(1); if ( !((((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << ALL) | (1L << ANALYZED) | (1L << EXECUTABLE) | (1L << MAPPED) | (1L << OPTIMIZED) | (1L << PARSED))) != 0)) ) { @@ -665,9 +666,9 @@ class SqlBaseParser extends Parser { break; case FORMAT: { - setState(111); + setState(113); match(FORMAT); - setState(112); + setState(114); ((ExplainContext)_localctx).format = _input.LT(1); _la = _input.LA(1); if ( !(_la==GRAPHVIZ || _la==TEXT) ) { @@ -679,9 +680,9 @@ class SqlBaseParser extends Parser { break; case VERIFY: { - setState(113); + setState(115); match(VERIFY); - setState(114); + setState(116); ((ExplainContext)_localctx).verify = booleanValue(); } break; @@ -689,16 +690,16 @@ class SqlBaseParser extends Parser { throw new NoViableAltException(this); } } - setState(119); + setState(121); _errHandler.sync(this); _la = _input.LA(1); } - setState(120); + setState(122); match(T__1); } break; } - setState(123); + setState(125); statement(); } break; @@ -706,27 +707,27 @@ class SqlBaseParser extends Parser { _localctx = new DebugContext(_localctx); enterOuterAlt(_localctx, 3); { - setState(124); + setState(126); match(DEBUG); - setState(136); + setState(138); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,5,_ctx) ) { case 1: { - setState(125); + setState(127); match(T__0); - setState(132); + setState(134); _errHandler.sync(this); _la = _input.LA(1); while (_la==FORMAT || _la==PLAN) { { - setState(130); + setState(132); switch (_input.LA(1)) { case PLAN: { - setState(126); + setState(128); match(PLAN); - setState(127); + setState(129); ((DebugContext)_localctx).type = _input.LT(1); _la = _input.LA(1); if ( !(_la==ANALYZED || _la==OPTIMIZED) ) { @@ -738,9 +739,9 @@ class SqlBaseParser extends Parser { break; case FORMAT: { - setState(128); + setState(130); match(FORMAT); - setState(129); + setState(131); ((DebugContext)_localctx).format = _input.LT(1); _la = _input.LA(1); if ( !(_la==GRAPHVIZ || _la==TEXT) ) { @@ -754,16 +755,16 @@ class SqlBaseParser extends Parser { throw new NoViableAltException(this); } } - setState(134); + setState(136); _errHandler.sync(this); _la = _input.LA(1); } - setState(135); + setState(137); match(T__1); } break; } - setState(138); + setState(140); statement(); } break; @@ -771,15 +772,15 @@ class SqlBaseParser extends Parser { _localctx = new ShowTablesContext(_localctx); enterOuterAlt(_localctx, 4); { - setState(139); + setState(141); match(SHOW); - setState(140); + setState(142); match(TABLES); - setState(143); + setState(145); switch (_input.LA(1)) { case LIKE: { - setState(141); + setState(143); ((ShowTablesContext)_localctx).tableLike = likePattern(); } break; @@ -814,7 +815,7 @@ class SqlBaseParser extends Parser { case QUOTED_IDENTIFIER: case BACKQUOTED_IDENTIFIER: { - setState(142); + setState(144); ((ShowTablesContext)_localctx).tableIdent = tableIdentifier(); } break; @@ -829,22 +830,22 @@ class SqlBaseParser extends Parser { _localctx = new ShowColumnsContext(_localctx); enterOuterAlt(_localctx, 5); { - setState(145); - match(SHOW); - setState(146); - match(COLUMNS); setState(147); + match(SHOW); + setState(148); + match(COLUMNS); + setState(149); _la = _input.LA(1); if ( !(_la==FROM || _la==IN) ) { _errHandler.recoverInline(this); } else { consume(); } - setState(150); + setState(152); switch (_input.LA(1)) { case LIKE: { - setState(148); + setState(150); ((ShowColumnsContext)_localctx).tableLike = likePattern(); } break; @@ -879,7 +880,7 @@ class SqlBaseParser extends Parser { case QUOTED_IDENTIFIER: case BACKQUOTED_IDENTIFIER: { - setState(149); + setState(151); ((ShowColumnsContext)_localctx).tableIdent = tableIdentifier(); } break; @@ -892,18 +893,18 @@ class SqlBaseParser extends Parser { _localctx = new ShowColumnsContext(_localctx); enterOuterAlt(_localctx, 6); { - setState(152); + setState(154); _la = _input.LA(1); if ( !(_la==DESC || _la==DESCRIBE) ) { _errHandler.recoverInline(this); } else { consume(); } - setState(155); + setState(157); switch (_input.LA(1)) { case LIKE: { - setState(153); + setState(155); ((ShowColumnsContext)_localctx).tableLike = likePattern(); } break; @@ -938,7 +939,7 @@ class SqlBaseParser extends Parser { case QUOTED_IDENTIFIER: case BACKQUOTED_IDENTIFIER: { - setState(154); + setState(156); ((ShowColumnsContext)_localctx).tableIdent = tableIdentifier(); } break; @@ -951,15 +952,15 @@ class SqlBaseParser extends Parser { _localctx = new ShowFunctionsContext(_localctx); enterOuterAlt(_localctx, 7); { - setState(157); + setState(159); match(SHOW); - setState(158); - match(FUNCTIONS); setState(160); + match(FUNCTIONS); + setState(162); _la = _input.LA(1); if (_la==LIKE) { { - setState(159); + setState(161); likePattern(); } } @@ -970,9 +971,9 @@ class SqlBaseParser extends Parser { _localctx = new ShowSchemasContext(_localctx); enterOuterAlt(_localctx, 8); { - setState(162); + setState(164); match(SHOW); - setState(163); + setState(165); match(SCHEMAS); } break; @@ -980,9 +981,9 @@ class SqlBaseParser extends Parser { _localctx = new SysCatalogsContext(_localctx); enterOuterAlt(_localctx, 9); { - setState(164); + setState(166); match(SYS); - setState(165); + setState(167); match(CATALOGS); } break; @@ -990,58 +991,58 @@ class SqlBaseParser extends Parser { _localctx = new SysTablesContext(_localctx); enterOuterAlt(_localctx, 10); { - setState(166); + setState(168); match(SYS); - setState(167); + setState(169); match(TABLES); - setState(170); + setState(172); _la = _input.LA(1); if (_la==CATALOG) { { - setState(168); + setState(170); match(CATALOG); - setState(169); + setState(171); ((SysTablesContext)_localctx).clusterLike = likePattern(); } } - setState(174); + setState(176); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,11,_ctx) ) { case 1: { - setState(172); + setState(174); ((SysTablesContext)_localctx).tableLike = likePattern(); } break; case 2: { - setState(173); + setState(175); ((SysTablesContext)_localctx).tableIdent = tableIdentifier(); } break; } - setState(185); + setState(187); _la = _input.LA(1); if (_la==TYPE) { { - setState(176); + setState(178); match(TYPE); - setState(177); + setState(179); string(); - setState(182); + setState(184); _errHandler.sync(this); _la = _input.LA(1); while (_la==T__2) { { { - setState(178); + setState(180); match(T__2); - setState(179); + setState(181); string(); } } - setState(184); + setState(186); _errHandler.sync(this); _la = _input.LA(1); } @@ -1054,28 +1055,28 @@ class SqlBaseParser extends Parser { _localctx = new SysColumnsContext(_localctx); enterOuterAlt(_localctx, 11); { - setState(187); + setState(189); match(SYS); - setState(188); + setState(190); match(COLUMNS); - setState(191); + setState(193); _la = _input.LA(1); if (_la==CATALOG) { { - setState(189); + setState(191); match(CATALOG); - setState(190); + setState(192); ((SysColumnsContext)_localctx).cluster = string(); } } - setState(196); + setState(198); switch (_input.LA(1)) { case TABLE: { - setState(193); + setState(195); match(TABLE); - setState(194); + setState(196); ((SysColumnsContext)_localctx).tableLike = likePattern(); } break; @@ -1110,7 +1111,7 @@ class SqlBaseParser extends Parser { case QUOTED_IDENTIFIER: case BACKQUOTED_IDENTIFIER: { - setState(195); + setState(197); ((SysColumnsContext)_localctx).tableIdent = tableIdentifier(); } break; @@ -1120,11 +1121,11 @@ class SqlBaseParser extends Parser { default: throw new NoViableAltException(this); } - setState(199); + setState(201); _la = _input.LA(1); if (_la==LIKE) { { - setState(198); + setState(200); ((SysColumnsContext)_localctx).columnPattern = likePattern(); } } @@ -1135,9 +1136,9 @@ class SqlBaseParser extends Parser { _localctx = new SysTypesContext(_localctx); enterOuterAlt(_localctx, 12); { - setState(201); + setState(203); match(SYS); - setState(202); + setState(204); match(TYPES); } break; @@ -1145,11 +1146,11 @@ class SqlBaseParser extends Parser { _localctx = new SysTableTypesContext(_localctx); enterOuterAlt(_localctx, 13); { - setState(203); - match(SYS); - setState(204); - match(TABLE); setState(205); + match(SYS); + setState(206); + match(TABLE); + setState(207); match(TYPES); } break; @@ -1203,34 +1204,34 @@ class SqlBaseParser extends Parser { try { enterOuterAlt(_localctx, 1); { - setState(217); + setState(219); _la = _input.LA(1); if (_la==WITH) { { - setState(208); + setState(210); match(WITH); - setState(209); + setState(211); namedQuery(); - setState(214); + setState(216); _errHandler.sync(this); _la = _input.LA(1); while (_la==T__2) { { { - setState(210); + setState(212); match(T__2); - setState(211); + setState(213); namedQuery(); } } - setState(216); + setState(218); _errHandler.sync(this); _la = _input.LA(1); } } } - setState(219); + setState(221); queryNoWith(); } } @@ -1286,42 +1287,42 @@ class SqlBaseParser extends Parser { try { enterOuterAlt(_localctx, 1); { - setState(221); + setState(223); queryTerm(); - setState(232); + setState(234); _la = _input.LA(1); if (_la==ORDER) { { - setState(222); - match(ORDER); - setState(223); - match(BY); setState(224); + match(ORDER); + setState(225); + match(BY); + setState(226); orderBy(); - setState(229); + setState(231); _errHandler.sync(this); _la = _input.LA(1); while (_la==T__2) { { { - setState(225); + setState(227); match(T__2); - setState(226); + setState(228); orderBy(); } } - setState(231); + setState(233); _errHandler.sync(this); _la = _input.LA(1); } } } - setState(235); + setState(237); _la = _input.LA(1); if (_la==LIMIT || _la==LIMIT_ESC) { { - setState(234); + setState(236); limitClause(); } } @@ -1370,14 +1371,14 @@ class SqlBaseParser extends Parser { enterRule(_localctx, 10, RULE_limitClause); int _la; try { - setState(242); + setState(244); switch (_input.LA(1)) { case LIMIT: enterOuterAlt(_localctx, 1); { - setState(237); + setState(239); match(LIMIT); - setState(238); + setState(240); ((LimitClauseContext)_localctx).limit = _input.LT(1); _la = _input.LA(1); if ( !(_la==ALL || _la==INTEGER_VALUE) ) { @@ -1390,9 +1391,9 @@ class SqlBaseParser extends Parser { case LIMIT_ESC: enterOuterAlt(_localctx, 2); { - setState(239); + setState(241); match(LIMIT_ESC); - setState(240); + setState(242); ((LimitClauseContext)_localctx).limit = _input.LT(1); _la = _input.LA(1); if ( !(_la==ALL || _la==INTEGER_VALUE) ) { @@ -1400,7 +1401,7 @@ class SqlBaseParser extends Parser { } else { consume(); } - setState(241); + setState(243); match(ESC_END); } break; @@ -1473,13 +1474,13 @@ class SqlBaseParser extends Parser { QueryTermContext _localctx = new QueryTermContext(_ctx, getState()); enterRule(_localctx, 12, RULE_queryTerm); try { - setState(249); + setState(251); switch (_input.LA(1)) { case SELECT: _localctx = new QueryPrimaryDefaultContext(_localctx); enterOuterAlt(_localctx, 1); { - setState(244); + setState(246); querySpecification(); } break; @@ -1487,11 +1488,11 @@ class SqlBaseParser extends Parser { _localctx = new SubqueryContext(_localctx); enterOuterAlt(_localctx, 2); { - setState(245); - match(T__0); - setState(246); - queryNoWith(); setState(247); + match(T__0); + setState(248); + queryNoWith(); + setState(249); match(T__1); } break; @@ -1547,13 +1548,13 @@ class SqlBaseParser extends Parser { try { enterOuterAlt(_localctx, 1); { - setState(251); - expression(); setState(253); + expression(); + setState(255); _la = _input.LA(1); if (_la==ASC || _la==DESC) { { - setState(252); + setState(254); ((OrderByContext)_localctx).ordering = _input.LT(1); _la = _input.LA(1); if ( !(_la==ASC || _la==DESC) ) { @@ -1564,13 +1565,13 @@ class SqlBaseParser extends Parser { } } - setState(257); + setState(259); _la = _input.LA(1); if (_la==NULLS) { { - setState(255); + setState(257); match(NULLS); - setState(256); + setState(258); ((OrderByContext)_localctx).nullOrdering = _input.LT(1); _la = _input.LA(1); if ( !(_la==FIRST || _la==LAST) ) { @@ -1649,75 +1650,75 @@ class SqlBaseParser extends Parser { try { enterOuterAlt(_localctx, 1); { - setState(259); - match(SELECT); setState(261); + match(SELECT); + setState(263); _la = _input.LA(1); if (_la==ALL || _la==DISTINCT) { { - setState(260); + setState(262); setQuantifier(); } } - setState(263); + setState(265); selectItem(); - setState(268); + setState(270); _errHandler.sync(this); _la = _input.LA(1); while (_la==T__2) { { { - setState(264); + setState(266); match(T__2); - setState(265); + setState(267); selectItem(); } } - setState(270); + setState(272); _errHandler.sync(this); _la = _input.LA(1); } - setState(272); + setState(274); _la = _input.LA(1); if (_la==FROM) { { - setState(271); + setState(273); fromClause(); } } - setState(276); + setState(278); _la = _input.LA(1); if (_la==WHERE) { { - setState(274); + setState(276); match(WHERE); - setState(275); + setState(277); ((QuerySpecificationContext)_localctx).where = booleanExpression(0); } } - setState(281); + setState(283); _la = _input.LA(1); if (_la==GROUP) { { - setState(278); - match(GROUP); - setState(279); - match(BY); setState(280); + match(GROUP); + setState(281); + match(BY); + setState(282); groupBy(); } } - setState(285); + setState(287); _la = _input.LA(1); if (_la==HAVING) { { - setState(283); + setState(285); match(HAVING); - setState(284); + setState(286); ((QuerySpecificationContext)_localctx).having = booleanExpression(0); } } @@ -1769,23 +1770,23 @@ class SqlBaseParser extends Parser { try { enterOuterAlt(_localctx, 1); { - setState(287); + setState(289); match(FROM); - setState(288); + setState(290); relation(); - setState(293); + setState(295); _errHandler.sync(this); _la = _input.LA(1); while (_la==T__2) { { { - setState(289); + setState(291); match(T__2); - setState(290); + setState(292); relation(); } } - setState(295); + setState(297); _errHandler.sync(this); _la = _input.LA(1); } @@ -1838,30 +1839,30 @@ class SqlBaseParser extends Parser { try { enterOuterAlt(_localctx, 1); { - setState(297); + setState(299); _la = _input.LA(1); if (_la==ALL || _la==DISTINCT) { { - setState(296); + setState(298); setQuantifier(); } } - setState(299); + setState(301); groupingElement(); - setState(304); + setState(306); _errHandler.sync(this); _la = _input.LA(1); while (_la==T__2) { { { - setState(300); + setState(302); match(T__2); - setState(301); + setState(303); groupingElement(); } } - setState(306); + setState(308); _errHandler.sync(this); _la = _input.LA(1); } @@ -1916,7 +1917,7 @@ class SqlBaseParser extends Parser { _localctx = new SingleGroupingSetContext(_localctx); enterOuterAlt(_localctx, 1); { - setState(307); + setState(309); groupingExpressions(); } } @@ -1962,47 +1963,47 @@ class SqlBaseParser extends Parser { enterRule(_localctx, 24, RULE_groupingExpressions); int _la; try { - setState(322); + setState(324); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,38,_ctx) ) { case 1: enterOuterAlt(_localctx, 1); { - setState(309); + setState(311); match(T__0); - setState(318); + setState(320); _la = _input.LA(1); - if ((((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << T__0) | (1L << ANALYZE) | (1L << ANALYZED) | (1L << CAST) | (1L << CATALOGS) | (1L << COLUMNS) | (1L << DEBUG) | (1L << EXECUTABLE) | (1L << EXISTS) | (1L << EXPLAIN) | (1L << EXTRACT) | (1L << FALSE) | (1L << FORMAT) | (1L << FUNCTIONS) | (1L << GRAPHVIZ) | (1L << LEFT) | (1L << MAPPED) | (1L << MATCH) | (1L << NOT) | (1L << NULL) | (1L << OPTIMIZED) | (1L << PARSED) | (1L << PHYSICAL) | (1L << PLAN) | (1L << RIGHT) | (1L << RLIKE) | (1L << QUERY) | (1L << SCHEMAS) | (1L << SHOW))) != 0) || ((((_la - 64)) & ~0x3f) == 0 && ((1L << (_la - 64)) & ((1L << (SYS - 64)) | (1L << (TABLES - 64)) | (1L << (TEXT - 64)) | (1L << (TRUE - 64)) | (1L << (TYPE - 64)) | (1L << (TYPES - 64)) | (1L << (VERIFY - 64)) | (1L << (FUNCTION_ESC - 64)) | (1L << (DATE_ESC - 64)) | (1L << (TIME_ESC - 64)) | (1L << (TIMESTAMP_ESC - 64)) | (1L << (GUID_ESC - 64)) | (1L << (PLUS - 64)) | (1L << (MINUS - 64)) | (1L << (ASTERISK - 64)) | (1L << (PARAM - 64)) | (1L << (STRING - 64)) | (1L << (INTEGER_VALUE - 64)) | (1L << (DECIMAL_VALUE - 64)) | (1L << (IDENTIFIER - 64)) | (1L << (DIGIT_IDENTIFIER - 64)) | (1L << (QUOTED_IDENTIFIER - 64)) | (1L << (BACKQUOTED_IDENTIFIER - 64)))) != 0)) { + if ((((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << T__0) | (1L << ANALYZE) | (1L << ANALYZED) | (1L << CAST) | (1L << CATALOGS) | (1L << COLUMNS) | (1L << CONVERT) | (1L << DEBUG) | (1L << EXECUTABLE) | (1L << EXISTS) | (1L << EXPLAIN) | (1L << EXTRACT) | (1L << FALSE) | (1L << FORMAT) | (1L << FUNCTIONS) | (1L << GRAPHVIZ) | (1L << LEFT) | (1L << MAPPED) | (1L << MATCH) | (1L << NOT) | (1L << NULL) | (1L << OPTIMIZED) | (1L << PARSED) | (1L << PHYSICAL) | (1L << PLAN) | (1L << RIGHT) | (1L << RLIKE) | (1L << QUERY) | (1L << SCHEMAS))) != 0) || ((((_la - 64)) & ~0x3f) == 0 && ((1L << (_la - 64)) & ((1L << (SHOW - 64)) | (1L << (SYS - 64)) | (1L << (TABLES - 64)) | (1L << (TEXT - 64)) | (1L << (TRUE - 64)) | (1L << (TYPE - 64)) | (1L << (TYPES - 64)) | (1L << (VERIFY - 64)) | (1L << (FUNCTION_ESC - 64)) | (1L << (DATE_ESC - 64)) | (1L << (TIME_ESC - 64)) | (1L << (TIMESTAMP_ESC - 64)) | (1L << (GUID_ESC - 64)) | (1L << (PLUS - 64)) | (1L << (MINUS - 64)) | (1L << (ASTERISK - 64)) | (1L << (PARAM - 64)) | (1L << (STRING - 64)) | (1L << (INTEGER_VALUE - 64)) | (1L << (DECIMAL_VALUE - 64)) | (1L << (IDENTIFIER - 64)) | (1L << (DIGIT_IDENTIFIER - 64)) | (1L << (QUOTED_IDENTIFIER - 64)) | (1L << (BACKQUOTED_IDENTIFIER - 64)))) != 0)) { { - setState(310); + setState(312); expression(); - setState(315); + setState(317); _errHandler.sync(this); _la = _input.LA(1); while (_la==T__2) { { { - setState(311); + setState(313); match(T__2); - setState(312); + setState(314); expression(); } } - setState(317); + setState(319); _errHandler.sync(this); _la = _input.LA(1); } } } - setState(320); + setState(322); match(T__1); } break; case 2: enterOuterAlt(_localctx, 2); { - setState(321); + setState(323); expression(); } break; @@ -2053,15 +2054,15 @@ class SqlBaseParser extends Parser { try { enterOuterAlt(_localctx, 1); { - setState(324); - ((NamedQueryContext)_localctx).name = identifier(); - setState(325); - match(AS); setState(326); - match(T__0); + ((NamedQueryContext)_localctx).name = identifier(); setState(327); - queryNoWith(); + match(AS); setState(328); + match(T__0); + setState(329); + queryNoWith(); + setState(330); match(T__1); } } @@ -2105,7 +2106,7 @@ class SqlBaseParser extends Parser { try { enterOuterAlt(_localctx, 1); { - setState(330); + setState(332); _la = _input.LA(1); if ( !(_la==ALL || _la==DISTINCT) ) { _errHandler.recoverInline(this); @@ -2168,22 +2169,22 @@ class SqlBaseParser extends Parser { _localctx = new SelectExpressionContext(_localctx); enterOuterAlt(_localctx, 1); { - setState(332); + setState(334); expression(); - setState(337); + setState(339); _la = _input.LA(1); - if ((((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << ANALYZE) | (1L << ANALYZED) | (1L << AS) | (1L << CATALOGS) | (1L << COLUMNS) | (1L << DEBUG) | (1L << EXECUTABLE) | (1L << EXPLAIN) | (1L << FORMAT) | (1L << FUNCTIONS) | (1L << GRAPHVIZ) | (1L << MAPPED) | (1L << OPTIMIZED) | (1L << PARSED) | (1L << PHYSICAL) | (1L << PLAN) | (1L << RLIKE) | (1L << QUERY) | (1L << SCHEMAS) | (1L << SHOW))) != 0) || ((((_la - 64)) & ~0x3f) == 0 && ((1L << (_la - 64)) & ((1L << (SYS - 64)) | (1L << (TABLES - 64)) | (1L << (TEXT - 64)) | (1L << (TYPE - 64)) | (1L << (TYPES - 64)) | (1L << (VERIFY - 64)) | (1L << (IDENTIFIER - 64)) | (1L << (DIGIT_IDENTIFIER - 64)) | (1L << (QUOTED_IDENTIFIER - 64)) | (1L << (BACKQUOTED_IDENTIFIER - 64)))) != 0)) { + if ((((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << ANALYZE) | (1L << ANALYZED) | (1L << AS) | (1L << CATALOGS) | (1L << COLUMNS) | (1L << DEBUG) | (1L << EXECUTABLE) | (1L << EXPLAIN) | (1L << FORMAT) | (1L << FUNCTIONS) | (1L << GRAPHVIZ) | (1L << MAPPED) | (1L << OPTIMIZED) | (1L << PARSED) | (1L << PHYSICAL) | (1L << PLAN) | (1L << RLIKE) | (1L << QUERY) | (1L << SCHEMAS))) != 0) || ((((_la - 64)) & ~0x3f) == 0 && ((1L << (_la - 64)) & ((1L << (SHOW - 64)) | (1L << (SYS - 64)) | (1L << (TABLES - 64)) | (1L << (TEXT - 64)) | (1L << (TYPE - 64)) | (1L << (TYPES - 64)) | (1L << (VERIFY - 64)) | (1L << (IDENTIFIER - 64)) | (1L << (DIGIT_IDENTIFIER - 64)) | (1L << (QUOTED_IDENTIFIER - 64)) | (1L << (BACKQUOTED_IDENTIFIER - 64)))) != 0)) { { - setState(334); + setState(336); _la = _input.LA(1); if (_la==AS) { { - setState(333); + setState(335); match(AS); } } - setState(336); + setState(338); identifier(); } } @@ -2237,19 +2238,19 @@ class SqlBaseParser extends Parser { try { enterOuterAlt(_localctx, 1); { - setState(339); + setState(341); relationPrimary(); - setState(343); + setState(345); _errHandler.sync(this); _la = _input.LA(1); while ((((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << FULL) | (1L << INNER) | (1L << JOIN) | (1L << LEFT) | (1L << NATURAL) | (1L << RIGHT))) != 0)) { { { - setState(340); + setState(342); joinRelation(); } } - setState(345); + setState(347); _errHandler.sync(this); _la = _input.LA(1); } @@ -2303,7 +2304,7 @@ class SqlBaseParser extends Parser { enterRule(_localctx, 34, RULE_joinRelation); int _la; try { - setState(357); + setState(359); switch (_input.LA(1)) { case FULL: case INNER: @@ -2313,18 +2314,18 @@ class SqlBaseParser extends Parser { enterOuterAlt(_localctx, 1); { { - setState(346); + setState(348); joinType(); } - setState(347); + setState(349); match(JOIN); - setState(348); - ((JoinRelationContext)_localctx).right = relationPrimary(); setState(350); + ((JoinRelationContext)_localctx).right = relationPrimary(); + setState(352); _la = _input.LA(1); if (_la==ON || _la==USING) { { - setState(349); + setState(351); joinCriteria(); } } @@ -2334,13 +2335,13 @@ class SqlBaseParser extends Parser { case NATURAL: enterOuterAlt(_localctx, 2); { - setState(352); - match(NATURAL); - setState(353); - joinType(); setState(354); - match(JOIN); + match(NATURAL); setState(355); + joinType(); + setState(356); + match(JOIN); + setState(357); ((JoinRelationContext)_localctx).right = relationPrimary(); } break; @@ -2389,17 +2390,17 @@ class SqlBaseParser extends Parser { enterRule(_localctx, 36, RULE_joinType); int _la; try { - setState(374); + setState(376); switch (_input.LA(1)) { case INNER: case JOIN: enterOuterAlt(_localctx, 1); { - setState(360); + setState(362); _la = _input.LA(1); if (_la==INNER) { { - setState(359); + setState(361); match(INNER); } } @@ -2409,13 +2410,13 @@ class SqlBaseParser extends Parser { case LEFT: enterOuterAlt(_localctx, 2); { - setState(362); - match(LEFT); setState(364); + match(LEFT); + setState(366); _la = _input.LA(1); if (_la==OUTER) { { - setState(363); + setState(365); match(OUTER); } } @@ -2425,13 +2426,13 @@ class SqlBaseParser extends Parser { case RIGHT: enterOuterAlt(_localctx, 3); { - setState(366); - match(RIGHT); setState(368); + match(RIGHT); + setState(370); _la = _input.LA(1); if (_la==OUTER) { { - setState(367); + setState(369); match(OUTER); } } @@ -2441,13 +2442,13 @@ class SqlBaseParser extends Parser { case FULL: enterOuterAlt(_localctx, 4); { - setState(370); - match(FULL); setState(372); + match(FULL); + setState(374); _la = _input.LA(1); if (_la==OUTER) { { - setState(371); + setState(373); match(OUTER); } } @@ -2505,43 +2506,43 @@ class SqlBaseParser extends Parser { enterRule(_localctx, 38, RULE_joinCriteria); int _la; try { - setState(390); + setState(392); switch (_input.LA(1)) { case ON: enterOuterAlt(_localctx, 1); { - setState(376); + setState(378); match(ON); - setState(377); + setState(379); booleanExpression(0); } break; case USING: enterOuterAlt(_localctx, 2); { - setState(378); - match(USING); - setState(379); - match(T__0); setState(380); + match(USING); + setState(381); + match(T__0); + setState(382); identifier(); - setState(385); + setState(387); _errHandler.sync(this); _la = _input.LA(1); while (_la==T__2) { { { - setState(381); + setState(383); match(T__2); - setState(382); + setState(384); identifier(); } } - setState(387); + setState(389); _errHandler.sync(this); _la = _input.LA(1); } - setState(388); + setState(390); match(T__1); } break; @@ -2646,29 +2647,29 @@ class SqlBaseParser extends Parser { enterRule(_localctx, 40, RULE_relationPrimary); int _la; try { - setState(417); + setState(419); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,57,_ctx) ) { case 1: _localctx = new TableNameContext(_localctx); enterOuterAlt(_localctx, 1); { - setState(392); + setState(394); tableIdentifier(); - setState(397); + setState(399); _la = _input.LA(1); - if ((((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << ANALYZE) | (1L << ANALYZED) | (1L << AS) | (1L << CATALOGS) | (1L << COLUMNS) | (1L << DEBUG) | (1L << EXECUTABLE) | (1L << EXPLAIN) | (1L << FORMAT) | (1L << FUNCTIONS) | (1L << GRAPHVIZ) | (1L << MAPPED) | (1L << OPTIMIZED) | (1L << PARSED) | (1L << PHYSICAL) | (1L << PLAN) | (1L << RLIKE) | (1L << QUERY) | (1L << SCHEMAS) | (1L << SHOW))) != 0) || ((((_la - 64)) & ~0x3f) == 0 && ((1L << (_la - 64)) & ((1L << (SYS - 64)) | (1L << (TABLES - 64)) | (1L << (TEXT - 64)) | (1L << (TYPE - 64)) | (1L << (TYPES - 64)) | (1L << (VERIFY - 64)) | (1L << (IDENTIFIER - 64)) | (1L << (DIGIT_IDENTIFIER - 64)) | (1L << (QUOTED_IDENTIFIER - 64)) | (1L << (BACKQUOTED_IDENTIFIER - 64)))) != 0)) { + if ((((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << ANALYZE) | (1L << ANALYZED) | (1L << AS) | (1L << CATALOGS) | (1L << COLUMNS) | (1L << DEBUG) | (1L << EXECUTABLE) | (1L << EXPLAIN) | (1L << FORMAT) | (1L << FUNCTIONS) | (1L << GRAPHVIZ) | (1L << MAPPED) | (1L << OPTIMIZED) | (1L << PARSED) | (1L << PHYSICAL) | (1L << PLAN) | (1L << RLIKE) | (1L << QUERY) | (1L << SCHEMAS))) != 0) || ((((_la - 64)) & ~0x3f) == 0 && ((1L << (_la - 64)) & ((1L << (SHOW - 64)) | (1L << (SYS - 64)) | (1L << (TABLES - 64)) | (1L << (TEXT - 64)) | (1L << (TYPE - 64)) | (1L << (TYPES - 64)) | (1L << (VERIFY - 64)) | (1L << (IDENTIFIER - 64)) | (1L << (DIGIT_IDENTIFIER - 64)) | (1L << (QUOTED_IDENTIFIER - 64)) | (1L << (BACKQUOTED_IDENTIFIER - 64)))) != 0)) { { - setState(394); + setState(396); _la = _input.LA(1); if (_la==AS) { { - setState(393); + setState(395); match(AS); } } - setState(396); + setState(398); qualifiedName(); } } @@ -2679,26 +2680,26 @@ class SqlBaseParser extends Parser { _localctx = new AliasedQueryContext(_localctx); enterOuterAlt(_localctx, 2); { - setState(399); - match(T__0); - setState(400); - queryNoWith(); setState(401); + match(T__0); + setState(402); + queryNoWith(); + setState(403); match(T__1); - setState(406); + setState(408); _la = _input.LA(1); - if ((((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << ANALYZE) | (1L << ANALYZED) | (1L << AS) | (1L << CATALOGS) | (1L << COLUMNS) | (1L << DEBUG) | (1L << EXECUTABLE) | (1L << EXPLAIN) | (1L << FORMAT) | (1L << FUNCTIONS) | (1L << GRAPHVIZ) | (1L << MAPPED) | (1L << OPTIMIZED) | (1L << PARSED) | (1L << PHYSICAL) | (1L << PLAN) | (1L << RLIKE) | (1L << QUERY) | (1L << SCHEMAS) | (1L << SHOW))) != 0) || ((((_la - 64)) & ~0x3f) == 0 && ((1L << (_la - 64)) & ((1L << (SYS - 64)) | (1L << (TABLES - 64)) | (1L << (TEXT - 64)) | (1L << (TYPE - 64)) | (1L << (TYPES - 64)) | (1L << (VERIFY - 64)) | (1L << (IDENTIFIER - 64)) | (1L << (DIGIT_IDENTIFIER - 64)) | (1L << (QUOTED_IDENTIFIER - 64)) | (1L << (BACKQUOTED_IDENTIFIER - 64)))) != 0)) { + if ((((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << ANALYZE) | (1L << ANALYZED) | (1L << AS) | (1L << CATALOGS) | (1L << COLUMNS) | (1L << DEBUG) | (1L << EXECUTABLE) | (1L << EXPLAIN) | (1L << FORMAT) | (1L << FUNCTIONS) | (1L << GRAPHVIZ) | (1L << MAPPED) | (1L << OPTIMIZED) | (1L << PARSED) | (1L << PHYSICAL) | (1L << PLAN) | (1L << RLIKE) | (1L << QUERY) | (1L << SCHEMAS))) != 0) || ((((_la - 64)) & ~0x3f) == 0 && ((1L << (_la - 64)) & ((1L << (SHOW - 64)) | (1L << (SYS - 64)) | (1L << (TABLES - 64)) | (1L << (TEXT - 64)) | (1L << (TYPE - 64)) | (1L << (TYPES - 64)) | (1L << (VERIFY - 64)) | (1L << (IDENTIFIER - 64)) | (1L << (DIGIT_IDENTIFIER - 64)) | (1L << (QUOTED_IDENTIFIER - 64)) | (1L << (BACKQUOTED_IDENTIFIER - 64)))) != 0)) { { - setState(403); + setState(405); _la = _input.LA(1); if (_la==AS) { { - setState(402); + setState(404); match(AS); } } - setState(405); + setState(407); qualifiedName(); } } @@ -2709,26 +2710,26 @@ class SqlBaseParser extends Parser { _localctx = new AliasedRelationContext(_localctx); enterOuterAlt(_localctx, 3); { - setState(408); - match(T__0); - setState(409); - relation(); setState(410); + match(T__0); + setState(411); + relation(); + setState(412); match(T__1); - setState(415); + setState(417); _la = _input.LA(1); - if ((((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << ANALYZE) | (1L << ANALYZED) | (1L << AS) | (1L << CATALOGS) | (1L << COLUMNS) | (1L << DEBUG) | (1L << EXECUTABLE) | (1L << EXPLAIN) | (1L << FORMAT) | (1L << FUNCTIONS) | (1L << GRAPHVIZ) | (1L << MAPPED) | (1L << OPTIMIZED) | (1L << PARSED) | (1L << PHYSICAL) | (1L << PLAN) | (1L << RLIKE) | (1L << QUERY) | (1L << SCHEMAS) | (1L << SHOW))) != 0) || ((((_la - 64)) & ~0x3f) == 0 && ((1L << (_la - 64)) & ((1L << (SYS - 64)) | (1L << (TABLES - 64)) | (1L << (TEXT - 64)) | (1L << (TYPE - 64)) | (1L << (TYPES - 64)) | (1L << (VERIFY - 64)) | (1L << (IDENTIFIER - 64)) | (1L << (DIGIT_IDENTIFIER - 64)) | (1L << (QUOTED_IDENTIFIER - 64)) | (1L << (BACKQUOTED_IDENTIFIER - 64)))) != 0)) { + if ((((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << ANALYZE) | (1L << ANALYZED) | (1L << AS) | (1L << CATALOGS) | (1L << COLUMNS) | (1L << DEBUG) | (1L << EXECUTABLE) | (1L << EXPLAIN) | (1L << FORMAT) | (1L << FUNCTIONS) | (1L << GRAPHVIZ) | (1L << MAPPED) | (1L << OPTIMIZED) | (1L << PARSED) | (1L << PHYSICAL) | (1L << PLAN) | (1L << RLIKE) | (1L << QUERY) | (1L << SCHEMAS))) != 0) || ((((_la - 64)) & ~0x3f) == 0 && ((1L << (_la - 64)) & ((1L << (SHOW - 64)) | (1L << (SYS - 64)) | (1L << (TABLES - 64)) | (1L << (TEXT - 64)) | (1L << (TYPE - 64)) | (1L << (TYPES - 64)) | (1L << (VERIFY - 64)) | (1L << (IDENTIFIER - 64)) | (1L << (DIGIT_IDENTIFIER - 64)) | (1L << (QUOTED_IDENTIFIER - 64)) | (1L << (BACKQUOTED_IDENTIFIER - 64)))) != 0)) { { - setState(412); + setState(414); _la = _input.LA(1); if (_la==AS) { { - setState(411); + setState(413); match(AS); } } - setState(414); + setState(416); qualifiedName(); } } @@ -2777,7 +2778,7 @@ class SqlBaseParser extends Parser { try { enterOuterAlt(_localctx, 1); { - setState(419); + setState(421); booleanExpression(0); } } @@ -2985,7 +2986,7 @@ class SqlBaseParser extends Parser { int _alt; enterOuterAlt(_localctx, 1); { - setState(452); + setState(454); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,58,_ctx) ) { case 1: @@ -2994,9 +2995,9 @@ class SqlBaseParser extends Parser { _ctx = _localctx; _prevctx = _localctx; - setState(422); + setState(424); match(NOT); - setState(423); + setState(425); booleanExpression(8); } break; @@ -3005,13 +3006,13 @@ class SqlBaseParser extends Parser { _localctx = new ExistsContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(424); - match(EXISTS); - setState(425); - match(T__0); setState(426); - query(); + match(EXISTS); setState(427); + match(T__0); + setState(428); + query(); + setState(429); match(T__1); } break; @@ -3020,15 +3021,15 @@ class SqlBaseParser extends Parser { _localctx = new StringQueryContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(429); - match(QUERY); - setState(430); - match(T__0); setState(431); - ((StringQueryContext)_localctx).queryString = string(); + match(QUERY); setState(432); - matchQueryOptions(); + match(T__0); setState(433); + ((StringQueryContext)_localctx).queryString = string(); + setState(434); + matchQueryOptions(); + setState(435); match(T__1); } break; @@ -3037,19 +3038,19 @@ class SqlBaseParser extends Parser { _localctx = new MatchQueryContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(435); - match(MATCH); - setState(436); - match(T__0); setState(437); - ((MatchQueryContext)_localctx).singleField = qualifiedName(); + match(MATCH); setState(438); - match(T__2); + match(T__0); setState(439); - ((MatchQueryContext)_localctx).queryString = string(); + ((MatchQueryContext)_localctx).singleField = qualifiedName(); setState(440); - matchQueryOptions(); + match(T__2); setState(441); + ((MatchQueryContext)_localctx).queryString = string(); + setState(442); + matchQueryOptions(); + setState(443); match(T__1); } break; @@ -3058,19 +3059,19 @@ class SqlBaseParser extends Parser { _localctx = new MultiMatchQueryContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(443); - match(MATCH); - setState(444); - match(T__0); setState(445); - ((MultiMatchQueryContext)_localctx).multiFields = string(); + match(MATCH); setState(446); - match(T__2); + match(T__0); setState(447); - ((MultiMatchQueryContext)_localctx).queryString = string(); + ((MultiMatchQueryContext)_localctx).multiFields = string(); setState(448); - matchQueryOptions(); + match(T__2); setState(449); + ((MultiMatchQueryContext)_localctx).queryString = string(); + setState(450); + matchQueryOptions(); + setState(451); match(T__1); } break; @@ -3079,13 +3080,13 @@ class SqlBaseParser extends Parser { _localctx = new BooleanDefaultContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(451); + setState(453); predicated(); } break; } _ctx.stop = _input.LT(-1); - setState(462); + setState(464); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,60,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { @@ -3093,7 +3094,7 @@ class SqlBaseParser extends Parser { if ( _parseListeners!=null ) triggerExitRuleEvent(); _prevctx = _localctx; { - setState(460); + setState(462); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,59,_ctx) ) { case 1: @@ -3101,11 +3102,11 @@ class SqlBaseParser extends Parser { _localctx = new LogicalBinaryContext(new BooleanExpressionContext(_parentctx, _parentState)); ((LogicalBinaryContext)_localctx).left = _prevctx; pushNewRecursionContext(_localctx, _startState, RULE_booleanExpression); - setState(454); - if (!(precpred(_ctx, 2))) throw new FailedPredicateException(this, "precpred(_ctx, 2)"); - setState(455); - ((LogicalBinaryContext)_localctx).operator = match(AND); setState(456); + if (!(precpred(_ctx, 2))) throw new FailedPredicateException(this, "precpred(_ctx, 2)"); + setState(457); + ((LogicalBinaryContext)_localctx).operator = match(AND); + setState(458); ((LogicalBinaryContext)_localctx).right = booleanExpression(3); } break; @@ -3114,18 +3115,18 @@ class SqlBaseParser extends Parser { _localctx = new LogicalBinaryContext(new BooleanExpressionContext(_parentctx, _parentState)); ((LogicalBinaryContext)_localctx).left = _prevctx; pushNewRecursionContext(_localctx, _startState, RULE_booleanExpression); - setState(457); - if (!(precpred(_ctx, 1))) throw new FailedPredicateException(this, "precpred(_ctx, 1)"); - setState(458); - ((LogicalBinaryContext)_localctx).operator = match(OR); setState(459); + if (!(precpred(_ctx, 1))) throw new FailedPredicateException(this, "precpred(_ctx, 1)"); + setState(460); + ((LogicalBinaryContext)_localctx).operator = match(OR); + setState(461); ((LogicalBinaryContext)_localctx).right = booleanExpression(2); } break; } } } - setState(464); + setState(466); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,60,_ctx); } @@ -3175,19 +3176,19 @@ class SqlBaseParser extends Parser { try { enterOuterAlt(_localctx, 1); { - setState(469); + setState(471); _errHandler.sync(this); _la = _input.LA(1); while (_la==T__2) { { { - setState(465); + setState(467); match(T__2); - setState(466); + setState(468); string(); } } - setState(471); + setState(473); _errHandler.sync(this); _la = _input.LA(1); } @@ -3236,14 +3237,14 @@ class SqlBaseParser extends Parser { try { enterOuterAlt(_localctx, 1); { - setState(472); - valueExpression(0); setState(474); + valueExpression(0); + setState(476); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,62,_ctx) ) { case 1: { - setState(473); + setState(475); predicate(); } break; @@ -3319,142 +3320,142 @@ class SqlBaseParser extends Parser { enterRule(_localctx, 50, RULE_predicate); int _la; try { - setState(522); + setState(524); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,70,_ctx) ) { case 1: enterOuterAlt(_localctx, 1); { - setState(477); + setState(479); _la = _input.LA(1); if (_la==NOT) { { - setState(476); + setState(478); match(NOT); } } - setState(479); - ((PredicateContext)_localctx).kind = match(BETWEEN); - setState(480); - ((PredicateContext)_localctx).lower = valueExpression(0); setState(481); - match(AND); + ((PredicateContext)_localctx).kind = match(BETWEEN); setState(482); + ((PredicateContext)_localctx).lower = valueExpression(0); + setState(483); + match(AND); + setState(484); ((PredicateContext)_localctx).upper = valueExpression(0); } break; case 2: enterOuterAlt(_localctx, 2); { - setState(485); + setState(487); _la = _input.LA(1); if (_la==NOT) { { - setState(484); + setState(486); match(NOT); } } - setState(487); - ((PredicateContext)_localctx).kind = match(IN); - setState(488); - match(T__0); setState(489); + ((PredicateContext)_localctx).kind = match(IN); + setState(490); + match(T__0); + setState(491); expression(); - setState(494); + setState(496); _errHandler.sync(this); _la = _input.LA(1); while (_la==T__2) { { { - setState(490); + setState(492); match(T__2); - setState(491); + setState(493); expression(); } } - setState(496); + setState(498); _errHandler.sync(this); _la = _input.LA(1); } - setState(497); + setState(499); match(T__1); } break; case 3: enterOuterAlt(_localctx, 3); { - setState(500); + setState(502); _la = _input.LA(1); if (_la==NOT) { { - setState(499); + setState(501); match(NOT); } } - setState(502); - ((PredicateContext)_localctx).kind = match(IN); - setState(503); - match(T__0); setState(504); - query(); + ((PredicateContext)_localctx).kind = match(IN); setState(505); + match(T__0); + setState(506); + query(); + setState(507); match(T__1); } break; case 4: enterOuterAlt(_localctx, 4); { - setState(508); + setState(510); _la = _input.LA(1); if (_la==NOT) { { - setState(507); + setState(509); match(NOT); } } - setState(510); + setState(512); ((PredicateContext)_localctx).kind = match(LIKE); - setState(511); + setState(513); pattern(); } break; case 5: enterOuterAlt(_localctx, 5); { - setState(513); + setState(515); _la = _input.LA(1); if (_la==NOT) { { - setState(512); + setState(514); match(NOT); } } - setState(515); + setState(517); ((PredicateContext)_localctx).kind = match(RLIKE); - setState(516); + setState(518); ((PredicateContext)_localctx).regex = string(); } break; case 6: enterOuterAlt(_localctx, 6); { - setState(517); - match(IS); setState(519); + match(IS); + setState(521); _la = _input.LA(1); if (_la==NOT) { { - setState(518); + setState(520); match(NOT); } } - setState(521); + setState(523); ((PredicateContext)_localctx).kind = match(NULL); } break; @@ -3501,9 +3502,9 @@ class SqlBaseParser extends Parser { try { enterOuterAlt(_localctx, 1); { - setState(524); + setState(526); match(LIKE); - setState(525); + setState(527); pattern(); } } @@ -3551,14 +3552,14 @@ class SqlBaseParser extends Parser { try { enterOuterAlt(_localctx, 1); { - setState(527); - ((PatternContext)_localctx).value = string(); setState(529); + ((PatternContext)_localctx).value = string(); + setState(531); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,71,_ctx) ) { case 1: { - setState(528); + setState(530); patternEscape(); } break; @@ -3606,25 +3607,25 @@ class SqlBaseParser extends Parser { PatternEscapeContext _localctx = new PatternEscapeContext(_ctx, getState()); enterRule(_localctx, 56, RULE_patternEscape); try { - setState(537); + setState(539); switch (_input.LA(1)) { case ESCAPE: enterOuterAlt(_localctx, 1); { - setState(531); + setState(533); match(ESCAPE); - setState(532); + setState(534); ((PatternEscapeContext)_localctx).escape = string(); } break; case ESCAPE_ESC: enterOuterAlt(_localctx, 2); { - setState(533); - match(ESCAPE_ESC); - setState(534); - ((PatternEscapeContext)_localctx).escape = string(); setState(535); + match(ESCAPE_ESC); + setState(536); + ((PatternEscapeContext)_localctx).escape = string(); + setState(537); match(ESC_END); } break; @@ -3769,7 +3770,7 @@ class SqlBaseParser extends Parser { int _alt; enterOuterAlt(_localctx, 1); { - setState(543); + setState(545); switch (_input.LA(1)) { case T__0: case ANALYZE: @@ -3777,6 +3778,7 @@ class SqlBaseParser extends Parser { case CAST: case CATALOGS: case COLUMNS: + case CONVERT: case DEBUG: case EXECUTABLE: case EXPLAIN: @@ -3823,7 +3825,7 @@ class SqlBaseParser extends Parser { _ctx = _localctx; _prevctx = _localctx; - setState(540); + setState(542); primaryExpression(); } break; @@ -3833,7 +3835,7 @@ class SqlBaseParser extends Parser { _localctx = new ArithmeticUnaryContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(541); + setState(543); ((ArithmeticUnaryContext)_localctx).operator = _input.LT(1); _la = _input.LA(1); if ( !(_la==PLUS || _la==MINUS) ) { @@ -3841,7 +3843,7 @@ class SqlBaseParser extends Parser { } else { consume(); } - setState(542); + setState(544); valueExpression(4); } break; @@ -3849,7 +3851,7 @@ class SqlBaseParser extends Parser { throw new NoViableAltException(this); } _ctx.stop = _input.LT(-1); - setState(557); + setState(559); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,75,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { @@ -3857,7 +3859,7 @@ class SqlBaseParser extends Parser { if ( _parseListeners!=null ) triggerExitRuleEvent(); _prevctx = _localctx; { - setState(555); + setState(557); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,74,_ctx) ) { case 1: @@ -3865,17 +3867,17 @@ class SqlBaseParser extends Parser { _localctx = new ArithmeticBinaryContext(new ValueExpressionContext(_parentctx, _parentState)); ((ArithmeticBinaryContext)_localctx).left = _prevctx; pushNewRecursionContext(_localctx, _startState, RULE_valueExpression); - setState(545); + setState(547); if (!(precpred(_ctx, 3))) throw new FailedPredicateException(this, "precpred(_ctx, 3)"); - setState(546); + setState(548); ((ArithmeticBinaryContext)_localctx).operator = _input.LT(1); _la = _input.LA(1); - if ( !(((((_la - 91)) & ~0x3f) == 0 && ((1L << (_la - 91)) & ((1L << (ASTERISK - 91)) | (1L << (SLASH - 91)) | (1L << (PERCENT - 91)))) != 0)) ) { + if ( !(((((_la - 92)) & ~0x3f) == 0 && ((1L << (_la - 92)) & ((1L << (ASTERISK - 92)) | (1L << (SLASH - 92)) | (1L << (PERCENT - 92)))) != 0)) ) { ((ArithmeticBinaryContext)_localctx).operator = (Token)_errHandler.recoverInline(this); } else { consume(); } - setState(547); + setState(549); ((ArithmeticBinaryContext)_localctx).right = valueExpression(4); } break; @@ -3884,9 +3886,9 @@ class SqlBaseParser extends Parser { _localctx = new ArithmeticBinaryContext(new ValueExpressionContext(_parentctx, _parentState)); ((ArithmeticBinaryContext)_localctx).left = _prevctx; pushNewRecursionContext(_localctx, _startState, RULE_valueExpression); - setState(548); + setState(550); if (!(precpred(_ctx, 2))) throw new FailedPredicateException(this, "precpred(_ctx, 2)"); - setState(549); + setState(551); ((ArithmeticBinaryContext)_localctx).operator = _input.LT(1); _la = _input.LA(1); if ( !(_la==PLUS || _la==MINUS) ) { @@ -3894,7 +3896,7 @@ class SqlBaseParser extends Parser { } else { consume(); } - setState(550); + setState(552); ((ArithmeticBinaryContext)_localctx).right = valueExpression(3); } break; @@ -3903,18 +3905,18 @@ class SqlBaseParser extends Parser { _localctx = new ComparisonContext(new ValueExpressionContext(_parentctx, _parentState)); ((ComparisonContext)_localctx).left = _prevctx; pushNewRecursionContext(_localctx, _startState, RULE_valueExpression); - setState(551); - if (!(precpred(_ctx, 1))) throw new FailedPredicateException(this, "precpred(_ctx, 1)"); - setState(552); - comparisonOperator(); setState(553); + if (!(precpred(_ctx, 1))) throw new FailedPredicateException(this, "precpred(_ctx, 1)"); + setState(554); + comparisonOperator(); + setState(555); ((ComparisonContext)_localctx).right = valueExpression(2); } break; } } } - setState(559); + setState(561); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,75,_ctx); } @@ -4102,14 +4104,14 @@ class SqlBaseParser extends Parser { enterRule(_localctx, 60, RULE_primaryExpression); int _la; try { - setState(579); + setState(581); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,77,_ctx) ) { case 1: _localctx = new CastContext(_localctx); enterOuterAlt(_localctx, 1); { - setState(560); + setState(562); castExpression(); } break; @@ -4117,7 +4119,7 @@ class SqlBaseParser extends Parser { _localctx = new ExtractContext(_localctx); enterOuterAlt(_localctx, 2); { - setState(561); + setState(563); extractExpression(); } break; @@ -4125,7 +4127,7 @@ class SqlBaseParser extends Parser { _localctx = new ConstantDefaultContext(_localctx); enterOuterAlt(_localctx, 3); { - setState(562); + setState(564); constant(); } break; @@ -4133,18 +4135,18 @@ class SqlBaseParser extends Parser { _localctx = new StarContext(_localctx); enterOuterAlt(_localctx, 4); { - setState(566); + setState(568); _la = _input.LA(1); - if ((((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << ANALYZE) | (1L << ANALYZED) | (1L << CATALOGS) | (1L << COLUMNS) | (1L << DEBUG) | (1L << EXECUTABLE) | (1L << EXPLAIN) | (1L << FORMAT) | (1L << FUNCTIONS) | (1L << GRAPHVIZ) | (1L << MAPPED) | (1L << OPTIMIZED) | (1L << PARSED) | (1L << PHYSICAL) | (1L << PLAN) | (1L << RLIKE) | (1L << QUERY) | (1L << SCHEMAS) | (1L << SHOW))) != 0) || ((((_la - 64)) & ~0x3f) == 0 && ((1L << (_la - 64)) & ((1L << (SYS - 64)) | (1L << (TABLES - 64)) | (1L << (TEXT - 64)) | (1L << (TYPE - 64)) | (1L << (TYPES - 64)) | (1L << (VERIFY - 64)) | (1L << (IDENTIFIER - 64)) | (1L << (DIGIT_IDENTIFIER - 64)) | (1L << (QUOTED_IDENTIFIER - 64)) | (1L << (BACKQUOTED_IDENTIFIER - 64)))) != 0)) { + if ((((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << ANALYZE) | (1L << ANALYZED) | (1L << CATALOGS) | (1L << COLUMNS) | (1L << DEBUG) | (1L << EXECUTABLE) | (1L << EXPLAIN) | (1L << FORMAT) | (1L << FUNCTIONS) | (1L << GRAPHVIZ) | (1L << MAPPED) | (1L << OPTIMIZED) | (1L << PARSED) | (1L << PHYSICAL) | (1L << PLAN) | (1L << RLIKE) | (1L << QUERY) | (1L << SCHEMAS))) != 0) || ((((_la - 64)) & ~0x3f) == 0 && ((1L << (_la - 64)) & ((1L << (SHOW - 64)) | (1L << (SYS - 64)) | (1L << (TABLES - 64)) | (1L << (TEXT - 64)) | (1L << (TYPE - 64)) | (1L << (TYPES - 64)) | (1L << (VERIFY - 64)) | (1L << (IDENTIFIER - 64)) | (1L << (DIGIT_IDENTIFIER - 64)) | (1L << (QUOTED_IDENTIFIER - 64)) | (1L << (BACKQUOTED_IDENTIFIER - 64)))) != 0)) { { - setState(563); + setState(565); qualifiedName(); - setState(564); + setState(566); match(DOT); } } - setState(568); + setState(570); match(ASTERISK); } break; @@ -4152,7 +4154,7 @@ class SqlBaseParser extends Parser { _localctx = new FunctionContext(_localctx); enterOuterAlt(_localctx, 5); { - setState(569); + setState(571); functionExpression(); } break; @@ -4160,11 +4162,11 @@ class SqlBaseParser extends Parser { _localctx = new SubqueryExpressionContext(_localctx); enterOuterAlt(_localctx, 6); { - setState(570); - match(T__0); - setState(571); - query(); setState(572); + match(T__0); + setState(573); + query(); + setState(574); match(T__1); } break; @@ -4172,7 +4174,7 @@ class SqlBaseParser extends Parser { _localctx = new DereferenceContext(_localctx); enterOuterAlt(_localctx, 7); { - setState(574); + setState(576); qualifiedName(); } break; @@ -4180,11 +4182,11 @@ class SqlBaseParser extends Parser { _localctx = new ParenthesizedExpressionContext(_localctx); enterOuterAlt(_localctx, 8); { - setState(575); - match(T__0); - setState(576); - expression(); setState(577); + match(T__0); + setState(578); + expression(); + setState(579); match(T__1); } break; @@ -4207,6 +4209,9 @@ class SqlBaseParser extends Parser { } public TerminalNode FUNCTION_ESC() { return getToken(SqlBaseParser.FUNCTION_ESC, 0); } public TerminalNode ESC_END() { return getToken(SqlBaseParser.ESC_END, 0); } + public ConvertTemplateContext convertTemplate() { + return getRuleContext(ConvertTemplateContext.class,0); + } public CastExpressionContext(ParserRuleContext parent, int invokingState) { super(parent, invokingState); } @@ -4230,28 +4235,45 @@ class SqlBaseParser extends Parser { CastExpressionContext _localctx = new CastExpressionContext(_ctx, getState()); enterRule(_localctx, 62, RULE_castExpression); try { - setState(586); - switch (_input.LA(1)) { - case CAST: + setState(593); + _errHandler.sync(this); + switch ( getInterpreter().adaptivePredict(_input,78,_ctx) ) { + case 1: enterOuterAlt(_localctx, 1); { - setState(581); + setState(583); castTemplate(); } break; - case FUNCTION_ESC: + case 2: enterOuterAlt(_localctx, 2); { - setState(582); - match(FUNCTION_ESC); - setState(583); - castTemplate(); setState(584); + match(FUNCTION_ESC); + setState(585); + castTemplate(); + setState(586); + match(ESC_END); + } + break; + case 3: + enterOuterAlt(_localctx, 3); + { + setState(588); + convertTemplate(); + } + break; + case 4: + enterOuterAlt(_localctx, 4); + { + setState(589); + match(FUNCTION_ESC); + setState(590); + convertTemplate(); + setState(591); match(ESC_END); } break; - default: - throw new NoViableAltException(this); } } catch (RecognitionException re) { @@ -4299,17 +4321,75 @@ class SqlBaseParser extends Parser { try { enterOuterAlt(_localctx, 1); { - setState(588); + setState(595); match(CAST); - setState(589); + setState(596); match(T__0); - setState(590); + setState(597); expression(); - setState(591); + setState(598); match(AS); - setState(592); + setState(599); dataType(); - setState(593); + setState(600); + match(T__1); + } + } + catch (RecognitionException re) { + _localctx.exception = re; + _errHandler.reportError(this, re); + _errHandler.recover(this, re); + } + finally { + exitRule(); + } + return _localctx; + } + + public static class ConvertTemplateContext extends ParserRuleContext { + public TerminalNode CONVERT() { return getToken(SqlBaseParser.CONVERT, 0); } + public ExpressionContext expression() { + return getRuleContext(ExpressionContext.class,0); + } + public DataTypeContext dataType() { + return getRuleContext(DataTypeContext.class,0); + } + public ConvertTemplateContext(ParserRuleContext parent, int invokingState) { + super(parent, invokingState); + } + @Override public int getRuleIndex() { return RULE_convertTemplate; } + @Override + public void enterRule(ParseTreeListener listener) { + if ( listener instanceof SqlBaseListener ) ((SqlBaseListener)listener).enterConvertTemplate(this); + } + @Override + public void exitRule(ParseTreeListener listener) { + if ( listener instanceof SqlBaseListener ) ((SqlBaseListener)listener).exitConvertTemplate(this); + } + @Override + public T accept(ParseTreeVisitor visitor) { + if ( visitor instanceof SqlBaseVisitor ) return ((SqlBaseVisitor)visitor).visitConvertTemplate(this); + else return visitor.visitChildren(this); + } + } + + public final ConvertTemplateContext convertTemplate() throws RecognitionException { + ConvertTemplateContext _localctx = new ConvertTemplateContext(_ctx, getState()); + enterRule(_localctx, 66, RULE_convertTemplate); + try { + enterOuterAlt(_localctx, 1); + { + setState(602); + match(CONVERT); + setState(603); + match(T__0); + setState(604); + expression(); + setState(605); + match(T__2); + setState(606); + dataType(); + setState(607); match(T__1); } } @@ -4351,25 +4431,25 @@ class SqlBaseParser extends Parser { public final ExtractExpressionContext extractExpression() throws RecognitionException { ExtractExpressionContext _localctx = new ExtractExpressionContext(_ctx, getState()); - enterRule(_localctx, 66, RULE_extractExpression); + enterRule(_localctx, 68, RULE_extractExpression); try { - setState(600); + setState(614); switch (_input.LA(1)) { case EXTRACT: enterOuterAlt(_localctx, 1); { - setState(595); + setState(609); extractTemplate(); } break; case FUNCTION_ESC: enterOuterAlt(_localctx, 2); { - setState(596); + setState(610); match(FUNCTION_ESC); - setState(597); + setState(611); extractTemplate(); - setState(598); + setState(612); match(ESC_END); } break; @@ -4419,21 +4499,21 @@ class SqlBaseParser extends Parser { public final ExtractTemplateContext extractTemplate() throws RecognitionException { ExtractTemplateContext _localctx = new ExtractTemplateContext(_ctx, getState()); - enterRule(_localctx, 68, RULE_extractTemplate); + enterRule(_localctx, 70, RULE_extractTemplate); try { enterOuterAlt(_localctx, 1); { - setState(602); + setState(616); match(EXTRACT); - setState(603); + setState(617); match(T__0); - setState(604); + setState(618); ((ExtractTemplateContext)_localctx).field = identifier(); - setState(605); + setState(619); match(FROM); - setState(606); + setState(620); valueExpression(0); - setState(607); + setState(621); match(T__1); } } @@ -4474,9 +4554,9 @@ class SqlBaseParser extends Parser { public final FunctionExpressionContext functionExpression() throws RecognitionException { FunctionExpressionContext _localctx = new FunctionExpressionContext(_ctx, getState()); - enterRule(_localctx, 70, RULE_functionExpression); + enterRule(_localctx, 72, RULE_functionExpression); try { - setState(614); + setState(628); switch (_input.LA(1)) { case ANALYZE: case ANALYZED: @@ -4511,18 +4591,18 @@ class SqlBaseParser extends Parser { case BACKQUOTED_IDENTIFIER: enterOuterAlt(_localctx, 1); { - setState(609); + setState(623); functionTemplate(); } break; case FUNCTION_ESC: enterOuterAlt(_localctx, 2); { - setState(610); + setState(624); match(FUNCTION_ESC); - setState(611); + setState(625); functionTemplate(); - setState(612); + setState(626); match(ESC_END); } break; @@ -4575,50 +4655,50 @@ class SqlBaseParser extends Parser { public final FunctionTemplateContext functionTemplate() throws RecognitionException { FunctionTemplateContext _localctx = new FunctionTemplateContext(_ctx, getState()); - enterRule(_localctx, 72, RULE_functionTemplate); + enterRule(_localctx, 74, RULE_functionTemplate); int _la; try { enterOuterAlt(_localctx, 1); { - setState(616); + setState(630); functionName(); - setState(617); + setState(631); match(T__0); - setState(629); + setState(643); _la = _input.LA(1); - if ((((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << T__0) | (1L << ALL) | (1L << ANALYZE) | (1L << ANALYZED) | (1L << CAST) | (1L << CATALOGS) | (1L << COLUMNS) | (1L << DEBUG) | (1L << DISTINCT) | (1L << EXECUTABLE) | (1L << EXISTS) | (1L << EXPLAIN) | (1L << EXTRACT) | (1L << FALSE) | (1L << FORMAT) | (1L << FUNCTIONS) | (1L << GRAPHVIZ) | (1L << LEFT) | (1L << MAPPED) | (1L << MATCH) | (1L << NOT) | (1L << NULL) | (1L << OPTIMIZED) | (1L << PARSED) | (1L << PHYSICAL) | (1L << PLAN) | (1L << RIGHT) | (1L << RLIKE) | (1L << QUERY) | (1L << SCHEMAS) | (1L << SHOW))) != 0) || ((((_la - 64)) & ~0x3f) == 0 && ((1L << (_la - 64)) & ((1L << (SYS - 64)) | (1L << (TABLES - 64)) | (1L << (TEXT - 64)) | (1L << (TRUE - 64)) | (1L << (TYPE - 64)) | (1L << (TYPES - 64)) | (1L << (VERIFY - 64)) | (1L << (FUNCTION_ESC - 64)) | (1L << (DATE_ESC - 64)) | (1L << (TIME_ESC - 64)) | (1L << (TIMESTAMP_ESC - 64)) | (1L << (GUID_ESC - 64)) | (1L << (PLUS - 64)) | (1L << (MINUS - 64)) | (1L << (ASTERISK - 64)) | (1L << (PARAM - 64)) | (1L << (STRING - 64)) | (1L << (INTEGER_VALUE - 64)) | (1L << (DECIMAL_VALUE - 64)) | (1L << (IDENTIFIER - 64)) | (1L << (DIGIT_IDENTIFIER - 64)) | (1L << (QUOTED_IDENTIFIER - 64)) | (1L << (BACKQUOTED_IDENTIFIER - 64)))) != 0)) { + if ((((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << T__0) | (1L << ALL) | (1L << ANALYZE) | (1L << ANALYZED) | (1L << CAST) | (1L << CATALOGS) | (1L << COLUMNS) | (1L << CONVERT) | (1L << DEBUG) | (1L << DISTINCT) | (1L << EXECUTABLE) | (1L << EXISTS) | (1L << EXPLAIN) | (1L << EXTRACT) | (1L << FALSE) | (1L << FORMAT) | (1L << FUNCTIONS) | (1L << GRAPHVIZ) | (1L << LEFT) | (1L << MAPPED) | (1L << MATCH) | (1L << NOT) | (1L << NULL) | (1L << OPTIMIZED) | (1L << PARSED) | (1L << PHYSICAL) | (1L << PLAN) | (1L << RIGHT) | (1L << RLIKE) | (1L << QUERY) | (1L << SCHEMAS))) != 0) || ((((_la - 64)) & ~0x3f) == 0 && ((1L << (_la - 64)) & ((1L << (SHOW - 64)) | (1L << (SYS - 64)) | (1L << (TABLES - 64)) | (1L << (TEXT - 64)) | (1L << (TRUE - 64)) | (1L << (TYPE - 64)) | (1L << (TYPES - 64)) | (1L << (VERIFY - 64)) | (1L << (FUNCTION_ESC - 64)) | (1L << (DATE_ESC - 64)) | (1L << (TIME_ESC - 64)) | (1L << (TIMESTAMP_ESC - 64)) | (1L << (GUID_ESC - 64)) | (1L << (PLUS - 64)) | (1L << (MINUS - 64)) | (1L << (ASTERISK - 64)) | (1L << (PARAM - 64)) | (1L << (STRING - 64)) | (1L << (INTEGER_VALUE - 64)) | (1L << (DECIMAL_VALUE - 64)) | (1L << (IDENTIFIER - 64)) | (1L << (DIGIT_IDENTIFIER - 64)) | (1L << (QUOTED_IDENTIFIER - 64)) | (1L << (BACKQUOTED_IDENTIFIER - 64)))) != 0)) { { - setState(619); + setState(633); _la = _input.LA(1); if (_la==ALL || _la==DISTINCT) { { - setState(618); + setState(632); setQuantifier(); } } - setState(621); + setState(635); expression(); - setState(626); + setState(640); _errHandler.sync(this); _la = _input.LA(1); while (_la==T__2) { { { - setState(622); + setState(636); match(T__2); - setState(623); + setState(637); expression(); } } - setState(628); + setState(642); _errHandler.sync(this); _la = _input.LA(1); } } } - setState(631); + setState(645); match(T__1); } } @@ -4660,21 +4740,21 @@ class SqlBaseParser extends Parser { public final FunctionNameContext functionName() throws RecognitionException { FunctionNameContext _localctx = new FunctionNameContext(_ctx, getState()); - enterRule(_localctx, 74, RULE_functionName); + enterRule(_localctx, 76, RULE_functionName); try { - setState(636); + setState(650); switch (_input.LA(1)) { case LEFT: enterOuterAlt(_localctx, 1); { - setState(633); + setState(647); match(LEFT); } break; case RIGHT: enterOuterAlt(_localctx, 2); { - setState(634); + setState(648); match(RIGHT); } break; @@ -4709,7 +4789,7 @@ class SqlBaseParser extends Parser { case BACKQUOTED_IDENTIFIER: enterOuterAlt(_localctx, 3); { - setState(635); + setState(649); identifier(); } break; @@ -4918,16 +4998,16 @@ class SqlBaseParser extends Parser { public final ConstantContext constant() throws RecognitionException { ConstantContext _localctx = new ConstantContext(_ctx, getState()); - enterRule(_localctx, 76, RULE_constant); + enterRule(_localctx, 78, RULE_constant); try { int _alt; - setState(663); + setState(677); switch (_input.LA(1)) { case NULL: _localctx = new NullLiteralContext(_localctx); enterOuterAlt(_localctx, 1); { - setState(638); + setState(652); match(NULL); } break; @@ -4936,7 +5016,7 @@ class SqlBaseParser extends Parser { _localctx = new NumericLiteralContext(_localctx); enterOuterAlt(_localctx, 2); { - setState(639); + setState(653); number(); } break; @@ -4945,7 +5025,7 @@ class SqlBaseParser extends Parser { _localctx = new BooleanLiteralContext(_localctx); enterOuterAlt(_localctx, 3); { - setState(640); + setState(654); booleanValue(); } break; @@ -4953,7 +5033,7 @@ class SqlBaseParser extends Parser { _localctx = new StringLiteralContext(_localctx); enterOuterAlt(_localctx, 4); { - setState(642); + setState(656); _errHandler.sync(this); _alt = 1; do { @@ -4961,7 +5041,7 @@ class SqlBaseParser extends Parser { case 1: { { - setState(641); + setState(655); match(STRING); } } @@ -4969,7 +5049,7 @@ class SqlBaseParser extends Parser { default: throw new NoViableAltException(this); } - setState(644); + setState(658); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,85,_ctx); } while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ); @@ -4979,7 +5059,7 @@ class SqlBaseParser extends Parser { _localctx = new ParamLiteralContext(_localctx); enterOuterAlt(_localctx, 5); { - setState(646); + setState(660); match(PARAM); } break; @@ -4987,11 +5067,11 @@ class SqlBaseParser extends Parser { _localctx = new DateEscapedLiteralContext(_localctx); enterOuterAlt(_localctx, 6); { - setState(647); + setState(661); match(DATE_ESC); - setState(648); + setState(662); string(); - setState(649); + setState(663); match(ESC_END); } break; @@ -4999,11 +5079,11 @@ class SqlBaseParser extends Parser { _localctx = new TimeEscapedLiteralContext(_localctx); enterOuterAlt(_localctx, 7); { - setState(651); + setState(665); match(TIME_ESC); - setState(652); + setState(666); string(); - setState(653); + setState(667); match(ESC_END); } break; @@ -5011,11 +5091,11 @@ class SqlBaseParser extends Parser { _localctx = new TimestampEscapedLiteralContext(_localctx); enterOuterAlt(_localctx, 8); { - setState(655); + setState(669); match(TIMESTAMP_ESC); - setState(656); + setState(670); string(); - setState(657); + setState(671); match(ESC_END); } break; @@ -5023,11 +5103,11 @@ class SqlBaseParser extends Parser { _localctx = new GuidEscapedLiteralContext(_localctx); enterOuterAlt(_localctx, 9); { - setState(659); + setState(673); match(GUID_ESC); - setState(660); + setState(674); string(); - setState(661); + setState(675); match(ESC_END); } break; @@ -5074,14 +5154,14 @@ class SqlBaseParser extends Parser { public final ComparisonOperatorContext comparisonOperator() throws RecognitionException { ComparisonOperatorContext _localctx = new ComparisonOperatorContext(_ctx, getState()); - enterRule(_localctx, 78, RULE_comparisonOperator); + enterRule(_localctx, 80, RULE_comparisonOperator); int _la; try { enterOuterAlt(_localctx, 1); { - setState(665); + setState(679); _la = _input.LA(1); - if ( !(((((_la - 83)) & ~0x3f) == 0 && ((1L << (_la - 83)) & ((1L << (EQ - 83)) | (1L << (NEQ - 83)) | (1L << (LT - 83)) | (1L << (LTE - 83)) | (1L << (GT - 83)) | (1L << (GTE - 83)))) != 0)) ) { + if ( !(((((_la - 84)) & ~0x3f) == 0 && ((1L << (_la - 84)) & ((1L << (EQ - 84)) | (1L << (NEQ - 84)) | (1L << (LT - 84)) | (1L << (LTE - 84)) | (1L << (GT - 84)) | (1L << (GTE - 84)))) != 0)) ) { _errHandler.recoverInline(this); } else { consume(); @@ -5123,12 +5203,12 @@ class SqlBaseParser extends Parser { public final BooleanValueContext booleanValue() throws RecognitionException { BooleanValueContext _localctx = new BooleanValueContext(_ctx, getState()); - enterRule(_localctx, 80, RULE_booleanValue); + enterRule(_localctx, 82, RULE_booleanValue); int _la; try { enterOuterAlt(_localctx, 1); { - setState(667); + setState(681); _la = _input.LA(1); if ( !(_la==FALSE || _la==TRUE) ) { _errHandler.recoverInline(this); @@ -5181,12 +5261,12 @@ class SqlBaseParser extends Parser { public final DataTypeContext dataType() throws RecognitionException { DataTypeContext _localctx = new DataTypeContext(_ctx, getState()); - enterRule(_localctx, 82, RULE_dataType); + enterRule(_localctx, 84, RULE_dataType); try { _localctx = new PrimitiveDataTypeContext(_localctx); enterOuterAlt(_localctx, 1); { - setState(669); + setState(683); identifier(); } } @@ -5233,30 +5313,30 @@ class SqlBaseParser extends Parser { public final QualifiedNameContext qualifiedName() throws RecognitionException { QualifiedNameContext _localctx = new QualifiedNameContext(_ctx, getState()); - enterRule(_localctx, 84, RULE_qualifiedName); + enterRule(_localctx, 86, RULE_qualifiedName); try { int _alt; enterOuterAlt(_localctx, 1); { - setState(676); + setState(690); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,87,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(671); + setState(685); identifier(); - setState(672); + setState(686); match(DOT); } } } - setState(678); + setState(692); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,87,_ctx); } - setState(679); + setState(693); identifier(); } } @@ -5299,15 +5379,15 @@ class SqlBaseParser extends Parser { public final IdentifierContext identifier() throws RecognitionException { IdentifierContext _localctx = new IdentifierContext(_ctx, getState()); - enterRule(_localctx, 86, RULE_identifier); + enterRule(_localctx, 88, RULE_identifier); try { - setState(683); + setState(697); switch (_input.LA(1)) { case QUOTED_IDENTIFIER: case BACKQUOTED_IDENTIFIER: enterOuterAlt(_localctx, 1); { - setState(681); + setState(695); quoteIdentifier(); } break; @@ -5340,7 +5420,7 @@ class SqlBaseParser extends Parser { case DIGIT_IDENTIFIER: enterOuterAlt(_localctx, 2); { - setState(682); + setState(696); unquoteIdentifier(); } break; @@ -5390,46 +5470,46 @@ class SqlBaseParser extends Parser { public final TableIdentifierContext tableIdentifier() throws RecognitionException { TableIdentifierContext _localctx = new TableIdentifierContext(_ctx, getState()); - enterRule(_localctx, 88, RULE_tableIdentifier); + enterRule(_localctx, 90, RULE_tableIdentifier); int _la; try { - setState(697); + setState(711); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,91,_ctx) ) { case 1: enterOuterAlt(_localctx, 1); { - setState(688); + setState(702); _la = _input.LA(1); - if ((((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << ANALYZE) | (1L << ANALYZED) | (1L << CATALOGS) | (1L << COLUMNS) | (1L << DEBUG) | (1L << EXECUTABLE) | (1L << EXPLAIN) | (1L << FORMAT) | (1L << FUNCTIONS) | (1L << GRAPHVIZ) | (1L << MAPPED) | (1L << OPTIMIZED) | (1L << PARSED) | (1L << PHYSICAL) | (1L << PLAN) | (1L << RLIKE) | (1L << QUERY) | (1L << SCHEMAS) | (1L << SHOW))) != 0) || ((((_la - 64)) & ~0x3f) == 0 && ((1L << (_la - 64)) & ((1L << (SYS - 64)) | (1L << (TABLES - 64)) | (1L << (TEXT - 64)) | (1L << (TYPE - 64)) | (1L << (TYPES - 64)) | (1L << (VERIFY - 64)) | (1L << (IDENTIFIER - 64)) | (1L << (DIGIT_IDENTIFIER - 64)) | (1L << (QUOTED_IDENTIFIER - 64)) | (1L << (BACKQUOTED_IDENTIFIER - 64)))) != 0)) { + if ((((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << ANALYZE) | (1L << ANALYZED) | (1L << CATALOGS) | (1L << COLUMNS) | (1L << DEBUG) | (1L << EXECUTABLE) | (1L << EXPLAIN) | (1L << FORMAT) | (1L << FUNCTIONS) | (1L << GRAPHVIZ) | (1L << MAPPED) | (1L << OPTIMIZED) | (1L << PARSED) | (1L << PHYSICAL) | (1L << PLAN) | (1L << RLIKE) | (1L << QUERY) | (1L << SCHEMAS))) != 0) || ((((_la - 64)) & ~0x3f) == 0 && ((1L << (_la - 64)) & ((1L << (SHOW - 64)) | (1L << (SYS - 64)) | (1L << (TABLES - 64)) | (1L << (TEXT - 64)) | (1L << (TYPE - 64)) | (1L << (TYPES - 64)) | (1L << (VERIFY - 64)) | (1L << (IDENTIFIER - 64)) | (1L << (DIGIT_IDENTIFIER - 64)) | (1L << (QUOTED_IDENTIFIER - 64)) | (1L << (BACKQUOTED_IDENTIFIER - 64)))) != 0)) { { - setState(685); + setState(699); ((TableIdentifierContext)_localctx).catalog = identifier(); - setState(686); + setState(700); match(T__3); } } - setState(690); + setState(704); match(TABLE_IDENTIFIER); } break; case 2: enterOuterAlt(_localctx, 2); { - setState(694); + setState(708); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,90,_ctx) ) { case 1: { - setState(691); + setState(705); ((TableIdentifierContext)_localctx).catalog = identifier(); - setState(692); + setState(706); match(T__3); } break; } - setState(696); + setState(710); ((TableIdentifierContext)_localctx).name = identifier(); } break; @@ -5494,15 +5574,15 @@ class SqlBaseParser extends Parser { public final QuoteIdentifierContext quoteIdentifier() throws RecognitionException { QuoteIdentifierContext _localctx = new QuoteIdentifierContext(_ctx, getState()); - enterRule(_localctx, 90, RULE_quoteIdentifier); + enterRule(_localctx, 92, RULE_quoteIdentifier); try { - setState(701); + setState(715); switch (_input.LA(1)) { case QUOTED_IDENTIFIER: _localctx = new QuotedIdentifierContext(_localctx); enterOuterAlt(_localctx, 1); { - setState(699); + setState(713); match(QUOTED_IDENTIFIER); } break; @@ -5510,7 +5590,7 @@ class SqlBaseParser extends Parser { _localctx = new BackQuotedIdentifierContext(_localctx); enterOuterAlt(_localctx, 2); { - setState(700); + setState(714); match(BACKQUOTED_IDENTIFIER); } break; @@ -5580,15 +5660,15 @@ class SqlBaseParser extends Parser { public final UnquoteIdentifierContext unquoteIdentifier() throws RecognitionException { UnquoteIdentifierContext _localctx = new UnquoteIdentifierContext(_ctx, getState()); - enterRule(_localctx, 92, RULE_unquoteIdentifier); + enterRule(_localctx, 94, RULE_unquoteIdentifier); try { - setState(706); + setState(720); switch (_input.LA(1)) { case IDENTIFIER: _localctx = new UnquotedIdentifierContext(_localctx); enterOuterAlt(_localctx, 1); { - setState(703); + setState(717); match(IDENTIFIER); } break; @@ -5620,7 +5700,7 @@ class SqlBaseParser extends Parser { _localctx = new UnquotedIdentifierContext(_localctx); enterOuterAlt(_localctx, 2); { - setState(704); + setState(718); nonReserved(); } break; @@ -5628,7 +5708,7 @@ class SqlBaseParser extends Parser { _localctx = new DigitIdentifierContext(_localctx); enterOuterAlt(_localctx, 3); { - setState(705); + setState(719); match(DIGIT_IDENTIFIER); } break; @@ -5695,15 +5775,15 @@ class SqlBaseParser extends Parser { public final NumberContext number() throws RecognitionException { NumberContext _localctx = new NumberContext(_ctx, getState()); - enterRule(_localctx, 94, RULE_number); + enterRule(_localctx, 96, RULE_number); try { - setState(710); + setState(724); switch (_input.LA(1)) { case DECIMAL_VALUE: _localctx = new DecimalLiteralContext(_localctx); enterOuterAlt(_localctx, 1); { - setState(708); + setState(722); match(DECIMAL_VALUE); } break; @@ -5711,7 +5791,7 @@ class SqlBaseParser extends Parser { _localctx = new IntegerLiteralContext(_localctx); enterOuterAlt(_localctx, 2); { - setState(709); + setState(723); match(INTEGER_VALUE); } break; @@ -5754,12 +5834,12 @@ class SqlBaseParser extends Parser { public final StringContext string() throws RecognitionException { StringContext _localctx = new StringContext(_ctx, getState()); - enterRule(_localctx, 96, RULE_string); + enterRule(_localctx, 98, RULE_string); int _la; try { enterOuterAlt(_localctx, 1); { - setState(712); + setState(726); _la = _input.LA(1); if ( !(_la==PARAM || _la==STRING) ) { _errHandler.recoverInline(this); @@ -5826,14 +5906,14 @@ class SqlBaseParser extends Parser { public final NonReservedContext nonReserved() throws RecognitionException { NonReservedContext _localctx = new NonReservedContext(_ctx, getState()); - enterRule(_localctx, 98, RULE_nonReserved); + enterRule(_localctx, 100, RULE_nonReserved); int _la; try { enterOuterAlt(_localctx, 1); { - setState(714); + setState(728); _la = _input.LA(1); - if ( !((((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << ANALYZE) | (1L << ANALYZED) | (1L << CATALOGS) | (1L << COLUMNS) | (1L << DEBUG) | (1L << EXECUTABLE) | (1L << EXPLAIN) | (1L << FORMAT) | (1L << FUNCTIONS) | (1L << GRAPHVIZ) | (1L << MAPPED) | (1L << OPTIMIZED) | (1L << PARSED) | (1L << PHYSICAL) | (1L << PLAN) | (1L << RLIKE) | (1L << QUERY) | (1L << SCHEMAS) | (1L << SHOW))) != 0) || ((((_la - 64)) & ~0x3f) == 0 && ((1L << (_la - 64)) & ((1L << (SYS - 64)) | (1L << (TABLES - 64)) | (1L << (TEXT - 64)) | (1L << (TYPE - 64)) | (1L << (TYPES - 64)) | (1L << (VERIFY - 64)))) != 0)) ) { + if ( !((((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << ANALYZE) | (1L << ANALYZED) | (1L << CATALOGS) | (1L << COLUMNS) | (1L << DEBUG) | (1L << EXECUTABLE) | (1L << EXPLAIN) | (1L << FORMAT) | (1L << FUNCTIONS) | (1L << GRAPHVIZ) | (1L << MAPPED) | (1L << OPTIMIZED) | (1L << PARSED) | (1L << PHYSICAL) | (1L << PLAN) | (1L << RLIKE) | (1L << QUERY) | (1L << SCHEMAS))) != 0) || ((((_la - 64)) & ~0x3f) == 0 && ((1L << (_la - 64)) & ((1L << (SHOW - 64)) | (1L << (SYS - 64)) | (1L << (TABLES - 64)) | (1L << (TEXT - 64)) | (1L << (TYPE - 64)) | (1L << (TYPES - 64)) | (1L << (VERIFY - 64)))) != 0)) ) { _errHandler.recoverInline(this); } else { consume(); @@ -5882,288 +5962,293 @@ class SqlBaseParser extends Parser { } public static final String _serializedATN = - "\3\u0430\ud6d1\u8206\uad2d\u4417\uaef1\u8d80\uaadd\3o\u02cf\4\2\t\2\4"+ + "\3\u0430\ud6d1\u8206\uad2d\u4417\uaef1\u8d80\uaadd\3p\u02dd\4\2\t\2\4"+ "\3\t\3\4\4\t\4\4\5\t\5\4\6\t\6\4\7\t\7\4\b\t\b\4\t\t\t\4\n\t\n\4\13\t"+ "\13\4\f\t\f\4\r\t\r\4\16\t\16\4\17\t\17\4\20\t\20\4\21\t\21\4\22\t\22"+ "\4\23\t\23\4\24\t\24\4\25\t\25\4\26\t\26\4\27\t\27\4\30\t\30\4\31\t\31"+ "\4\32\t\32\4\33\t\33\4\34\t\34\4\35\t\35\4\36\t\36\4\37\t\37\4 \t \4!"+ "\t!\4\"\t\"\4#\t#\4$\t$\4%\t%\4&\t&\4\'\t\'\4(\t(\4)\t)\4*\t*\4+\t+\4"+ - ",\t,\4-\t-\4.\t.\4/\t/\4\60\t\60\4\61\t\61\4\62\t\62\4\63\t\63\3\2\3\2"+ - "\3\2\3\3\3\3\3\3\3\4\3\4\3\4\3\4\3\4\3\4\3\4\3\4\3\4\7\4v\n\4\f\4\16\4"+ - "y\13\4\3\4\5\4|\n\4\3\4\3\4\3\4\3\4\3\4\3\4\3\4\7\4\u0085\n\4\f\4\16\4"+ - "\u0088\13\4\3\4\5\4\u008b\n\4\3\4\3\4\3\4\3\4\3\4\5\4\u0092\n\4\3\4\3"+ - "\4\3\4\3\4\3\4\5\4\u0099\n\4\3\4\3\4\3\4\5\4\u009e\n\4\3\4\3\4\3\4\5\4"+ - "\u00a3\n\4\3\4\3\4\3\4\3\4\3\4\3\4\3\4\3\4\5\4\u00ad\n\4\3\4\3\4\5\4\u00b1"+ - "\n\4\3\4\3\4\3\4\3\4\7\4\u00b7\n\4\f\4\16\4\u00ba\13\4\5\4\u00bc\n\4\3"+ - "\4\3\4\3\4\3\4\5\4\u00c2\n\4\3\4\3\4\3\4\5\4\u00c7\n\4\3\4\5\4\u00ca\n"+ - "\4\3\4\3\4\3\4\3\4\3\4\5\4\u00d1\n\4\3\5\3\5\3\5\3\5\7\5\u00d7\n\5\f\5"+ - "\16\5\u00da\13\5\5\5\u00dc\n\5\3\5\3\5\3\6\3\6\3\6\3\6\3\6\3\6\7\6\u00e6"+ - "\n\6\f\6\16\6\u00e9\13\6\5\6\u00eb\n\6\3\6\5\6\u00ee\n\6\3\7\3\7\3\7\3"+ - "\7\3\7\5\7\u00f5\n\7\3\b\3\b\3\b\3\b\3\b\5\b\u00fc\n\b\3\t\3\t\5\t\u0100"+ - "\n\t\3\t\3\t\5\t\u0104\n\t\3\n\3\n\5\n\u0108\n\n\3\n\3\n\3\n\7\n\u010d"+ - "\n\n\f\n\16\n\u0110\13\n\3\n\5\n\u0113\n\n\3\n\3\n\5\n\u0117\n\n\3\n\3"+ - "\n\3\n\5\n\u011c\n\n\3\n\3\n\5\n\u0120\n\n\3\13\3\13\3\13\3\13\7\13\u0126"+ - "\n\13\f\13\16\13\u0129\13\13\3\f\5\f\u012c\n\f\3\f\3\f\3\f\7\f\u0131\n"+ - "\f\f\f\16\f\u0134\13\f\3\r\3\r\3\16\3\16\3\16\3\16\7\16\u013c\n\16\f\16"+ - "\16\16\u013f\13\16\5\16\u0141\n\16\3\16\3\16\5\16\u0145\n\16\3\17\3\17"+ - "\3\17\3\17\3\17\3\17\3\20\3\20\3\21\3\21\5\21\u0151\n\21\3\21\5\21\u0154"+ - "\n\21\3\22\3\22\7\22\u0158\n\22\f\22\16\22\u015b\13\22\3\23\3\23\3\23"+ - "\3\23\5\23\u0161\n\23\3\23\3\23\3\23\3\23\3\23\5\23\u0168\n\23\3\24\5"+ - "\24\u016b\n\24\3\24\3\24\5\24\u016f\n\24\3\24\3\24\5\24\u0173\n\24\3\24"+ - "\3\24\5\24\u0177\n\24\5\24\u0179\n\24\3\25\3\25\3\25\3\25\3\25\3\25\3"+ - "\25\7\25\u0182\n\25\f\25\16\25\u0185\13\25\3\25\3\25\5\25\u0189\n\25\3"+ - "\26\3\26\5\26\u018d\n\26\3\26\5\26\u0190\n\26\3\26\3\26\3\26\3\26\5\26"+ - "\u0196\n\26\3\26\5\26\u0199\n\26\3\26\3\26\3\26\3\26\5\26\u019f\n\26\3"+ - "\26\5\26\u01a2\n\26\5\26\u01a4\n\26\3\27\3\27\3\30\3\30\3\30\3\30\3\30"+ + ",\t,\4-\t-\4.\t.\4/\t/\4\60\t\60\4\61\t\61\4\62\t\62\4\63\t\63\4\64\t"+ + "\64\3\2\3\2\3\2\3\3\3\3\3\3\3\4\3\4\3\4\3\4\3\4\3\4\3\4\3\4\3\4\7\4x\n"+ + "\4\f\4\16\4{\13\4\3\4\5\4~\n\4\3\4\3\4\3\4\3\4\3\4\3\4\3\4\7\4\u0087\n"+ + "\4\f\4\16\4\u008a\13\4\3\4\5\4\u008d\n\4\3\4\3\4\3\4\3\4\3\4\5\4\u0094"+ + "\n\4\3\4\3\4\3\4\3\4\3\4\5\4\u009b\n\4\3\4\3\4\3\4\5\4\u00a0\n\4\3\4\3"+ + "\4\3\4\5\4\u00a5\n\4\3\4\3\4\3\4\3\4\3\4\3\4\3\4\3\4\5\4\u00af\n\4\3\4"+ + "\3\4\5\4\u00b3\n\4\3\4\3\4\3\4\3\4\7\4\u00b9\n\4\f\4\16\4\u00bc\13\4\5"+ + "\4\u00be\n\4\3\4\3\4\3\4\3\4\5\4\u00c4\n\4\3\4\3\4\3\4\5\4\u00c9\n\4\3"+ + "\4\5\4\u00cc\n\4\3\4\3\4\3\4\3\4\3\4\5\4\u00d3\n\4\3\5\3\5\3\5\3\5\7\5"+ + "\u00d9\n\5\f\5\16\5\u00dc\13\5\5\5\u00de\n\5\3\5\3\5\3\6\3\6\3\6\3\6\3"+ + "\6\3\6\7\6\u00e8\n\6\f\6\16\6\u00eb\13\6\5\6\u00ed\n\6\3\6\5\6\u00f0\n"+ + "\6\3\7\3\7\3\7\3\7\3\7\5\7\u00f7\n\7\3\b\3\b\3\b\3\b\3\b\5\b\u00fe\n\b"+ + "\3\t\3\t\5\t\u0102\n\t\3\t\3\t\5\t\u0106\n\t\3\n\3\n\5\n\u010a\n\n\3\n"+ + "\3\n\3\n\7\n\u010f\n\n\f\n\16\n\u0112\13\n\3\n\5\n\u0115\n\n\3\n\3\n\5"+ + "\n\u0119\n\n\3\n\3\n\3\n\5\n\u011e\n\n\3\n\3\n\5\n\u0122\n\n\3\13\3\13"+ + "\3\13\3\13\7\13\u0128\n\13\f\13\16\13\u012b\13\13\3\f\5\f\u012e\n\f\3"+ + "\f\3\f\3\f\7\f\u0133\n\f\f\f\16\f\u0136\13\f\3\r\3\r\3\16\3\16\3\16\3"+ + "\16\7\16\u013e\n\16\f\16\16\16\u0141\13\16\5\16\u0143\n\16\3\16\3\16\5"+ + "\16\u0147\n\16\3\17\3\17\3\17\3\17\3\17\3\17\3\20\3\20\3\21\3\21\5\21"+ + "\u0153\n\21\3\21\5\21\u0156\n\21\3\22\3\22\7\22\u015a\n\22\f\22\16\22"+ + "\u015d\13\22\3\23\3\23\3\23\3\23\5\23\u0163\n\23\3\23\3\23\3\23\3\23\3"+ + "\23\5\23\u016a\n\23\3\24\5\24\u016d\n\24\3\24\3\24\5\24\u0171\n\24\3\24"+ + "\3\24\5\24\u0175\n\24\3\24\3\24\5\24\u0179\n\24\5\24\u017b\n\24\3\25\3"+ + "\25\3\25\3\25\3\25\3\25\3\25\7\25\u0184\n\25\f\25\16\25\u0187\13\25\3"+ + "\25\3\25\5\25\u018b\n\25\3\26\3\26\5\26\u018f\n\26\3\26\5\26\u0192\n\26"+ + "\3\26\3\26\3\26\3\26\5\26\u0198\n\26\3\26\5\26\u019b\n\26\3\26\3\26\3"+ + "\26\3\26\5\26\u01a1\n\26\3\26\5\26\u01a4\n\26\5\26\u01a6\n\26\3\27\3\27"+ "\3\30\3\30\3\30\3\30\3\30\3\30\3\30\3\30\3\30\3\30\3\30\3\30\3\30\3\30"+ - "\3\30\3\30\3\30\3\30\3\30\3\30\3\30\3\30\3\30\3\30\3\30\3\30\5\30\u01c7"+ - "\n\30\3\30\3\30\3\30\3\30\3\30\3\30\7\30\u01cf\n\30\f\30\16\30\u01d2\13"+ - "\30\3\31\3\31\7\31\u01d6\n\31\f\31\16\31\u01d9\13\31\3\32\3\32\5\32\u01dd"+ - "\n\32\3\33\5\33\u01e0\n\33\3\33\3\33\3\33\3\33\3\33\3\33\5\33\u01e8\n"+ - "\33\3\33\3\33\3\33\3\33\3\33\7\33\u01ef\n\33\f\33\16\33\u01f2\13\33\3"+ - "\33\3\33\3\33\5\33\u01f7\n\33\3\33\3\33\3\33\3\33\3\33\3\33\5\33\u01ff"+ - "\n\33\3\33\3\33\3\33\5\33\u0204\n\33\3\33\3\33\3\33\3\33\5\33\u020a\n"+ - "\33\3\33\5\33\u020d\n\33\3\34\3\34\3\34\3\35\3\35\5\35\u0214\n\35\3\36"+ - "\3\36\3\36\3\36\3\36\3\36\5\36\u021c\n\36\3\37\3\37\3\37\3\37\5\37\u0222"+ - "\n\37\3\37\3\37\3\37\3\37\3\37\3\37\3\37\3\37\3\37\3\37\7\37\u022e\n\37"+ - "\f\37\16\37\u0231\13\37\3 \3 \3 \3 \3 \3 \5 \u0239\n \3 \3 \3 \3 \3 \3"+ - " \3 \3 \3 \3 \3 \5 \u0246\n \3!\3!\3!\3!\3!\5!\u024d\n!\3\"\3\"\3\"\3"+ - "\"\3\"\3\"\3\"\3#\3#\3#\3#\3#\5#\u025b\n#\3$\3$\3$\3$\3$\3$\3$\3%\3%\3"+ - "%\3%\3%\5%\u0269\n%\3&\3&\3&\5&\u026e\n&\3&\3&\3&\7&\u0273\n&\f&\16&\u0276"+ - "\13&\5&\u0278\n&\3&\3&\3\'\3\'\3\'\5\'\u027f\n\'\3(\3(\3(\3(\6(\u0285"+ - "\n(\r(\16(\u0286\3(\3(\3(\3(\3(\3(\3(\3(\3(\3(\3(\3(\3(\3(\3(\3(\3(\5"+ - "(\u029a\n(\3)\3)\3*\3*\3+\3+\3,\3,\3,\7,\u02a5\n,\f,\16,\u02a8\13,\3,"+ - "\3,\3-\3-\5-\u02ae\n-\3.\3.\3.\5.\u02b3\n.\3.\3.\3.\3.\5.\u02b9\n.\3."+ - "\5.\u02bc\n.\3/\3/\5/\u02c0\n/\3\60\3\60\3\60\5\60\u02c5\n\60\3\61\3\61"+ - "\5\61\u02c9\n\61\3\62\3\62\3\63\3\63\3\63\2\4.<\64\2\4\6\b\n\f\16\20\22"+ - "\24\26\30\32\34\36 \"$&(*,.\60\62\64\668:<>@BDFHJLNPRTVXZ\\^`bd\2\21\b"+ - "\2\7\7\t\t\31\31..\65\6599\4\2##EE\4\2\t\t\65\65\4\2 &&\3\2\25\26\4\2"+ - "\7\7dd\4\2\r\r\25\25\4\2\36\36**\4\2\7\7\27\27\3\2[\\\3\2]_\3\2UZ\4\2"+ - "\35\35FF\3\2bc\20\2\b\t\22\24\31\31\33\33\37\37\"#..\65\659;=?ABDEGHJ"+ - "J\u0325\2f\3\2\2\2\4i\3\2\2\2\6\u00d0\3\2\2\2\b\u00db\3\2\2\2\n\u00df"+ - "\3\2\2\2\f\u00f4\3\2\2\2\16\u00fb\3\2\2\2\20\u00fd\3\2\2\2\22\u0105\3"+ - "\2\2\2\24\u0121\3\2\2\2\26\u012b\3\2\2\2\30\u0135\3\2\2\2\32\u0144\3\2"+ - "\2\2\34\u0146\3\2\2\2\36\u014c\3\2\2\2 \u014e\3\2\2\2\"\u0155\3\2\2\2"+ - "$\u0167\3\2\2\2&\u0178\3\2\2\2(\u0188\3\2\2\2*\u01a3\3\2\2\2,\u01a5\3"+ - "\2\2\2.\u01c6\3\2\2\2\60\u01d7\3\2\2\2\62\u01da\3\2\2\2\64\u020c\3\2\2"+ - "\2\66\u020e\3\2\2\28\u0211\3\2\2\2:\u021b\3\2\2\2<\u0221\3\2\2\2>\u0245"+ - "\3\2\2\2@\u024c\3\2\2\2B\u024e\3\2\2\2D\u025a\3\2\2\2F\u025c\3\2\2\2H"+ - "\u0268\3\2\2\2J\u026a\3\2\2\2L\u027e\3\2\2\2N\u0299\3\2\2\2P\u029b\3\2"+ - "\2\2R\u029d\3\2\2\2T\u029f\3\2\2\2V\u02a6\3\2\2\2X\u02ad\3\2\2\2Z\u02bb"+ - "\3\2\2\2\\\u02bf\3\2\2\2^\u02c4\3\2\2\2`\u02c8\3\2\2\2b\u02ca\3\2\2\2"+ - "d\u02cc\3\2\2\2fg\5\6\4\2gh\7\2\2\3h\3\3\2\2\2ij\5,\27\2jk\7\2\2\3k\5"+ - "\3\2\2\2l\u00d1\5\b\5\2m{\7\33\2\2nw\7\3\2\2op\7;\2\2pv\t\2\2\2qr\7\37"+ - "\2\2rv\t\3\2\2st\7J\2\2tv\5R*\2uo\3\2\2\2uq\3\2\2\2us\3\2\2\2vy\3\2\2"+ - "\2wu\3\2\2\2wx\3\2\2\2xz\3\2\2\2yw\3\2\2\2z|\7\4\2\2{n\3\2\2\2{|\3\2\2"+ - "\2|}\3\2\2\2}\u00d1\5\6\4\2~\u008a\7\24\2\2\177\u0086\7\3\2\2\u0080\u0081"+ - "\7;\2\2\u0081\u0085\t\4\2\2\u0082\u0083\7\37\2\2\u0083\u0085\t\3\2\2\u0084"+ - "\u0080\3\2\2\2\u0084\u0082\3\2\2\2\u0085\u0088\3\2\2\2\u0086\u0084\3\2"+ - "\2\2\u0086\u0087\3\2\2\2\u0087\u0089\3\2\2\2\u0088\u0086\3\2\2\2\u0089"+ - "\u008b\7\4\2\2\u008a\177\3\2\2\2\u008a\u008b\3\2\2\2\u008b\u008c\3\2\2"+ - "\2\u008c\u00d1\5\6\4\2\u008d\u008e\7A\2\2\u008e\u0091\7D\2\2\u008f\u0092"+ - "\5\66\34\2\u0090\u0092\5Z.\2\u0091\u008f\3\2\2\2\u0091\u0090\3\2\2\2\u0091"+ - "\u0092\3\2\2\2\u0092\u00d1\3\2\2\2\u0093\u0094\7A\2\2\u0094\u0095\7\23"+ - "\2\2\u0095\u0098\t\5\2\2\u0096\u0099\5\66\34\2\u0097\u0099\5Z.\2\u0098"+ - "\u0096\3\2\2\2\u0098\u0097\3\2\2\2\u0099\u00d1\3\2\2\2\u009a\u009d\t\6"+ - "\2\2\u009b\u009e\5\66\34\2\u009c\u009e\5Z.\2\u009d\u009b\3\2\2\2\u009d"+ - "\u009c\3\2\2\2\u009e\u00d1\3\2\2\2\u009f\u00a0\7A\2\2\u00a0\u00a2\7\""+ - "\2\2\u00a1\u00a3\5\66\34\2\u00a2\u00a1\3\2\2\2\u00a2\u00a3\3\2\2\2\u00a3"+ - "\u00d1\3\2\2\2\u00a4\u00a5\7A\2\2\u00a5\u00d1\7?\2\2\u00a6\u00a7\7B\2"+ - "\2\u00a7\u00d1\7\22\2\2\u00a8\u00a9\7B\2\2\u00a9\u00ac\7D\2\2\u00aa\u00ab"+ - "\7\21\2\2\u00ab\u00ad\5\66\34\2\u00ac\u00aa\3\2\2\2\u00ac\u00ad\3\2\2"+ - "\2\u00ad\u00b0\3\2\2\2\u00ae\u00b1\5\66\34\2\u00af\u00b1\5Z.\2\u00b0\u00ae"+ - "\3\2\2\2\u00b0\u00af\3\2\2\2\u00b0\u00b1\3\2\2\2\u00b1\u00bb\3\2\2\2\u00b2"+ - "\u00b3\7G\2\2\u00b3\u00b8\5b\62\2\u00b4\u00b5\7\5\2\2\u00b5\u00b7\5b\62"+ - "\2\u00b6\u00b4\3\2\2\2\u00b7\u00ba\3\2\2\2\u00b8\u00b6\3\2\2\2\u00b8\u00b9"+ - "\3\2\2\2\u00b9\u00bc\3\2\2\2\u00ba\u00b8\3\2\2\2\u00bb\u00b2\3\2\2\2\u00bb"+ - "\u00bc\3\2\2\2\u00bc\u00d1\3\2\2\2\u00bd\u00be\7B\2\2\u00be\u00c1\7\23"+ - "\2\2\u00bf\u00c0\7\21\2\2\u00c0\u00c2\5b\62\2\u00c1\u00bf\3\2\2\2\u00c1"+ - "\u00c2\3\2\2\2\u00c2\u00c6\3\2\2\2\u00c3\u00c4\7C\2\2\u00c4\u00c7\5\66"+ - "\34\2\u00c5\u00c7\5Z.\2\u00c6\u00c3\3\2\2\2\u00c6\u00c5\3\2\2\2\u00c6"+ - "\u00c7\3\2\2\2\u00c7\u00c9\3\2\2\2\u00c8\u00ca\5\66\34\2\u00c9\u00c8\3"+ - "\2\2\2\u00c9\u00ca\3\2\2\2\u00ca\u00d1\3\2\2\2\u00cb\u00cc\7B\2\2\u00cc"+ - "\u00d1\7H\2\2\u00cd\u00ce\7B\2\2\u00ce\u00cf\7C\2\2\u00cf\u00d1\7H\2\2"+ - "\u00d0l\3\2\2\2\u00d0m\3\2\2\2\u00d0~\3\2\2\2\u00d0\u008d\3\2\2\2\u00d0"+ - "\u0093\3\2\2\2\u00d0\u009a\3\2\2\2\u00d0\u009f\3\2\2\2\u00d0\u00a4\3\2"+ - "\2\2\u00d0\u00a6\3\2\2\2\u00d0\u00a8\3\2\2\2\u00d0\u00bd\3\2\2\2\u00d0"+ - "\u00cb\3\2\2\2\u00d0\u00cd\3\2\2\2\u00d1\7\3\2\2\2\u00d2\u00d3\7L\2\2"+ - "\u00d3\u00d8\5\34\17\2\u00d4\u00d5\7\5\2\2\u00d5\u00d7\5\34\17\2\u00d6"+ - "\u00d4\3\2\2\2\u00d7\u00da\3\2\2\2\u00d8\u00d6\3\2\2\2\u00d8\u00d9\3\2"+ - "\2\2\u00d9\u00dc\3\2\2\2\u00da\u00d8\3\2\2\2\u00db\u00d2\3\2\2\2\u00db"+ - "\u00dc\3\2\2\2\u00dc\u00dd\3\2\2\2\u00dd\u00de\5\n\6\2\u00de\t\3\2\2\2"+ - "\u00df\u00ea\5\16\b\2\u00e0\u00e1\7\67\2\2\u00e1\u00e2\7\17\2\2\u00e2"+ - "\u00e7\5\20\t\2\u00e3\u00e4\7\5\2\2\u00e4\u00e6\5\20\t\2\u00e5\u00e3\3"+ - "\2\2\2\u00e6\u00e9\3\2\2\2\u00e7\u00e5\3\2\2\2\u00e7\u00e8\3\2\2\2\u00e8"+ - "\u00eb\3\2\2\2\u00e9\u00e7\3\2\2\2\u00ea\u00e0\3\2\2\2\u00ea\u00eb\3\2"+ - "\2\2\u00eb\u00ed\3\2\2\2\u00ec\u00ee\5\f\7\2\u00ed\u00ec\3\2\2\2\u00ed"+ - "\u00ee\3\2\2\2\u00ee\13\3\2\2\2\u00ef\u00f0\7-\2\2\u00f0\u00f5\t\7\2\2"+ - "\u00f1\u00f2\7O\2\2\u00f2\u00f3\t\7\2\2\u00f3\u00f5\7T\2\2\u00f4\u00ef"+ - "\3\2\2\2\u00f4\u00f1\3\2\2\2\u00f5\r\3\2\2\2\u00f6\u00fc\5\22\n\2\u00f7"+ - "\u00f8\7\3\2\2\u00f8\u00f9\5\n\6\2\u00f9\u00fa\7\4\2\2\u00fa\u00fc\3\2"+ - "\2\2\u00fb\u00f6\3\2\2\2\u00fb\u00f7\3\2\2\2\u00fc\17\3\2\2\2\u00fd\u00ff"+ - "\5,\27\2\u00fe\u0100\t\b\2\2\u00ff\u00fe\3\2\2\2\u00ff\u0100\3\2\2\2\u0100"+ - "\u0103\3\2\2\2\u0101\u0102\7\63\2\2\u0102\u0104\t\t\2\2\u0103\u0101\3"+ - "\2\2\2\u0103\u0104\3\2\2\2\u0104\21\3\2\2\2\u0105\u0107\7@\2\2\u0106\u0108"+ - "\5\36\20\2\u0107\u0106\3\2\2\2\u0107\u0108\3\2\2\2\u0108\u0109\3\2\2\2"+ - "\u0109\u010e\5 \21\2\u010a\u010b\7\5\2\2\u010b\u010d\5 \21\2\u010c\u010a"+ - "\3\2\2\2\u010d\u0110\3\2\2\2\u010e\u010c\3\2\2\2\u010e\u010f\3\2\2\2\u010f"+ - "\u0112\3\2\2\2\u0110\u010e\3\2\2\2\u0111\u0113\5\24\13\2\u0112\u0111\3"+ - "\2\2\2\u0112\u0113\3\2\2\2\u0113\u0116\3\2\2\2\u0114\u0115\7K\2\2\u0115"+ - "\u0117\5.\30\2\u0116\u0114\3\2\2\2\u0116\u0117\3\2\2\2\u0117\u011b\3\2"+ - "\2\2\u0118\u0119\7$\2\2\u0119\u011a\7\17\2\2\u011a\u011c\5\26\f\2\u011b"+ - "\u0118\3\2\2\2\u011b\u011c\3\2\2\2\u011c\u011f\3\2\2\2\u011d\u011e\7%"+ - "\2\2\u011e\u0120\5.\30\2\u011f\u011d\3\2\2\2\u011f\u0120\3\2\2\2\u0120"+ - "\23\3\2\2\2\u0121\u0122\7 \2\2\u0122\u0127\5\"\22\2\u0123\u0124\7\5\2"+ - "\2\u0124\u0126\5\"\22\2\u0125\u0123\3\2\2\2\u0126\u0129\3\2\2\2\u0127"+ - "\u0125\3\2\2\2\u0127\u0128\3\2\2\2\u0128\25\3\2\2\2\u0129\u0127\3\2\2"+ - "\2\u012a\u012c\5\36\20\2\u012b\u012a\3\2\2\2\u012b\u012c\3\2\2\2\u012c"+ - "\u012d\3\2\2\2\u012d\u0132\5\30\r\2\u012e\u012f\7\5\2\2\u012f\u0131\5"+ - "\30\r\2\u0130\u012e\3\2\2\2\u0131\u0134\3\2\2\2\u0132\u0130\3\2\2\2\u0132"+ - "\u0133\3\2\2\2\u0133\27\3\2\2\2\u0134\u0132\3\2\2\2\u0135\u0136\5\32\16"+ - "\2\u0136\31\3\2\2\2\u0137\u0140\7\3\2\2\u0138\u013d\5,\27\2\u0139\u013a"+ - "\7\5\2\2\u013a\u013c\5,\27\2\u013b\u0139\3\2\2\2\u013c\u013f\3\2\2\2\u013d"+ - "\u013b\3\2\2\2\u013d\u013e\3\2\2\2\u013e\u0141\3\2\2\2\u013f\u013d\3\2"+ - "\2\2\u0140\u0138\3\2\2\2\u0140\u0141\3\2\2\2\u0141\u0142\3\2\2\2\u0142"+ - "\u0145\7\4\2\2\u0143\u0145\5,\27\2\u0144\u0137\3\2\2\2\u0144\u0143\3\2"+ - "\2\2\u0145\33\3\2\2\2\u0146\u0147\5X-\2\u0147\u0148\7\f\2\2\u0148\u0149"+ - "\7\3\2\2\u0149\u014a\5\n\6\2\u014a\u014b\7\4\2\2\u014b\35\3\2\2\2\u014c"+ - "\u014d\t\n\2\2\u014d\37\3\2\2\2\u014e\u0153\5,\27\2\u014f\u0151\7\f\2"+ - "\2\u0150\u014f\3\2\2\2\u0150\u0151\3\2\2\2\u0151\u0152\3\2\2\2\u0152\u0154"+ - "\5X-\2\u0153\u0150\3\2\2\2\u0153\u0154\3\2\2\2\u0154!\3\2\2\2\u0155\u0159"+ - "\5*\26\2\u0156\u0158\5$\23\2\u0157\u0156\3\2\2\2\u0158\u015b\3\2\2\2\u0159"+ - "\u0157\3\2\2\2\u0159\u015a\3\2\2\2\u015a#\3\2\2\2\u015b\u0159\3\2\2\2"+ - "\u015c\u015d\5&\24\2\u015d\u015e\7)\2\2\u015e\u0160\5*\26\2\u015f\u0161"+ - "\5(\25\2\u0160\u015f\3\2\2\2\u0160\u0161\3\2\2\2\u0161\u0168\3\2\2\2\u0162"+ - "\u0163\7\60\2\2\u0163\u0164\5&\24\2\u0164\u0165\7)\2\2\u0165\u0166\5*"+ - "\26\2\u0166\u0168\3\2\2\2\u0167\u015c\3\2\2\2\u0167\u0162\3\2\2\2\u0168"+ - "%\3\2\2\2\u0169\u016b\7\'\2\2\u016a\u0169\3\2\2\2\u016a\u016b\3\2\2\2"+ - "\u016b\u0179\3\2\2\2\u016c\u016e\7+\2\2\u016d\u016f\78\2\2\u016e\u016d"+ - "\3\2\2\2\u016e\u016f\3\2\2\2\u016f\u0179\3\2\2\2\u0170\u0172\7<\2\2\u0171"+ - "\u0173\78\2\2\u0172\u0171\3\2\2\2\u0172\u0173\3\2\2\2\u0173\u0179\3\2"+ - "\2\2\u0174\u0176\7!\2\2\u0175\u0177\78\2\2\u0176\u0175\3\2\2\2\u0176\u0177"+ - "\3\2\2\2\u0177\u0179\3\2\2\2\u0178\u016a\3\2\2\2\u0178\u016c\3\2\2\2\u0178"+ - "\u0170\3\2\2\2\u0178\u0174\3\2\2\2\u0179\'\3\2\2\2\u017a\u017b\7\64\2"+ - "\2\u017b\u0189\5.\30\2\u017c\u017d\7I\2\2\u017d\u017e\7\3\2\2\u017e\u0183"+ - "\5X-\2\u017f\u0180\7\5\2\2\u0180\u0182\5X-\2\u0181\u017f\3\2\2\2\u0182"+ - "\u0185\3\2\2\2\u0183\u0181\3\2\2\2\u0183\u0184\3\2\2\2\u0184\u0186\3\2"+ - "\2\2\u0185\u0183\3\2\2\2\u0186\u0187\7\4\2\2\u0187\u0189\3\2\2\2\u0188"+ - "\u017a\3\2\2\2\u0188\u017c\3\2\2\2\u0189)\3\2\2\2\u018a\u018f\5Z.\2\u018b"+ - "\u018d\7\f\2\2\u018c\u018b\3\2\2\2\u018c\u018d\3\2\2\2\u018d\u018e\3\2"+ - "\2\2\u018e\u0190\5V,\2\u018f\u018c\3\2\2\2\u018f\u0190\3\2\2\2\u0190\u01a4"+ - "\3\2\2\2\u0191\u0192\7\3\2\2\u0192\u0193\5\n\6\2\u0193\u0198\7\4\2\2\u0194"+ - "\u0196\7\f\2\2\u0195\u0194\3\2\2\2\u0195\u0196\3\2\2\2\u0196\u0197\3\2"+ - "\2\2\u0197\u0199\5V,\2\u0198\u0195\3\2\2\2\u0198\u0199\3\2\2\2\u0199\u01a4"+ - "\3\2\2\2\u019a\u019b\7\3\2\2\u019b\u019c\5\"\22\2\u019c\u01a1\7\4\2\2"+ - "\u019d\u019f\7\f\2\2\u019e\u019d\3\2\2\2\u019e\u019f\3\2\2\2\u019f\u01a0"+ - "\3\2\2\2\u01a0\u01a2\5V,\2\u01a1\u019e\3\2\2\2\u01a1\u01a2\3\2\2\2\u01a2"+ - "\u01a4\3\2\2\2\u01a3\u018a\3\2\2\2\u01a3\u0191\3\2\2\2\u01a3\u019a\3\2"+ - "\2\2\u01a4+\3\2\2\2\u01a5\u01a6\5.\30\2\u01a6-\3\2\2\2\u01a7\u01a8\b\30"+ - "\1\2\u01a8\u01a9\7\61\2\2\u01a9\u01c7\5.\30\n\u01aa\u01ab\7\32\2\2\u01ab"+ - "\u01ac\7\3\2\2\u01ac\u01ad\5\b\5\2\u01ad\u01ae\7\4\2\2\u01ae\u01c7\3\2"+ - "\2\2\u01af\u01b0\7>\2\2\u01b0\u01b1\7\3\2\2\u01b1\u01b2\5b\62\2\u01b2"+ - "\u01b3\5\60\31\2\u01b3\u01b4\7\4\2\2\u01b4\u01c7\3\2\2\2\u01b5\u01b6\7"+ - "/\2\2\u01b6\u01b7\7\3\2\2\u01b7\u01b8\5V,\2\u01b8\u01b9\7\5\2\2\u01b9"+ - "\u01ba\5b\62\2\u01ba\u01bb\5\60\31\2\u01bb\u01bc\7\4\2\2\u01bc\u01c7\3"+ - "\2\2\2\u01bd\u01be\7/\2\2\u01be\u01bf\7\3\2\2\u01bf\u01c0\5b\62\2\u01c0"+ - "\u01c1\7\5\2\2\u01c1\u01c2\5b\62\2\u01c2\u01c3\5\60\31\2\u01c3\u01c4\7"+ - "\4\2\2\u01c4\u01c7\3\2\2\2\u01c5\u01c7\5\62\32\2\u01c6\u01a7\3\2\2\2\u01c6"+ - "\u01aa\3\2\2\2\u01c6\u01af\3\2\2\2\u01c6\u01b5\3\2\2\2\u01c6\u01bd\3\2"+ - "\2\2\u01c6\u01c5\3\2\2\2\u01c7\u01d0\3\2\2\2\u01c8\u01c9\f\4\2\2\u01c9"+ - "\u01ca\7\n\2\2\u01ca\u01cf\5.\30\5\u01cb\u01cc\f\3\2\2\u01cc\u01cd\7\66"+ - "\2\2\u01cd\u01cf\5.\30\4\u01ce\u01c8\3\2\2\2\u01ce\u01cb\3\2\2\2\u01cf"+ - "\u01d2\3\2\2\2\u01d0\u01ce\3\2\2\2\u01d0\u01d1\3\2\2\2\u01d1/\3\2\2\2"+ - "\u01d2\u01d0\3\2\2\2\u01d3\u01d4\7\5\2\2\u01d4\u01d6\5b\62\2\u01d5\u01d3"+ - "\3\2\2\2\u01d6\u01d9\3\2\2\2\u01d7\u01d5\3\2\2\2\u01d7\u01d8\3\2\2\2\u01d8"+ - "\61\3\2\2\2\u01d9\u01d7\3\2\2\2\u01da\u01dc\5<\37\2\u01db\u01dd\5\64\33"+ - "\2\u01dc\u01db\3\2\2\2\u01dc\u01dd\3\2\2\2\u01dd\63\3\2\2\2\u01de\u01e0"+ - "\7\61\2\2\u01df\u01de\3\2\2\2\u01df\u01e0\3\2\2\2\u01e0\u01e1\3\2\2\2"+ - "\u01e1\u01e2\7\16\2\2\u01e2\u01e3\5<\37\2\u01e3\u01e4\7\n\2\2\u01e4\u01e5"+ - "\5<\37\2\u01e5\u020d\3\2\2\2\u01e6\u01e8\7\61\2\2\u01e7\u01e6\3\2\2\2"+ - "\u01e7\u01e8\3\2\2\2\u01e8\u01e9\3\2\2\2\u01e9\u01ea\7&\2\2\u01ea\u01eb"+ - "\7\3\2\2\u01eb\u01f0\5,\27\2\u01ec\u01ed\7\5\2\2\u01ed\u01ef\5,\27\2\u01ee"+ - "\u01ec\3\2\2\2\u01ef\u01f2\3\2\2\2\u01f0\u01ee\3\2\2\2\u01f0\u01f1\3\2"+ - "\2\2\u01f1\u01f3\3\2\2\2\u01f2\u01f0\3\2\2\2\u01f3\u01f4\7\4\2\2\u01f4"+ - "\u020d\3\2\2\2\u01f5\u01f7\7\61\2\2\u01f6\u01f5\3\2\2\2\u01f6\u01f7\3"+ - "\2\2\2\u01f7\u01f8\3\2\2\2\u01f8\u01f9\7&\2\2\u01f9\u01fa\7\3\2\2\u01fa"+ - "\u01fb\5\b\5\2\u01fb\u01fc\7\4\2\2\u01fc\u020d\3\2\2\2\u01fd\u01ff\7\61"+ - "\2\2\u01fe\u01fd\3\2\2\2\u01fe\u01ff\3\2\2\2\u01ff\u0200\3\2\2\2\u0200"+ - "\u0201\7,\2\2\u0201\u020d\58\35\2\u0202\u0204\7\61\2\2\u0203\u0202\3\2"+ - "\2\2\u0203\u0204\3\2\2\2\u0204\u0205\3\2\2\2\u0205\u0206\7=\2\2\u0206"+ - "\u020d\5b\62\2\u0207\u0209\7(\2\2\u0208\u020a\7\61\2\2\u0209\u0208\3\2"+ - "\2\2\u0209\u020a\3\2\2\2\u020a\u020b\3\2\2\2\u020b\u020d\7\62\2\2\u020c"+ - "\u01df\3\2\2\2\u020c\u01e7\3\2\2\2\u020c\u01f6\3\2\2\2\u020c\u01fe\3\2"+ - "\2\2\u020c\u0203\3\2\2\2\u020c\u0207\3\2\2\2\u020d\65\3\2\2\2\u020e\u020f"+ - "\7,\2\2\u020f\u0210\58\35\2\u0210\67\3\2\2\2\u0211\u0213\5b\62\2\u0212"+ - "\u0214\5:\36\2\u0213\u0212\3\2\2\2\u0213\u0214\3\2\2\2\u02149\3\2\2\2"+ - "\u0215\u0216\7\30\2\2\u0216\u021c\5b\62\2\u0217\u0218\7M\2\2\u0218\u0219"+ - "\5b\62\2\u0219\u021a\7T\2\2\u021a\u021c\3\2\2\2\u021b\u0215\3\2\2\2\u021b"+ - "\u0217\3\2\2\2\u021c;\3\2\2\2\u021d\u021e\b\37\1\2\u021e\u0222\5> \2\u021f"+ - "\u0220\t\13\2\2\u0220\u0222\5<\37\6\u0221\u021d\3\2\2\2\u0221\u021f\3"+ - "\2\2\2\u0222\u022f\3\2\2\2\u0223\u0224\f\5\2\2\u0224\u0225\t\f\2\2\u0225"+ - "\u022e\5<\37\6\u0226\u0227\f\4\2\2\u0227\u0228\t\13\2\2\u0228\u022e\5"+ - "<\37\5\u0229\u022a\f\3\2\2\u022a\u022b\5P)\2\u022b\u022c\5<\37\4\u022c"+ - "\u022e\3\2\2\2\u022d\u0223\3\2\2\2\u022d\u0226\3\2\2\2\u022d\u0229\3\2"+ - "\2\2\u022e\u0231\3\2\2\2\u022f\u022d\3\2\2\2\u022f\u0230\3\2\2\2\u0230"+ - "=\3\2\2\2\u0231\u022f\3\2\2\2\u0232\u0246\5@!\2\u0233\u0246\5D#\2\u0234"+ - "\u0246\5N(\2\u0235\u0236\5V,\2\u0236\u0237\7a\2\2\u0237\u0239\3\2\2\2"+ - "\u0238\u0235\3\2\2\2\u0238\u0239\3\2\2\2\u0239\u023a\3\2\2\2\u023a\u0246"+ - "\7]\2\2\u023b\u0246\5H%\2\u023c\u023d\7\3\2\2\u023d\u023e\5\b\5\2\u023e"+ - "\u023f\7\4\2\2\u023f\u0246\3\2\2\2\u0240\u0246\5V,\2\u0241\u0242\7\3\2"+ - "\2\u0242\u0243\5,\27\2\u0243\u0244\7\4\2\2\u0244\u0246\3\2\2\2\u0245\u0232"+ - "\3\2\2\2\u0245\u0233\3\2\2\2\u0245\u0234\3\2\2\2\u0245\u0238\3\2\2\2\u0245"+ - "\u023b\3\2\2\2\u0245\u023c\3\2\2\2\u0245\u0240\3\2\2\2\u0245\u0241\3\2"+ - "\2\2\u0246?\3\2\2\2\u0247\u024d\5B\"\2\u0248\u0249\7N\2\2\u0249\u024a"+ - "\5B\"\2\u024a\u024b\7T\2\2\u024b\u024d\3\2\2\2\u024c\u0247\3\2\2\2\u024c"+ - "\u0248\3\2\2\2\u024dA\3\2\2\2\u024e\u024f\7\20\2\2\u024f\u0250\7\3\2\2"+ - "\u0250\u0251\5,\27\2\u0251\u0252\7\f\2\2\u0252\u0253\5T+\2\u0253\u0254"+ - "\7\4\2\2\u0254C\3\2\2\2\u0255\u025b\5F$\2\u0256\u0257\7N\2\2\u0257\u0258"+ - "\5F$\2\u0258\u0259\7T\2\2\u0259\u025b\3\2\2\2\u025a\u0255\3\2\2\2\u025a"+ - "\u0256\3\2\2\2\u025bE\3\2\2\2\u025c\u025d\7\34\2\2\u025d\u025e\7\3\2\2"+ - "\u025e\u025f\5X-\2\u025f\u0260\7 \2\2\u0260\u0261\5<\37\2\u0261\u0262"+ - "\7\4\2\2\u0262G\3\2\2\2\u0263\u0269\5J&\2\u0264\u0265\7N\2\2\u0265\u0266"+ - "\5J&\2\u0266\u0267\7T\2\2\u0267\u0269\3\2\2\2\u0268\u0263\3\2\2\2\u0268"+ - "\u0264\3\2\2\2\u0269I\3\2\2\2\u026a\u026b\5L\'\2\u026b\u0277\7\3\2\2\u026c"+ - "\u026e\5\36\20\2\u026d\u026c\3\2\2\2\u026d\u026e\3\2\2\2\u026e\u026f\3"+ - "\2\2\2\u026f\u0274\5,\27\2\u0270\u0271\7\5\2\2\u0271\u0273\5,\27\2\u0272"+ - "\u0270\3\2\2\2\u0273\u0276\3\2\2\2\u0274\u0272\3\2\2\2\u0274\u0275\3\2"+ - "\2\2\u0275\u0278\3\2\2\2\u0276\u0274\3\2\2\2\u0277\u026d\3\2\2\2\u0277"+ - "\u0278\3\2\2\2\u0278\u0279\3\2\2\2\u0279\u027a\7\4\2\2\u027aK\3\2\2\2"+ - "\u027b\u027f\7+\2\2\u027c\u027f\7<\2\2\u027d\u027f\5X-\2\u027e\u027b\3"+ - "\2\2\2\u027e\u027c\3\2\2\2\u027e\u027d\3\2\2\2\u027fM\3\2\2\2\u0280\u029a"+ - "\7\62\2\2\u0281\u029a\5`\61\2\u0282\u029a\5R*\2\u0283\u0285\7c\2\2\u0284"+ - "\u0283\3\2\2\2\u0285\u0286\3\2\2\2\u0286\u0284\3\2\2\2\u0286\u0287\3\2"+ - "\2\2\u0287\u029a\3\2\2\2\u0288\u029a\7b\2\2\u0289\u028a\7P\2\2\u028a\u028b"+ - "\5b\62\2\u028b\u028c\7T\2\2\u028c\u029a\3\2\2\2\u028d\u028e\7Q\2\2\u028e"+ - "\u028f\5b\62\2\u028f\u0290\7T\2\2\u0290\u029a\3\2\2\2\u0291\u0292\7R\2"+ - "\2\u0292\u0293\5b\62\2\u0293\u0294\7T\2\2\u0294\u029a\3\2\2\2\u0295\u0296"+ - "\7S\2\2\u0296\u0297\5b\62\2\u0297\u0298\7T\2\2\u0298\u029a\3\2\2\2\u0299"+ - "\u0280\3\2\2\2\u0299\u0281\3\2\2\2\u0299\u0282\3\2\2\2\u0299\u0284\3\2"+ - "\2\2\u0299\u0288\3\2\2\2\u0299\u0289\3\2\2\2\u0299\u028d\3\2\2\2\u0299"+ - "\u0291\3\2\2\2\u0299\u0295\3\2\2\2\u029aO\3\2\2\2\u029b\u029c\t\r\2\2"+ - "\u029cQ\3\2\2\2\u029d\u029e\t\16\2\2\u029eS\3\2\2\2\u029f\u02a0\5X-\2"+ - "\u02a0U\3\2\2\2\u02a1\u02a2\5X-\2\u02a2\u02a3\7a\2\2\u02a3\u02a5\3\2\2"+ - "\2\u02a4\u02a1\3\2\2\2\u02a5\u02a8\3\2\2\2\u02a6\u02a4\3\2\2\2\u02a6\u02a7"+ - "\3\2\2\2\u02a7\u02a9\3\2\2\2\u02a8\u02a6\3\2\2\2\u02a9\u02aa\5X-\2\u02aa"+ - "W\3\2\2\2\u02ab\u02ae\5\\/\2\u02ac\u02ae\5^\60\2\u02ad\u02ab\3\2\2\2\u02ad"+ - "\u02ac\3\2\2\2\u02aeY\3\2\2\2\u02af\u02b0\5X-\2\u02b0\u02b1\7\6\2\2\u02b1"+ - "\u02b3\3\2\2\2\u02b2\u02af\3\2\2\2\u02b2\u02b3\3\2\2\2\u02b3\u02b4\3\2"+ - "\2\2\u02b4\u02bc\7h\2\2\u02b5\u02b6\5X-\2\u02b6\u02b7\7\6\2\2\u02b7\u02b9"+ - "\3\2\2\2\u02b8\u02b5\3\2\2\2\u02b8\u02b9\3\2\2\2\u02b9\u02ba\3\2\2\2\u02ba"+ - "\u02bc\5X-\2\u02bb\u02b2\3\2\2\2\u02bb\u02b8\3\2\2\2\u02bc[\3\2\2\2\u02bd"+ - "\u02c0\7i\2\2\u02be\u02c0\7j\2\2\u02bf\u02bd\3\2\2\2\u02bf\u02be\3\2\2"+ - "\2\u02c0]\3\2\2\2\u02c1\u02c5\7f\2\2\u02c2\u02c5\5d\63\2\u02c3\u02c5\7"+ - "g\2\2\u02c4\u02c1\3\2\2\2\u02c4\u02c2\3\2\2\2\u02c4\u02c3\3\2\2\2\u02c5"+ - "_\3\2\2\2\u02c6\u02c9\7e\2\2\u02c7\u02c9\7d\2\2\u02c8\u02c6\3\2\2\2\u02c8"+ - "\u02c7\3\2\2\2\u02c9a\3\2\2\2\u02ca\u02cb\t\17\2\2\u02cbc\3\2\2\2\u02cc"+ - "\u02cd\t\20\2\2\u02cde\3\2\2\2auw{\u0084\u0086\u008a\u0091\u0098\u009d"+ - "\u00a2\u00ac\u00b0\u00b8\u00bb\u00c1\u00c6\u00c9\u00d0\u00d8\u00db\u00e7"+ - "\u00ea\u00ed\u00f4\u00fb\u00ff\u0103\u0107\u010e\u0112\u0116\u011b\u011f"+ - "\u0127\u012b\u0132\u013d\u0140\u0144\u0150\u0153\u0159\u0160\u0167\u016a"+ - "\u016e\u0172\u0176\u0178\u0183\u0188\u018c\u018f\u0195\u0198\u019e\u01a1"+ - "\u01a3\u01c6\u01ce\u01d0\u01d7\u01dc\u01df\u01e7\u01f0\u01f6\u01fe\u0203"+ - "\u0209\u020c\u0213\u021b\u0221\u022d\u022f\u0238\u0245\u024c\u025a\u0268"+ - "\u026d\u0274\u0277\u027e\u0286\u0299\u02a6\u02ad\u02b2\u02b8\u02bb\u02bf"+ - "\u02c4\u02c8"; + "\3\30\3\30\3\30\3\30\3\30\3\30\3\30\3\30\3\30\3\30\3\30\3\30\3\30\3\30"+ + "\3\30\3\30\3\30\5\30\u01c9\n\30\3\30\3\30\3\30\3\30\3\30\3\30\7\30\u01d1"+ + "\n\30\f\30\16\30\u01d4\13\30\3\31\3\31\7\31\u01d8\n\31\f\31\16\31\u01db"+ + "\13\31\3\32\3\32\5\32\u01df\n\32\3\33\5\33\u01e2\n\33\3\33\3\33\3\33\3"+ + "\33\3\33\3\33\5\33\u01ea\n\33\3\33\3\33\3\33\3\33\3\33\7\33\u01f1\n\33"+ + "\f\33\16\33\u01f4\13\33\3\33\3\33\3\33\5\33\u01f9\n\33\3\33\3\33\3\33"+ + "\3\33\3\33\3\33\5\33\u0201\n\33\3\33\3\33\3\33\5\33\u0206\n\33\3\33\3"+ + "\33\3\33\3\33\5\33\u020c\n\33\3\33\5\33\u020f\n\33\3\34\3\34\3\34\3\35"+ + "\3\35\5\35\u0216\n\35\3\36\3\36\3\36\3\36\3\36\3\36\5\36\u021e\n\36\3"+ + "\37\3\37\3\37\3\37\5\37\u0224\n\37\3\37\3\37\3\37\3\37\3\37\3\37\3\37"+ + "\3\37\3\37\3\37\7\37\u0230\n\37\f\37\16\37\u0233\13\37\3 \3 \3 \3 \3 "+ + "\3 \5 \u023b\n \3 \3 \3 \3 \3 \3 \3 \3 \3 \3 \3 \5 \u0248\n \3!\3!\3!"+ + "\3!\3!\3!\3!\3!\3!\3!\5!\u0254\n!\3\"\3\"\3\"\3\"\3\"\3\"\3\"\3#\3#\3"+ + "#\3#\3#\3#\3#\3$\3$\3$\3$\3$\5$\u0269\n$\3%\3%\3%\3%\3%\3%\3%\3&\3&\3"+ + "&\3&\3&\5&\u0277\n&\3\'\3\'\3\'\5\'\u027c\n\'\3\'\3\'\3\'\7\'\u0281\n"+ + "\'\f\'\16\'\u0284\13\'\5\'\u0286\n\'\3\'\3\'\3(\3(\3(\5(\u028d\n(\3)\3"+ + ")\3)\3)\6)\u0293\n)\r)\16)\u0294\3)\3)\3)\3)\3)\3)\3)\3)\3)\3)\3)\3)\3"+ + ")\3)\3)\3)\3)\5)\u02a8\n)\3*\3*\3+\3+\3,\3,\3-\3-\3-\7-\u02b3\n-\f-\16"+ + "-\u02b6\13-\3-\3-\3.\3.\5.\u02bc\n.\3/\3/\3/\5/\u02c1\n/\3/\3/\3/\3/\5"+ + "/\u02c7\n/\3/\5/\u02ca\n/\3\60\3\60\5\60\u02ce\n\60\3\61\3\61\3\61\5\61"+ + "\u02d3\n\61\3\62\3\62\5\62\u02d7\n\62\3\63\3\63\3\64\3\64\3\64\2\4.<\65"+ + "\2\4\6\b\n\f\16\20\22\24\26\30\32\34\36 \"$&(*,.\60\62\64\668:<>@BDFH"+ + "JLNPRTVXZ\\^`bdf\2\21\b\2\7\7\t\t\32\32//\66\66::\4\2$$FF\4\2\t\t\66\66"+ + "\4\2!!\'\'\3\2\26\27\4\2\7\7ee\4\2\r\r\26\26\4\2\37\37++\4\2\7\7\30\30"+ + "\3\2\\]\3\2^`\3\2V[\4\2\36\36GG\3\2cd\21\2\b\t\22\23\25\25\32\32\34\34"+ + " #$//\66\66:<>@BCEFHIKK\u0334\2h\3\2\2\2\4k\3\2\2\2\6\u00d2\3\2\2\2\b"+ + "\u00dd\3\2\2\2\n\u00e1\3\2\2\2\f\u00f6\3\2\2\2\16\u00fd\3\2\2\2\20\u00ff"+ + "\3\2\2\2\22\u0107\3\2\2\2\24\u0123\3\2\2\2\26\u012d\3\2\2\2\30\u0137\3"+ + "\2\2\2\32\u0146\3\2\2\2\34\u0148\3\2\2\2\36\u014e\3\2\2\2 \u0150\3\2\2"+ + "\2\"\u0157\3\2\2\2$\u0169\3\2\2\2&\u017a\3\2\2\2(\u018a\3\2\2\2*\u01a5"+ + "\3\2\2\2,\u01a7\3\2\2\2.\u01c8\3\2\2\2\60\u01d9\3\2\2\2\62\u01dc\3\2\2"+ + "\2\64\u020e\3\2\2\2\66\u0210\3\2\2\28\u0213\3\2\2\2:\u021d\3\2\2\2<\u0223"+ + "\3\2\2\2>\u0247\3\2\2\2@\u0253\3\2\2\2B\u0255\3\2\2\2D\u025c\3\2\2\2F"+ + "\u0268\3\2\2\2H\u026a\3\2\2\2J\u0276\3\2\2\2L\u0278\3\2\2\2N\u028c\3\2"+ + "\2\2P\u02a7\3\2\2\2R\u02a9\3\2\2\2T\u02ab\3\2\2\2V\u02ad\3\2\2\2X\u02b4"+ + "\3\2\2\2Z\u02bb\3\2\2\2\\\u02c9\3\2\2\2^\u02cd\3\2\2\2`\u02d2\3\2\2\2"+ + "b\u02d6\3\2\2\2d\u02d8\3\2\2\2f\u02da\3\2\2\2hi\5\6\4\2ij\7\2\2\3j\3\3"+ + "\2\2\2kl\5,\27\2lm\7\2\2\3m\5\3\2\2\2n\u00d3\5\b\5\2o}\7\34\2\2py\7\3"+ + "\2\2qr\7<\2\2rx\t\2\2\2st\7 \2\2tx\t\3\2\2uv\7K\2\2vx\5T+\2wq\3\2\2\2"+ + "ws\3\2\2\2wu\3\2\2\2x{\3\2\2\2yw\3\2\2\2yz\3\2\2\2z|\3\2\2\2{y\3\2\2\2"+ + "|~\7\4\2\2}p\3\2\2\2}~\3\2\2\2~\177\3\2\2\2\177\u00d3\5\6\4\2\u0080\u008c"+ + "\7\25\2\2\u0081\u0088\7\3\2\2\u0082\u0083\7<\2\2\u0083\u0087\t\4\2\2\u0084"+ + "\u0085\7 \2\2\u0085\u0087\t\3\2\2\u0086\u0082\3\2\2\2\u0086\u0084\3\2"+ + "\2\2\u0087\u008a\3\2\2\2\u0088\u0086\3\2\2\2\u0088\u0089\3\2\2\2\u0089"+ + "\u008b\3\2\2\2\u008a\u0088\3\2\2\2\u008b\u008d\7\4\2\2\u008c\u0081\3\2"+ + "\2\2\u008c\u008d\3\2\2\2\u008d\u008e\3\2\2\2\u008e\u00d3\5\6\4\2\u008f"+ + "\u0090\7B\2\2\u0090\u0093\7E\2\2\u0091\u0094\5\66\34\2\u0092\u0094\5\\"+ + "/\2\u0093\u0091\3\2\2\2\u0093\u0092\3\2\2\2\u0093\u0094\3\2\2\2\u0094"+ + "\u00d3\3\2\2\2\u0095\u0096\7B\2\2\u0096\u0097\7\23\2\2\u0097\u009a\t\5"+ + "\2\2\u0098\u009b\5\66\34\2\u0099\u009b\5\\/\2\u009a\u0098\3\2\2\2\u009a"+ + "\u0099\3\2\2\2\u009b\u00d3\3\2\2\2\u009c\u009f\t\6\2\2\u009d\u00a0\5\66"+ + "\34\2\u009e\u00a0\5\\/\2\u009f\u009d\3\2\2\2\u009f\u009e\3\2\2\2\u00a0"+ + "\u00d3\3\2\2\2\u00a1\u00a2\7B\2\2\u00a2\u00a4\7#\2\2\u00a3\u00a5\5\66"+ + "\34\2\u00a4\u00a3\3\2\2\2\u00a4\u00a5\3\2\2\2\u00a5\u00d3\3\2\2\2\u00a6"+ + "\u00a7\7B\2\2\u00a7\u00d3\7@\2\2\u00a8\u00a9\7C\2\2\u00a9\u00d3\7\22\2"+ + "\2\u00aa\u00ab\7C\2\2\u00ab\u00ae\7E\2\2\u00ac\u00ad\7\21\2\2\u00ad\u00af"+ + "\5\66\34\2\u00ae\u00ac\3\2\2\2\u00ae\u00af\3\2\2\2\u00af\u00b2\3\2\2\2"+ + "\u00b0\u00b3\5\66\34\2\u00b1\u00b3\5\\/\2\u00b2\u00b0\3\2\2\2\u00b2\u00b1"+ + "\3\2\2\2\u00b2\u00b3\3\2\2\2\u00b3\u00bd\3\2\2\2\u00b4\u00b5\7H\2\2\u00b5"+ + "\u00ba\5d\63\2\u00b6\u00b7\7\5\2\2\u00b7\u00b9\5d\63\2\u00b8\u00b6\3\2"+ + "\2\2\u00b9\u00bc\3\2\2\2\u00ba\u00b8\3\2\2\2\u00ba\u00bb\3\2\2\2\u00bb"+ + "\u00be\3\2\2\2\u00bc\u00ba\3\2\2\2\u00bd\u00b4\3\2\2\2\u00bd\u00be\3\2"+ + "\2\2\u00be\u00d3\3\2\2\2\u00bf\u00c0\7C\2\2\u00c0\u00c3\7\23\2\2\u00c1"+ + "\u00c2\7\21\2\2\u00c2\u00c4\5d\63\2\u00c3\u00c1\3\2\2\2\u00c3\u00c4\3"+ + "\2\2\2\u00c4\u00c8\3\2\2\2\u00c5\u00c6\7D\2\2\u00c6\u00c9\5\66\34\2\u00c7"+ + "\u00c9\5\\/\2\u00c8\u00c5\3\2\2\2\u00c8\u00c7\3\2\2\2\u00c8\u00c9\3\2"+ + "\2\2\u00c9\u00cb\3\2\2\2\u00ca\u00cc\5\66\34\2\u00cb\u00ca\3\2\2\2\u00cb"+ + "\u00cc\3\2\2\2\u00cc\u00d3\3\2\2\2\u00cd\u00ce\7C\2\2\u00ce\u00d3\7I\2"+ + "\2\u00cf\u00d0\7C\2\2\u00d0\u00d1\7D\2\2\u00d1\u00d3\7I\2\2\u00d2n\3\2"+ + "\2\2\u00d2o\3\2\2\2\u00d2\u0080\3\2\2\2\u00d2\u008f\3\2\2\2\u00d2\u0095"+ + "\3\2\2\2\u00d2\u009c\3\2\2\2\u00d2\u00a1\3\2\2\2\u00d2\u00a6\3\2\2\2\u00d2"+ + "\u00a8\3\2\2\2\u00d2\u00aa\3\2\2\2\u00d2\u00bf\3\2\2\2\u00d2\u00cd\3\2"+ + "\2\2\u00d2\u00cf\3\2\2\2\u00d3\7\3\2\2\2\u00d4\u00d5\7M\2\2\u00d5\u00da"+ + "\5\34\17\2\u00d6\u00d7\7\5\2\2\u00d7\u00d9\5\34\17\2\u00d8\u00d6\3\2\2"+ + "\2\u00d9\u00dc\3\2\2\2\u00da\u00d8\3\2\2\2\u00da\u00db\3\2\2\2\u00db\u00de"+ + "\3\2\2\2\u00dc\u00da\3\2\2\2\u00dd\u00d4\3\2\2\2\u00dd\u00de\3\2\2\2\u00de"+ + "\u00df\3\2\2\2\u00df\u00e0\5\n\6\2\u00e0\t\3\2\2\2\u00e1\u00ec\5\16\b"+ + "\2\u00e2\u00e3\78\2\2\u00e3\u00e4\7\17\2\2\u00e4\u00e9\5\20\t\2\u00e5"+ + "\u00e6\7\5\2\2\u00e6\u00e8\5\20\t\2\u00e7\u00e5\3\2\2\2\u00e8\u00eb\3"+ + "\2\2\2\u00e9\u00e7\3\2\2\2\u00e9\u00ea\3\2\2\2\u00ea\u00ed\3\2\2\2\u00eb"+ + "\u00e9\3\2\2\2\u00ec\u00e2\3\2\2\2\u00ec\u00ed\3\2\2\2\u00ed\u00ef\3\2"+ + "\2\2\u00ee\u00f0\5\f\7\2\u00ef\u00ee\3\2\2\2\u00ef\u00f0\3\2\2\2\u00f0"+ + "\13\3\2\2\2\u00f1\u00f2\7.\2\2\u00f2\u00f7\t\7\2\2\u00f3\u00f4\7P\2\2"+ + "\u00f4\u00f5\t\7\2\2\u00f5\u00f7\7U\2\2\u00f6\u00f1\3\2\2\2\u00f6\u00f3"+ + "\3\2\2\2\u00f7\r\3\2\2\2\u00f8\u00fe\5\22\n\2\u00f9\u00fa\7\3\2\2\u00fa"+ + "\u00fb\5\n\6\2\u00fb\u00fc\7\4\2\2\u00fc\u00fe\3\2\2\2\u00fd\u00f8\3\2"+ + "\2\2\u00fd\u00f9\3\2\2\2\u00fe\17\3\2\2\2\u00ff\u0101\5,\27\2\u0100\u0102"+ + "\t\b\2\2\u0101\u0100\3\2\2\2\u0101\u0102\3\2\2\2\u0102\u0105\3\2\2\2\u0103"+ + "\u0104\7\64\2\2\u0104\u0106\t\t\2\2\u0105\u0103\3\2\2\2\u0105\u0106\3"+ + "\2\2\2\u0106\21\3\2\2\2\u0107\u0109\7A\2\2\u0108\u010a\5\36\20\2\u0109"+ + "\u0108\3\2\2\2\u0109\u010a\3\2\2\2\u010a\u010b\3\2\2\2\u010b\u0110\5 "+ + "\21\2\u010c\u010d\7\5\2\2\u010d\u010f\5 \21\2\u010e\u010c\3\2\2\2\u010f"+ + "\u0112\3\2\2\2\u0110\u010e\3\2\2\2\u0110\u0111\3\2\2\2\u0111\u0114\3\2"+ + "\2\2\u0112\u0110\3\2\2\2\u0113\u0115\5\24\13\2\u0114\u0113\3\2\2\2\u0114"+ + "\u0115\3\2\2\2\u0115\u0118\3\2\2\2\u0116\u0117\7L\2\2\u0117\u0119\5.\30"+ + "\2\u0118\u0116\3\2\2\2\u0118\u0119\3\2\2\2\u0119\u011d\3\2\2\2\u011a\u011b"+ + "\7%\2\2\u011b\u011c\7\17\2\2\u011c\u011e\5\26\f\2\u011d\u011a\3\2\2\2"+ + "\u011d\u011e\3\2\2\2\u011e\u0121\3\2\2\2\u011f\u0120\7&\2\2\u0120\u0122"+ + "\5.\30\2\u0121\u011f\3\2\2\2\u0121\u0122\3\2\2\2\u0122\23\3\2\2\2\u0123"+ + "\u0124\7!\2\2\u0124\u0129\5\"\22\2\u0125\u0126\7\5\2\2\u0126\u0128\5\""+ + "\22\2\u0127\u0125\3\2\2\2\u0128\u012b\3\2\2\2\u0129\u0127\3\2\2\2\u0129"+ + "\u012a\3\2\2\2\u012a\25\3\2\2\2\u012b\u0129\3\2\2\2\u012c\u012e\5\36\20"+ + "\2\u012d\u012c\3\2\2\2\u012d\u012e\3\2\2\2\u012e\u012f\3\2\2\2\u012f\u0134"+ + "\5\30\r\2\u0130\u0131\7\5\2\2\u0131\u0133\5\30\r\2\u0132\u0130\3\2\2\2"+ + "\u0133\u0136\3\2\2\2\u0134\u0132\3\2\2\2\u0134\u0135\3\2\2\2\u0135\27"+ + "\3\2\2\2\u0136\u0134\3\2\2\2\u0137\u0138\5\32\16\2\u0138\31\3\2\2\2\u0139"+ + "\u0142\7\3\2\2\u013a\u013f\5,\27\2\u013b\u013c\7\5\2\2\u013c\u013e\5,"+ + "\27\2\u013d\u013b\3\2\2\2\u013e\u0141\3\2\2\2\u013f\u013d\3\2\2\2\u013f"+ + "\u0140\3\2\2\2\u0140\u0143\3\2\2\2\u0141\u013f\3\2\2\2\u0142\u013a\3\2"+ + "\2\2\u0142\u0143\3\2\2\2\u0143\u0144\3\2\2\2\u0144\u0147\7\4\2\2\u0145"+ + "\u0147\5,\27\2\u0146\u0139\3\2\2\2\u0146\u0145\3\2\2\2\u0147\33\3\2\2"+ + "\2\u0148\u0149\5Z.\2\u0149\u014a\7\f\2\2\u014a\u014b\7\3\2\2\u014b\u014c"+ + "\5\n\6\2\u014c\u014d\7\4\2\2\u014d\35\3\2\2\2\u014e\u014f\t\n\2\2\u014f"+ + "\37\3\2\2\2\u0150\u0155\5,\27\2\u0151\u0153\7\f\2\2\u0152\u0151\3\2\2"+ + "\2\u0152\u0153\3\2\2\2\u0153\u0154\3\2\2\2\u0154\u0156\5Z.\2\u0155\u0152"+ + "\3\2\2\2\u0155\u0156\3\2\2\2\u0156!\3\2\2\2\u0157\u015b\5*\26\2\u0158"+ + "\u015a\5$\23\2\u0159\u0158\3\2\2\2\u015a\u015d\3\2\2\2\u015b\u0159\3\2"+ + "\2\2\u015b\u015c\3\2\2\2\u015c#\3\2\2\2\u015d\u015b\3\2\2\2\u015e\u015f"+ + "\5&\24\2\u015f\u0160\7*\2\2\u0160\u0162\5*\26\2\u0161\u0163\5(\25\2\u0162"+ + "\u0161\3\2\2\2\u0162\u0163\3\2\2\2\u0163\u016a\3\2\2\2\u0164\u0165\7\61"+ + "\2\2\u0165\u0166\5&\24\2\u0166\u0167\7*\2\2\u0167\u0168\5*\26\2\u0168"+ + "\u016a\3\2\2\2\u0169\u015e\3\2\2\2\u0169\u0164\3\2\2\2\u016a%\3\2\2\2"+ + "\u016b\u016d\7(\2\2\u016c\u016b\3\2\2\2\u016c\u016d\3\2\2\2\u016d\u017b"+ + "\3\2\2\2\u016e\u0170\7,\2\2\u016f\u0171\79\2\2\u0170\u016f\3\2\2\2\u0170"+ + "\u0171\3\2\2\2\u0171\u017b\3\2\2\2\u0172\u0174\7=\2\2\u0173\u0175\79\2"+ + "\2\u0174\u0173\3\2\2\2\u0174\u0175\3\2\2\2\u0175\u017b\3\2\2\2\u0176\u0178"+ + "\7\"\2\2\u0177\u0179\79\2\2\u0178\u0177\3\2\2\2\u0178\u0179\3\2\2\2\u0179"+ + "\u017b\3\2\2\2\u017a\u016c\3\2\2\2\u017a\u016e\3\2\2\2\u017a\u0172\3\2"+ + "\2\2\u017a\u0176\3\2\2\2\u017b\'\3\2\2\2\u017c\u017d\7\65\2\2\u017d\u018b"+ + "\5.\30\2\u017e\u017f\7J\2\2\u017f\u0180\7\3\2\2\u0180\u0185\5Z.\2\u0181"+ + "\u0182\7\5\2\2\u0182\u0184\5Z.\2\u0183\u0181\3\2\2\2\u0184\u0187\3\2\2"+ + "\2\u0185\u0183\3\2\2\2\u0185\u0186\3\2\2\2\u0186\u0188\3\2\2\2\u0187\u0185"+ + "\3\2\2\2\u0188\u0189\7\4\2\2\u0189\u018b\3\2\2\2\u018a\u017c\3\2\2\2\u018a"+ + "\u017e\3\2\2\2\u018b)\3\2\2\2\u018c\u0191\5\\/\2\u018d\u018f\7\f\2\2\u018e"+ + "\u018d\3\2\2\2\u018e\u018f\3\2\2\2\u018f\u0190\3\2\2\2\u0190\u0192\5X"+ + "-\2\u0191\u018e\3\2\2\2\u0191\u0192\3\2\2\2\u0192\u01a6\3\2\2\2\u0193"+ + "\u0194\7\3\2\2\u0194\u0195\5\n\6\2\u0195\u019a\7\4\2\2\u0196\u0198\7\f"+ + "\2\2\u0197\u0196\3\2\2\2\u0197\u0198\3\2\2\2\u0198\u0199\3\2\2\2\u0199"+ + "\u019b\5X-\2\u019a\u0197\3\2\2\2\u019a\u019b\3\2\2\2\u019b\u01a6\3\2\2"+ + "\2\u019c\u019d\7\3\2\2\u019d\u019e\5\"\22\2\u019e\u01a3\7\4\2\2\u019f"+ + "\u01a1\7\f\2\2\u01a0\u019f\3\2\2\2\u01a0\u01a1\3\2\2\2\u01a1\u01a2\3\2"+ + "\2\2\u01a2\u01a4\5X-\2\u01a3\u01a0\3\2\2\2\u01a3\u01a4\3\2\2\2\u01a4\u01a6"+ + "\3\2\2\2\u01a5\u018c\3\2\2\2\u01a5\u0193\3\2\2\2\u01a5\u019c\3\2\2\2\u01a6"+ + "+\3\2\2\2\u01a7\u01a8\5.\30\2\u01a8-\3\2\2\2\u01a9\u01aa\b\30\1\2\u01aa"+ + "\u01ab\7\62\2\2\u01ab\u01c9\5.\30\n\u01ac\u01ad\7\33\2\2\u01ad\u01ae\7"+ + "\3\2\2\u01ae\u01af\5\b\5\2\u01af\u01b0\7\4\2\2\u01b0\u01c9\3\2\2\2\u01b1"+ + "\u01b2\7?\2\2\u01b2\u01b3\7\3\2\2\u01b3\u01b4\5d\63\2\u01b4\u01b5\5\60"+ + "\31\2\u01b5\u01b6\7\4\2\2\u01b6\u01c9\3\2\2\2\u01b7\u01b8\7\60\2\2\u01b8"+ + "\u01b9\7\3\2\2\u01b9\u01ba\5X-\2\u01ba\u01bb\7\5\2\2\u01bb\u01bc\5d\63"+ + "\2\u01bc\u01bd\5\60\31\2\u01bd\u01be\7\4\2\2\u01be\u01c9\3\2\2\2\u01bf"+ + "\u01c0\7\60\2\2\u01c0\u01c1\7\3\2\2\u01c1\u01c2\5d\63\2\u01c2\u01c3\7"+ + "\5\2\2\u01c3\u01c4\5d\63\2\u01c4\u01c5\5\60\31\2\u01c5\u01c6\7\4\2\2\u01c6"+ + "\u01c9\3\2\2\2\u01c7\u01c9\5\62\32\2\u01c8\u01a9\3\2\2\2\u01c8\u01ac\3"+ + "\2\2\2\u01c8\u01b1\3\2\2\2\u01c8\u01b7\3\2\2\2\u01c8\u01bf\3\2\2\2\u01c8"+ + "\u01c7\3\2\2\2\u01c9\u01d2\3\2\2\2\u01ca\u01cb\f\4\2\2\u01cb\u01cc\7\n"+ + "\2\2\u01cc\u01d1\5.\30\5\u01cd\u01ce\f\3\2\2\u01ce\u01cf\7\67\2\2\u01cf"+ + "\u01d1\5.\30\4\u01d0\u01ca\3\2\2\2\u01d0\u01cd\3\2\2\2\u01d1\u01d4\3\2"+ + "\2\2\u01d2\u01d0\3\2\2\2\u01d2\u01d3\3\2\2\2\u01d3/\3\2\2\2\u01d4\u01d2"+ + "\3\2\2\2\u01d5\u01d6\7\5\2\2\u01d6\u01d8\5d\63\2\u01d7\u01d5\3\2\2\2\u01d8"+ + "\u01db\3\2\2\2\u01d9\u01d7\3\2\2\2\u01d9\u01da\3\2\2\2\u01da\61\3\2\2"+ + "\2\u01db\u01d9\3\2\2\2\u01dc\u01de\5<\37\2\u01dd\u01df\5\64\33\2\u01de"+ + "\u01dd\3\2\2\2\u01de\u01df\3\2\2\2\u01df\63\3\2\2\2\u01e0\u01e2\7\62\2"+ + "\2\u01e1\u01e0\3\2\2\2\u01e1\u01e2\3\2\2\2\u01e2\u01e3\3\2\2\2\u01e3\u01e4"+ + "\7\16\2\2\u01e4\u01e5\5<\37\2\u01e5\u01e6\7\n\2\2\u01e6\u01e7\5<\37\2"+ + "\u01e7\u020f\3\2\2\2\u01e8\u01ea\7\62\2\2\u01e9\u01e8\3\2\2\2\u01e9\u01ea"+ + "\3\2\2\2\u01ea\u01eb\3\2\2\2\u01eb\u01ec\7\'\2\2\u01ec\u01ed\7\3\2\2\u01ed"+ + "\u01f2\5,\27\2\u01ee\u01ef\7\5\2\2\u01ef\u01f1\5,\27\2\u01f0\u01ee\3\2"+ + "\2\2\u01f1\u01f4\3\2\2\2\u01f2\u01f0\3\2\2\2\u01f2\u01f3\3\2\2\2\u01f3"+ + "\u01f5\3\2\2\2\u01f4\u01f2\3\2\2\2\u01f5\u01f6\7\4\2\2\u01f6\u020f\3\2"+ + "\2\2\u01f7\u01f9\7\62\2\2\u01f8\u01f7\3\2\2\2\u01f8\u01f9\3\2\2\2\u01f9"+ + "\u01fa\3\2\2\2\u01fa\u01fb\7\'\2\2\u01fb\u01fc\7\3\2\2\u01fc\u01fd\5\b"+ + "\5\2\u01fd\u01fe\7\4\2\2\u01fe\u020f\3\2\2\2\u01ff\u0201\7\62\2\2\u0200"+ + "\u01ff\3\2\2\2\u0200\u0201\3\2\2\2\u0201\u0202\3\2\2\2\u0202\u0203\7-"+ + "\2\2\u0203\u020f\58\35\2\u0204\u0206\7\62\2\2\u0205\u0204\3\2\2\2\u0205"+ + "\u0206\3\2\2\2\u0206\u0207\3\2\2\2\u0207\u0208\7>\2\2\u0208\u020f\5d\63"+ + "\2\u0209\u020b\7)\2\2\u020a\u020c\7\62\2\2\u020b\u020a\3\2\2\2\u020b\u020c"+ + "\3\2\2\2\u020c\u020d\3\2\2\2\u020d\u020f\7\63\2\2\u020e\u01e1\3\2\2\2"+ + "\u020e\u01e9\3\2\2\2\u020e\u01f8\3\2\2\2\u020e\u0200\3\2\2\2\u020e\u0205"+ + "\3\2\2\2\u020e\u0209\3\2\2\2\u020f\65\3\2\2\2\u0210\u0211\7-\2\2\u0211"+ + "\u0212\58\35\2\u0212\67\3\2\2\2\u0213\u0215\5d\63\2\u0214\u0216\5:\36"+ + "\2\u0215\u0214\3\2\2\2\u0215\u0216\3\2\2\2\u02169\3\2\2\2\u0217\u0218"+ + "\7\31\2\2\u0218\u021e\5d\63\2\u0219\u021a\7N\2\2\u021a\u021b\5d\63\2\u021b"+ + "\u021c\7U\2\2\u021c\u021e\3\2\2\2\u021d\u0217\3\2\2\2\u021d\u0219\3\2"+ + "\2\2\u021e;\3\2\2\2\u021f\u0220\b\37\1\2\u0220\u0224\5> \2\u0221\u0222"+ + "\t\13\2\2\u0222\u0224\5<\37\6\u0223\u021f\3\2\2\2\u0223\u0221\3\2\2\2"+ + "\u0224\u0231\3\2\2\2\u0225\u0226\f\5\2\2\u0226\u0227\t\f\2\2\u0227\u0230"+ + "\5<\37\6\u0228\u0229\f\4\2\2\u0229\u022a\t\13\2\2\u022a\u0230\5<\37\5"+ + "\u022b\u022c\f\3\2\2\u022c\u022d\5R*\2\u022d\u022e\5<\37\4\u022e\u0230"+ + "\3\2\2\2\u022f\u0225\3\2\2\2\u022f\u0228\3\2\2\2\u022f\u022b\3\2\2\2\u0230"+ + "\u0233\3\2\2\2\u0231\u022f\3\2\2\2\u0231\u0232\3\2\2\2\u0232=\3\2\2\2"+ + "\u0233\u0231\3\2\2\2\u0234\u0248\5@!\2\u0235\u0248\5F$\2\u0236\u0248\5"+ + "P)\2\u0237\u0238\5X-\2\u0238\u0239\7b\2\2\u0239\u023b\3\2\2\2\u023a\u0237"+ + "\3\2\2\2\u023a\u023b\3\2\2\2\u023b\u023c\3\2\2\2\u023c\u0248\7^\2\2\u023d"+ + "\u0248\5J&\2\u023e\u023f\7\3\2\2\u023f\u0240\5\b\5\2\u0240\u0241\7\4\2"+ + "\2\u0241\u0248\3\2\2\2\u0242\u0248\5X-\2\u0243\u0244\7\3\2\2\u0244\u0245"+ + "\5,\27\2\u0245\u0246\7\4\2\2\u0246\u0248\3\2\2\2\u0247\u0234\3\2\2\2\u0247"+ + "\u0235\3\2\2\2\u0247\u0236\3\2\2\2\u0247\u023a\3\2\2\2\u0247\u023d\3\2"+ + "\2\2\u0247\u023e\3\2\2\2\u0247\u0242\3\2\2\2\u0247\u0243\3\2\2\2\u0248"+ + "?\3\2\2\2\u0249\u0254\5B\"\2\u024a\u024b\7O\2\2\u024b\u024c\5B\"\2\u024c"+ + "\u024d\7U\2\2\u024d\u0254\3\2\2\2\u024e\u0254\5D#\2\u024f\u0250\7O\2\2"+ + "\u0250\u0251\5D#\2\u0251\u0252\7U\2\2\u0252\u0254\3\2\2\2\u0253\u0249"+ + "\3\2\2\2\u0253\u024a\3\2\2\2\u0253\u024e\3\2\2\2\u0253\u024f\3\2\2\2\u0254"+ + "A\3\2\2\2\u0255\u0256\7\20\2\2\u0256\u0257\7\3\2\2\u0257\u0258\5,\27\2"+ + "\u0258\u0259\7\f\2\2\u0259\u025a\5V,\2\u025a\u025b\7\4\2\2\u025bC\3\2"+ + "\2\2\u025c\u025d\7\24\2\2\u025d\u025e\7\3\2\2\u025e\u025f\5,\27\2\u025f"+ + "\u0260\7\5\2\2\u0260\u0261\5V,\2\u0261\u0262\7\4\2\2\u0262E\3\2\2\2\u0263"+ + "\u0269\5H%\2\u0264\u0265\7O\2\2\u0265\u0266\5H%\2\u0266\u0267\7U\2\2\u0267"+ + "\u0269\3\2\2\2\u0268\u0263\3\2\2\2\u0268\u0264\3\2\2\2\u0269G\3\2\2\2"+ + "\u026a\u026b\7\35\2\2\u026b\u026c\7\3\2\2\u026c\u026d\5Z.\2\u026d\u026e"+ + "\7!\2\2\u026e\u026f\5<\37\2\u026f\u0270\7\4\2\2\u0270I\3\2\2\2\u0271\u0277"+ + "\5L\'\2\u0272\u0273\7O\2\2\u0273\u0274\5L\'\2\u0274\u0275\7U\2\2\u0275"+ + "\u0277\3\2\2\2\u0276\u0271\3\2\2\2\u0276\u0272\3\2\2\2\u0277K\3\2\2\2"+ + "\u0278\u0279\5N(\2\u0279\u0285\7\3\2\2\u027a\u027c\5\36\20\2\u027b\u027a"+ + "\3\2\2\2\u027b\u027c\3\2\2\2\u027c\u027d\3\2\2\2\u027d\u0282\5,\27\2\u027e"+ + "\u027f\7\5\2\2\u027f\u0281\5,\27\2\u0280\u027e\3\2\2\2\u0281\u0284\3\2"+ + "\2\2\u0282\u0280\3\2\2\2\u0282\u0283\3\2\2\2\u0283\u0286\3\2\2\2\u0284"+ + "\u0282\3\2\2\2\u0285\u027b\3\2\2\2\u0285\u0286\3\2\2\2\u0286\u0287\3\2"+ + "\2\2\u0287\u0288\7\4\2\2\u0288M\3\2\2\2\u0289\u028d\7,\2\2\u028a\u028d"+ + "\7=\2\2\u028b\u028d\5Z.\2\u028c\u0289\3\2\2\2\u028c\u028a\3\2\2\2\u028c"+ + "\u028b\3\2\2\2\u028dO\3\2\2\2\u028e\u02a8\7\63\2\2\u028f\u02a8\5b\62\2"+ + "\u0290\u02a8\5T+\2\u0291\u0293\7d\2\2\u0292\u0291\3\2\2\2\u0293\u0294"+ + "\3\2\2\2\u0294\u0292\3\2\2\2\u0294\u0295\3\2\2\2\u0295\u02a8\3\2\2\2\u0296"+ + "\u02a8\7c\2\2\u0297\u0298\7Q\2\2\u0298\u0299\5d\63\2\u0299\u029a\7U\2"+ + "\2\u029a\u02a8\3\2\2\2\u029b\u029c\7R\2\2\u029c\u029d\5d\63\2\u029d\u029e"+ + "\7U\2\2\u029e\u02a8\3\2\2\2\u029f\u02a0\7S\2\2\u02a0\u02a1\5d\63\2\u02a1"+ + "\u02a2\7U\2\2\u02a2\u02a8\3\2\2\2\u02a3\u02a4\7T\2\2\u02a4\u02a5\5d\63"+ + "\2\u02a5\u02a6\7U\2\2\u02a6\u02a8\3\2\2\2\u02a7\u028e\3\2\2\2\u02a7\u028f"+ + "\3\2\2\2\u02a7\u0290\3\2\2\2\u02a7\u0292\3\2\2\2\u02a7\u0296\3\2\2\2\u02a7"+ + "\u0297\3\2\2\2\u02a7\u029b\3\2\2\2\u02a7\u029f\3\2\2\2\u02a7\u02a3\3\2"+ + "\2\2\u02a8Q\3\2\2\2\u02a9\u02aa\t\r\2\2\u02aaS\3\2\2\2\u02ab\u02ac\t\16"+ + "\2\2\u02acU\3\2\2\2\u02ad\u02ae\5Z.\2\u02aeW\3\2\2\2\u02af\u02b0\5Z.\2"+ + "\u02b0\u02b1\7b\2\2\u02b1\u02b3\3\2\2\2\u02b2\u02af\3\2\2\2\u02b3\u02b6"+ + "\3\2\2\2\u02b4\u02b2\3\2\2\2\u02b4\u02b5\3\2\2\2\u02b5\u02b7\3\2\2\2\u02b6"+ + "\u02b4\3\2\2\2\u02b7\u02b8\5Z.\2\u02b8Y\3\2\2\2\u02b9\u02bc\5^\60\2\u02ba"+ + "\u02bc\5`\61\2\u02bb\u02b9\3\2\2\2\u02bb\u02ba\3\2\2\2\u02bc[\3\2\2\2"+ + "\u02bd\u02be\5Z.\2\u02be\u02bf\7\6\2\2\u02bf\u02c1\3\2\2\2\u02c0\u02bd"+ + "\3\2\2\2\u02c0\u02c1\3\2\2\2\u02c1\u02c2\3\2\2\2\u02c2\u02ca\7i\2\2\u02c3"+ + "\u02c4\5Z.\2\u02c4\u02c5\7\6\2\2\u02c5\u02c7\3\2\2\2\u02c6\u02c3\3\2\2"+ + "\2\u02c6\u02c7\3\2\2\2\u02c7\u02c8\3\2\2\2\u02c8\u02ca\5Z.\2\u02c9\u02c0"+ + "\3\2\2\2\u02c9\u02c6\3\2\2\2\u02ca]\3\2\2\2\u02cb\u02ce\7j\2\2\u02cc\u02ce"+ + "\7k\2\2\u02cd\u02cb\3\2\2\2\u02cd\u02cc\3\2\2\2\u02ce_\3\2\2\2\u02cf\u02d3"+ + "\7g\2\2\u02d0\u02d3\5f\64\2\u02d1\u02d3\7h\2\2\u02d2\u02cf\3\2\2\2\u02d2"+ + "\u02d0\3\2\2\2\u02d2\u02d1\3\2\2\2\u02d3a\3\2\2\2\u02d4\u02d7\7f\2\2\u02d5"+ + "\u02d7\7e\2\2\u02d6\u02d4\3\2\2\2\u02d6\u02d5\3\2\2\2\u02d7c\3\2\2\2\u02d8"+ + "\u02d9\t\17\2\2\u02d9e\3\2\2\2\u02da\u02db\t\20\2\2\u02dbg\3\2\2\2awy"+ + "}\u0086\u0088\u008c\u0093\u009a\u009f\u00a4\u00ae\u00b2\u00ba\u00bd\u00c3"+ + "\u00c8\u00cb\u00d2\u00da\u00dd\u00e9\u00ec\u00ef\u00f6\u00fd\u0101\u0105"+ + "\u0109\u0110\u0114\u0118\u011d\u0121\u0129\u012d\u0134\u013f\u0142\u0146"+ + "\u0152\u0155\u015b\u0162\u0169\u016c\u0170\u0174\u0178\u017a\u0185\u018a"+ + "\u018e\u0191\u0197\u019a\u01a0\u01a3\u01a5\u01c8\u01d0\u01d2\u01d9\u01de"+ + "\u01e1\u01e9\u01f2\u01f8\u0200\u0205\u020b\u020e\u0215\u021d\u0223\u022f"+ + "\u0231\u023a\u0247\u0253\u0268\u0276\u027b\u0282\u0285\u028c\u0294\u02a7"+ + "\u02b4\u02bb\u02c0\u02c6\u02c9\u02cd\u02d2\u02d6"; public static final ATN _ATN = new ATNDeserializer().deserialize(_serializedATN.toCharArray()); static { diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/SqlBaseVisitor.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/SqlBaseVisitor.java index afb4747ae49..f4420a66adb 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/SqlBaseVisitor.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/SqlBaseVisitor.java @@ -426,6 +426,12 @@ interface SqlBaseVisitor extends ParseTreeVisitor { * @return the visitor result */ T visitCastTemplate(SqlBaseParser.CastTemplateContext ctx); + /** + * Visit a parse tree produced by {@link SqlBaseParser#convertTemplate}. + * @param ctx the parse tree + * @return the visitor result + */ + T visitConvertTemplate(SqlBaseParser.ConvertTemplateContext ctx); /** * Visit a parse tree produced by {@link SqlBaseParser#extractExpression}. * @param ctx the parse tree diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plan/logical/command/ShowColumns.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plan/logical/command/ShowColumns.java index aa2e784de3d..24f55b1d8eb 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plan/logical/command/ShowColumns.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plan/logical/command/ShowColumns.java @@ -54,13 +54,15 @@ public class ShowColumns extends Command { @Override public List output() { return asList(new FieldAttribute(location(), "column", new KeywordEsField("column")), - new FieldAttribute(location(), "type", new KeywordEsField("type"))); } + new FieldAttribute(location(), "type", new KeywordEsField("type")), + new FieldAttribute(location(), "mapping", new KeywordEsField("mapping"))); + } @Override public void execute(SqlSession session, ActionListener listener) { String idx = index != null ? index : (pattern != null ? pattern.asIndexNameWildcard() : "*"); String regex = pattern != null ? pattern.asJavaRegex() : null; - session.indexResolver().resolveWithSameMapping(idx, regex, ActionListener.wrap( + session.indexResolver().resolveAsMergedMapping(idx, regex, ActionListener.wrap( indexResult -> { List> rows = emptyList(); if (indexResult.isValid()) { @@ -69,8 +71,7 @@ public class ShowColumns extends Command { } listener.onResponse(Rows.of(output(), rows)); }, - listener::onFailure - )); + listener::onFailure)); } private void fillInRows(Map mapping, String prefix, List> rows) { @@ -79,7 +80,7 @@ public class ShowColumns extends Command { DataType dt = field.getDataType(); String name = e.getKey(); if (dt != null) { - rows.add(asList(prefix != null ? prefix + "." + name : name, dt.sqlName())); + rows.add(asList(prefix != null ? prefix + "." + name : name, dt.sqlName(), dt.name())); if (field.getProperties().isEmpty() == false) { String newPrefix = prefix != null ? prefix + "." + name : name; fillInRows(field.getProperties(), newPrefix, rows); diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/planner/QueryFolder.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/planner/QueryFolder.java index 8616205b003..1d61cb1be46 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/planner/QueryFolder.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/planner/QueryFolder.java @@ -29,6 +29,7 @@ import org.elasticsearch.xpack.sql.expression.gen.pipeline.AggPathInput; import org.elasticsearch.xpack.sql.expression.gen.pipeline.Pipe; import org.elasticsearch.xpack.sql.expression.gen.pipeline.UnaryPipe; import org.elasticsearch.xpack.sql.expression.gen.processor.Processor; +import org.elasticsearch.xpack.sql.expression.predicate.In; import org.elasticsearch.xpack.sql.plan.physical.AggregateExec; import org.elasticsearch.xpack.sql.plan.physical.EsQueryExec; import org.elasticsearch.xpack.sql.plan.physical.FilterExec; @@ -138,6 +139,9 @@ class QueryFolder extends RuleExecutor { if (pj instanceof ScalarFunction) { ScalarFunction f = (ScalarFunction) pj; processors.put(f.toAttribute(), Expressions.pipe(f)); + } else if (pj instanceof In) { + In in = (In) pj; + processors.put(in.toAttribute(), Expressions.pipe(in)); } } } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/planner/QueryTranslator.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/planner/QueryTranslator.java index 806944e3a79..453660f07da 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/planner/QueryTranslator.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/planner/QueryTranslator.java @@ -32,6 +32,7 @@ import org.elasticsearch.xpack.sql.expression.function.scalar.ScalarFunction; import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.DateTimeFunction; import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.DateTimeHistogramFunction; import org.elasticsearch.xpack.sql.expression.gen.script.ScriptTemplate; +import org.elasticsearch.xpack.sql.expression.predicate.In; import org.elasticsearch.xpack.sql.expression.predicate.IsNotNull; import org.elasticsearch.xpack.sql.expression.predicate.Range; import org.elasticsearch.xpack.sql.expression.predicate.fulltext.MatchQueryPredicate; @@ -80,6 +81,7 @@ import org.elasticsearch.xpack.sql.querydsl.query.RangeQuery; import org.elasticsearch.xpack.sql.querydsl.query.RegexQuery; import org.elasticsearch.xpack.sql.querydsl.query.ScriptQuery; import org.elasticsearch.xpack.sql.querydsl.query.TermQuery; +import org.elasticsearch.xpack.sql.querydsl.query.TermsQuery; import org.elasticsearch.xpack.sql.querydsl.query.WildcardQuery; import org.elasticsearch.xpack.sql.tree.Location; import org.elasticsearch.xpack.sql.util.Check; @@ -90,16 +92,20 @@ import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import java.util.Map.Entry; +import java.util.Optional; import static java.util.Collections.singletonList; import static org.elasticsearch.xpack.sql.expression.Foldables.doubleValuesOf; import static org.elasticsearch.xpack.sql.expression.Foldables.stringValueOf; import static org.elasticsearch.xpack.sql.expression.Foldables.valueOf; -abstract class QueryTranslator { +final class QueryTranslator { - static final List> QUERY_TRANSLATORS = Arrays.asList( + private QueryTranslator(){} + + private static final List> QUERY_TRANSLATORS = Arrays.asList( new BinaryComparisons(), + new InComparisons(), new Ranges(), new BinaryLogic(), new Nots(), @@ -110,7 +116,7 @@ abstract class QueryTranslator { new MultiMatches() ); - static final List> AGG_TRANSLATORS = Arrays.asList( + private static final List> AGG_TRANSLATORS = Arrays.asList( new Maxes(), new Mins(), new Avgs(), @@ -235,7 +241,7 @@ abstract class QueryTranslator { } aggId = ne.id().toString(); - GroupByKey key = null; + GroupByKey key; // handle functions differently if (exp instanceof Function) { @@ -281,7 +287,7 @@ abstract class QueryTranslator { newQ = and(loc, left.query, right.query); } - AggFilter aggFilter = null; + AggFilter aggFilter; if (left.aggFilter == null) { aggFilter = right.aggFilter; @@ -533,7 +539,7 @@ abstract class QueryTranslator { // if the code gets here it's a bug // else { - throw new UnsupportedOperationException("No idea how to translate " + bc.left()); + throw new SqlIllegalArgumentException("No idea how to translate " + bc.left()); } } @@ -572,6 +578,55 @@ abstract class QueryTranslator { } } + // assume the Optimizer properly orders the predicates to ease the translation + static class InComparisons extends ExpressionTranslator { + + @Override + protected QueryTranslation asQuery(In in, boolean onAggs) { + Optional firstNotFoldable = in.list().stream().filter(expression -> !expression.foldable()).findFirst(); + + if (firstNotFoldable.isPresent()) { + throw new SqlIllegalArgumentException( + "Line {}:{}: Comparisons against variables are not (currently) supported; offender [{}] in [{}]", + firstNotFoldable.get().location().getLineNumber(), + firstNotFoldable.get().location().getColumnNumber(), + Expressions.name(firstNotFoldable.get()), + in.name()); + } + + if (in.value() instanceof NamedExpression) { + NamedExpression ne = (NamedExpression) in.value(); + + Query query = null; + AggFilter aggFilter = null; + + Attribute at = ne.toAttribute(); + // + // Agg context means HAVING -> PipelineAggs + // + ScriptTemplate script = in.asScript(); + if (onAggs) { + aggFilter = new AggFilter(at.id().toString(), script); + } + else { + // query directly on the field + if (at instanceof FieldAttribute) { + query = wrapIfNested(new TermsQuery(in.location(), ne.name(), in.list()), ne); + } else { + query = new ScriptQuery(at.location(), script); + } + } + return new QueryTranslation(query, aggFilter); + } + // + // if the code gets here it's a bug + // + else { + throw new SqlIllegalArgumentException("No idea how to translate " + in.value()); + } + } + } + static class Ranges extends ExpressionTranslator { @Override @@ -759,4 +814,4 @@ abstract class QueryTranslator { return query; } } -} \ No newline at end of file +} diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/querydsl/container/QueryContainer.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/querydsl/container/QueryContainer.java index 9048df42017..5ef4689422b 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/querydsl/container/QueryContainer.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/querydsl/container/QueryContainer.java @@ -172,7 +172,7 @@ public class QueryContainer { // reference methods // private FieldExtraction topHitFieldRef(FieldAttribute fieldAttr) { - return new SearchHitFieldRef(aliasName(fieldAttr), fieldAttr.field().getDataType(), fieldAttr.field().hasDocValues()); + return new SearchHitFieldRef(aliasName(fieldAttr), fieldAttr.field().getDataType(), fieldAttr.field().isAggregatable()); } private Tuple nestedHitFieldRef(FieldAttribute attr) { @@ -181,10 +181,10 @@ public class QueryContainer { String name = aliasName(attr); Query q = rewriteToContainNestedField(query, attr.location(), - attr.nestedParent().name(), name, attr.field().hasDocValues()); + attr.nestedParent().name(), name, attr.field().isAggregatable()); SearchHitFieldRef nestedFieldRef = new SearchHitFieldRef(name, attr.field().getDataType(), - attr.field().hasDocValues(), attr.parent().name()); + attr.field().isAggregatable(), attr.parent().name()); nestedRefs.add(nestedFieldRef); return new Tuple<>(new QueryContainer(q, aggs, columns, aliases, pseudoFunctions, scalarFunctions, sort, limit), nestedFieldRef); diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/querydsl/query/TermsQuery.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/querydsl/query/TermsQuery.java new file mode 100644 index 00000000000..412df4e8ca6 --- /dev/null +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/querydsl/query/TermsQuery.java @@ -0,0 +1,59 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.sql.querydsl.query; + +import org.elasticsearch.index.query.QueryBuilder; +import org.elasticsearch.xpack.sql.expression.Expression; +import org.elasticsearch.xpack.sql.expression.Foldables; +import org.elasticsearch.xpack.sql.tree.Location; + +import java.util.LinkedHashSet; +import java.util.List; +import java.util.Objects; + +import static org.elasticsearch.index.query.QueryBuilders.termsQuery; + +public class TermsQuery extends LeafQuery { + + private final String term; + private final LinkedHashSet values; + + public TermsQuery(Location location, String term, List values) { + super(location); + this.term = term; + this.values = new LinkedHashSet<>(Foldables.valuesOf(values, values.get(0).dataType())); + } + + @Override + public QueryBuilder asBuilder() { + return termsQuery(term, values); + } + + @Override + public int hashCode() { + return Objects.hash(term, values); + } + + @Override + public boolean equals(Object obj) { + if (this == obj) { + return true; + } + + if (obj == null || getClass() != obj.getClass()) { + return false; + } + + TermsQuery other = (TermsQuery) obj; + return Objects.equals(term, other.term) + && Objects.equals(values, other.values); + } + + @Override + protected String innerToString() { + return term + ":" + values; + } +} diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/session/SqlSession.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/session/SqlSession.java index 65da32c3122..9b31d069cbe 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/session/SqlSession.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/session/SqlSession.java @@ -127,7 +127,7 @@ public class SqlSession { listener.onFailure(new MappingException("Cannot inspect indices in cluster/catalog [{}]", cluster)); } - indexResolver.resolveWithSameMapping(table.index(), null, + indexResolver.resolveAsMergedMapping(table.index(), null, wrap(indexResult -> listener.onResponse(action.apply(indexResult)), listener::onFailure)); } else { try { diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/type/EsField.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/type/EsField.java index cc7e085416c..5630c9409af 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/type/EsField.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/type/EsField.java @@ -15,14 +15,14 @@ import java.util.Objects; */ public class EsField { private final DataType esDataType; - private final boolean hasDocValues; + private final boolean aggregatable; private final Map properties; private final String name; - public EsField(String name, DataType esDataType, Map properties, boolean hasDocValues) { + public EsField(String name, DataType esDataType, Map properties, boolean aggregatable) { this.name = name; this.esDataType = esDataType; - this.hasDocValues = hasDocValues; + this.aggregatable = aggregatable; this.properties = properties; } @@ -41,10 +41,10 @@ public class EsField { } /** - * The field supports doc values + * This field can be aggregated */ - public boolean hasDocValues() { - return hasDocValues; + public boolean isAggregatable() { + return aggregatable; } /** @@ -85,19 +85,27 @@ public class EsField { return true; } + @Override + public String toString() { + return name + "@" + esDataType.name() + "=" + properties; + } + @Override public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } EsField field = (EsField) o; - return hasDocValues == field.hasDocValues && - esDataType == field.esDataType && - Objects.equals(properties, field.properties) && - Objects.equals(name, field.name); + return aggregatable == field.aggregatable && esDataType == field.esDataType + && Objects.equals(name, field.name) + && Objects.equals(properties, field.properties); } @Override public int hashCode() { - return Objects.hash(esDataType, hasDocValues, properties, name); + return Objects.hash(esDataType, aggregatable, properties, name); } -} +} \ No newline at end of file diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/analysis/analyzer/VerifierErrorMessagesTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/analysis/analyzer/VerifierErrorMessagesTests.java index 95949070f2e..c193dcfd546 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/analysis/analyzer/VerifierErrorMessagesTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/analysis/analyzer/VerifierErrorMessagesTests.java @@ -125,6 +125,11 @@ public class VerifierErrorMessagesTests extends ESTestCase { verify("SELECT AVG(int) FROM test GROUP BY AVG(int)")); } + public void testNotSupportedAggregateOnDate() { + assertEquals("1:8: Argument required to be numeric ('date' of type 'date')", + verify("SELECT AVG(date) FROM test")); + } + public void testGroupByOnNested() { assertEquals("1:38: Grouping isn't (yet) compatible with nested fields [dep.dep_id]", verify("SELECT dep.dep_id FROM test GROUP BY dep.dep_id")); @@ -169,4 +174,44 @@ public class VerifierErrorMessagesTests extends ESTestCase { assertEquals("1:42: Cannot filter HAVING on non-aggregate [int]; consider using WHERE instead", verify("SELECT int FROM test GROUP BY int HAVING 2 < ABS(int)")); } -} \ No newline at end of file + + public void testInWithDifferentDataTypes_SelectClause() { + assertEquals("1:17: expected data type [INTEGER], value provided is of type [KEYWORD]", + verify("SELECT 1 IN (2, '3', 4)")); + } + + public void testInNestedWithDifferentDataTypes_SelectClause() { + assertEquals("1:27: expected data type [INTEGER], value provided is of type [KEYWORD]", + verify("SELECT 1 = 1 OR 1 IN (2, '3', 4)")); + } + + public void testInWithDifferentDataTypesFromLeftValue_SelectClause() { + assertEquals("1:14: expected data type [INTEGER], value provided is of type [KEYWORD]", + verify("SELECT 1 IN ('foo', 'bar')")); + } + + public void testInNestedWithDifferentDataTypesFromLeftValue_SelectClause() { + assertEquals("1:29: expected data type [KEYWORD], value provided is of type [INTEGER]", + verify("SELECT 1 = 1 OR 'foo' IN (2, 3)")); + } + + public void testInWithDifferentDataTypes_WhereClause() { + assertEquals("1:49: expected data type [TEXT], value provided is of type [INTEGER]", + verify("SELECT * FROM test WHERE text IN ('foo', 'bar', 4)")); + } + + public void testInNestedWithDifferentDataTypes_WhereClause() { + assertEquals("1:60: expected data type [TEXT], value provided is of type [INTEGER]", + verify("SELECT * FROM test WHERE int = 1 OR text IN ('foo', 'bar', 2)")); + } + + public void testInWithDifferentDataTypesFromLeftValue_WhereClause() { + assertEquals("1:35: expected data type [TEXT], value provided is of type [INTEGER]", + verify("SELECT * FROM test WHERE text IN (1, 2)")); + } + + public void testInNestedWithDifferentDataTypesFromLeftValue_WhereClause() { + assertEquals("1:46: expected data type [TEXT], value provided is of type [INTEGER]", + verify("SELECT * FROM test WHERE int = 1 OR text IN (1, 2)")); + } +} diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/analysis/index/IndexResolverTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/analysis/index/IndexResolverTests.java index 639356b2997..89f87271402 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/analysis/index/IndexResolverTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/analysis/index/IndexResolverTests.java @@ -5,12 +5,18 @@ */ package org.elasticsearch.xpack.sql.analysis.index; +import org.elasticsearch.action.fieldcaps.FieldCapabilities; import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.sql.type.DataType; import org.elasticsearch.xpack.sql.type.EsField; import org.elasticsearch.xpack.sql.type.TypesTests; -import java.util.Arrays; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Locale; import java.util.Map; +import java.util.Map.Entry; public class IndexResolverTests extends ESTestCase { @@ -21,40 +27,175 @@ public class IndexResolverTests extends ESTestCase { assertEquals(oneMapping, sameMapping); String wildcard = "*"; - IndexResolution resolution = IndexResolver.merge( - Arrays.asList(IndexResolution.valid(new EsIndex("a", oneMapping)), IndexResolution.valid(new EsIndex("b", sameMapping))), - wildcard); + + IndexResolution resolution = IndexResolver.mergedMapping(wildcard, fromMappings( + new EsIndex("a", oneMapping), + new EsIndex("b", sameMapping))); assertTrue(resolution.isValid()); - - EsIndex esIndex = resolution.get(); - - assertEquals(wildcard, esIndex.name()); - assertEquals(sameMapping, esIndex.mapping()); + assertEqualsMaps(oneMapping, resolution.get().mapping()); } - public void testMergeDifferentMapping() throws Exception { - Map oneMapping = TypesTests.loadMapping("mapping-basic.json", true); - Map sameMapping = TypesTests.loadMapping("mapping-basic.json", true); - Map differentMapping = TypesTests.loadMapping("mapping-numeric.json", true); + public void testMergeCompatibleMapping() throws Exception { + Map basicMapping = TypesTests.loadMapping("mapping-basic.json", true); + Map numericMapping = TypesTests.loadMapping("mapping-numeric.json", true); - assertNotSame(oneMapping, sameMapping); - assertEquals(oneMapping, sameMapping); - assertNotEquals(oneMapping, differentMapping); + assertNotEquals(basicMapping, numericMapping); String wildcard = "*"; - IndexResolution resolution = IndexResolver.merge( - Arrays.asList(IndexResolution.valid(new EsIndex("a", oneMapping)), - IndexResolution.valid(new EsIndex("b", sameMapping)), - IndexResolution.valid(new EsIndex("diff", differentMapping))), - wildcard); + IndexResolution resolution = IndexResolver.mergedMapping(wildcard, fromMappings( + new EsIndex("basic", basicMapping), + new EsIndex("numeric", numericMapping))); + + assertTrue(resolution.isValid()); + assertEquals(basicMapping.size() + numericMapping.size(), resolution.get().mapping().size()); + } + + public void testMergeIncompatibleTypes() throws Exception { + Map basicMapping = TypesTests.loadMapping("mapping-basic.json", true); + Map incompatible = TypesTests.loadMapping("mapping-basic-incompatible.json"); + + assertNotEquals(basicMapping, incompatible); + + String wildcard = "*"; + IndexResolution resolution = IndexResolver.mergedMapping(wildcard, + fromMappings(new EsIndex("basic", basicMapping), new EsIndex("incompatible", incompatible))); assertFalse(resolution.isValid()); - - MappingException ex = expectThrows(MappingException.class, () -> resolution.get()); + MappingException me = expectThrows(MappingException.class, () -> resolution.get()); assertEquals( - "[*] points to indices [a] and [diff] which have different mappings. " - + "When using multiple indices, the mappings must be identical.", - ex.getMessage()); + "[*] points to indices with incompatible mappings;" + + " field [gender] is mapped in [2] different ways: [text] in [incompatible], [keyword] in [basic]", + me.getMessage()); } -} + + public void testMergeIncompatibleCapabilities() throws Exception { + Map basicMapping = TypesTests.loadMapping("mapping-basic.json", true); + Map incompatible = TypesTests.loadMapping("mapping-basic-nodocvalues.json", true); + + assertNotEquals(basicMapping, incompatible); + + String wildcard = "*"; + IndexResolution resolution = IndexResolver.mergedMapping(wildcard, + fromMappings(new EsIndex("basic", basicMapping), new EsIndex("incompatible", incompatible))); + + assertFalse(resolution.isValid()); + MappingException me = expectThrows(MappingException.class, () -> resolution.get()); + assertEquals( + "[*] points to indices with incompatible mappings: field [emp_no] is aggregateable except in [incompatible]", + me.getMessage()); + } + + public void testMultiLevelObjectMappings() throws Exception { + Map dottedMapping = TypesTests.loadMapping("mapping-dotted-field.json", true); + + String wildcard = "*"; + IndexResolution resolution = IndexResolver.mergedMapping(wildcard, fromMappings(new EsIndex("a", dottedMapping))); + + assertTrue(resolution.isValid()); + assertEqualsMaps(dottedMapping, resolution.get().mapping()); + } + + public void testMultiLevelNestedMappings() throws Exception { + Map nestedMapping = TypesTests.loadMapping("mapping-nested.json", true); + + String wildcard = "*"; + IndexResolution resolution = IndexResolver.mergedMapping(wildcard, fromMappings(new EsIndex("a", nestedMapping))); + + assertTrue(resolution.isValid()); + assertEqualsMaps(nestedMapping, resolution.get().mapping()); + } + + private static Map> fromMappings(EsIndex... indices) { + Map> merged = new HashMap<>(); + + // first pass: create the field caps + for (EsIndex index : indices) { + for (EsField field : index.mapping().values()) { + addFieldCaps(null, field, index.name(), merged); + } + } + + // second pass: update indices + for (Entry> entry : merged.entrySet()) { + String fieldName = entry.getKey(); + Map caps = entry.getValue(); + if (entry.getValue().size() > 1) { + for (EsIndex index : indices) { + EsField field = index.mapping().get(fieldName); + UpdateableFieldCapabilities fieldCaps = (UpdateableFieldCapabilities) caps.get(field.getDataType().esType); + fieldCaps.indices.add(index.name()); + } + //TODO: what about nonAgg/SearchIndices? + } + } + + return merged; + } + + private static void addFieldCaps(String parent, EsField field, String indexName, Map> merged) { + String fieldName = parent != null ? parent + "." + field.getName() : field.getName(); + Map map = merged.get(fieldName); + if (map == null) { + map = new HashMap<>(); + merged.put(fieldName, map); + } + FieldCapabilities caps = map.computeIfAbsent(field.getDataType().esType, + esType -> new UpdateableFieldCapabilities(fieldName, esType, + isSearchable(field.getDataType()), + isAggregatable(field.getDataType()))); + + if (!field.isAggregatable()) { + ((UpdateableFieldCapabilities) caps).nonAggregatableIndices.add(indexName); + } + + for (EsField nested : field.getProperties().values()) { + addFieldCaps(fieldName, nested, indexName, merged); + } + } + + private static boolean isSearchable(DataType type) { + return type.isPrimitive(); + } + + private static boolean isAggregatable(DataType type) { + return type.isNumeric() || type == DataType.KEYWORD || type == DataType.DATE; + } + + private static class UpdateableFieldCapabilities extends FieldCapabilities { + List indices = new ArrayList<>(); + List nonSearchableIndices = new ArrayList<>(); + List nonAggregatableIndices = new ArrayList<>(); + + UpdateableFieldCapabilities(String name, String type, boolean isSearchable, boolean isAggregatable) { + super(name, type, isSearchable, isAggregatable); + } + + @Override + public String[] indices() { + return indices.isEmpty() ? null : indices.toArray(new String[indices.size()]); + } + + @Override + public String[] nonSearchableIndices() { + return nonSearchableIndices.isEmpty() ? null : nonSearchableIndices.toArray(new String[nonSearchableIndices.size()]); + } + + @Override + public String[] nonAggregatableIndices() { + return nonAggregatableIndices.isEmpty() ? null : nonAggregatableIndices.toArray(new String[nonAggregatableIndices.size()]); + } + + @Override + public String toString() { + return String.format(Locale.ROOT, "%s,%s->%s", getName(), getType(), indices); + } + } + + private static void assertEqualsMaps(Map left, Map right) { + for (Entry entry : left.entrySet()) { + V rv = right.get(entry.getKey()); + assertEquals(String.format(Locale.ROOT, "Key [%s] has different values", entry.getKey()), entry.getValue(), rv); + } + } +} \ No newline at end of file diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/predicate/InProcessorTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/predicate/InProcessorTests.java new file mode 100644 index 00000000000..3e71ac90f81 --- /dev/null +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/predicate/InProcessorTests.java @@ -0,0 +1,53 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.sql.expression.predicate; + +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; +import org.elasticsearch.common.io.stream.Writeable.Reader; +import org.elasticsearch.test.AbstractWireSerializingTestCase; +import org.elasticsearch.xpack.sql.expression.Literal; +import org.elasticsearch.xpack.sql.expression.function.scalar.Processors; +import org.elasticsearch.xpack.sql.expression.gen.processor.ConstantProcessor; +import org.elasticsearch.xpack.sql.expression.predicate.operator.comparison.InProcessor; + +import java.util.Arrays; + +import static org.elasticsearch.xpack.sql.tree.Location.EMPTY; + +public class InProcessorTests extends AbstractWireSerializingTestCase { + + private static final Literal ONE = L(1); + private static final Literal TWO = L(2); + private static final Literal THREE = L(3); + + public static InProcessor randomProcessor() { + return new InProcessor(Arrays.asList(new ConstantProcessor(randomLong()), new ConstantProcessor(randomLong()))); + } + + @Override + protected InProcessor createTestInstance() { + return randomProcessor(); + } + + @Override + protected Reader instanceReader() { + return InProcessor::new; + } + + @Override + protected NamedWriteableRegistry getNamedWriteableRegistry() { + return new NamedWriteableRegistry(Processors.getNamedWriteables()); + } + + public void testEq() { + assertEquals(true, new In(EMPTY, TWO, Arrays.asList(ONE, TWO, THREE)).makePipe().asProcessor().process(null)); + assertEquals(false, new In(EMPTY, THREE, Arrays.asList(ONE, TWO)).makePipe().asProcessor().process(null)); + } + + private static Literal L(Object value) { + return Literal.of(EMPTY, value); + } +} diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/optimizer/OptimizerTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/optimizer/OptimizerTests.java index 608be8ab86f..acd0378ee01 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/optimizer/OptimizerTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/optimizer/OptimizerTests.java @@ -10,6 +10,7 @@ import org.elasticsearch.xpack.sql.expression.Alias; import org.elasticsearch.xpack.sql.expression.Expression; import org.elasticsearch.xpack.sql.expression.Expressions; import org.elasticsearch.xpack.sql.expression.FieldAttribute; +import org.elasticsearch.xpack.sql.expression.Foldables; import org.elasticsearch.xpack.sql.expression.Literal; import org.elasticsearch.xpack.sql.expression.NamedExpression; import org.elasticsearch.xpack.sql.expression.Order; @@ -30,6 +31,7 @@ import org.elasticsearch.xpack.sql.expression.function.scalar.math.Abs; import org.elasticsearch.xpack.sql.expression.function.scalar.math.E; import org.elasticsearch.xpack.sql.expression.function.scalar.math.Floor; import org.elasticsearch.xpack.sql.expression.predicate.BinaryOperator; +import org.elasticsearch.xpack.sql.expression.predicate.In; import org.elasticsearch.xpack.sql.expression.predicate.IsNotNull; import org.elasticsearch.xpack.sql.expression.predicate.Range; import org.elasticsearch.xpack.sql.expression.predicate.logical.And; @@ -81,6 +83,7 @@ import static java.util.Collections.emptyList; import static java.util.Collections.emptyMap; import static java.util.Collections.singletonList; import static org.elasticsearch.xpack.sql.tree.Location.EMPTY; +import static org.hamcrest.Matchers.contains; public class OptimizerTests extends ESTestCase { @@ -147,6 +150,11 @@ public class OptimizerTests extends ESTestCase { return Literal.of(EMPTY, value); } + private static FieldAttribute getFieldAttribute() { + return new FieldAttribute(EMPTY, "a", new EsField("af", DataType.INTEGER, emptyMap(), true)); + } + + public void testPruneSubqueryAliases() { ShowTables s = new ShowTables(EMPTY, null, null); SubQueryAlias plan = new SubQueryAlias(EMPTY, s, "show"); @@ -298,6 +306,23 @@ public class OptimizerTests extends ESTestCase { new WeekOfYear(EMPTY, new Literal(EMPTY, null, DataType.NULL), UTC))); } + public void testConstantFoldingIn() { + In in = new In(EMPTY, ONE, + Arrays.asList(ONE, TWO, ONE, THREE, new Sub(EMPTY, THREE, ONE), ONE, FOUR, new Abs(EMPTY, new Sub(EMPTY, TWO, FIVE)))); + Literal result= (Literal) new ConstantFolding().rule(in); + assertEquals(true, result.value()); + } + + public void testConstantFoldingIn_LeftValueNotFoldable() { + Project p = new Project(EMPTY, FROM(), Collections.singletonList( + new In(EMPTY, getFieldAttribute(), + Arrays.asList(ONE, TWO, ONE, THREE, new Sub(EMPTY, THREE, ONE), ONE, FOUR, new Abs(EMPTY, new Sub(EMPTY, TWO, FIVE)))))); + p = (Project) new ConstantFolding().apply(p); + assertEquals(1, p.projections().size()); + In in = (In) p.projections().get(0); + assertThat(Foldables.valuesOf(in.list(), DataType.INTEGER), contains(1 ,2 ,3 ,4)); + } + public void testArithmeticFolding() { assertEquals(10, foldOperator(new Add(EMPTY, L(7), THREE))); assertEquals(4, foldOperator(new Sub(EMPTY, L(7), THREE))); @@ -389,7 +414,7 @@ public class OptimizerTests extends ESTestCase { // 6 < a <= 5 -> FALSE public void testFoldExcludingRangeToFalse() { - FieldAttribute fa = new FieldAttribute(EMPTY, "a", new EsField("af", DataType.INTEGER, emptyMap(), true)); + FieldAttribute fa = getFieldAttribute(); Range r = new Range(EMPTY, fa, SIX, false, FIVE, true); assertTrue(r.foldable()); @@ -398,7 +423,7 @@ public class OptimizerTests extends ESTestCase { // 6 < a <= 5.5 -> FALSE public void testFoldExcludingRangeWithDifferentTypesToFalse() { - FieldAttribute fa = new FieldAttribute(EMPTY, "a", new EsField("af", DataType.INTEGER, emptyMap(), true)); + FieldAttribute fa = getFieldAttribute(); Range r = new Range(EMPTY, fa, SIX, false, L(5.5d), true); assertTrue(r.foldable()); @@ -408,7 +433,7 @@ public class OptimizerTests extends ESTestCase { // Conjunction public void testCombineBinaryComparisonsNotComparable() { - FieldAttribute fa = new FieldAttribute(EMPTY, "a", new EsField("af", DataType.INTEGER, emptyMap(), true)); + FieldAttribute fa = getFieldAttribute(); LessThanOrEqual lte = new LessThanOrEqual(EMPTY, fa, SIX); LessThan lt = new LessThan(EMPTY, fa, Literal.FALSE); @@ -420,7 +445,7 @@ public class OptimizerTests extends ESTestCase { // a <= 6 AND a < 5 -> a < 5 public void testCombineBinaryComparisonsUpper() { - FieldAttribute fa = new FieldAttribute(EMPTY, "a", new EsField("af", DataType.INTEGER, emptyMap(), true)); + FieldAttribute fa = getFieldAttribute(); LessThanOrEqual lte = new LessThanOrEqual(EMPTY, fa, SIX); LessThan lt = new LessThan(EMPTY, fa, FIVE); @@ -434,7 +459,7 @@ public class OptimizerTests extends ESTestCase { // 6 <= a AND 5 < a -> 6 <= a public void testCombineBinaryComparisonsLower() { - FieldAttribute fa = new FieldAttribute(EMPTY, "a", new EsField("af", DataType.INTEGER, emptyMap(), true)); + FieldAttribute fa = getFieldAttribute(); GreaterThanOrEqual gte = new GreaterThanOrEqual(EMPTY, fa, SIX); GreaterThan gt = new GreaterThan(EMPTY, fa, FIVE); @@ -448,7 +473,7 @@ public class OptimizerTests extends ESTestCase { // 5 <= a AND 5 < a -> 5 < a public void testCombineBinaryComparisonsInclude() { - FieldAttribute fa = new FieldAttribute(EMPTY, "a", new EsField("af", DataType.INTEGER, emptyMap(), true)); + FieldAttribute fa = getFieldAttribute(); GreaterThanOrEqual gte = new GreaterThanOrEqual(EMPTY, fa, FIVE); GreaterThan gt = new GreaterThan(EMPTY, fa, FIVE); @@ -462,7 +487,7 @@ public class OptimizerTests extends ESTestCase { // 3 <= a AND 4 < a AND a <= 7 AND a < 6 -> 4 < a < 6 public void testCombineMultipleBinaryComparisons() { - FieldAttribute fa = new FieldAttribute(EMPTY, "a", new EsField("af", DataType.INTEGER, emptyMap(), true)); + FieldAttribute fa = getFieldAttribute(); GreaterThanOrEqual gte = new GreaterThanOrEqual(EMPTY, fa, THREE); GreaterThan gt = new GreaterThan(EMPTY, fa, FOUR); LessThanOrEqual lte = new LessThanOrEqual(EMPTY, fa, L(7)); @@ -481,7 +506,7 @@ public class OptimizerTests extends ESTestCase { // 3 <= a AND TRUE AND 4 < a AND a != 5 AND a <= 7 -> 4 < a <= 7 AND a != 5 AND TRUE public void testCombineMixedMultipleBinaryComparisons() { - FieldAttribute fa = new FieldAttribute(EMPTY, "a", new EsField("af", DataType.INTEGER, emptyMap(), true)); + FieldAttribute fa = getFieldAttribute(); GreaterThanOrEqual gte = new GreaterThanOrEqual(EMPTY, fa, THREE); GreaterThan gt = new GreaterThan(EMPTY, fa, FOUR); LessThanOrEqual lte = new LessThanOrEqual(EMPTY, fa, L(7)); @@ -503,7 +528,7 @@ public class OptimizerTests extends ESTestCase { // 1 <= a AND a < 5 -> 1 <= a < 5 public void testCombineComparisonsIntoRange() { - FieldAttribute fa = new FieldAttribute(EMPTY, "a", new EsField("af", DataType.INTEGER, emptyMap(), true)); + FieldAttribute fa = getFieldAttribute(); GreaterThanOrEqual gte = new GreaterThanOrEqual(EMPTY, fa, ONE); LessThan lt = new LessThan(EMPTY, fa, FIVE); @@ -520,7 +545,7 @@ public class OptimizerTests extends ESTestCase { // a != NULL AND a > 1 AND a < 5 AND a == 10 -> (a != NULL AND a == 10) AND 1 <= a < 5 public void testCombineUnbalancedComparisonsMixedWithEqualsIntoRange() { - FieldAttribute fa = new FieldAttribute(EMPTY, "a", new EsField("af", DataType.INTEGER, emptyMap(), true)); + FieldAttribute fa = getFieldAttribute(); IsNotNull isn = new IsNotNull(EMPTY, fa); GreaterThanOrEqual gte = new GreaterThanOrEqual(EMPTY, fa, ONE); @@ -544,7 +569,7 @@ public class OptimizerTests extends ESTestCase { // (2 < a < 3) AND (1 < a < 4) -> (2 < a < 3) public void testCombineBinaryComparisonsConjunctionOfIncludedRange() { - FieldAttribute fa = new FieldAttribute(EMPTY, "a", new EsField("af", DataType.INTEGER, emptyMap(), true)); + FieldAttribute fa = getFieldAttribute(); Range r1 = new Range(EMPTY, fa, TWO, false, THREE, false); Range r2 = new Range(EMPTY, fa, ONE, false, FOUR, false); @@ -558,7 +583,7 @@ public class OptimizerTests extends ESTestCase { // (2 < a < 3) AND a < 2 -> 2 < a < 2 public void testCombineBinaryComparisonsConjunctionOfNonOverlappingBoundaries() { - FieldAttribute fa = new FieldAttribute(EMPTY, "a", new EsField("af", DataType.INTEGER, emptyMap(), true)); + FieldAttribute fa = getFieldAttribute(); Range r1 = new Range(EMPTY, fa, TWO, false, THREE, false); Range r2 = new Range(EMPTY, fa, ONE, false, TWO, false); @@ -578,7 +603,7 @@ public class OptimizerTests extends ESTestCase { // (2 < a < 3) AND (2 < a <= 3) -> 2 < a < 3 public void testCombineBinaryComparisonsConjunctionOfUpperEqualsOverlappingBoundaries() { - FieldAttribute fa = new FieldAttribute(EMPTY, "a", new EsField("af", DataType.INTEGER, emptyMap(), true)); + FieldAttribute fa = getFieldAttribute(); Range r1 = new Range(EMPTY, fa, TWO, false, THREE, false); Range r2 = new Range(EMPTY, fa, TWO, false, THREE, true); @@ -592,7 +617,7 @@ public class OptimizerTests extends ESTestCase { // (2 < a < 3) AND (1 < a < 3) -> 2 < a < 3 public void testCombineBinaryComparisonsConjunctionOverlappingUpperBoundary() { - FieldAttribute fa = new FieldAttribute(EMPTY, "a", new EsField("af", DataType.INTEGER, emptyMap(), true)); + FieldAttribute fa = getFieldAttribute(); Range r2 = new Range(EMPTY, fa, TWO, false, THREE, false); Range r1 = new Range(EMPTY, fa, ONE, false, THREE, false); @@ -606,7 +631,7 @@ public class OptimizerTests extends ESTestCase { // (2 < a <= 3) AND (1 < a < 3) -> 2 < a < 3 public void testCombineBinaryComparisonsConjunctionWithDifferentUpperLimitInclusion() { - FieldAttribute fa = new FieldAttribute(EMPTY, "a", new EsField("af", DataType.INTEGER, emptyMap(), true)); + FieldAttribute fa = getFieldAttribute(); Range r1 = new Range(EMPTY, fa, ONE, false, THREE, false); Range r2 = new Range(EMPTY, fa, TWO, false, THREE, true); @@ -625,7 +650,7 @@ public class OptimizerTests extends ESTestCase { // (0 < a <= 1) AND (0 <= a < 2) -> 0 < a <= 1 public void testRangesOverlappingConjunctionNoLowerBoundary() { - FieldAttribute fa = new FieldAttribute(EMPTY, "a", new EsField("af", DataType.INTEGER, emptyMap(), true)); + FieldAttribute fa = getFieldAttribute(); Range r1 = new Range(EMPTY, fa, L(0), false, ONE, true); Range r2 = new Range(EMPTY, fa, L(0), true, TWO, false); @@ -640,7 +665,7 @@ public class OptimizerTests extends ESTestCase { // Disjunction public void testCombineBinaryComparisonsDisjunctionNotComparable() { - FieldAttribute fa = new FieldAttribute(EMPTY, "a", new EsField("af", DataType.INTEGER, emptyMap(), true)); + FieldAttribute fa = getFieldAttribute(); GreaterThan gt1 = new GreaterThan(EMPTY, fa, ONE); GreaterThan gt2 = new GreaterThan(EMPTY, fa, Literal.FALSE); @@ -655,7 +680,7 @@ public class OptimizerTests extends ESTestCase { // 2 < a OR 1 < a OR 3 < a -> 1 < a public void testCombineBinaryComparisonsDisjunctionLowerBound() { - FieldAttribute fa = new FieldAttribute(EMPTY, "a", new EsField("af", DataType.INTEGER, emptyMap(), true)); + FieldAttribute fa = getFieldAttribute(); GreaterThan gt1 = new GreaterThan(EMPTY, fa, ONE); GreaterThan gt2 = new GreaterThan(EMPTY, fa, TWO); @@ -673,7 +698,7 @@ public class OptimizerTests extends ESTestCase { // 2 < a OR 1 < a OR 3 <= a -> 1 < a public void testCombineBinaryComparisonsDisjunctionIncludeLowerBounds() { - FieldAttribute fa = new FieldAttribute(EMPTY, "a", new EsField("af", DataType.INTEGER, emptyMap(), true)); + FieldAttribute fa = getFieldAttribute(); GreaterThan gt1 = new GreaterThan(EMPTY, fa, ONE); GreaterThan gt2 = new GreaterThan(EMPTY, fa, TWO); @@ -691,7 +716,7 @@ public class OptimizerTests extends ESTestCase { // a < 1 OR a < 2 OR a < 3 -> a < 3 public void testCombineBinaryComparisonsDisjunctionUpperBound() { - FieldAttribute fa = new FieldAttribute(EMPTY, "a", new EsField("af", DataType.INTEGER, emptyMap(), true)); + FieldAttribute fa = getFieldAttribute(); LessThan lt1 = new LessThan(EMPTY, fa, ONE); LessThan lt2 = new LessThan(EMPTY, fa, TWO); @@ -709,7 +734,7 @@ public class OptimizerTests extends ESTestCase { // a < 2 OR a <= 2 OR a < 1 -> a <= 2 public void testCombineBinaryComparisonsDisjunctionIncludeUpperBounds() { - FieldAttribute fa = new FieldAttribute(EMPTY, "a", new EsField("af", DataType.INTEGER, emptyMap(), true)); + FieldAttribute fa = getFieldAttribute(); LessThan lt1 = new LessThan(EMPTY, fa, ONE); LessThan lt2 = new LessThan(EMPTY, fa, TWO); @@ -727,7 +752,7 @@ public class OptimizerTests extends ESTestCase { // a < 2 OR 3 < a OR a < 1 OR 4 < a -> a < 2 OR 3 < a public void testCombineBinaryComparisonsDisjunctionOfLowerAndUpperBounds() { - FieldAttribute fa = new FieldAttribute(EMPTY, "a", new EsField("af", DataType.INTEGER, emptyMap(), true)); + FieldAttribute fa = getFieldAttribute(); LessThan lt1 = new LessThan(EMPTY, fa, ONE); LessThan lt2 = new LessThan(EMPTY, fa, TWO); @@ -753,7 +778,7 @@ public class OptimizerTests extends ESTestCase { // (2 < a < 3) OR (1 < a < 4) -> (1 < a < 4) public void testCombineBinaryComparisonsDisjunctionOfIncludedRangeNotComparable() { - FieldAttribute fa = new FieldAttribute(EMPTY, "a", new EsField("af", DataType.INTEGER, emptyMap(), true)); + FieldAttribute fa = getFieldAttribute(); Range r1 = new Range(EMPTY, fa, TWO, false, THREE, false); Range r2 = new Range(EMPTY, fa, ONE, false, Literal.FALSE, false); @@ -765,10 +790,9 @@ public class OptimizerTests extends ESTestCase { assertEquals(or, exp); } - // (2 < a < 3) OR (1 < a < 4) -> (1 < a < 4) public void testCombineBinaryComparisonsDisjunctionOfIncludedRange() { - FieldAttribute fa = new FieldAttribute(EMPTY, "a", new EsField("af", DataType.INTEGER, emptyMap(), true)); + FieldAttribute fa = getFieldAttribute(); Range r1 = new Range(EMPTY, fa, TWO, false, THREE, false); @@ -789,7 +813,7 @@ public class OptimizerTests extends ESTestCase { // (2 < a < 3) OR (1 < a < 2) -> same public void testCombineBinaryComparisonsDisjunctionOfNonOverlappingBoundaries() { - FieldAttribute fa = new FieldAttribute(EMPTY, "a", new EsField("af", DataType.INTEGER, emptyMap(), true)); + FieldAttribute fa = getFieldAttribute(); Range r1 = new Range(EMPTY, fa, TWO, false, THREE, false); Range r2 = new Range(EMPTY, fa, ONE, false, TWO, false); @@ -803,7 +827,7 @@ public class OptimizerTests extends ESTestCase { // (2 < a < 3) OR (2 < a <= 3) -> 2 < a <= 3 public void testCombineBinaryComparisonsDisjunctionOfUpperEqualsOverlappingBoundaries() { - FieldAttribute fa = new FieldAttribute(EMPTY, "a", new EsField("af", DataType.INTEGER, emptyMap(), true)); + FieldAttribute fa = getFieldAttribute(); Range r1 = new Range(EMPTY, fa, TWO, false, THREE, false); Range r2 = new Range(EMPTY, fa, TWO, false, THREE, true); @@ -817,7 +841,7 @@ public class OptimizerTests extends ESTestCase { // (2 < a < 3) OR (1 < a < 3) -> 1 < a < 3 public void testCombineBinaryComparisonsOverlappingUpperBoundary() { - FieldAttribute fa = new FieldAttribute(EMPTY, "a", new EsField("af", DataType.INTEGER, emptyMap(), true)); + FieldAttribute fa = getFieldAttribute(); Range r2 = new Range(EMPTY, fa, TWO, false, THREE, false); Range r1 = new Range(EMPTY, fa, ONE, false, THREE, false); @@ -831,7 +855,7 @@ public class OptimizerTests extends ESTestCase { // (2 < a <= 3) OR (1 < a < 3) -> same (the <= prevents the ranges from being combined) public void testCombineBinaryComparisonsWithDifferentUpperLimitInclusion() { - FieldAttribute fa = new FieldAttribute(EMPTY, "a", new EsField("af", DataType.INTEGER, emptyMap(), true)); + FieldAttribute fa = getFieldAttribute(); Range r1 = new Range(EMPTY, fa, ONE, false, THREE, false); Range r2 = new Range(EMPTY, fa, TWO, false, THREE, true); @@ -845,7 +869,7 @@ public class OptimizerTests extends ESTestCase { // (0 < a <= 1) OR (0 < a < 2) -> 0 < a < 2 public void testRangesOverlappingNoLowerBoundary() { - FieldAttribute fa = new FieldAttribute(EMPTY, "a", new EsField("af", DataType.INTEGER, emptyMap(), true)); + FieldAttribute fa = getFieldAttribute(); Range r2 = new Range(EMPTY, fa, L(0), false, TWO, false); Range r1 = new Range(EMPTY, fa, L(0), false, ONE, true); @@ -861,7 +885,7 @@ public class OptimizerTests extends ESTestCase { // a == 1 AND a == 2 -> FALSE public void testDualEqualsConjunction() { - FieldAttribute fa = new FieldAttribute(EMPTY, "a", new EsField("af", DataType.INTEGER, emptyMap(), true)); + FieldAttribute fa = getFieldAttribute(); Equals eq1 = new Equals(EMPTY, fa, ONE); Equals eq2 = new Equals(EMPTY, fa, TWO); @@ -872,7 +896,7 @@ public class OptimizerTests extends ESTestCase { // 1 <= a < 10 AND a == 1 -> a == 1 public void testEliminateRangeByEqualsInInterval() { - FieldAttribute fa = new FieldAttribute(EMPTY, "a", new EsField("af", DataType.INTEGER, emptyMap(), true)); + FieldAttribute fa = getFieldAttribute(); Equals eq1 = new Equals(EMPTY, fa, ONE); Range r = new Range(EMPTY, fa, ONE, true, L(10), false); @@ -883,7 +907,7 @@ public class OptimizerTests extends ESTestCase { // 1 < a < 10 AND a == 10 -> FALSE public void testEliminateRangeByEqualsOutsideInterval() { - FieldAttribute fa = new FieldAttribute(EMPTY, "a", new EsField("af", DataType.INTEGER, emptyMap(), true)); + FieldAttribute fa = getFieldAttribute(); Equals eq1 = new Equals(EMPTY, fa, L(10)); Range r = new Range(EMPTY, fa, ONE, false, L(10), false); @@ -891,4 +915,4 @@ public class OptimizerTests extends ESTestCase { Expression exp = rule.rule(new And(EMPTY, eq1, r)); assertEquals(Literal.FALSE, rule.rule(exp)); } -} \ No newline at end of file +} diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/parser/ExpressionTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/parser/ExpressionTests.java index 122da8fa3a6..0ee0c9bcca1 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/parser/ExpressionTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/parser/ExpressionTests.java @@ -9,6 +9,7 @@ import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xpack.sql.expression.Expression; import org.elasticsearch.xpack.sql.expression.Literal; import org.elasticsearch.xpack.sql.expression.function.UnresolvedFunction; +import org.elasticsearch.xpack.sql.expression.function.scalar.Cast; import org.elasticsearch.xpack.sql.expression.predicate.operator.arithmetic.Add; import org.elasticsearch.xpack.sql.expression.predicate.operator.arithmetic.Mul; import org.elasticsearch.xpack.sql.expression.predicate.operator.arithmetic.Neg; @@ -156,4 +157,89 @@ public class ExpressionTests extends ESTestCase { assertEquals(Literal.class, sub2.children().get(1).getClass()); assertEquals("2", ((Literal) sub2.children().get(1)).name()); } + + public void testCastWithUnquotedDataType() { + Expression expr = parser.createExpression("CAST(10*2 AS long)"); + assertEquals(Cast.class, expr.getClass()); + Cast cast = (Cast) expr; + assertEquals(DataType.INTEGER, cast.from()); + assertEquals(DataType.LONG, cast.to()); + assertEquals(DataType.LONG, cast.dataType()); + assertEquals(Mul.class, cast.field().getClass()); + Mul mul = (Mul) cast.field(); + assertEquals("10 * 2", mul.name()); + assertEquals(DataType.INTEGER, mul.dataType()); + } + + public void testCastWithQuotedDataType() { + Expression expr = parser.createExpression("CAST(10*2 AS \"LonG\")"); + assertEquals(Cast.class, expr.getClass()); + Cast cast = (Cast) expr; + assertEquals(DataType.INTEGER, cast.from()); + assertEquals(DataType.LONG, cast.to()); + assertEquals(DataType.LONG, cast.dataType()); + assertEquals(Mul.class, cast.field().getClass()); + Mul mul = (Mul) cast.field(); + assertEquals("10 * 2", mul.name()); + assertEquals(DataType.INTEGER, mul.dataType()); + } + + public void testCastWithInvalidDataType() { + ParsingException ex = expectThrows(ParsingException.class, () -> parser.createExpression("CAST(1 AS INVALID)")); + assertEquals("line 1:12: Does not recognize type invalid", ex.getMessage()); + } + + public void testConvertWithUnquotedDataType() { + Expression expr = parser.createExpression("CONVERT(10*2, long)"); + assertEquals(Cast.class, expr.getClass()); + Cast cast = (Cast) expr; + assertEquals(DataType.INTEGER, cast.from()); + assertEquals(DataType.LONG, cast.to()); + assertEquals(DataType.LONG, cast.dataType()); + assertEquals(Mul.class, cast.field().getClass()); + Mul mul = (Mul) cast.field(); + assertEquals("10 * 2", mul.name()); + assertEquals(DataType.INTEGER, mul.dataType()); + } + + public void testConvertWithQuotedDataType() { + Expression expr = parser.createExpression("CONVERT(10*2, \"LonG\")"); + assertEquals(Cast.class, expr.getClass()); + Cast cast = (Cast) expr; + assertEquals(DataType.INTEGER, cast.from()); + assertEquals(DataType.LONG, cast.to()); + assertEquals(DataType.LONG, cast.dataType()); + assertEquals(Mul.class, cast.field().getClass()); + Mul mul = (Mul) cast.field(); + assertEquals("10 * 2", mul.name()); + assertEquals(DataType.INTEGER, mul.dataType()); + } + + public void testConvertWithUnquotedODBCDataType() { + Expression expr = parser.createExpression("CONVERT(1, Sql_BigInt)"); + assertEquals(Cast.class, expr.getClass()); + Cast cast = (Cast) expr; + assertEquals(DataType.INTEGER, cast.from()); + assertEquals(DataType.LONG, cast.to()); + assertEquals(DataType.LONG, cast.dataType()); + } + + public void testConvertWithQuotedODBCDataType() { + Expression expr = parser.createExpression("CONVERT(1, \"sql_BIGint\")"); + assertEquals(Cast.class, expr.getClass()); + Cast cast = (Cast) expr; + assertEquals(DataType.INTEGER, cast.from()); + assertEquals(DataType.LONG, cast.to()); + assertEquals(DataType.LONG, cast.dataType()); + } + + public void testConvertWithInvalidODBCDataType() { + ParsingException ex = expectThrows(ParsingException.class, () -> parser.createExpression("CONVERT(1, SQL_INVALID)")); + assertEquals("line 1:13: Invalid data type [SQL_INVALID] provided", ex.getMessage()); + } + + public void testConvertWithInvalidESDataType() { + ParsingException ex = expectThrows(ParsingException.class, () -> parser.createExpression("CONVERT(1, INVALID)")); + assertEquals("line 1:13: Invalid data type [INVALID] provided", ex.getMessage()); + } } diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/planner/QueryTranslatorTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/planner/QueryTranslatorTests.java index 71f4dab679c..8d5db634ff0 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/planner/QueryTranslatorTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/planner/QueryTranslatorTests.java @@ -5,7 +5,7 @@ */ package org.elasticsearch.xpack.sql.planner; -import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.test.AbstractBuilderTestCase; import org.elasticsearch.xpack.sql.SqlIllegalArgumentException; import org.elasticsearch.xpack.sql.analysis.analyzer.Analyzer; import org.elasticsearch.xpack.sql.analysis.index.EsIndex; @@ -20,30 +20,40 @@ import org.elasticsearch.xpack.sql.plan.logical.Project; import org.elasticsearch.xpack.sql.planner.QueryTranslator.QueryTranslation; import org.elasticsearch.xpack.sql.querydsl.query.Query; import org.elasticsearch.xpack.sql.querydsl.query.RangeQuery; +import org.elasticsearch.xpack.sql.querydsl.query.ScriptQuery; import org.elasticsearch.xpack.sql.querydsl.query.TermQuery; +import org.elasticsearch.xpack.sql.querydsl.query.TermsQuery; import org.elasticsearch.xpack.sql.type.EsField; import org.elasticsearch.xpack.sql.type.TypesTests; import org.joda.time.DateTime; +import org.junit.AfterClass; +import org.junit.BeforeClass; +import java.io.IOException; import java.util.Map; import java.util.TimeZone; -public class QueryTranslatorTests extends ESTestCase { +import static org.hamcrest.core.StringStartsWith.startsWith; - private SqlParser parser; - private IndexResolution getIndexResult; - private FunctionRegistry functionRegistry; - private Analyzer analyzer; - - public QueryTranslatorTests() { +public class QueryTranslatorTests extends AbstractBuilderTestCase { + + private static SqlParser parser; + private static Analyzer analyzer; + + @BeforeClass + public static void init() { parser = new SqlParser(); - functionRegistry = new FunctionRegistry(); Map mapping = TypesTests.loadMapping("mapping-multi-field-variation.json"); - EsIndex test = new EsIndex("test", mapping); - getIndexResult = IndexResolution.valid(test); - analyzer = new Analyzer(functionRegistry, getIndexResult, TimeZone.getTimeZone("UTC")); + IndexResolution getIndexResult = IndexResolution.valid(test); + analyzer = new Analyzer(new FunctionRegistry(), getIndexResult, TimeZone.getTimeZone("UTC")); + } + + @AfterClass + public static void destroy() { + parser = null; + analyzer = null; } private LogicalPlan plan(String sql) { @@ -149,4 +159,41 @@ public class QueryTranslatorTests extends ESTestCase { SqlIllegalArgumentException ex = expectThrows(SqlIllegalArgumentException.class, () -> QueryTranslator.toQuery(condition, false)); assertEquals("Scalar function (LTRIM(keyword)) not allowed (yet) as arguments for LIKE", ex.getMessage()); } -} \ No newline at end of file + + public void testTranslateInExpression_WhereClause() throws IOException { + LogicalPlan p = plan("SELECT * FROM test WHERE keyword IN ('foo', 'bar', 'lala', 'foo', concat('la', 'la'))"); + assertTrue(p instanceof Project); + assertTrue(p.children().get(0) instanceof Filter); + Expression condition = ((Filter) p.children().get(0)).condition(); + assertFalse(condition.foldable()); + QueryTranslation translation = QueryTranslator.toQuery(condition, false); + Query query = translation.query; + assertTrue(query instanceof TermsQuery); + TermsQuery tq = (TermsQuery) query; + assertEquals("keyword:(bar foo lala)", tq.asBuilder().toQuery(createShardContext()).toString()); + } + + public void testTranslateInExpressionInvalidValues_WhereClause() { + LogicalPlan p = plan("SELECT * FROM test WHERE keyword IN ('foo', 'bar', keyword)"); + assertTrue(p instanceof Project); + assertTrue(p.children().get(0) instanceof Filter); + Expression condition = ((Filter) p.children().get(0)).condition(); + assertFalse(condition.foldable()); + SqlIllegalArgumentException ex = expectThrows(SqlIllegalArgumentException.class, () -> QueryTranslator.toQuery(condition, false)); + assertEquals("Line 1:52: Comparisons against variables are not (currently) supported; " + + "offender [keyword] in [keyword IN(foo, bar, keyword)]", ex.getMessage()); + } + + public void testTranslateInExpression_HavingClause_Painless() { + LogicalPlan p = plan("SELECT keyword, max(int) FROM test GROUP BY keyword HAVING max(int) in (10, 20, 30 - 10)"); + assertTrue(p instanceof Project); + assertTrue(p.children().get(0) instanceof Filter); + Expression condition = ((Filter) p.children().get(0)).condition(); + assertFalse(condition.foldable()); + QueryTranslation translation = QueryTranslator.toQuery(condition, false); + assertTrue(translation.query instanceof ScriptQuery); + ScriptQuery sq = (ScriptQuery) translation.query; + assertEquals("InternalSqlScriptUtils.nullSafeFilter(params.a0==10 || params.a0==20)", sq.script().toString()); + assertThat(sq.script().params().toString(), startsWith("[{a=MAX(int){a->")); + } +} diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/type/DataTypeConversionTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/type/DataTypeConversionTests.java index 1db470211f0..f773634fe72 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/type/DataTypeConversionTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/type/DataTypeConversionTests.java @@ -246,4 +246,10 @@ public class DataTypeConversionTests extends ESTestCase { assertEquals(type, DataType.fromEsType(type.esType)); } } + + public void testConversionToUnsupported() { + Exception e = expectThrows(SqlIllegalArgumentException.class, + () -> DataTypeConversion.conversionFor(DataType.INTEGER, DataType.UNSUPPORTED)); + assertEquals("cannot convert from [INTEGER] to [UNSUPPORTED]", e.getMessage()); + } } diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/type/TypesTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/type/TypesTests.java index 891b11ba70b..30f9d82ff77 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/type/TypesTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/type/TypesTests.java @@ -59,10 +59,10 @@ public class TypesTests extends ESTestCase { assertThat(mapping.size(), is(1)); EsField type = mapping.get("full_name"); assertThat(type, instanceOf(TextEsField.class)); - assertThat(type.hasDocValues(), is(false)); + assertThat(type.isAggregatable(), is(false)); TextEsField ttype = (TextEsField) type; assertThat(type.getPrecision(), is(Integer.MAX_VALUE)); - assertThat(ttype.hasDocValues(), is(false)); + assertThat(ttype.isAggregatable(), is(false)); } public void testKeywordField() { @@ -71,7 +71,7 @@ public class TypesTests extends ESTestCase { assertThat(mapping.size(), is(1)); EsField field = mapping.get("full_name"); assertThat(field, instanceOf(KeywordEsField.class)); - assertThat(field.hasDocValues(), is(true)); + assertThat(field.isAggregatable(), is(true)); assertThat(field.getPrecision(), is(256)); } @@ -81,7 +81,7 @@ public class TypesTests extends ESTestCase { assertThat(mapping.size(), is(1)); EsField field = mapping.get("date"); assertThat(field.getDataType(), is(DATE)); - assertThat(field.hasDocValues(), is(true)); + assertThat(field.isAggregatable(), is(true)); assertThat(field.getPrecision(), is(24)); DateEsField dfield = (DateEsField) field; @@ -95,7 +95,7 @@ public class TypesTests extends ESTestCase { assertThat(mapping.size(), is(1)); EsField field = mapping.get("date"); assertThat(field.getDataType(), is(DATE)); - assertThat(field.hasDocValues(), is(true)); + assertThat(field.isAggregatable(), is(true)); DateEsField dfield = (DateEsField) field; // default types assertThat(dfield.getFormats(), hasSize(2)); @@ -107,7 +107,7 @@ public class TypesTests extends ESTestCase { assertThat(mapping.size(), is(1)); EsField field = mapping.get("date"); assertThat(field.getDataType(), is(DATE)); - assertThat(field.hasDocValues(), is(true)); + assertThat(field.isAggregatable(), is(true)); DateEsField dfield = (DateEsField) field; // default types assertThat(dfield.getFormats(), hasSize(1)); @@ -120,7 +120,7 @@ public class TypesTests extends ESTestCase { EsField field = mapping.get("session_id"); assertThat(field, instanceOf(KeywordEsField.class)); assertThat(field.getPrecision(), is(15)); - assertThat(field.hasDocValues(), is(false)); + assertThat(field.isAggregatable(), is(false)); } public void testDottedField() { diff --git a/x-pack/plugin/sql/src/test/resources/mapping-basic-incompatible.json b/x-pack/plugin/sql/src/test/resources/mapping-basic-incompatible.json new file mode 100644 index 00000000000..9042415a515 --- /dev/null +++ b/x-pack/plugin/sql/src/test/resources/mapping-basic-incompatible.json @@ -0,0 +1,22 @@ +{ + "properties" : { + "emp_no" : { + "type" : "long" + }, + "first_name" : { + "type" : "text" + }, + "gender" : { + "type" : "text" + }, + "languages" : { + "type" : "byte" + }, + "last_name" : { + "type" : "text" + }, + "salary" : { + "type" : "integer" + } + } +} diff --git a/x-pack/plugin/sql/src/test/resources/mapping-basic-nodocvalues.json b/x-pack/plugin/sql/src/test/resources/mapping-basic-nodocvalues.json new file mode 100644 index 00000000000..bb9cd60dc02 --- /dev/null +++ b/x-pack/plugin/sql/src/test/resources/mapping-basic-nodocvalues.json @@ -0,0 +1,23 @@ +{ + "properties" : { + "emp_no" : { + "type" : "integer", + "doc_values" : false + }, + "first_name" : { + "type" : "text" + }, + "gender" : { + "type" : "keyword" + }, + "languages" : { + "type" : "byte" + }, + "last_name" : { + "type" : "text" + }, + "salary" : { + "type" : "integer" + } + } +} diff --git a/x-pack/plugin/src/test/resources/rest-api-spec/api/ccr.delete_auto_follow_pattern.json b/x-pack/plugin/src/test/resources/rest-api-spec/api/ccr.delete_auto_follow_pattern.json index 9554175af6c..a5c8de74836 100644 --- a/x-pack/plugin/src/test/resources/rest-api-spec/api/ccr.delete_auto_follow_pattern.json +++ b/x-pack/plugin/src/test/resources/rest-api-spec/api/ccr.delete_auto_follow_pattern.json @@ -3,13 +3,13 @@ "documentation": "http://www.elastic.co/guide/en/elasticsearch/reference/current", "methods": [ "DELETE" ], "url": { - "path": "/_ccr/auto_follow/{leader_cluster}", - "paths": [ "/_ccr/auto_follow/{leader_cluster}" ], + "path": "/_ccr/auto_follow/{name}", + "paths": [ "/_ccr/auto_follow/{name}" ], "parts": { - "leader_cluster": { + "name": { "type": "string", "required": true, - "description": "The name of the leader cluster alias." + "description": "The name of the auto follow pattern." } } } diff --git a/x-pack/plugin/src/test/resources/rest-api-spec/api/ccr.get_auto_follow_pattern.json b/x-pack/plugin/src/test/resources/rest-api-spec/api/ccr.get_auto_follow_pattern.json index 456c42a160f..65d0b9c8dea 100644 --- a/x-pack/plugin/src/test/resources/rest-api-spec/api/ccr.get_auto_follow_pattern.json +++ b/x-pack/plugin/src/test/resources/rest-api-spec/api/ccr.get_auto_follow_pattern.json @@ -3,12 +3,12 @@ "documentation": "http://www.elastic.co/guide/en/elasticsearch/reference/current", "methods": [ "GET" ], "url": { - "path": "/_ccr/auto_follow/{leader_cluster}", - "paths": [ "/_ccr/auto_follow", "/_ccr/auto_follow/{leader_cluster}" ], + "path": "/_ccr/auto_follow/{name}", + "paths": [ "/_ccr/auto_follow", "/_ccr/auto_follow/{name}" ], "parts": { - "leader_cluster": { + "name": { "type": "string", - "description": "The name of the leader cluster alias." + "description": "The name of the auto follow pattern." } } } diff --git a/x-pack/plugin/src/test/resources/rest-api-spec/api/ccr.put_auto_follow_pattern.json b/x-pack/plugin/src/test/resources/rest-api-spec/api/ccr.put_auto_follow_pattern.json index ae6508b4725..5f7aac790a0 100644 --- a/x-pack/plugin/src/test/resources/rest-api-spec/api/ccr.put_auto_follow_pattern.json +++ b/x-pack/plugin/src/test/resources/rest-api-spec/api/ccr.put_auto_follow_pattern.json @@ -3,13 +3,13 @@ "documentation": "http://www.elastic.co/guide/en/elasticsearch/reference/current", "methods": [ "PUT" ], "url": { - "path": "/_ccr/auto_follow/{leader_cluster}", - "paths": [ "/_ccr/auto_follow/{leader_cluster}" ], + "path": "/_ccr/auto_follow/{name}", + "paths": [ "/_ccr/auto_follow/{name}" ], "parts": { - "leader_cluster": { + "name": { "type": "string", "required": true, - "description": "The name of the leader cluster alias." + "description": "The name of the auto follow pattern." } } }, diff --git a/x-pack/plugin/src/test/resources/rest-api-spec/test/rollup/delete_job.yml b/x-pack/plugin/src/test/resources/rest-api-spec/test/rollup/delete_job.yml index 8eb4a358d15..ebf953c9352 100644 --- a/x-pack/plugin/src/test/resources/rest-api-spec/test/rollup/delete_job.yml +++ b/x-pack/plugin/src/test/resources/rest-api-spec/test/rollup/delete_job.yml @@ -184,15 +184,18 @@ setup: - is_true: started - do: + catch: request xpack.rollup.delete_job: id: foo - - is_true: acknowledged + - is_false: acknowledged + - match: { task_failures.0.reason.type: "illegal_state_exception" } + - match: { task_failures.0.reason.reason: "Could not delete job [foo] because indexer state is [STARTED]. Job must be [STOPPED] before deletion." } --- "Test delete non-existent job": - do: - catch: /the task with id does_not_exist doesn't exist/ + catch: /the task with id \[does_not_exist\] doesn't exist/ headers: Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser xpack.rollup.delete_job: diff --git a/x-pack/qa/multi-node/src/test/java/org/elasticsearch/multi_node/RollupIT.java b/x-pack/qa/multi-node/src/test/java/org/elasticsearch/multi_node/RollupIT.java index fb9c665b2bf..3ea1b8e6747 100644 --- a/x-pack/qa/multi-node/src/test/java/org/elasticsearch/multi_node/RollupIT.java +++ b/x-pack/qa/multi-node/src/test/java/org/elasticsearch/multi_node/RollupIT.java @@ -5,7 +5,6 @@ */ package org.elasticsearch.multi_node; -import org.apache.http.HttpStatus; import org.apache.http.entity.ContentType; import org.apache.http.entity.StringEntity; import org.apache.http.util.EntityUtils; @@ -20,16 +19,10 @@ import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.common.xcontent.json.JsonXContent; import org.elasticsearch.common.xcontent.support.XContentMapValues; import org.elasticsearch.rest.RestStatus; -import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.rest.ESRestTestCase; -import org.elasticsearch.xpack.core.rollup.job.RollupJob; import org.elasticsearch.xpack.core.watcher.support.xcontent.ObjectPath; -import org.junit.After; -import java.io.BufferedReader; import java.io.IOException; -import java.io.InputStreamReader; -import java.nio.charset.StandardCharsets; import java.time.Instant; import java.time.ZoneId; import java.time.ZonedDateTime; @@ -68,13 +61,6 @@ public class RollupIT extends ESRestTestCase { return XContentHelper.convertToMap(JsonXContent.jsonXContent, response, false); } - @After - public void clearRollupMetadata() throws Exception { - deleteAllJobs(); - waitForPendingTasks(); - // indices will be deleted by the ESRestTestCase class - } - public void testBigRollup() throws Exception { final int numDocs = 200; String dateFormat = "strict_date_optional_time"; @@ -293,60 +279,4 @@ public class RollupIT extends ESRestTestCase { } return null; } - - private void waitForPendingTasks() throws Exception { - ESTestCase.assertBusy(() -> { - try { - Request request = new Request("GET", "/_cat/tasks"); - request.addParameter("detailed", "true"); - Response response = adminClient().performRequest(request); - if (response.getStatusLine().getStatusCode() == HttpStatus.SC_OK) { - try (BufferedReader responseReader = new BufferedReader( - new InputStreamReader(response.getEntity().getContent(), StandardCharsets.UTF_8))) { - int activeTasks = 0; - String line; - StringBuilder tasksListString = new StringBuilder(); - while ((line = responseReader.readLine()) != null) { - - // We only care about Rollup jobs, otherwise this fails too easily due to unrelated tasks - if (line.startsWith(RollupJob.NAME) == true) { - activeTasks++; - tasksListString.append(line); - tasksListString.append('\n'); - } - } - assertEquals(activeTasks + " active tasks found:\n" + tasksListString, 0, activeTasks); - } - } - } catch (IOException e) { - throw new AssertionError("Error getting active tasks list", e); - } - }); - } - - @SuppressWarnings("unchecked") - private void deleteAllJobs() throws Exception { - Request request = new Request("GET", "/_xpack/rollup/job/_all"); - Response response = adminClient().performRequest(request); - Map jobs = ESRestTestCase.entityAsMap(response); - @SuppressWarnings("unchecked") - List> jobConfigs = - (List>) XContentMapValues.extractValue("jobs", jobs); - - if (jobConfigs == null) { - return; - } - - for (Map jobConfig : jobConfigs) { - logger.debug(jobConfig); - String jobId = (String) ((Map) jobConfig.get("config")).get("id"); - logger.debug("Deleting job " + jobId); - try { - request = new Request("DELETE", "/_xpack/rollup/job/" + jobId); - adminClient().performRequest(request); - } catch (Exception e) { - // ok - } - } - } } diff --git a/x-pack/qa/sql/security/roles.yml b/x-pack/qa/sql/security/roles.yml index 1759c972d34..337d7c7f9c7 100644 --- a/x-pack/qa/sql/security/roles.yml +++ b/x-pack/qa/sql/security/roles.yml @@ -7,8 +7,8 @@ rest_minimal: privileges: [read, "indices:admin/get"] # end::rest -# tag::cli_jdbc -cli_or_jdbc_minimal: +# tag::cli_drivers +cli_or_drivers_minimal: cluster: - "cluster:monitor/main" indices: @@ -16,7 +16,7 @@ cli_or_jdbc_minimal: privileges: [read, "indices:admin/get"] - names: bort privileges: [read, "indices:admin/get"] -# end::cli_jdbc +# end::cli_drivers read_something_else: cluster: @@ -82,6 +82,6 @@ no_get_index: - "cluster:monitor/main" indices: - names: test - privileges: [read] + privileges: [monitor] - names: bort - privileges: [read] + privileges: [monitor] diff --git a/x-pack/qa/sql/security/src/test/java/org/elasticsearch/xpack/qa/sql/security/CliSecurityIT.java b/x-pack/qa/sql/security/src/test/java/org/elasticsearch/xpack/qa/sql/security/CliSecurityIT.java index 5e8aa4ec6ad..3e0b5789138 100644 --- a/x-pack/qa/sql/security/src/test/java/org/elasticsearch/xpack/qa/sql/security/CliSecurityIT.java +++ b/x-pack/qa/sql/security/src/test/java/org/elasticsearch/xpack/qa/sql/security/CliSecurityIT.java @@ -7,9 +7,10 @@ package org.elasticsearch.xpack.qa.sql.security; import org.elasticsearch.common.CheckedConsumer; import org.elasticsearch.common.io.PathUtils; -import org.elasticsearch.xpack.qa.sql.cli.ErrorsTestCase; import org.elasticsearch.xpack.qa.sql.cli.EmbeddedCli; import org.elasticsearch.xpack.qa.sql.cli.EmbeddedCli.SecurityConfig; +import org.elasticsearch.xpack.qa.sql.cli.ErrorsTestCase; + import java.io.IOException; import java.net.URISyntaxException; import java.nio.file.Files; @@ -20,7 +21,6 @@ import java.util.List; import java.util.Map; import static org.elasticsearch.xpack.qa.sql.cli.CliIntegrationTestCase.elasticsearchAddress; -import static org.hamcrest.Matchers.both; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.startsWith; @@ -53,7 +53,7 @@ public class CliSecurityIT extends SqlSecurityTestCase { private static class CliActions implements Actions { @Override public String minimalPermissionsForAllActions() { - return "cli_or_jdbc_minimal"; + return "cli_or_drivers_minimal"; } private SecurityConfig userSecurity(String user) { @@ -121,12 +121,19 @@ public class CliSecurityIT extends SqlSecurityTestCase { } @Override - public void expectDescribe(Map columns, String user) throws Exception { + public void expectDescribe(Map> columns, String user) throws Exception { try (EmbeddedCli cli = new EmbeddedCli(elasticsearchAddress(), true, userSecurity(user))) { - assertThat(cli.command("DESCRIBE test"), containsString("column | type")); - assertEquals("---------------+---------------", cli.readLine()); - for (Map.Entry column : columns.entrySet()) { - assertThat(cli.readLine(), both(startsWith(column.getKey())).and(containsString("|" + column.getValue()))); + String output = cli.command("DESCRIBE test"); + assertThat(output, containsString("column")); + assertThat(output, containsString("type")); + assertThat(output, containsString("mapping")); + assertThat(cli.readLine(), containsString("-+---------------+---------------")); + for (Map.Entry> column : columns.entrySet()) { + String line = cli.readLine(); + assertThat(line, startsWith(column.getKey())); + for (String value : column.getValue()) { + assertThat(line, containsString(value)); + } } assertEquals("", cli.readLine()); } diff --git a/x-pack/qa/sql/security/src/test/java/org/elasticsearch/xpack/qa/sql/security/JdbcSecurityIT.java b/x-pack/qa/sql/security/src/test/java/org/elasticsearch/xpack/qa/sql/security/JdbcSecurityIT.java index 48b850d0acf..848b98eeb7b 100644 --- a/x-pack/qa/sql/security/src/test/java/org/elasticsearch/xpack/qa/sql/security/JdbcSecurityIT.java +++ b/x-pack/qa/sql/security/src/test/java/org/elasticsearch/xpack/qa/sql/security/JdbcSecurityIT.java @@ -118,7 +118,7 @@ public class JdbcSecurityIT extends SqlSecurityTestCase { private static class JdbcActions implements Actions { @Override public String minimalPermissionsForAllActions() { - return "cli_or_jdbc_minimal"; + return "cli_or_drivers_minimal"; } @Override @@ -158,22 +158,26 @@ public class JdbcSecurityIT extends SqlSecurityTestCase { } @Override - public void expectDescribe(Map columns, String user) throws Exception { + public void expectDescribe(Map> columns, String user) throws Exception { try (Connection h2 = LocalH2.anonymousDb(); Connection es = es(userProperties(user))) { // h2 doesn't have the same sort of DESCRIBE that we have so we emulate it - h2.createStatement().executeUpdate("CREATE TABLE mock (column VARCHAR, type VARCHAR)"); + h2.createStatement().executeUpdate("CREATE TABLE mock (column VARCHAR, type VARCHAR, mapping VARCHAR)"); if (columns.size() > 0) { StringBuilder insert = new StringBuilder(); - insert.append("INSERT INTO mock (column, type) VALUES "); + insert.append("INSERT INTO mock (column, type, mapping) VALUES "); boolean first = true; - for (Map.Entry column : columns.entrySet()) { + for (Map.Entry> column : columns.entrySet()) { if (first) { first = false; } else { insert.append(", "); } - insert.append("('").append(column.getKey()).append("', '").append(column.getValue()).append("')"); + insert.append("('").append(column.getKey()).append("'"); + for (String value : column.getValue()) { + insert.append(", '").append(value).append("'"); + } + insert.append(")"); } h2.createStatement().executeUpdate(insert.toString()); } @@ -250,7 +254,7 @@ public class JdbcSecurityIT extends SqlSecurityTestCase { // Metadata methods only available to JDBC public void testMetaDataGetTablesWithFullAccess() throws Exception { - createUser("full_access", "cli_or_jdbc_minimal"); + createUser("full_access", "cli_or_drivers_minimal"); expectActionMatchesAdmin( con -> con.getMetaData().getTables("%", "%", "%t", null), @@ -283,7 +287,7 @@ public class JdbcSecurityIT extends SqlSecurityTestCase { } public void testMetaDataGetColumnsWorksAsFullAccess() throws Exception { - createUser("full_access", "cli_or_jdbc_minimal"); + createUser("full_access", "cli_or_drivers_minimal"); expectActionMatchesAdmin( con -> con.getMetaData().getColumns(null, "%", "%t", "%"), diff --git a/x-pack/qa/sql/security/src/test/java/org/elasticsearch/xpack/qa/sql/security/RestSqlSecurityIT.java b/x-pack/qa/sql/security/src/test/java/org/elasticsearch/xpack/qa/sql/security/RestSqlSecurityIT.java index 51440cc68dd..607b37e39d1 100644 --- a/x-pack/qa/sql/security/src/test/java/org/elasticsearch/xpack/qa/sql/security/RestSqlSecurityIT.java +++ b/x-pack/qa/sql/security/src/test/java/org/elasticsearch/xpack/qa/sql/security/RestSqlSecurityIT.java @@ -93,15 +93,19 @@ public class RestSqlSecurityIT extends SqlSecurityTestCase { } @Override - public void expectDescribe(Map columns, String user) throws Exception { + public void expectDescribe(Map> columns, String user) throws Exception { String mode = randomMode(); Map expected = new HashMap<>(3); expected.put("columns", Arrays.asList( columnInfo(mode, "column", "keyword", JDBCType.VARCHAR, 0), - columnInfo(mode, "type", "keyword", JDBCType.VARCHAR, 0))); + columnInfo(mode, "type", "keyword", JDBCType.VARCHAR, 0), + columnInfo(mode, "mapping", "keyword", JDBCType.VARCHAR, 0))); List> rows = new ArrayList<>(columns.size()); - for (Map.Entry column : columns.entrySet()) { - rows.add(Arrays.asList(column.getKey(), column.getValue())); + for (Map.Entry> column : columns.entrySet()) { + List cols = new ArrayList<>(); + cols.add(column.getKey()); + cols.addAll(column.getValue()); + rows.add(cols); } expected.put("rows", rows); @@ -232,7 +236,7 @@ public class RestSqlSecurityIT extends SqlSecurityTestCase { assertEquals(404, e.getResponse().getStatusLine().getStatusCode()); createAuditLogAsserter() - .expectSqlCompositeAction("test_admin", "test") + .expectSqlCompositeActionFieldCaps("test_admin", "test") .expect(true, SQL_ACTION_NAME, "full_access", empty()) // one scroll access denied per shard .expect("access_denied", SQL_ACTION_NAME, "full_access", "default_native", empty(), "InternalScrollSearchRequest") diff --git a/x-pack/qa/sql/security/src/test/java/org/elasticsearch/xpack/qa/sql/security/SqlSecurityTestCase.java b/x-pack/qa/sql/security/src/test/java/org/elasticsearch/xpack/qa/sql/security/SqlSecurityTestCase.java index c9076e38a0d..0e3d2cab2ff 100644 --- a/x-pack/qa/sql/security/src/test/java/org/elasticsearch/xpack/qa/sql/security/SqlSecurityTestCase.java +++ b/x-pack/qa/sql/security/src/test/java/org/elasticsearch/xpack/qa/sql/security/SqlSecurityTestCase.java @@ -10,6 +10,8 @@ import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.SpecialPermission; import org.elasticsearch.action.admin.indices.get.GetIndexAction; import org.elasticsearch.action.admin.indices.get.GetIndexRequest; +import org.elasticsearch.action.fieldcaps.FieldCapabilitiesAction; +import org.elasticsearch.action.fieldcaps.FieldCapabilitiesRequest; import org.elasticsearch.client.Request; import org.elasticsearch.client.ResponseException; import org.elasticsearch.common.Strings; @@ -40,11 +42,12 @@ import java.util.Map; import java.util.TreeMap; import java.util.function.Function; +import static java.util.Arrays.asList; import static java.util.Collections.singletonMap; import static org.hamcrest.Matchers.contains; import static org.hamcrest.Matchers.empty; -import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.hasItems; +import static org.hamcrest.Matchers.is; public abstract class SqlSecurityTestCase extends ESRestTestCase { /** @@ -65,7 +68,7 @@ public abstract class SqlSecurityTestCase extends ESRestTestCase { * to 1 and completely scrolls the results. */ void expectScrollMatchesAdmin(String adminSql, String user, String userSql) throws Exception; - void expectDescribe(Map columns, String user) throws Exception; + void expectDescribe(Map> columns, String user) throws Exception; void expectShowTables(List tables, String user) throws Exception; void expectForbidden(String user, String sql) throws Exception; void expectUnknownIndex(String user, String sql) throws Exception; @@ -196,7 +199,7 @@ public abstract class SqlSecurityTestCase extends ESRestTestCase { public void testQueryWorksAsAdmin() throws Exception { actions.queryWorksAsAdmin(); createAuditLogAsserter() - .expectSqlCompositeAction("test_admin", "test") + .expectSqlCompositeActionFieldCaps("test_admin", "test") .assertLogs(); } @@ -205,8 +208,8 @@ public abstract class SqlSecurityTestCase extends ESRestTestCase { actions.expectMatchesAdmin("SELECT * FROM test ORDER BY a", "full_access", "SELECT * FROM test ORDER BY a"); createAuditLogAsserter() - .expectSqlCompositeAction("test_admin", "test") - .expectSqlCompositeAction("full_access", "test") + .expectSqlCompositeActionFieldCaps("test_admin", "test") + .expectSqlCompositeActionFieldCaps("full_access", "test") .assertLogs(); } @@ -215,12 +218,12 @@ public abstract class SqlSecurityTestCase extends ESRestTestCase { actions.expectScrollMatchesAdmin("SELECT * FROM test ORDER BY a", "full_access", "SELECT * FROM test ORDER BY a"); createAuditLogAsserter() - .expectSqlCompositeAction("test_admin", "test") + .expectSqlCompositeActionFieldCaps("test_admin", "test") /* Scrolling doesn't have to access the index again, at least not through sql. * If we asserted query and scroll logs then we would see the scroll. */ .expect(true, SQL_ACTION_NAME, "test_admin", empty()) .expect(true, SQL_ACTION_NAME, "test_admin", empty()) - .expectSqlCompositeAction("full_access", "test") + .expectSqlCompositeActionFieldCaps("full_access", "test") .expect(true, SQL_ACTION_NAME, "full_access", empty()) .expect(true, SQL_ACTION_NAME, "full_access", empty()) .assertLogs(); @@ -243,7 +246,7 @@ public abstract class SqlSecurityTestCase extends ESRestTestCase { //This user has permission to run sql queries so they are given preliminary authorization .expect(true, SQL_ACTION_NAME, "wrong_access", empty()) //the following get index is granted too but against the no indices placeholder, as ignore_unavailable=true - .expect(true, GetIndexAction.NAME, "wrong_access", hasItems("*", "-*")) + .expect(true, FieldCapabilitiesAction.NAME, "wrong_access", hasItems("*", "-*")) .assertLogs(); } @@ -252,8 +255,8 @@ public abstract class SqlSecurityTestCase extends ESRestTestCase { actions.expectMatchesAdmin("SELECT a FROM test ORDER BY a", "only_a", "SELECT * FROM test ORDER BY a"); createAuditLogAsserter() - .expectSqlCompositeAction("test_admin", "test") - .expectSqlCompositeAction("only_a", "test") + .expectSqlCompositeActionFieldCaps("test_admin", "test") + .expectSqlCompositeActionFieldCaps("only_a", "test") .assertLogs(); } @@ -262,18 +265,18 @@ public abstract class SqlSecurityTestCase extends ESRestTestCase { actions.expectScrollMatchesAdmin("SELECT a FROM test ORDER BY a", "only_a", "SELECT * FROM test ORDER BY a"); createAuditLogAsserter() - .expectSqlCompositeAction("test_admin", "test") + .expectSqlCompositeActionFieldCaps("test_admin", "test") /* Scrolling doesn't have to access the index again, at least not through sql. * If we asserted query and scroll logs then we would see the scroll. */ .expect(true, SQL_ACTION_NAME, "test_admin", empty()) .expect(true, SQL_ACTION_NAME, "test_admin", empty()) - .expectSqlCompositeAction("only_a", "test") + .expectSqlCompositeActionFieldCaps("only_a", "test") .expect(true, SQL_ACTION_NAME, "only_a", empty()) .expect(true, SQL_ACTION_NAME, "only_a", empty()) .assertLogs(); } - public void testQueryStringSingeFieldGrantedWrongRequested() throws Exception { + public void testQueryStringSingleFieldGrantedWrongRequested() throws Exception { createUser("only_a", "read_test_a"); actions.expectUnknownColumn("only_a", "SELECT c FROM test", "c"); @@ -284,7 +287,7 @@ public abstract class SqlSecurityTestCase extends ESRestTestCase { * out but it failed in SQL because it couldn't compile the * query without the metadata for the missing field. */ createAuditLogAsserter() - .expectSqlCompositeAction("only_a", "test") + .expectSqlCompositeActionFieldCaps("only_a", "test") .assertLogs(); } @@ -293,8 +296,8 @@ public abstract class SqlSecurityTestCase extends ESRestTestCase { actions.expectMatchesAdmin("SELECT a, b FROM test ORDER BY a", "not_c", "SELECT * FROM test ORDER BY a"); createAuditLogAsserter() - .expectSqlCompositeAction("test_admin", "test") - .expectSqlCompositeAction("not_c", "test") + .expectSqlCompositeActionFieldCaps("test_admin", "test") + .expectSqlCompositeActionFieldCaps("not_c", "test") .assertLogs(); } @@ -303,12 +306,12 @@ public abstract class SqlSecurityTestCase extends ESRestTestCase { actions.expectScrollMatchesAdmin("SELECT a, b FROM test ORDER BY a", "not_c", "SELECT * FROM test ORDER BY a"); createAuditLogAsserter() - .expectSqlCompositeAction("test_admin", "test") + .expectSqlCompositeActionFieldCaps("test_admin", "test") /* Scrolling doesn't have to access the index again, at least not through sql. * If we asserted query and scroll logs then we would see the scroll. */ .expect(true, SQL_ACTION_NAME, "test_admin", empty()) .expect(true, SQL_ACTION_NAME, "test_admin", empty()) - .expectSqlCompositeAction("not_c", "test") + .expectSqlCompositeActionFieldCaps("not_c", "test") .expect(true, SQL_ACTION_NAME, "not_c", empty()) .expect(true, SQL_ACTION_NAME, "not_c", empty()) .assertLogs(); @@ -325,7 +328,7 @@ public abstract class SqlSecurityTestCase extends ESRestTestCase { * out but it failed in SQL because it couldn't compile the * query without the metadata for the missing field. */ createAuditLogAsserter() - .expectSqlCompositeAction("not_c", "test") + .expectSqlCompositeActionFieldCaps("not_c", "test") .assertLogs(); } @@ -334,15 +337,15 @@ public abstract class SqlSecurityTestCase extends ESRestTestCase { actions.expectMatchesAdmin("SELECT * FROM test WHERE c != 3 ORDER BY a", "no_3s", "SELECT * FROM test ORDER BY a"); createAuditLogAsserter() - .expectSqlCompositeAction("test_admin", "test") - .expectSqlCompositeAction("no_3s", "test") + .expectSqlCompositeActionFieldCaps("test_admin", "test") + .expectSqlCompositeActionFieldCaps("no_3s", "test") .assertLogs(); } public void testShowTablesWorksAsAdmin() throws Exception { actions.expectShowTables(Arrays.asList("bort", "test"), null); createAuditLogAsserter() - .expectSqlCompositeAction("test_admin", "bort", "test") + .expectSqlCompositeActionGetIndex("test_admin", "bort", "test") .assertLogs(); } @@ -351,8 +354,8 @@ public abstract class SqlSecurityTestCase extends ESRestTestCase { actions.expectMatchesAdmin("SHOW TABLES LIKE '%t'", "full_access", "SHOW TABLES"); createAuditLogAsserter() - .expectSqlCompositeAction("test_admin", "bort", "test") - .expectSqlCompositeAction("full_access", "bort", "test") + .expectSqlCompositeActionGetIndex("test_admin", "bort", "test") + .expectSqlCompositeActionGetIndex("full_access", "bort", "test") .assertLogs(); } @@ -370,8 +373,7 @@ public abstract class SqlSecurityTestCase extends ESRestTestCase { actions.expectMatchesAdmin("SHOW TABLES LIKE 'bort'", "read_bort", "SHOW TABLES"); createAuditLogAsserter() - .expectSqlCompositeAction("test_admin", "bort") - .expectSqlCompositeAction("read_bort", "bort") + .expectSqlCompositeActionGetIndex("test_admin", "bort").expectSqlCompositeActionGetIndex("read_bort", "bort") .assertLogs(); } @@ -388,13 +390,13 @@ public abstract class SqlSecurityTestCase extends ESRestTestCase { } public void testDescribeWorksAsAdmin() throws Exception { - Map expected = new TreeMap<>(); - expected.put("a", "BIGINT"); - expected.put("b", "BIGINT"); - expected.put("c", "BIGINT"); + Map> expected = new TreeMap<>(); + expected.put("a", asList("BIGINT", "LONG")); + expected.put("b", asList("BIGINT", "LONG")); + expected.put("c", asList("BIGINT", "LONG")); actions.expectDescribe(expected, null); createAuditLogAsserter() - .expectSqlCompositeAction("test_admin", "test") + .expectSqlCompositeActionFieldCaps("test_admin", "test") .assertLogs(); } @@ -403,8 +405,8 @@ public abstract class SqlSecurityTestCase extends ESRestTestCase { actions.expectMatchesAdmin("DESCRIBE test", "full_access", "DESCRIBE test"); createAuditLogAsserter() - .expectSqlCompositeAction("test_admin", "test") - .expectSqlCompositeAction("full_access", "test") + .expectSqlCompositeActionFieldCaps("test_admin", "test") + .expectSqlCompositeActionFieldCaps("full_access", "test") .assertLogs(); } @@ -425,28 +427,28 @@ public abstract class SqlSecurityTestCase extends ESRestTestCase { //This user has permission to run sql queries so they are given preliminary authorization .expect(true, SQL_ACTION_NAME, "wrong_access", empty()) //the following get index is granted too but against the no indices placeholder, as ignore_unavailable=true - .expect(true, GetIndexAction.NAME, "wrong_access", hasItems("*", "-*")) + .expect(true, FieldCapabilitiesAction.NAME, "wrong_access", hasItems("*", "-*")) .assertLogs(); } public void testDescribeSingleFieldGranted() throws Exception { createUser("only_a", "read_test_a"); - actions.expectDescribe(singletonMap("a", "BIGINT"), "only_a"); + actions.expectDescribe(singletonMap("a", asList("BIGINT", "LONG")), "only_a"); createAuditLogAsserter() - .expectSqlCompositeAction("only_a", "test") + .expectSqlCompositeActionFieldCaps("only_a", "test") .assertLogs(); } public void testDescribeSingleFieldExcepted() throws Exception { createUser("not_c", "read_test_a_and_b"); - Map expected = new TreeMap<>(); - expected.put("a", "BIGINT"); - expected.put("b", "BIGINT"); + Map> expected = new TreeMap<>(); + expected.put("a", asList("BIGINT", "LONG")); + expected.put("b", asList("BIGINT", "LONG")); actions.expectDescribe(expected, "not_c"); createAuditLogAsserter() - .expectSqlCompositeAction("not_c", "test") + .expectSqlCompositeActionFieldCaps("not_c", "test") .assertLogs(); } @@ -455,8 +457,8 @@ public abstract class SqlSecurityTestCase extends ESRestTestCase { actions.expectMatchesAdmin("DESCRIBE test", "no_3s", "DESCRIBE test"); createAuditLogAsserter() - .expectSqlCompositeAction("test_admin", "test") - .expectSqlCompositeAction("no_3s", "test") + .expectSqlCompositeActionFieldCaps("test_admin", "test") + .expectSqlCompositeActionFieldCaps("no_3s", "test") .assertLogs(); } @@ -497,12 +499,18 @@ public abstract class SqlSecurityTestCase extends ESRestTestCase { protected class AuditLogAsserter { protected final List, Boolean>> logCheckers = new ArrayList<>(); - public AuditLogAsserter expectSqlCompositeAction(String user, String... indices) { + public AuditLogAsserter expectSqlCompositeActionGetIndex(String user, String... indices) { expect(true, SQL_ACTION_NAME, user, empty()); expect(true, GetIndexAction.NAME, user, hasItems(indices)); return this; } + public AuditLogAsserter expectSqlCompositeActionFieldCaps(String user, String... indices) { + expect(true, SQL_ACTION_NAME, user, empty()); + expect(true, FieldCapabilitiesAction.NAME, user, hasItems(indices)); + return this; + } + public AuditLogAsserter expect(boolean granted, String action, String principal, Matcher> indicesMatcher) { String request; @@ -513,6 +521,9 @@ public abstract class SqlSecurityTestCase extends ESRestTestCase { case GetIndexAction.NAME: request = GetIndexRequest.class.getSimpleName(); break; + case FieldCapabilitiesAction.NAME: + request = FieldCapabilitiesRequest.class.getSimpleName(); + break; default: throw new IllegalArgumentException("Unknown action [" + action + "]"); } @@ -523,7 +534,7 @@ public abstract class SqlSecurityTestCase extends ESRestTestCase { public AuditLogAsserter expect(String eventAction, String action, String principal, String realm, Matcher> indicesMatcher, String request) { - logCheckers.add(m -> + logCheckers.add(m -> eventAction.equals(m.get("event.action")) && action.equals(m.get("action")) && principal.equals(m.get("user.name")) @@ -564,7 +575,9 @@ public abstract class SqlSecurityTestCase extends ESRestTestCase { continue; } assertThat(log.containsKey("action"), is(true)); - if (false == (SQL_ACTION_NAME.equals(log.get("action")) || GetIndexAction.NAME.equals(log.get("action")))) { + if (false == (SQL_ACTION_NAME.equals(log.get("action")) + || GetIndexAction.NAME.equals(log.get("action")) + || FieldCapabilitiesAction.NAME.equals(log.get("action")))) { // TODO we may want to extend this and the assertions to SearchAction.NAME as well continue; } diff --git a/x-pack/qa/sql/src/main/java/org/elasticsearch/xpack/qa/sql/cli/EmbeddedCli.java b/x-pack/qa/sql/src/main/java/org/elasticsearch/xpack/qa/sql/cli/EmbeddedCli.java index 89184edec0a..234d229f324 100644 --- a/x-pack/qa/sql/src/main/java/org/elasticsearch/xpack/qa/sql/cli/EmbeddedCli.java +++ b/x-pack/qa/sql/src/main/java/org/elasticsearch/xpack/qa/sql/cli/EmbeddedCli.java @@ -5,13 +5,13 @@ */ package org.elasticsearch.xpack.qa.sql.cli; +import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; -import org.elasticsearch.core.internal.io.IOUtils; import org.elasticsearch.cli.MockTerminal; import org.elasticsearch.cli.Terminal; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.Strings; -import org.elasticsearch.common.logging.Loggers; +import org.elasticsearch.core.internal.io.IOUtils; import org.elasticsearch.xpack.sql.cli.Cli; import org.elasticsearch.xpack.sql.cli.CliTerminal; import org.elasticsearch.xpack.sql.cli.JLineTerminal; @@ -49,7 +49,7 @@ import static org.junit.Assert.fail; * and doesn't run super frequently. */ public class EmbeddedCli implements Closeable { - private static final Logger logger = Loggers.getLogger(EmbeddedCli.class); + private static final Logger logger = LogManager.getLogger(EmbeddedCli.class); private final Thread exec; private final Cli cli; @@ -151,7 +151,9 @@ public class EmbeddedCli implements Closeable { } // Throw out the logo - while (false == readLine().contains("SQL")); + while (false == readLine().contains("SQL")) { + ; + } assertConnectionTest(); } catch (IOException e) { try { diff --git a/x-pack/qa/sql/src/main/java/org/elasticsearch/xpack/qa/sql/cli/ErrorsTestCase.java b/x-pack/qa/sql/src/main/java/org/elasticsearch/xpack/qa/sql/cli/ErrorsTestCase.java index 18895fbabbc..dee99a7be1c 100644 --- a/x-pack/qa/sql/src/main/java/org/elasticsearch/xpack/qa/sql/cli/ErrorsTestCase.java +++ b/x-pack/qa/sql/src/main/java/org/elasticsearch/xpack/qa/sql/cli/ErrorsTestCase.java @@ -5,9 +5,10 @@ */ package org.elasticsearch.xpack.qa.sql.cli; -import java.io.IOException; import org.elasticsearch.client.Request; +import java.io.IOException; + import static org.hamcrest.Matchers.startsWith; /** @@ -43,7 +44,8 @@ public abstract class ErrorsTestCase extends CliIntegrationTestCase implements o client().performRequest(request); assertFoundOneProblem(command("SELECT * FROM test")); - assertEquals("line 1:15: [test] doesn't have any types so it is incompatible with sql" + END, readLine()); + //assertEquals("line 1:15: [test] doesn't have any types so it is incompatible with sql" + END, readLine()); + assertEquals("line 1:15: Unknown index [test]" + END, readLine()); } @Override diff --git a/x-pack/qa/sql/src/main/java/org/elasticsearch/xpack/qa/sql/jdbc/DataLoader.java b/x-pack/qa/sql/src/main/java/org/elasticsearch/xpack/qa/sql/jdbc/DataLoader.java index 75204d9ff12..354c44a60ee 100644 --- a/x-pack/qa/sql/src/main/java/org/elasticsearch/xpack/qa/sql/jdbc/DataLoader.java +++ b/x-pack/qa/sql/src/main/java/org/elasticsearch/xpack/qa/sql/jdbc/DataLoader.java @@ -43,7 +43,7 @@ public class DataLoader { protected static void loadEmpDatasetIntoEs(RestClient client) throws Exception { loadEmpDatasetIntoEs(client, "test_emp", "employees"); - loadEmpDatasetIntoEs(client, "test_emp_copy", "employees"); + loadEmpDatasetWithExtraIntoEs(client, "test_emp_copy", "employees"); makeAlias(client, "test_alias", "test_emp", "test_emp_copy"); makeAlias(client, "test_alias_emp", "test_emp", "test_emp_copy"); } @@ -63,6 +63,14 @@ public class DataLoader { } protected static void loadEmpDatasetIntoEs(RestClient client, String index, String fileName) throws Exception { + loadEmpDatasetIntoEs(client, index, fileName, false); + } + + protected static void loadEmpDatasetWithExtraIntoEs(RestClient client, String index, String fileName) throws Exception { + loadEmpDatasetIntoEs(client, index, fileName, true); + } + + private static void loadEmpDatasetIntoEs(RestClient client, String index, String fileName, boolean extraFields) throws Exception { Request request = new Request("PUT", "/" + index); XContentBuilder createIndex = JsonXContent.contentBuilder().startObject(); createIndex.startObject("settings"); @@ -76,10 +84,26 @@ public class DataLoader { { createIndex.startObject("properties"); { - createIndex.startObject("emp_no").field("type", "integer").endObject(); + createIndex.startObject("emp_no").field("type", "integer"); + if (extraFields) { + createIndex.field("copy_to", "extra_no"); + } + createIndex.endObject(); + if (extraFields) { + createIndex.startObject("extra_no").field("type", "integer").endObject(); + } createString("first_name", createIndex); createString("last_name", createIndex); - createIndex.startObject("gender").field("type", "keyword").endObject(); + createIndex.startObject("gender").field("type", "keyword"); + if (extraFields) { + createIndex.field("copy_to", "extra_gender"); + } + createIndex.endObject(); + + if (extraFields) { + createIndex.startObject("extra_gender").field("type", "keyword").endObject(); + } + createIndex.startObject("birth_date").field("type", "date").endObject(); createIndex.startObject("hire_date").field("type", "date").endObject(); createIndex.startObject("salary").field("type", "integer").endObject(); diff --git a/x-pack/qa/sql/src/main/java/org/elasticsearch/xpack/qa/sql/jdbc/ErrorsTestCase.java b/x-pack/qa/sql/src/main/java/org/elasticsearch/xpack/qa/sql/jdbc/ErrorsTestCase.java index 8574dd3ece1..65fd0778b57 100644 --- a/x-pack/qa/sql/src/main/java/org/elasticsearch/xpack/qa/sql/jdbc/ErrorsTestCase.java +++ b/x-pack/qa/sql/src/main/java/org/elasticsearch/xpack/qa/sql/jdbc/ErrorsTestCase.java @@ -5,9 +5,10 @@ */ package org.elasticsearch.xpack.qa.sql.jdbc; +import org.elasticsearch.client.Request; + import java.sql.Connection; import java.sql.SQLException; -import org.elasticsearch.client.Request; import static org.hamcrest.Matchers.startsWith; @@ -40,7 +41,9 @@ public class ErrorsTestCase extends JdbcIntegrationTestCase implements org.elast try (Connection c = esJdbc()) { SQLException e = expectThrows(SQLException.class, () -> c.prepareStatement("SELECT * FROM test").executeQuery()); - assertEquals("Found 1 problem(s)\nline 1:15: [test] doesn't have any types so it is incompatible with sql", e.getMessage()); + // see https://github.com/elastic/elasticsearch/issues/34719 + //assertEquals("Found 1 problem(s)\nline 1:15: [test] doesn't have any types so it is incompatible with sql", e.getMessage()); + assertEquals("Found 1 problem(s)\nline 1:15: Unknown index [test]", e.getMessage()); } } diff --git a/x-pack/qa/sql/src/main/java/org/elasticsearch/xpack/qa/sql/rest/RestSqlTestCase.java b/x-pack/qa/sql/src/main/java/org/elasticsearch/xpack/qa/sql/rest/RestSqlTestCase.java index dd79bd08514..4df82119e36 100644 --- a/x-pack/qa/sql/src/main/java/org/elasticsearch/xpack/qa/sql/rest/RestSqlTestCase.java +++ b/x-pack/qa/sql/src/main/java/org/elasticsearch/xpack/qa/sql/rest/RestSqlTestCase.java @@ -219,7 +219,9 @@ public abstract class RestSqlTestCase extends ESRestTestCase implements ErrorsTe client().performRequest(request); String mode = randomFrom("jdbc", "plain"); expectBadRequest(() -> runSql(mode, "SELECT * FROM test"), - containsString("1:15: [test] doesn't have any types so it is incompatible with sql")); + // see https://github.com/elastic/elasticsearch/issues/34719 + //containsString("1:15: [test] doesn't have any types so it is incompatible with sql")); + containsString("1:15: Unknown index [test]")); } @Override diff --git a/x-pack/qa/sql/src/main/resources/agg.sql-spec b/x-pack/qa/sql/src/main/resources/agg.sql-spec index c97f4da0fbc..2c6248059f5 100644 --- a/x-pack/qa/sql/src/main/resources/agg.sql-spec +++ b/x-pack/qa/sql/src/main/resources/agg.sql-spec @@ -216,6 +216,8 @@ aggMinWithCastAndFilter SELECT gender g, CAST(MIN(emp_no) AS SMALLINT) m, COUNT(1) c FROM "test_emp" WHERE emp_no < 10020 GROUP BY gender ORDER BY gender; aggMinWithAlias SELECT gender g, MIN(emp_no) m FROM "test_emp" GROUP BY g ORDER BY gender; +aggMinOnDate +SELECT gender, MIN(birth_date) m FROM "test_emp" GROUP BY gender ORDER BY gender; // Conditional MIN aggMinWithHaving @@ -270,6 +272,8 @@ aggMaxAndCountWithFilterAndLimit SELECT gender g, MAX(emp_no) m, COUNT(1) c FROM "test_emp" WHERE emp_no > 10000 GROUP BY gender ORDER BY gender LIMIT 1; aggMaxWithAlias SELECT gender g, MAX(emp_no) m FROM "test_emp" GROUP BY g ORDER BY gender; +aggMaxOnDate +SELECT gender, MAX(birth_date) m FROM "test_emp" GROUP BY gender ORDER BY gender; // Conditional MAX aggMaxWithHaving @@ -422,6 +426,11 @@ SELECT MIN(emp_no) AS a, 1 + MIN(emp_no) AS b, ABS(MIN(emp_no)) AS c FROM test_e aggRepeatFunctionBetweenSelectAndHaving SELECT gender, COUNT(DISTINCT languages) AS c FROM test_emp GROUP BY gender HAVING count(DISTINCT languages) > 0 ORDER BY gender; +// filter with IN +aggMultiWithHavingUsingIn +SELECT MIN(salary) min, MAX(salary) max, gender g, COUNT(*) c FROM "test_emp" WHERE languages > 0 GROUP BY g HAVING max IN(74999, 74600) ORDER BY gender; +aggMultiGroupByMultiWithHavingUsingIn +SELECT MIN(salary) min, MAX(salary) max, gender g, languages l, COUNT(*) c FROM "test_emp" WHERE languages > 0 GROUP BY g, languages HAVING max IN (74500, 74600) ORDER BY gender, languages; // @@ -440,4 +449,4 @@ SELECT hire_date HD, COUNT(*) c FROM test_emp GROUP BY hire_date ORDER BY hire_d selectHireDateGroupByHireDate SELECT hire_date HD, COUNT(*) c FROM test_emp GROUP BY hire_date ORDER BY hire_date DESC; selectSalaryGroupBySalary -SELECT salary, COUNT(*) c FROM test_emp GROUP BY salary ORDER BY salary DESC; \ No newline at end of file +SELECT salary, COUNT(*) c FROM test_emp GROUP BY salary ORDER BY salary DESC; diff --git a/x-pack/qa/sql/src/main/resources/alias.csv-spec b/x-pack/qa/sql/src/main/resources/alias.csv-spec index a5f928d73e5..2a64bfc34de 100644 --- a/x-pack/qa/sql/src/main/resources/alias.csv-spec +++ b/x-pack/qa/sql/src/main/resources/alias.csv-spec @@ -26,47 +26,51 @@ emp_no:i | first_name:s describeAlias DESCRIBE test_alias; -column:s | type:s - -birth_date | TIMESTAMP -dep | STRUCT -dep.dep_id | VARCHAR -dep.dep_name | VARCHAR -dep.dep_name.keyword | VARCHAR -dep.from_date | TIMESTAMP -dep.to_date | TIMESTAMP -emp_no | INTEGER -first_name | VARCHAR -first_name.keyword | VARCHAR -gender | VARCHAR -hire_date | TIMESTAMP -languages | TINYINT -last_name | VARCHAR -last_name.keyword | VARCHAR -salary | INTEGER + column | type | mapping +--------------------+---------------+--------------- +birth_date |TIMESTAMP |DATE +dep |STRUCT |NESTED +dep.dep_id |VARCHAR |KEYWORD +dep.dep_name |VARCHAR |TEXT +dep.dep_name.keyword|VARCHAR |KEYWORD +dep.from_date |TIMESTAMP |DATE +dep.to_date |TIMESTAMP |DATE +emp_no |INTEGER |INTEGER +extra_gender |VARCHAR |KEYWORD +extra_no |INTEGER |INTEGER +first_name |VARCHAR |TEXT +first_name.keyword |VARCHAR |KEYWORD +gender |VARCHAR |KEYWORD +hire_date |TIMESTAMP |DATE +languages |TINYINT |BYTE +last_name |VARCHAR |TEXT +last_name.keyword |VARCHAR |KEYWORD +salary |INTEGER |INTEGER ; describePattern DESCRIBE "test_*"; -column:s | type:s - -birth_date | TIMESTAMP -dep | STRUCT -dep.dep_id | VARCHAR -dep.dep_name | VARCHAR -dep.dep_name.keyword | VARCHAR -dep.from_date | TIMESTAMP -dep.to_date | TIMESTAMP -emp_no | INTEGER -first_name | VARCHAR -first_name.keyword | VARCHAR -gender | VARCHAR -hire_date | TIMESTAMP -languages | TINYINT -last_name | VARCHAR -last_name.keyword | VARCHAR -salary | INTEGER + column | type | mapping +--------------------+---------------+--------------- +birth_date |TIMESTAMP |DATE +dep |STRUCT |NESTED +dep.dep_id |VARCHAR |KEYWORD +dep.dep_name |VARCHAR |TEXT +dep.dep_name.keyword|VARCHAR |KEYWORD +dep.from_date |TIMESTAMP |DATE +dep.to_date |TIMESTAMP |DATE +emp_no |INTEGER |INTEGER +extra_gender |VARCHAR |KEYWORD +extra_no |INTEGER |INTEGER +first_name |VARCHAR |TEXT +first_name.keyword |VARCHAR |KEYWORD +gender |VARCHAR |KEYWORD +hire_date |TIMESTAMP |DATE +languages |TINYINT |BYTE +last_name |VARCHAR |TEXT +last_name.keyword |VARCHAR |KEYWORD +salary |INTEGER |INTEGER ; showAlias diff --git a/x-pack/qa/sql/src/main/resources/command.csv-spec b/x-pack/qa/sql/src/main/resources/command.csv-spec index 879fa486d07..06f38f0a07e 100644 --- a/x-pack/qa/sql/src/main/resources/command.csv-spec +++ b/x-pack/qa/sql/src/main/resources/command.csv-spec @@ -200,89 +200,98 @@ test_alias_emp |ALIAS describeSimpleLike DESCRIBE LIKE 'test_emp'; - column:s | type:s -birth_date | TIMESTAMP -dep | STRUCT -dep.dep_id | VARCHAR -dep.dep_name | VARCHAR -dep.dep_name.keyword | VARCHAR -dep.from_date | TIMESTAMP -dep.to_date | TIMESTAMP -emp_no | INTEGER -first_name | VARCHAR -first_name.keyword | VARCHAR -gender | VARCHAR -hire_date | TIMESTAMP -languages | TINYINT -last_name | VARCHAR -last_name.keyword | VARCHAR -salary | INTEGER + column | type | mapping +--------------------+---------------+--------------- +birth_date |TIMESTAMP |DATE +dep |STRUCT |NESTED +dep.dep_id |VARCHAR |KEYWORD +dep.dep_name |VARCHAR |TEXT +dep.dep_name.keyword|VARCHAR |KEYWORD +dep.from_date |TIMESTAMP |DATE +dep.to_date |TIMESTAMP |DATE +emp_no |INTEGER |INTEGER +extra_gender |VARCHAR |KEYWORD +extra_no |INTEGER |INTEGER +first_name |VARCHAR |TEXT +first_name.keyword |VARCHAR |KEYWORD +gender |VARCHAR |KEYWORD +hire_date |TIMESTAMP |DATE +languages |TINYINT |BYTE +last_name |VARCHAR |TEXT +last_name.keyword |VARCHAR |KEYWORD +salary |INTEGER |INTEGER ; describeMultiLike DESCRIBE LIKE 'test_emp%'; - column:s | type:s -birth_date | TIMESTAMP -dep | STRUCT -dep.dep_id | VARCHAR -dep.dep_name | VARCHAR -dep.dep_name.keyword | VARCHAR -dep.from_date | TIMESTAMP -dep.to_date | TIMESTAMP -emp_no | INTEGER -first_name | VARCHAR -first_name.keyword | VARCHAR -gender | VARCHAR -hire_date | TIMESTAMP -languages | TINYINT -last_name | VARCHAR -last_name.keyword | VARCHAR -salary | INTEGER + column | type | mapping +--------------------+---------------+--------------- +birth_date |TIMESTAMP |DATE +dep |STRUCT |NESTED +dep.dep_id |VARCHAR |KEYWORD +dep.dep_name |VARCHAR |TEXT +dep.dep_name.keyword|VARCHAR |KEYWORD +dep.from_date |TIMESTAMP |DATE +dep.to_date |TIMESTAMP |DATE +emp_no |INTEGER |INTEGER +extra_gender |VARCHAR |KEYWORD +extra_no |INTEGER |INTEGER +first_name |VARCHAR |TEXT +first_name.keyword |VARCHAR |KEYWORD +gender |VARCHAR |KEYWORD +hire_date |TIMESTAMP |DATE +languages |TINYINT |BYTE +last_name |VARCHAR |TEXT +last_name.keyword |VARCHAR |KEYWORD +salary |INTEGER |INTEGER ; describeSimpleIdentifier DESCRIBE "test_emp"; - column:s | type:s -birth_date | TIMESTAMP -dep | STRUCT -dep.dep_id | VARCHAR -dep.dep_name | VARCHAR -dep.dep_name.keyword | VARCHAR -dep.from_date | TIMESTAMP -dep.to_date | TIMESTAMP -emp_no | INTEGER -first_name | VARCHAR -first_name.keyword | VARCHAR -gender | VARCHAR -hire_date | TIMESTAMP -languages | TINYINT -last_name | VARCHAR -last_name.keyword | VARCHAR -salary | INTEGER + column | type | mapping +--------------------+---------------+--------------- +birth_date |TIMESTAMP |DATE +dep |STRUCT |NESTED +dep.dep_id |VARCHAR |KEYWORD +dep.dep_name |VARCHAR |TEXT +dep.dep_name.keyword|VARCHAR |KEYWORD +dep.from_date |TIMESTAMP |DATE +dep.to_date |TIMESTAMP |DATE +emp_no |INTEGER |INTEGER +first_name |VARCHAR |TEXT +first_name.keyword |VARCHAR |KEYWORD +gender |VARCHAR |KEYWORD +hire_date |TIMESTAMP |DATE +languages |TINYINT |BYTE +last_name |VARCHAR |TEXT +last_name.keyword |VARCHAR |KEYWORD +salary |INTEGER |INTEGER ; -describeIncludeExcludeIdentifier -DESCRIBE "test_emp*,-test_emp_*"; +// NB: need to pursue how the resolution is done +// should aliases be included or excluded? +describeIncludeExcludeIdentifier-Ignore +DESCRIBE "test_*,-test_alias*"; -column:s | type:s -birth_date | TIMESTAMP -dep | STRUCT -dep.dep_id | VARCHAR -dep.dep_name | VARCHAR -dep.dep_name.keyword | VARCHAR -dep.from_date | TIMESTAMP -dep.to_date | TIMESTAMP -emp_no | INTEGER -first_name | VARCHAR -first_name.keyword | VARCHAR -gender | VARCHAR -hire_date | TIMESTAMP -languages | TINYINT -last_name | VARCHAR -last_name.keyword | VARCHAR -salary | INTEGER + column | type | mapping +--------------------+---------------+--------------- +birth_date |TIMESTAMP |DATE +dep |STRUCT |NESTED +dep.dep_id |VARCHAR |KEYWORD +dep.dep_name |VARCHAR |TEXT +dep.dep_name.keyword|VARCHAR |KEYWORD +dep.from_date |TIMESTAMP |DATE +dep.to_date |TIMESTAMP |DATE +emp_no |INTEGER |INTEGER +first_name |VARCHAR |TEXT +first_name.keyword |VARCHAR |KEYWORD +gender |VARCHAR |KEYWORD +hire_date |TIMESTAMP |DATE +languages |TINYINT |BYTE +last_name |VARCHAR |TEXT +last_name.keyword |VARCHAR |KEYWORD +salary |INTEGER |INTEGER ; - diff --git a/x-pack/qa/sql/src/main/resources/docs.csv-spec b/x-pack/qa/sql/src/main/resources/docs.csv-spec index d225c3d35da..4d5c8c26b8c 100644 --- a/x-pack/qa/sql/src/main/resources/docs.csv-spec +++ b/x-pack/qa/sql/src/main/resources/docs.csv-spec @@ -12,24 +12,24 @@ describeTable // tag::describeTable DESCRIBE emp; - column | type ---------------------+--------------- -birth_date |TIMESTAMP -dep |STRUCT -dep.dep_id |VARCHAR -dep.dep_name |VARCHAR -dep.dep_name.keyword|VARCHAR -dep.from_date |TIMESTAMP -dep.to_date |TIMESTAMP -emp_no |INTEGER -first_name |VARCHAR -first_name.keyword |VARCHAR -gender |VARCHAR -hire_date |TIMESTAMP -languages |TINYINT -last_name |VARCHAR -last_name.keyword |VARCHAR -salary |INTEGER + column | type | mapping +--------------------+---------------+--------------- +birth_date |TIMESTAMP |DATE +dep |STRUCT |NESTED +dep.dep_id |VARCHAR |KEYWORD +dep.dep_name |VARCHAR |TEXT +dep.dep_name.keyword|VARCHAR |KEYWORD +dep.from_date |TIMESTAMP |DATE +dep.to_date |TIMESTAMP |DATE +emp_no |INTEGER |INTEGER +first_name |VARCHAR |TEXT +first_name.keyword |VARCHAR |KEYWORD +gender |VARCHAR |KEYWORD +hire_date |TIMESTAMP |DATE +languages |TINYINT |BYTE +last_name |VARCHAR |TEXT +last_name.keyword |VARCHAR |KEYWORD +salary |INTEGER |INTEGER // end::describeTable ; @@ -51,24 +51,24 @@ showColumns // tag::showColumns SHOW COLUMNS IN emp; - column | type ---------------------+--------------- -birth_date |TIMESTAMP -dep |STRUCT -dep.dep_id |VARCHAR -dep.dep_name |VARCHAR -dep.dep_name.keyword|VARCHAR -dep.from_date |TIMESTAMP -dep.to_date |TIMESTAMP -emp_no |INTEGER -first_name |VARCHAR -first_name.keyword |VARCHAR -gender |VARCHAR -hire_date |TIMESTAMP -languages |TINYINT -last_name |VARCHAR -last_name.keyword |VARCHAR -salary |INTEGER + column | type | mapping +--------------------+---------------+--------------- +birth_date |TIMESTAMP |DATE +dep |STRUCT |NESTED +dep.dep_id |VARCHAR |KEYWORD +dep.dep_name |VARCHAR |TEXT +dep.dep_name.keyword|VARCHAR |KEYWORD +dep.from_date |TIMESTAMP |DATE +dep.to_date |TIMESTAMP |DATE +emp_no |INTEGER |INTEGER +first_name |VARCHAR |TEXT +first_name.keyword |VARCHAR |KEYWORD +gender |VARCHAR |KEYWORD +hire_date |TIMESTAMP |DATE +languages |TINYINT |BYTE +last_name |VARCHAR |TEXT +last_name.keyword |VARCHAR |KEYWORD +salary |INTEGER |INTEGER // end::showColumns ; @@ -1164,6 +1164,33 @@ SELECT YEAR(CAST('2018-05-19T11:23:45Z' AS TIMESTAMP)) AS year; // end::conversionStringToDateCast ; +/////////////////////////////// +// +// Convert +// +/////////////////////////////// + +conversionStringToIntConvertESDataType +// tag::conversionStringToIntConvertESDataType +SELECT CONVERT('123', INTEGER) AS int; + + int +--------------- +123 +// end::conversionStringToIntConvertESDataType +; + +conversionStringToIntConvertODBCDataType +// tag::conversionStringToIntConvertODBCDataType +SELECT CONVERT('123', SQL_INTEGER) AS int; + + int +--------------- +123 +// end::conversionStringToIntConvertODBCDataType +; + + /////////////////////////////// // // Math diff --git a/x-pack/qa/sql/src/main/resources/filter.sql-spec b/x-pack/qa/sql/src/main/resources/filter.sql-spec index 5112fbc1551..1a564ecb9ad 100644 --- a/x-pack/qa/sql/src/main/resources/filter.sql-spec +++ b/x-pack/qa/sql/src/main/resources/filter.sql-spec @@ -78,3 +78,21 @@ SELECT last_name l FROM "test_emp" WHERE emp_no BETWEEN 9990 AND 10003 ORDER BY // end::whereBetween whereNotBetween SELECT last_name l FROM "test_emp" WHERE emp_no NOT BETWEEN 10010 AND 10020 ORDER BY emp_no LIMIT 5; + +// +// IN expression +// +whereWithInAndOneValue +SELECT last_name l FROM "test_emp" WHERE emp_no IN (10001); +whereWithInAndMultipleValues +// tag::whereWithInAndMultipleValues +SELECT last_name l FROM "test_emp" WHERE emp_no IN (10000, 10001, 10002, 999) ORDER BY emp_no LIMIT 5; +// end::whereWithInAndMultipleValues + +whereWithInAndOneValueWithNegation +SELECT last_name l FROM "test_emp" WHERE emp_no NOT IN (10001) ORDER BY emp_no LIMIT 5; +whereWithInAndMultipleValuesAndNegation +SELECT last_name l FROM "test_emp" WHERE emp_no NOT IN (10000, 10001, 10002, 999) ORDER BY emp_no LIMIT 5; + +whereWithInAndComplexFunctions +SELECT last_name l FROM "test_emp" WHERE emp_no NOT IN (10000, abs(2 - 10003), 10002, 999) AND lcase(first_name) IN ('sumant', 'mary', 'patricio', 'No''Match') ORDER BY emp_no LIMIT 5; diff --git a/x-pack/qa/sql/src/main/resources/nested.csv-spec b/x-pack/qa/sql/src/main/resources/nested.csv-spec index 0a188bd7faf..428ed781204 100644 --- a/x-pack/qa/sql/src/main/resources/nested.csv-spec +++ b/x-pack/qa/sql/src/main/resources/nested.csv-spec @@ -6,24 +6,24 @@ describeParent DESCRIBE test_emp; -column | type - -birth_date | TIMESTAMP -dep | STRUCT -dep.dep_id | VARCHAR -dep.dep_name | VARCHAR -dep.dep_name.keyword | VARCHAR -dep.from_date | TIMESTAMP -dep.to_date | TIMESTAMP -emp_no | INTEGER -first_name | VARCHAR -first_name.keyword | VARCHAR -gender | VARCHAR -hire_date | TIMESTAMP -languages | TINYINT -last_name | VARCHAR -last_name.keyword | VARCHAR -salary | INTEGER + column | type | mapping +--------------------+---------------+--------------- +birth_date |TIMESTAMP |DATE +dep |STRUCT |NESTED +dep.dep_id |VARCHAR |KEYWORD +dep.dep_name |VARCHAR |TEXT +dep.dep_name.keyword|VARCHAR |KEYWORD +dep.from_date |TIMESTAMP |DATE +dep.to_date |TIMESTAMP |DATE +emp_no |INTEGER |INTEGER +first_name |VARCHAR |TEXT +first_name.keyword |VARCHAR |KEYWORD +gender |VARCHAR |KEYWORD +hire_date |TIMESTAMP |DATE +languages |TINYINT |BYTE +last_name |VARCHAR |TEXT +last_name.keyword |VARCHAR |KEYWORD +salary |INTEGER |INTEGER ; // disable until we figure out how to use field names with . in their name diff --git a/x-pack/qa/sql/src/main/resources/select.csv-spec b/x-pack/qa/sql/src/main/resources/select.csv-spec new file mode 100644 index 00000000000..b3888abd47b --- /dev/null +++ b/x-pack/qa/sql/src/main/resources/select.csv-spec @@ -0,0 +1,67 @@ +// SELECT with IN +inWithLiterals +SELECT 1 IN (1, 2, 3), 1 IN (2, 3); + + 1 IN (1, 2, 3) | 1 IN (2, 3) +-----------------+------------- +true |false +; + +inWithLiteralsAndFunctions +SELECT 1 IN (2 - 1, 2, 3), abs(-1) IN (2, 3, abs(4 - 5)); + + 1 IN (1, 2, 3) | 1 IN (2, 3) +-----------------+------------- +true |false +; + + +inWithLiteralsAndNegation +SELECT NOT 1 IN (1, 1 + 1, 3), NOT 1 IN (2, 3); + + 1 IN (1, 2, 3) | 1 IN (2, 3) +-----------------+------------- +false |true +; + + +// +// SELECT with IN and table columns +// +inWithTableColumn +SELECT emp_no IN (10000, 10001, 10002) FROM test_emp ORDER BY 1; + + emp_no +------- +10001 +10002 +; + +inWithTableColumnAndFunction +SELECT emp_no IN (10000, 10000 + 1, abs(-10000 - 2)) FROM test_emp; + + emp_no +------- +10001 +10002 +; + +inWithTableColumnAndNegation +SELECT emp_no NOT IN (10000, 10000 + 1, 10002) FROM test_emp ORDER BY 1 LIMIT 3; + + emp_no +------- +10003 +10004 +10005 +; + +inWithTableColumnAndComplexFunctions +SELECT 1 IN (1, abs(2 - 4), 3) OR emp_no NOT IN (10000, 10000 + 1, 10002) FROM test_emp ORDER BY 1 LIMIT 3; + + emp_no +------- +10003 +10004 +10005 +; \ No newline at end of file