diff --git a/Vagrantfile b/Vagrantfile index de344e18183..7322399fed5 100644 --- a/Vagrantfile +++ b/Vagrantfile @@ -115,11 +115,6 @@ Vagrant.configure(2) do |config| 'opensuse-42'.tap do |box| config.vm.define box, define_opts do |config| config.vm.box = 'elastic/opensuse-42-x86_64' - - # https://github.com/elastic/elasticsearch/issues/30295 - config.vm.provider 'virtualbox' do |vbox| - vbox.customize ['storagectl', :id, '--name', 'SATA Controller', '--hostiocache', 'on'] - end suse_common config, box end end diff --git a/build.gradle b/build.gradle index c413c897ff6..341fbc04c0f 100644 --- a/build.gradle +++ b/build.gradle @@ -87,8 +87,15 @@ subprojects { } } } + repositories { + maven { + name = 'localTest' + url = "${rootProject.buildDir}/local-test-repo" + } + } } } + plugins.withType(BuildPlugin).whenPluginAdded { project.licenseFile = project.rootProject.file('licenses/APACHE-LICENSE-2.0.txt') project.noticeFile = project.rootProject.file('NOTICE.txt') @@ -228,6 +235,7 @@ subprojects { "org.elasticsearch.client:elasticsearch-rest-high-level-client:${version}": ':client:rest-high-level', "org.elasticsearch.client:test:${version}": ':client:test', "org.elasticsearch.client:transport:${version}": ':client:transport', + "org.elasticsearch.plugin:elasticsearch-scripting-painless-spi:${version}": ':modules:lang-painless:spi', "org.elasticsearch.test:framework:${version}": ':test:framework', "org.elasticsearch.distribution.integ-test-zip:elasticsearch:${version}": ':distribution:archives:integ-test-zip', "org.elasticsearch.distribution.zip:elasticsearch:${version}": ':distribution:archives:zip', diff --git a/buildSrc/build.gradle b/buildSrc/build.gradle index 5775b2b6323..967c2e27ee8 100644 --- a/buildSrc/build.gradle +++ b/buildSrc/build.gradle @@ -162,11 +162,24 @@ if (project != rootProject) { // it's fine as we run them as part of :buildSrc test.enabled = false task integTest(type: Test) { + // integration test requires the local testing repo for example plugin builds + dependsOn project.rootProject.allprojects.collect { + it.tasks.matching { it.name == 'publishNebulaPublicationToLocalTestRepository'} + } exclude "**/*Tests.class" include "**/*IT.class" testClassesDirs = sourceSets.test.output.classesDirs classpath = sourceSets.test.runtimeClasspath inputs.dir(file("src/testKit")) + // tell BuildExamplePluginsIT where to find the example plugins + systemProperty ( + 'test.build-tools.plugin.examples', + files( + project(':example-plugins').subprojects.collect { it.projectDir } + ).asPath, + ) + systemProperty 'test.local-test-repo-path', "${rootProject.buildDir}/local-test-repo" + systemProperty 'test.lucene-snapshot-revision', (versions.lucene =~ /\w+-snapshot-([a-z0-9]+)/)[0][1] } check.dependsOn(integTest) diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/BuildPlugin.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/BuildPlugin.groovy index 5a962a5138b..bf3ffcabe2f 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/BuildPlugin.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/BuildPlugin.groovy @@ -87,6 +87,8 @@ class BuildPlugin implements Plugin { project.pluginManager.apply('nebula.info-scm') project.pluginManager.apply('nebula.info-jar') + project.getTasks().create("buildResources", ExportElasticsearchBuildResourcesTask) + globalBuildInfo(project) configureRepositories(project) configureConfigurations(project) @@ -101,6 +103,7 @@ class BuildPlugin implements Plugin { configureDependenciesInfo(project) } + /** Performs checks on the build environment and prints information about the build environment. */ static void globalBuildInfo(Project project) { if (project.rootProject.ext.has('buildChecksDone') == false) { @@ -116,12 +119,14 @@ class BuildPlugin implements Plugin { final Map javaVersions = [:] for (int version = 7; version <= Integer.parseInt(minimumCompilerVersion.majorVersion); version++) { - javaVersions.put(version, findJavaHome(version)); + if(System.getenv(getJavaHomeEnvVarName(version.toString())) != null) { + javaVersions.put(version, findJavaHome(version.toString())); + } } String javaVendor = System.getProperty('java.vendor') - String javaVersion = System.getProperty('java.version') - String gradleJavaVersionDetails = "${javaVendor} ${javaVersion}" + + String gradleJavaVersion = System.getProperty('java.version') + String gradleJavaVersionDetails = "${javaVendor} ${gradleJavaVersion}" + " [${System.getProperty('java.vm.name')} ${System.getProperty('java.vm.version')}]" String compilerJavaVersionDetails = gradleJavaVersionDetails @@ -144,33 +149,33 @@ class BuildPlugin implements Plugin { // Build debugging info println '=======================================' println 'Elasticsearch Build Hamster says Hello!' - println '=======================================' println " Gradle Version : ${project.gradle.gradleVersion}" println " OS Info : ${System.getProperty('os.name')} ${System.getProperty('os.version')} (${System.getProperty('os.arch')})" if (gradleJavaVersionDetails != compilerJavaVersionDetails || gradleJavaVersionDetails != runtimeJavaVersionDetails) { - println " JDK Version (gradle) : ${gradleJavaVersionDetails}" - println " JAVA_HOME (gradle) : ${gradleJavaHome}" - println " JDK Version (compile) : ${compilerJavaVersionDetails}" - println " JAVA_HOME (compile) : ${compilerJavaHome}" - println " JDK Version (runtime) : ${runtimeJavaVersionDetails}" - println " JAVA_HOME (runtime) : ${runtimeJavaHome}" + println " Compiler JDK Version : ${getPaddedMajorVersion(compilerJavaVersionEnum)} (${compilerJavaVersionDetails})" + println " Compiler java.home : ${compilerJavaHome}" + println " Runtime JDK Version : ${getPaddedMajorVersion(runtimeJavaVersionEnum)} (${runtimeJavaVersionDetails})" + println " Runtime java.home : ${runtimeJavaHome}" + println " Gradle JDK Version : ${getPaddedMajorVersion(JavaVersion.toVersion(gradleJavaVersion))} (${gradleJavaVersionDetails})" + println " Gradle java.home : ${gradleJavaHome}" } else { - println " JDK Version : ${gradleJavaVersionDetails}" + println " JDK Version : ${getPaddedMajorVersion(JavaVersion.toVersion(gradleJavaVersion))} (${gradleJavaVersionDetails})" println " JAVA_HOME : ${gradleJavaHome}" } println " Random Testing Seed : ${project.testSeed}" + println '=======================================' // enforce Java version if (compilerJavaVersionEnum < minimumCompilerVersion) { final String message = - "the environment variable JAVA_HOME must be set to a JDK installation directory for Java ${minimumCompilerVersion}" + + "the compiler java.home must be set to a JDK installation directory for Java ${minimumCompilerVersion}" + " but is [${compilerJavaHome}] corresponding to [${compilerJavaVersionEnum}]" throw new GradleException(message) } if (runtimeJavaVersionEnum < minimumRuntimeVersion) { final String message = - "the environment variable RUNTIME_JAVA_HOME must be set to a JDK installation directory for Java ${minimumRuntimeVersion}" + + "the runtime java.home must be set to a JDK installation directory for Java ${minimumRuntimeVersion}" + " but is [${runtimeJavaHome}] corresponding to [${runtimeJavaVersionEnum}]" throw new GradleException(message) } @@ -205,6 +210,7 @@ class BuildPlugin implements Plugin { project.rootProject.ext.minimumCompilerVersion = minimumCompilerVersion project.rootProject.ext.minimumRuntimeVersion = minimumRuntimeVersion project.rootProject.ext.inFipsJvm = inFipsJvm + project.rootProject.ext.gradleJavaVersion = JavaVersion.toVersion(gradleJavaVersion) } project.targetCompatibility = project.rootProject.ext.minimumRuntimeVersion @@ -217,11 +223,20 @@ class BuildPlugin implements Plugin { project.ext.runtimeJavaVersion = project.rootProject.ext.runtimeJavaVersion project.ext.javaVersions = project.rootProject.ext.javaVersions project.ext.inFipsJvm = project.rootProject.ext.inFipsJvm + project.ext.gradleJavaVersion = project.rootProject.ext.gradleJavaVersion + } + + private static String getPaddedMajorVersion(JavaVersion compilerJavaVersionEnum) { + compilerJavaVersionEnum.getMajorVersion().toString().padLeft(2) } private static String findCompilerJavaHome() { - final String javaHome = System.getenv('JAVA_HOME') - if (javaHome == null) { + final String compilerJavaHome = System.getenv('JAVA_HOME') + final String compilerJavaProperty = System.getProperty('compiler.java') + if (compilerJavaProperty != null) { + compilerJavaHome = findJavaHome(compilerJavaProperty) + } + if (compilerJavaHome == null) { if (System.getProperty("idea.active") != null || System.getProperty("eclipse.launcher") != null) { // IntelliJ does not set JAVA_HOME, so we use the JDK that Gradle was run with return Jvm.current().javaHome @@ -233,11 +248,24 @@ class BuildPlugin implements Plugin { ) } } - return javaHome + return compilerJavaHome } - private static String findJavaHome(int version) { - return System.getenv('JAVA' + version + '_HOME') + private static String findJavaHome(String version) { + String versionedVarName = getJavaHomeEnvVarName(version) + String versionedJavaHome = System.getenv(versionedVarName); + if (versionedJavaHome == null) { + throw new GradleException( + "$versionedVarName must be set to build Elasticsearch. " + + "Note that if the variable was just set you might have to run `./gradlew --stop` for " + + "it to be picked up. See https://github.com/elastic/elasticsearch/issues/31399 details." + ) + } + return versionedJavaHome + } + + private static String getJavaHomeEnvVarName(String version) { + return 'JAVA' + version + '_HOME' } /** Add a check before gradle execution phase which ensures java home for the given java version is set. */ @@ -271,7 +299,10 @@ class BuildPlugin implements Plugin { } private static String findRuntimeJavaHome(final String compilerJavaHome) { - assert compilerJavaHome != null + String runtimeJavaProperty = System.getProperty("runtime.java") + if (runtimeJavaProperty != null) { + return findJavaHome(runtimeJavaProperty) + } return System.getenv('RUNTIME_JAVA_HOME') ?: compilerJavaHome } @@ -405,6 +436,10 @@ class BuildPlugin implements Plugin { repos.mavenLocal() } repos.mavenCentral() + repos.maven { + name "elastic" + url "https://artifacts.elastic.co/maven" + } String luceneVersion = VersionProperties.lucene if (luceneVersion.contains('-snapshot')) { // extract the revision number from the version with a regex matcher @@ -519,7 +554,7 @@ class BuildPlugin implements Plugin { project.publishing { publications { nebula(MavenPublication) { - artifact project.tasks.shadowJar + artifacts = [ project.tasks.shadowJar ] artifactId = project.archivesBaseName /* * Configure the pom to include the "shadow" as compile dependencies @@ -549,7 +584,6 @@ class BuildPlugin implements Plugin { } } } - } /** Adds compiler settings to the project */ @@ -764,6 +798,14 @@ class BuildPlugin implements Plugin { systemProperty 'tests.task', path systemProperty 'tests.security.manager', 'true' systemProperty 'jna.nosys', 'true' + // TODO: remove this deprecation compatibility setting for 7.0 + systemProperty 'es.aggregations.enable_scripted_metric_agg_param', 'false' + systemProperty 'compiler.java', project.ext.compilerJavaVersion.getMajorVersion() + if (project.ext.inFipsJvm) { + systemProperty 'runtime.java', project.ext.runtimeJavaVersion.getMajorVersion() + "FIPS" + } else { + systemProperty 'runtime.java', project.ext.runtimeJavaVersion.getMajorVersion() + } // TODO: remove setting logging level via system property systemProperty 'tests.logger.level', 'WARN' for (Map.Entry property : System.properties.entrySet()) { @@ -777,11 +819,19 @@ class BuildPlugin implements Plugin { systemProperty property.getKey(), property.getValue() } } + + // TODO: remove this once joda time is removed from scripting in 7.0 + systemProperty 'es.scripting.use_java_time', 'true' + + // TODO: remove this once ctx isn't added to update script params in 7.0 + systemProperty 'es.scripting.update.ctx_in_params', 'false' + // Set the system keystore/truststore password if we're running tests in a FIPS-140 JVM if (project.inFipsJvm) { systemProperty 'javax.net.ssl.trustStorePassword', 'password' systemProperty 'javax.net.ssl.keyStorePassword', 'password' } + boolean assertionsEnabled = Boolean.parseBoolean(System.getProperty('tests.asserts', 'true')) enableSystemAssertions assertionsEnabled enableAssertions assertionsEnabled diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/doc/SnippetsTask.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/doc/SnippetsTask.groovy index ec012633f08..8c0eedeb6f5 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/doc/SnippetsTask.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/doc/SnippetsTask.groovy @@ -284,6 +284,10 @@ public class SnippetsTask extends DefaultTask { contents.append(line).append('\n') return } + // Allow line continuations for console snippets within lists + if (snippet != null && line.trim() == '+') { + return + } // Just finished emit() } diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/plugin/PluginBuildPlugin.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/plugin/PluginBuildPlugin.groovy index 00f178fda9c..6f42e41beaa 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/plugin/PluginBuildPlugin.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/plugin/PluginBuildPlugin.groovy @@ -25,7 +25,6 @@ import org.elasticsearch.gradle.NoticeTask import org.elasticsearch.gradle.test.RestIntegTestTask import org.elasticsearch.gradle.test.RunTask import org.gradle.api.InvalidUserDataException -import org.gradle.api.JavaVersion import org.gradle.api.Project import org.gradle.api.Task import org.gradle.api.XmlProvider @@ -39,7 +38,6 @@ import java.nio.file.Path import java.nio.file.StandardCopyOption import java.util.regex.Matcher import java.util.regex.Pattern - /** * Encapsulates build configuration for an Elasticsearch plugin. */ diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/plugin/PluginPropertiesExtension.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/plugin/PluginPropertiesExtension.groovy index 6cfe44c8068..c250d7695a8 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/plugin/PluginPropertiesExtension.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/plugin/PluginPropertiesExtension.groovy @@ -20,6 +20,7 @@ package org.elasticsearch.gradle.plugin import org.gradle.api.Project import org.gradle.api.tasks.Input +import org.gradle.api.tasks.InputFile /** * A container for plugin properties that will be written to the plugin descriptor, for easy @@ -55,18 +56,39 @@ class PluginPropertiesExtension { boolean requiresKeystore = false /** A license file that should be included in the built plugin zip. */ - @Input - File licenseFile = null + private File licenseFile = null /** * A notice file that should be included in the built plugin zip. This will be * extended with notices from the {@code licenses/} directory. */ - @Input - File noticeFile = null + private File noticeFile = null + + Project project = null PluginPropertiesExtension(Project project) { name = project.name version = project.version + this.project = project + } + + @InputFile + File getLicenseFile() { + return licenseFile + } + + void setLicenseFile(File licenseFile) { + project.ext.licenseFile = licenseFile + this.licenseFile = licenseFile + } + + @InputFile + File getNoticeFile() { + return noticeFile + } + + void setNoticeFile(File noticeFile) { + project.ext.noticeFile = noticeFile + this.noticeFile = noticeFile } } diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/plugin/PluginPropertiesTask.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/plugin/PluginPropertiesTask.groovy index 8e913153f05..9588f77a71d 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/plugin/PluginPropertiesTask.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/plugin/PluginPropertiesTask.groovy @@ -23,7 +23,6 @@ import org.gradle.api.InvalidUserDataException import org.gradle.api.Task import org.gradle.api.tasks.Copy import org.gradle.api.tasks.OutputFile - /** * Creates a plugin descriptor. */ diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/precommit/PrecommitTasks.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/precommit/PrecommitTasks.groovy index 3709805680d..42dc29df058 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/precommit/PrecommitTasks.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/precommit/PrecommitTasks.groovy @@ -20,10 +20,15 @@ package org.elasticsearch.gradle.precommit import de.thetaphi.forbiddenapis.gradle.CheckForbiddenApis import de.thetaphi.forbiddenapis.gradle.ForbiddenApisPlugin +import org.elasticsearch.gradle.ExportElasticsearchBuildResourcesTask +import org.gradle.api.JavaVersion import org.gradle.api.Project import org.gradle.api.Task +import org.gradle.api.file.FileCollection import org.gradle.api.plugins.JavaBasePlugin import org.gradle.api.plugins.quality.Checkstyle +import org.gradle.api.tasks.JavaExec +import org.gradle.api.tasks.StopExecutionException /** * Validation tasks which should be run before committing. These run before tests. @@ -40,7 +45,11 @@ class PrecommitTasks { project.tasks.create('licenseHeaders', LicenseHeadersTask.class), project.tasks.create('filepermissions', FilePermissionsTask.class), project.tasks.create('jarHell', JarHellTask.class), - project.tasks.create('thirdPartyAudit', ThirdPartyAuditTask.class)] + project.tasks.create('thirdPartyAudit', ThirdPartyAuditTask.class) + ] + + // Configure it but don't add it as a dependency yet + configureForbiddenApisCli(project) // tasks with just tests don't need dependency licenses, so this flag makes adding // the task optional @@ -93,12 +102,66 @@ class PrecommitTasks { signaturesURLs = project.forbiddenApis.signaturesURLs + [ getClass().getResource('/forbidden/es-server-signatures.txt') ] } + // forbidden apis doesn't support Java 11, so stop at 10 + String targetMajorVersion = (project.compilerJavaVersion.compareTo(JavaVersion.VERSION_1_10) > 0 ? + JavaVersion.VERSION_1_10 : + project.compilerJavaVersion).getMajorVersion() + targetCompatibility = Integer.parseInt(targetMajorVersion) >= 9 ?targetMajorVersion : "1.${targetMajorVersion}" } Task forbiddenApis = project.tasks.findByName('forbiddenApis') forbiddenApis.group = "" // clear group, so this does not show up under verification tasks + return forbiddenApis } + private static Task configureForbiddenApisCli(Project project) { + project.configurations.create("forbiddenApisCliJar") + project.dependencies { + forbiddenApisCliJar 'de.thetaphi:forbiddenapis:2.5' + } + Task forbiddenApisCli = project.tasks.create('forbiddenApisCli') + + project.sourceSets.forEach { sourceSet -> + forbiddenApisCli.dependsOn( + project.tasks.create(sourceSet.getTaskName('forbiddenApisCli', null), JavaExec) { + ExportElasticsearchBuildResourcesTask buildResources = project.tasks.getByName('buildResources') + dependsOn(buildResources) + classpath = project.files( + project.configurations.forbiddenApisCliJar, + sourceSet.compileClasspath, + sourceSet.runtimeClasspath + ) + main = 'de.thetaphi.forbiddenapis.cli.CliMain' + executable = "${project.runtimeJavaHome}/bin/java" + args "-b", 'jdk-unsafe-1.8' + args "-b", 'jdk-deprecated-1.8' + args "-b", 'jdk-non-portable' + args "-b", 'jdk-system-out' + args "-f", buildResources.copy("forbidden/jdk-signatures.txt") + args "-f", buildResources.copy("forbidden/es-all-signatures.txt") + args "--suppressannotation", '**.SuppressForbidden' + if (sourceSet.name == 'test') { + args "-f", buildResources.copy("forbidden/es-test-signatures.txt") + args "-f", buildResources.copy("forbidden/http-signatures.txt") + } else { + args "-f", buildResources.copy("forbidden/es-server-signatures.txt") + } + dependsOn sourceSet.classesTaskName + doFirst { + // Forbidden APIs expects only existing dirs, and requires at least one + FileCollection existingOutputs = sourceSet.output.classesDirs + .filter { it.exists() } + if (existingOutputs.isEmpty()) { + throw new StopExecutionException("${sourceSet.name} has no outputs") + } + existingOutputs.forEach { args "-d", it } + } + } + ) + } + return forbiddenApisCli + } + private static Task configureCheckstyle(Project project) { // Always copy the checkstyle configuration files to 'buildDir/checkstyle' since the resources could be located in a jar // file. If the resources are located in a jar, Gradle will fail when it tries to turn the URL into a file diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/NodeInfo.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/NodeInfo.groovy index 0dd56b86332..aaf4e468182 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/NodeInfo.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/NodeInfo.groovy @@ -177,6 +177,12 @@ class NodeInfo { javaVersion = 8 } else if (nodeVersion.onOrAfter("6.2.0") && nodeVersion.before("6.3.0")) { javaVersion = 9 + } else if (project.inFipsJvm && nodeVersion.onOrAfter("6.3.0") && nodeVersion.before("6.4.0")) { + /* + * Elasticsearch versions before 6.4.0 cannot be run in a FIPS-140 JVM. If we're running + * bwc tests in a FIPS-140 JVM, ensure that the pre v6.4.0 nodes use a Java 10 JVM instead. + */ + javaVersion = 10 } args.addAll("-E", "node.portsfile=true") diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/RestIntegTestTask.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/RestIntegTestTask.groovy index d2101c48aab..2838849981a 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/RestIntegTestTask.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/RestIntegTestTask.groovy @@ -31,6 +31,7 @@ import org.gradle.api.provider.Provider import org.gradle.api.tasks.Copy import org.gradle.api.tasks.Input import org.gradle.api.tasks.TaskState +import org.gradle.plugins.ide.idea.IdeaPlugin import java.nio.charset.StandardCharsets import java.nio.file.Files @@ -243,10 +244,12 @@ public class RestIntegTestTask extends DefaultTask { } } } - project.idea { - module { - if (scopes.TEST != null) { - scopes.TEST.plus.add(project.configurations.restSpec) + if (project.plugins.hasPlugin(IdeaPlugin)) { + project.idea { + module { + if (scopes.TEST != null) { + scopes.TEST.plus.add(project.configurations.restSpec) + } } } } diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/StandaloneRestTestPlugin.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/StandaloneRestTestPlugin.groovy index 390821c80ff..a2484e9c5fc 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/StandaloneRestTestPlugin.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/StandaloneRestTestPlugin.groovy @@ -22,15 +22,14 @@ package org.elasticsearch.gradle.test import com.carrotsearch.gradle.junit4.RandomizedTestingPlugin import org.elasticsearch.gradle.BuildPlugin +import org.elasticsearch.gradle.ExportElasticsearchBuildResourcesTask import org.elasticsearch.gradle.VersionProperties import org.elasticsearch.gradle.precommit.PrecommitTasks import org.gradle.api.InvalidUserDataException import org.gradle.api.Plugin import org.gradle.api.Project -import org.gradle.api.Task import org.gradle.api.plugins.JavaBasePlugin import org.gradle.api.tasks.compile.JavaCompile - /** * Configures the build to compile tests against Elasticsearch's test framework * and run REST tests. Use BuildPlugin if you want to build main code as well @@ -48,6 +47,7 @@ public class StandaloneRestTestPlugin implements Plugin { project.pluginManager.apply(JavaBasePlugin) project.pluginManager.apply(RandomizedTestingPlugin) + project.getTasks().create("buildResources", ExportElasticsearchBuildResourcesTask) BuildPlugin.globalBuildInfo(project) BuildPlugin.configureRepositories(project) diff --git a/buildSrc/src/main/java/org/elasticsearch/gradle/ExportElasticsearchBuildResourcesTask.java b/buildSrc/src/main/java/org/elasticsearch/gradle/ExportElasticsearchBuildResourcesTask.java new file mode 100644 index 00000000000..03c18f54e67 --- /dev/null +++ b/buildSrc/src/main/java/org/elasticsearch/gradle/ExportElasticsearchBuildResourcesTask.java @@ -0,0 +1,115 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.gradle; + +import org.gradle.api.DefaultTask; +import org.gradle.api.GradleException; +import org.gradle.api.file.DirectoryProperty; +import org.gradle.api.logging.Logger; +import org.gradle.api.logging.Logging; +import org.gradle.api.tasks.Classpath; +import org.gradle.api.tasks.Input; +import org.gradle.api.tasks.OutputDirectory; +import org.gradle.api.tasks.SkipWhenEmpty; +import org.gradle.api.tasks.StopExecutionException; +import org.gradle.api.tasks.TaskAction; + +import java.io.File; +import java.io.IOException; +import java.io.InputStream; +import java.nio.file.Files; +import java.nio.file.Path; +import java.util.Collections; +import java.util.HashSet; +import java.util.Set; + +/** + * Export Elasticsearch build resources to configurable paths + *

+ * Wil overwrite existing files and create missing directories. + * Useful for resources that that need to be passed to other processes trough the filesystem or otherwise can't be + * consumed from the classpath. + */ +public class ExportElasticsearchBuildResourcesTask extends DefaultTask { + + private final Logger logger = Logging.getLogger(ExportElasticsearchBuildResourcesTask.class); + + private final Set resources = new HashSet<>(); + + private DirectoryProperty outputDir; + + public ExportElasticsearchBuildResourcesTask() { + outputDir = getProject().getLayout().directoryProperty( + getProject().getLayout().getBuildDirectory().dir("build-tools-exported") + ); + } + + @OutputDirectory + public DirectoryProperty getOutputDir() { + return outputDir; + } + + @Input + @SkipWhenEmpty + public Set getResources() { + return Collections.unmodifiableSet(resources); + } + + @Classpath + public String getResourcesClasspath() { + // This will make sure the task is not considered up to date if the resources are changed. + logger.info("Classpath: {}", System.getProperty("java.class.path")); + return System.getProperty("java.class.path"); + } + + public void setOutputDir(DirectoryProperty outputDir) { + this.outputDir = outputDir; + } + + public File copy(String resource) { + if (getState().getExecuted() || getState().getExecuting()) { + throw new GradleException("buildResources can't be configured after the task ran. " + + "Make sure task is not used after configuration time" + ); + } + resources.add(resource); + return outputDir.file(resource).get().getAsFile(); + } + + @TaskAction + public void doExport() { + if (resources.isEmpty()) { + throw new StopExecutionException(); + } + resources.stream().parallel() + .forEach(resourcePath -> { + Path destination = outputDir.get().file(resourcePath).getAsFile().toPath(); + try (InputStream is = getClass().getClassLoader().getResourceAsStream(resourcePath)) { + Files.createDirectories(destination.getParent()); + if (is == null) { + throw new GradleException("Can't export `" + resourcePath + "` from build-tools: not found"); + } + Files.copy(is, destination); + } catch (IOException e) { + throw new GradleException("Can't write resource `" + resourcePath + "` to " + destination, e); + } + }); + } + +} diff --git a/buildSrc/src/main/resources/checkstyle_suppressions.xml b/buildSrc/src/main/resources/checkstyle_suppressions.xml index 8ab68b40c0a..420ed3b10b4 100644 --- a/buildSrc/src/main/resources/checkstyle_suppressions.xml +++ b/buildSrc/src/main/resources/checkstyle_suppressions.xml @@ -686,6 +686,7 @@ + diff --git a/buildSrc/src/test/java/org/elasticsearch/gradle/BuildExamplePluginsIT.java b/buildSrc/src/test/java/org/elasticsearch/gradle/BuildExamplePluginsIT.java new file mode 100644 index 00000000000..9b63d6f45e0 --- /dev/null +++ b/buildSrc/src/test/java/org/elasticsearch/gradle/BuildExamplePluginsIT.java @@ -0,0 +1,164 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.gradle; + +import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; +import org.apache.commons.io.FileUtils; +import org.elasticsearch.gradle.test.GradleIntegrationTestCase; +import org.gradle.testkit.runner.GradleRunner; +import org.junit.BeforeClass; +import org.junit.Rule; +import org.junit.rules.TemporaryFolder; + +import java.io.File; +import java.io.IOException; +import java.nio.charset.StandardCharsets; +import java.nio.file.Files; +import java.nio.file.Path; +import java.nio.file.StandardOpenOption; +import java.util.Arrays; +import java.util.Collections; +import java.util.List; +import java.util.Objects; +import java.util.stream.Collectors; + +public class BuildExamplePluginsIT extends GradleIntegrationTestCase { + + private static List EXAMPLE_PLUGINS = Collections.unmodifiableList( + Arrays.stream( + Objects.requireNonNull(System.getProperty("test.build-tools.plugin.examples")) + .split(File.pathSeparator) + ).map(File::new).collect(Collectors.toList()) + ); + + @Rule + public TemporaryFolder tmpDir = new TemporaryFolder(); + + public final File examplePlugin; + + public BuildExamplePluginsIT(File examplePlugin) { + this.examplePlugin = examplePlugin; + } + + @BeforeClass + public static void assertProjectsExist() { + assertEquals( + EXAMPLE_PLUGINS, + EXAMPLE_PLUGINS.stream().filter(File::exists).collect(Collectors.toList()) + ); + } + + @ParametersFactory + public static Iterable parameters() { + return EXAMPLE_PLUGINS + .stream() + .map(each -> new Object[] {each}) + .collect(Collectors.toList()); + } + + public void testCurrentExamplePlugin() throws IOException { + FileUtils.copyDirectory(examplePlugin, tmpDir.getRoot()); + // just get rid of deprecation warnings + Files.write( + getTempPath("settings.gradle"), + "enableFeaturePreview('STABLE_PUBLISHING')\n".getBytes(StandardCharsets.UTF_8) + ); + + adaptBuildScriptForTest(); + + Files.write( + tmpDir.newFile("NOTICE.txt").toPath(), + "dummy test notice".getBytes(StandardCharsets.UTF_8) + ); + + GradleRunner.create() + .withProjectDir(tmpDir.getRoot()) + .withArguments("clean", "check", "-s", "-i", "--warning-mode=all", "--scan") + .withPluginClasspath() + .build(); + } + + private void adaptBuildScriptForTest() throws IOException { + // Add the local repo as a build script URL so we can pull in build-tools and apply the plugin under test + // + is ok because we have no other repo and just want to pick up latest + writeBuildScript( + "buildscript {\n" + + " repositories {\n" + + " maven {\n" + + " url = '" + getLocalTestRepoPath() + "'\n" + + " }\n" + + " }\n" + + " dependencies {\n" + + " classpath \"org.elasticsearch.gradle:build-tools:+\"\n" + + " }\n" + + "}\n" + ); + // get the original file + Files.readAllLines(getTempPath("build.gradle"), StandardCharsets.UTF_8) + .stream() + .map(line -> line + "\n") + .forEach(this::writeBuildScript); + // Add a repositories section to be able to resolve dependencies + String luceneSnapshotRepo = ""; + String luceneSnapshotRevision = System.getProperty("test.lucene-snapshot-revision"); + if (luceneSnapshotRepo != null) { + luceneSnapshotRepo = " maven {\n" + + " url \"http://s3.amazonaws.com/download.elasticsearch.org/lucenesnapshots/" + luceneSnapshotRevision + "\"\n" + + " }\n"; + } + writeBuildScript("\n" + + "repositories {\n" + + " maven {\n" + + " url \"" + getLocalTestRepoPath() + "\"\n" + + " }\n" + + luceneSnapshotRepo + + "}\n" + ); + Files.delete(getTempPath("build.gradle")); + Files.move(getTempPath("build.gradle.new"), getTempPath("build.gradle")); + System.err.print("Generated build script is:"); + Files.readAllLines(getTempPath("build.gradle")).forEach(System.err::println); + } + + private Path getTempPath(String fileName) { + return new File(tmpDir.getRoot(), fileName).toPath(); + } + + private Path writeBuildScript(String script) { + try { + Path path = getTempPath("build.gradle.new"); + return Files.write( + path, + script.getBytes(StandardCharsets.UTF_8), + Files.exists(path) ? StandardOpenOption.APPEND : StandardOpenOption.CREATE_NEW + ); + } catch (IOException e) { + throw new RuntimeException(e); + } + } + + private String getLocalTestRepoPath() { + String property = System.getProperty("test.local-test-repo-path"); + Objects.requireNonNull(property, "test.local-test-repo-path not passed to tests"); + File file = new File(property); + assertTrue("Expected " + property + " to exist, but it did not!", file.exists()); + return file.getAbsolutePath(); + } + +} diff --git a/buildSrc/src/test/java/org/elasticsearch/gradle/ExportElasticsearchBuildResourcesTaskIT.java b/buildSrc/src/test/java/org/elasticsearch/gradle/ExportElasticsearchBuildResourcesTaskIT.java new file mode 100644 index 00000000000..98fea2ea15a --- /dev/null +++ b/buildSrc/src/test/java/org/elasticsearch/gradle/ExportElasticsearchBuildResourcesTaskIT.java @@ -0,0 +1,91 @@ +package org.elasticsearch.gradle; + +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +import org.elasticsearch.gradle.test.GradleIntegrationTestCase; +import org.gradle.testkit.runner.BuildResult; +import org.gradle.testkit.runner.GradleRunner; + + +public class ExportElasticsearchBuildResourcesTaskIT extends GradleIntegrationTestCase { + + public static final String PROJECT_NAME = "elasticsearch-build-resources"; + + public void testUpToDateWithSourcesConfigured() { + GradleRunner.create() + .withProjectDir(getProjectDir(PROJECT_NAME)) + .withArguments("clean", "-s") + .withPluginClasspath() + .build(); + + BuildResult result = GradleRunner.create() + .withProjectDir(getProjectDir(PROJECT_NAME)) + .withArguments("buildResources", "-s", "-i") + .withPluginClasspath() + .build(); + assertTaskSuccessfull(result, ":buildResources"); + assertBuildFileExists(result, PROJECT_NAME, "build-tools-exported/checkstyle.xml"); + assertBuildFileExists(result, PROJECT_NAME, "build-tools-exported/checkstyle_suppressions.xml"); + + result = GradleRunner.create() + .withProjectDir(getProjectDir(PROJECT_NAME)) + .withArguments("buildResources", "-s", "-i") + .withPluginClasspath() + .build(); + assertTaskUpToDate(result, ":buildResources"); + assertBuildFileExists(result, PROJECT_NAME, "build-tools-exported/checkstyle.xml"); + assertBuildFileExists(result, PROJECT_NAME, "build-tools-exported/checkstyle_suppressions.xml"); + } + + public void testImplicitTaskDependencyCopy() { + BuildResult result = GradleRunner.create() + .withProjectDir(getProjectDir(PROJECT_NAME)) + .withArguments("clean", "sampleCopyAll", "-s", "-i") + .withPluginClasspath() + .build(); + + assertTaskSuccessfull(result, ":buildResources"); + assertTaskSuccessfull(result, ":sampleCopyAll"); + assertBuildFileExists(result, PROJECT_NAME, "sampleCopyAll/checkstyle.xml"); + // This is a side effect of compile time reference + assertBuildFileExists(result, PROJECT_NAME, "sampleCopyAll/checkstyle_suppressions.xml"); + } + + public void testImplicitTaskDependencyInputFileOfOther() { + BuildResult result = GradleRunner.create() + .withProjectDir(getProjectDir(PROJECT_NAME)) + .withArguments("clean", "sample", "-s", "-i") + .withPluginClasspath() + .build(); + + assertTaskSuccessfull(result, ":sample"); + assertBuildFileExists(result, PROJECT_NAME, "build-tools-exported/checkstyle.xml"); + assertBuildFileExists(result, PROJECT_NAME, "build-tools-exported/checkstyle_suppressions.xml"); + } + + public void testIncorrectUsage() { + BuildResult result = GradleRunner.create() + .withProjectDir(getProjectDir(PROJECT_NAME)) + .withArguments("noConfigAfterExecution", "-s", "-i") + .withPluginClasspath() + .buildAndFail(); + assertOutputContains("buildResources can't be configured after the task ran"); + } +} diff --git a/buildSrc/src/test/java/org/elasticsearch/gradle/test/GradleIntegrationTestCase.java b/buildSrc/src/test/java/org/elasticsearch/gradle/test/GradleIntegrationTestCase.java index 5c36fa61550..f00ab406a6c 100644 --- a/buildSrc/src/test/java/org/elasticsearch/gradle/test/GradleIntegrationTestCase.java +++ b/buildSrc/src/test/java/org/elasticsearch/gradle/test/GradleIntegrationTestCase.java @@ -1,8 +1,13 @@ package org.elasticsearch.gradle.test; +import org.gradle.testkit.runner.BuildResult; +import org.gradle.testkit.runner.BuildTask; import org.gradle.testkit.runner.GradleRunner; +import org.gradle.testkit.runner.TaskOutcome; import java.io.File; +import java.nio.file.Files; +import java.nio.file.Path; import java.util.List; import java.util.stream.Collectors; import java.util.stream.Stream; @@ -15,7 +20,7 @@ public abstract class GradleIntegrationTestCase extends GradleUnitTestCase { throw new RuntimeException("Could not find resources dir for integration tests. " + "Note that these tests can only be ran by Gradle and are not currently supported by the IDE"); } - return new File(root, name); + return new File(root, name).getAbsoluteFile(); } protected GradleRunner getGradleRunner(String sampleProject) { @@ -61,4 +66,47 @@ public abstract class GradleIntegrationTestCase extends GradleUnitTestCase { } } + protected void assertTaskSuccessfull(BuildResult result, String taskName) { + BuildTask task = result.task(taskName); + if (task == null) { + fail("Expected task `" + taskName + "` to be successful, but it did not run"); + } + assertEquals( + "Expected task to be successful but it was: " + task.getOutcome() + + "\n\nOutput is:\n" + result.getOutput() , + TaskOutcome.SUCCESS, + task.getOutcome() + ); + } + + protected void assertTaskUpToDate(BuildResult result, String taskName) { + BuildTask task = result.task(taskName); + if (task == null) { + fail("Expected task `" + taskName + "` to be up-to-date, but it did not run"); + } + assertEquals( + "Expected task to be up to date but it was: " + task.getOutcome() + + "\n\nOutput is:\n" + result.getOutput() , + TaskOutcome.UP_TO_DATE, + task.getOutcome() + ); + } + + protected void assertBuildFileExists(BuildResult result, String projectName, String path) { + Path absPath = getBuildDir(projectName).toPath().resolve(path); + assertTrue( + result.getOutput() + "\n\nExpected `" + absPath + "` to exists but it did not" + + "\n\nOutput is:\n" + result.getOutput(), + Files.exists(absPath) + ); + } + + protected void assertBuildFileDoesNotExists(BuildResult result, String projectName, String path) { + Path absPath = getBuildDir(projectName).toPath().resolve(path); + assertFalse( + result.getOutput() + "\n\nExpected `" + absPath + "` bo to exists but it did" + + "\n\nOutput is:\n" + result.getOutput(), + Files.exists(absPath) + ); + } } diff --git a/buildSrc/src/testKit/elasticsearch-build-resources/build.gradle b/buildSrc/src/testKit/elasticsearch-build-resources/build.gradle new file mode 100644 index 00000000000..95d1453025e --- /dev/null +++ b/buildSrc/src/testKit/elasticsearch-build-resources/build.gradle @@ -0,0 +1,38 @@ +plugins { + id 'elasticsearch.build' +} + +ext.licenseFile = file("$buildDir/dummy/license") +ext.noticeFile = file("$buildDir/dummy/notice") + +buildResources { + copy 'checkstyle.xml' +} + +task sampleCopyAll(type: Sync) { + /** Note: no explicit dependency. This works with tasks that use the Provider API a.k.a "Lazy Configuration" **/ + from buildResources + into "$buildDir/sampleCopyAll" +} + +task sample { + // This does not work, task dependencies can't be providers + // dependsOn buildResources.resource('minimumRuntimeVersion') + // Nor does this, despite https://github.com/gradle/gradle/issues/3811 + // dependsOn buildResources.outputDir + // for now it's just + dependsOn buildResources + // we have to refference it at configuration time in order to be picked up + ext.checkstyle_suppressions = buildResources.copy('checkstyle_suppressions.xml') + doLast { + println "This task is using ${file(checkstyle_suppressions)}" + } +} + +task noConfigAfterExecution { + dependsOn buildResources + doLast { + println "This should cause an error because we are refferencing " + + "${buildResources.copy('checkstyle_suppressions.xml')} after the `buildResources` task has ran." + } +} \ No newline at end of file diff --git a/buildSrc/version.properties b/buildSrc/version.properties index 023d5d5b8dc..34c266913d0 100644 --- a/buildSrc/version.properties +++ b/buildSrc/version.properties @@ -1,13 +1,15 @@ elasticsearch = 7.0.0-alpha1 -lucene = 7.5.0-snapshot-608f0277b0 +lucene = 7.5.0-snapshot-13b9e28f9d # optional dependencies spatial4j = 0.7 jts = 1.15.0 -jackson = 2.8.10 +jackson = 2.8.11 snakeyaml = 1.17 +icu4j = 62.1 +supercsv = 2.4.0 # when updating log4j, please update also docs/java-api/index.asciidoc -log4j = 2.9.1 +log4j = 2.11.1 slf4j = 1.6.2 # when updating the JNA version, also update the version in buildSrc/build.gradle diff --git a/client/rest-high-level/build.gradle b/client/rest-high-level/build.gradle index 65c5d094c71..6f5eab6e1db 100644 --- a/client/rest-high-level/build.gradle +++ b/client/rest-high-level/build.gradle @@ -30,6 +30,14 @@ apply plugin: 'com.github.johnrengelman.shadow' group = 'org.elasticsearch.client' archivesBaseName = 'elasticsearch-rest-high-level-client' +publishing { + publications { + nebula { + artifactId = archivesBaseName + } + } +} + //we need to copy the yaml spec so we can check naming (see RestHighlevelClientTests#testApiNamingConventions) Task copyRestSpec = RestIntegTestTask.createCopyRestSpecTask(project, Providers.FALSE) test.dependsOn(copyRestSpec) diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/IndicesClient.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/IndicesClient.java index 250bbd520da..c4567e22e0b 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/IndicesClient.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/IndicesClient.java @@ -21,18 +21,15 @@ package org.elasticsearch.client; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequest; -import org.elasticsearch.action.admin.indices.alias.IndicesAliasesResponse; import org.elasticsearch.action.admin.indices.alias.get.GetAliasesRequest; import org.elasticsearch.action.admin.indices.analyze.AnalyzeRequest; import org.elasticsearch.action.admin.indices.analyze.AnalyzeResponse; import org.elasticsearch.action.admin.indices.cache.clear.ClearIndicesCacheRequest; import org.elasticsearch.action.admin.indices.cache.clear.ClearIndicesCacheResponse; import org.elasticsearch.action.admin.indices.close.CloseIndexRequest; -import org.elasticsearch.action.admin.indices.close.CloseIndexResponse; import org.elasticsearch.action.admin.indices.create.CreateIndexRequest; import org.elasticsearch.action.admin.indices.create.CreateIndexResponse; import org.elasticsearch.action.admin.indices.delete.DeleteIndexRequest; -import org.elasticsearch.action.admin.indices.delete.DeleteIndexResponse; import org.elasticsearch.action.admin.indices.flush.FlushRequest; import org.elasticsearch.action.admin.indices.flush.FlushResponse; import org.elasticsearch.action.admin.indices.flush.SyncedFlushRequest; @@ -45,7 +42,6 @@ import org.elasticsearch.action.admin.indices.mapping.get.GetFieldMappingsRespon import org.elasticsearch.action.admin.indices.mapping.get.GetMappingsRequest; import org.elasticsearch.action.admin.indices.mapping.get.GetMappingsResponse; import org.elasticsearch.action.admin.indices.mapping.put.PutMappingRequest; -import org.elasticsearch.action.admin.indices.mapping.put.PutMappingResponse; import org.elasticsearch.action.admin.indices.open.OpenIndexRequest; import org.elasticsearch.action.admin.indices.open.OpenIndexResponse; import org.elasticsearch.action.admin.indices.refresh.RefreshRequest; @@ -55,15 +51,14 @@ import org.elasticsearch.action.admin.indices.rollover.RolloverResponse; import org.elasticsearch.action.admin.indices.settings.get.GetSettingsRequest; import org.elasticsearch.action.admin.indices.settings.get.GetSettingsResponse; import org.elasticsearch.action.admin.indices.settings.put.UpdateSettingsRequest; -import org.elasticsearch.action.admin.indices.settings.put.UpdateSettingsResponse; import org.elasticsearch.action.admin.indices.shrink.ResizeRequest; import org.elasticsearch.action.admin.indices.shrink.ResizeResponse; import org.elasticsearch.action.admin.indices.template.get.GetIndexTemplatesRequest; import org.elasticsearch.action.admin.indices.template.get.GetIndexTemplatesResponse; import org.elasticsearch.action.admin.indices.template.put.PutIndexTemplateRequest; -import org.elasticsearch.action.admin.indices.template.put.PutIndexTemplateResponse; import org.elasticsearch.action.admin.indices.validate.query.ValidateQueryRequest; import org.elasticsearch.action.admin.indices.validate.query.ValidateQueryResponse; +import org.elasticsearch.action.support.master.AcknowledgedResponse; import org.elasticsearch.rest.RestStatus; import java.io.IOException; @@ -93,9 +88,9 @@ public final class IndicesClient { * @return the response * @throws IOException in case there is a problem sending the request or parsing back the response */ - public DeleteIndexResponse delete(DeleteIndexRequest deleteIndexRequest, RequestOptions options) throws IOException { + public AcknowledgedResponse delete(DeleteIndexRequest deleteIndexRequest, RequestOptions options) throws IOException { return restHighLevelClient.performRequestAndParseEntity(deleteIndexRequest, RequestConverters::deleteIndex, options, - DeleteIndexResponse::fromXContent, emptySet()); + AcknowledgedResponse::fromXContent, emptySet()); } /** @@ -106,9 +101,9 @@ public final class IndicesClient { * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized * @param listener the listener to be notified upon request completion */ - public void deleteAsync(DeleteIndexRequest deleteIndexRequest, RequestOptions options, ActionListener listener) { + public void deleteAsync(DeleteIndexRequest deleteIndexRequest, RequestOptions options, ActionListener listener) { restHighLevelClient.performRequestAsyncAndParseEntity(deleteIndexRequest, RequestConverters::deleteIndex, options, - DeleteIndexResponse::fromXContent, listener, emptySet()); + AcknowledgedResponse::fromXContent, listener, emptySet()); } /** @@ -147,9 +142,9 @@ public final class IndicesClient { * @return the response * @throws IOException in case there is a problem sending the request or parsing back the response */ - public PutMappingResponse putMapping(PutMappingRequest putMappingRequest, RequestOptions options) throws IOException { + public AcknowledgedResponse putMapping(PutMappingRequest putMappingRequest, RequestOptions options) throws IOException { return restHighLevelClient.performRequestAndParseEntity(putMappingRequest, RequestConverters::putMapping, options, - PutMappingResponse::fromXContent, emptySet()); + AcknowledgedResponse::fromXContent, emptySet()); } /** @@ -160,9 +155,10 @@ public final class IndicesClient { * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized * @param listener the listener to be notified upon request completion */ - public void putMappingAsync(PutMappingRequest putMappingRequest, RequestOptions options, ActionListener listener) { + public void putMappingAsync(PutMappingRequest putMappingRequest, RequestOptions options, + ActionListener listener) { restHighLevelClient.performRequestAsyncAndParseEntity(putMappingRequest, RequestConverters::putMapping, options, - PutMappingResponse::fromXContent, listener, emptySet()); + AcknowledgedResponse::fromXContent, listener, emptySet()); } /** @@ -231,9 +227,9 @@ public final class IndicesClient { * @return the response * @throws IOException in case there is a problem sending the request or parsing back the response */ - public IndicesAliasesResponse updateAliases(IndicesAliasesRequest indicesAliasesRequest, RequestOptions options) throws IOException { + public AcknowledgedResponse updateAliases(IndicesAliasesRequest indicesAliasesRequest, RequestOptions options) throws IOException { return restHighLevelClient.performRequestAndParseEntity(indicesAliasesRequest, RequestConverters::updateAliases, options, - IndicesAliasesResponse::fromXContent, emptySet()); + AcknowledgedResponse::fromXContent, emptySet()); } /** @@ -245,9 +241,9 @@ public final class IndicesClient { * @param listener the listener to be notified upon request completion */ public void updateAliasesAsync(IndicesAliasesRequest indicesAliasesRequest, RequestOptions options, - ActionListener listener) { + ActionListener listener) { restHighLevelClient.performRequestAsyncAndParseEntity(indicesAliasesRequest, RequestConverters::updateAliases, options, - IndicesAliasesResponse::fromXContent, listener, emptySet()); + AcknowledgedResponse::fromXContent, listener, emptySet()); } /** @@ -286,9 +282,9 @@ public final class IndicesClient { * @return the response * @throws IOException in case there is a problem sending the request or parsing back the response */ - public CloseIndexResponse close(CloseIndexRequest closeIndexRequest, RequestOptions options) throws IOException { + public AcknowledgedResponse close(CloseIndexRequest closeIndexRequest, RequestOptions options) throws IOException { return restHighLevelClient.performRequestAndParseEntity(closeIndexRequest, RequestConverters::closeIndex, options, - CloseIndexResponse::fromXContent, emptySet()); + AcknowledgedResponse::fromXContent, emptySet()); } /** @@ -299,9 +295,9 @@ public final class IndicesClient { * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized * @param listener the listener to be notified upon request completion */ - public void closeAsync(CloseIndexRequest closeIndexRequest, RequestOptions options, ActionListener listener) { + public void closeAsync(CloseIndexRequest closeIndexRequest, RequestOptions options, ActionListener listener) { restHighLevelClient.performRequestAsyncAndParseEntity(closeIndexRequest, RequestConverters::closeIndex, options, - CloseIndexResponse::fromXContent, listener, emptySet()); + AcknowledgedResponse::fromXContent, listener, emptySet()); } @@ -706,9 +702,9 @@ public final class IndicesClient { * @return the response * @throws IOException in case there is a problem sending the request or parsing back the response */ - public UpdateSettingsResponse putSettings(UpdateSettingsRequest updateSettingsRequest, RequestOptions options) throws IOException { + public AcknowledgedResponse putSettings(UpdateSettingsRequest updateSettingsRequest, RequestOptions options) throws IOException { return restHighLevelClient.performRequestAndParseEntity(updateSettingsRequest, RequestConverters::indexPutSettings, options, - UpdateSettingsResponse::fromXContent, emptySet()); + AcknowledgedResponse::fromXContent, emptySet()); } /** @@ -720,9 +716,9 @@ public final class IndicesClient { * @param listener the listener to be notified upon request completion */ public void putSettingsAsync(UpdateSettingsRequest updateSettingsRequest, RequestOptions options, - ActionListener listener) { + ActionListener listener) { restHighLevelClient.performRequestAsyncAndParseEntity(updateSettingsRequest, RequestConverters::indexPutSettings, options, - UpdateSettingsResponse::fromXContent, listener, emptySet()); + AcknowledgedResponse::fromXContent, listener, emptySet()); } /** @@ -734,10 +730,10 @@ public final class IndicesClient { * @return the response * @throws IOException in case there is a problem sending the request or parsing back the response */ - public PutIndexTemplateResponse putTemplate(PutIndexTemplateRequest putIndexTemplateRequest, - RequestOptions options) throws IOException { + public AcknowledgedResponse putTemplate(PutIndexTemplateRequest putIndexTemplateRequest, + RequestOptions options) throws IOException { return restHighLevelClient.performRequestAndParseEntity(putIndexTemplateRequest, RequestConverters::putTemplate, options, - PutIndexTemplateResponse::fromXContent, emptySet()); + AcknowledgedResponse::fromXContent, emptySet()); } /** @@ -749,9 +745,9 @@ public final class IndicesClient { * @param listener the listener to be notified upon request completion */ public void putTemplateAsync(PutIndexTemplateRequest putIndexTemplateRequest, RequestOptions options, - ActionListener listener) { + ActionListener listener) { restHighLevelClient.performRequestAsyncAndParseEntity(putIndexTemplateRequest, RequestConverters::putTemplate, options, - PutIndexTemplateResponse::fromXContent, listener, emptySet()); + AcknowledgedResponse::fromXContent, listener, emptySet()); } /** diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/IngestClient.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/IngestClient.java index e889ec5beba..99d50f6b46b 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/IngestClient.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/IngestClient.java @@ -26,7 +26,7 @@ import org.elasticsearch.action.ingest.GetPipelineResponse; import org.elasticsearch.action.ingest.PutPipelineRequest; import org.elasticsearch.action.ingest.SimulatePipelineRequest; import org.elasticsearch.action.ingest.SimulatePipelineResponse; -import org.elasticsearch.action.ingest.WritePipelineResponse; +import org.elasticsearch.action.support.master.AcknowledgedResponse; import java.io.IOException; @@ -54,9 +54,9 @@ public final class IngestClient { * @return the response * @throws IOException in case there is a problem sending the request or parsing back the response */ - public WritePipelineResponse putPipeline(PutPipelineRequest request, RequestOptions options) throws IOException { + public AcknowledgedResponse putPipeline(PutPipelineRequest request, RequestOptions options) throws IOException { return restHighLevelClient.performRequestAndParseEntity( request, RequestConverters::putPipeline, options, - WritePipelineResponse::fromXContent, emptySet()); + AcknowledgedResponse::fromXContent, emptySet()); } /** @@ -67,9 +67,9 @@ public final class IngestClient { * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized * @param listener the listener to be notified upon request completion */ - public void putPipelineAsync(PutPipelineRequest request, RequestOptions options, ActionListener listener) { + public void putPipelineAsync(PutPipelineRequest request, RequestOptions options, ActionListener listener) { restHighLevelClient.performRequestAsyncAndParseEntity( request, RequestConverters::putPipeline, options, - WritePipelineResponse::fromXContent, listener, emptySet()); + AcknowledgedResponse::fromXContent, listener, emptySet()); } /** @@ -109,9 +109,9 @@ public final class IngestClient { * @return the response * @throws IOException in case there is a problem sending the request or parsing back the response */ - public WritePipelineResponse deletePipeline(DeletePipelineRequest request, RequestOptions options) throws IOException { + public AcknowledgedResponse deletePipeline(DeletePipelineRequest request, RequestOptions options) throws IOException { return restHighLevelClient.performRequestAndParseEntity( request, RequestConverters::deletePipeline, options, - WritePipelineResponse::fromXContent, emptySet()); + AcknowledgedResponse::fromXContent, emptySet()); } /** @@ -123,9 +123,9 @@ public final class IngestClient { * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized * @param listener the listener to be notified upon request completion */ - public void deletePipelineAsync(DeletePipelineRequest request, RequestOptions options, ActionListener listener) { + public void deletePipelineAsync(DeletePipelineRequest request, RequestOptions options, ActionListener listener) { restHighLevelClient.performRequestAsyncAndParseEntity( request, RequestConverters::deletePipeline, options, - WritePipelineResponse::fromXContent, listener, emptySet()); + AcknowledgedResponse::fromXContent, listener, emptySet()); } /** diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/LicenseClient.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/LicenseClient.java index 587578f3b35..ca6539daa04 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/LicenseClient.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/LicenseClient.java @@ -19,11 +19,27 @@ package org.elasticsearch.client; +import org.apache.http.HttpEntity; import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.support.master.AcknowledgedResponse; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.io.Streams; +import org.elasticsearch.common.xcontent.DeprecationHandler; +import org.elasticsearch.common.xcontent.NamedXContentRegistry; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentFactory; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.common.xcontent.XContentType; +import org.elasticsearch.protocol.xpack.license.DeleteLicenseRequest; +import org.elasticsearch.protocol.xpack.license.GetLicenseRequest; +import org.elasticsearch.protocol.xpack.license.GetLicenseResponse; import org.elasticsearch.protocol.xpack.license.PutLicenseRequest; import org.elasticsearch.protocol.xpack.license.PutLicenseResponse; import java.io.IOException; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.charset.StandardCharsets; import static java.util.Collections.emptySet; @@ -34,7 +50,7 @@ import static java.util.Collections.emptySet; * See the * X-Pack Licensing APIs on elastic.co for more information. */ -public class LicenseClient { +public final class LicenseClient { private final RestHighLevelClient restHighLevelClient; @@ -54,7 +70,7 @@ public class LicenseClient { } /** - * Asynchronously updates license for the cluster cluster. + * Asynchronously updates license for the cluster. * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized * @param listener the listener to be notified upon request completion */ @@ -63,4 +79,79 @@ public class LicenseClient { PutLicenseResponse::fromXContent, listener, emptySet()); } + /** + * Returns the current license for the cluster. + * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized + * @return the response + * @throws IOException in case there is a problem sending the request or parsing back the response + */ + public GetLicenseResponse getLicense(GetLicenseRequest request, RequestOptions options) throws IOException { + return restHighLevelClient.performRequest(request, RequestConverters::getLicense, options, + response -> new GetLicenseResponse(convertResponseToJson(response)), emptySet()); + } + + /** + * Asynchronously returns the current license for the cluster cluster. + * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized + * @param listener the listener to be notified upon request completion + */ + public void getLicenseAsync(GetLicenseRequest request, RequestOptions options, ActionListener listener) { + restHighLevelClient.performRequestAsync(request, RequestConverters::getLicense, options, + response -> new GetLicenseResponse(convertResponseToJson(response)), listener, emptySet()); + } + + /** + * Deletes license from the cluster. + * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized + * @return the response + * @throws IOException in case there is a problem sending the request or parsing back the response + */ + public AcknowledgedResponse deleteLicense(DeleteLicenseRequest request, RequestOptions options) throws IOException { + return restHighLevelClient.performRequestAndParseEntity(request, RequestConverters::deleteLicense, options, + AcknowledgedResponse::fromXContent, emptySet()); + } + + /** + * Asynchronously deletes license from the cluster. + * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized + * @param listener the listener to be notified upon request completion + */ + public void deleteLicenseAsync(DeleteLicenseRequest request, RequestOptions options, ActionListener listener) { + restHighLevelClient.performRequestAsyncAndParseEntity(request, RequestConverters::deleteLicense, options, + AcknowledgedResponse::fromXContent, listener, emptySet()); + } + + /** + * Converts an entire response into a json string + * + * This is useful for responses that we don't parse on the client side, but instead work as string + * such as in case of the license JSON + */ + static String convertResponseToJson(Response response) throws IOException { + HttpEntity entity = response.getEntity(); + if (entity == null) { + throw new IllegalStateException("Response body expected but not returned"); + } + if (entity.getContentType() == null) { + throw new IllegalStateException("Elasticsearch didn't return the [Content-Type] header, unable to parse response body"); + } + XContentType xContentType = XContentType.fromMediaTypeOrFormat(entity.getContentType().getValue()); + if (xContentType == null) { + throw new IllegalStateException("Unsupported Content-Type: " + entity.getContentType().getValue()); + } + if (xContentType == XContentType.JSON) { + // No changes is required + return Streams.copyToString(new InputStreamReader(response.getEntity().getContent(), StandardCharsets.UTF_8)); + } else { + // Need to convert into JSON + try (InputStream stream = response.getEntity().getContent(); + XContentParser parser = XContentFactory.xContent(xContentType).createParser(NamedXContentRegistry.EMPTY, + DeprecationHandler.THROW_UNSUPPORTED_OPERATION, stream)) { + parser.nextToken(); + XContentBuilder builder = XContentFactory.jsonBuilder(); + builder.copyCurrentStructure(parser); + return Strings.toString(builder); + } + } + } } diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/MLRequestConverters.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/MLRequestConverters.java new file mode 100644 index 00000000000..e26a4c629a0 --- /dev/null +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/MLRequestConverters.java @@ -0,0 +1,78 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.client; + +import org.apache.http.client.methods.HttpDelete; +import org.apache.http.client.methods.HttpPost; +import org.apache.http.client.methods.HttpPut; +import org.elasticsearch.client.RequestConverters.EndpointBuilder; +import org.elasticsearch.protocol.xpack.ml.DeleteJobRequest; +import org.elasticsearch.protocol.xpack.ml.OpenJobRequest; +import org.elasticsearch.protocol.xpack.ml.PutJobRequest; + +import java.io.IOException; + +import static org.elasticsearch.client.RequestConverters.REQUEST_BODY_CONTENT_TYPE; +import static org.elasticsearch.client.RequestConverters.createEntity; + +final class MLRequestConverters { + + private MLRequestConverters() {} + + static Request putJob(PutJobRequest putJobRequest) throws IOException { + String endpoint = new EndpointBuilder() + .addPathPartAsIs("_xpack") + .addPathPartAsIs("ml") + .addPathPartAsIs("anomaly_detectors") + .addPathPart(putJobRequest.getJob().getId()) + .build(); + Request request = new Request(HttpPut.METHOD_NAME, endpoint); + request.setEntity(createEntity(putJobRequest, REQUEST_BODY_CONTENT_TYPE)); + return request; + } + + static Request openJob(OpenJobRequest openJobRequest) throws IOException { + String endpoint = new EndpointBuilder() + .addPathPartAsIs("_xpack") + .addPathPartAsIs("ml") + .addPathPartAsIs("anomaly_detectors") + .addPathPart(openJobRequest.getJobId()) + .addPathPartAsIs("_open") + .build(); + Request request = new Request(HttpPost.METHOD_NAME, endpoint); + request.setJsonEntity(openJobRequest.toString()); + return request; + } + + static Request deleteJob(DeleteJobRequest deleteJobRequest) { + String endpoint = new EndpointBuilder() + .addPathPartAsIs("_xpack") + .addPathPartAsIs("ml") + .addPathPartAsIs("anomaly_detectors") + .addPathPart(deleteJobRequest.getJobId()) + .build(); + Request request = new Request(HttpDelete.METHOD_NAME, endpoint); + + RequestConverters.Params params = new RequestConverters.Params(request); + params.putParam("force", Boolean.toString(deleteJobRequest.isForce())); + + return request; + } +} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/MachineLearningClient.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/MachineLearningClient.java new file mode 100644 index 00000000000..32b6cd6cf2c --- /dev/null +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/MachineLearningClient.java @@ -0,0 +1,169 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.client; + +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.protocol.xpack.ml.DeleteJobRequest; +import org.elasticsearch.protocol.xpack.ml.DeleteJobResponse; +import org.elasticsearch.protocol.xpack.ml.OpenJobRequest; +import org.elasticsearch.protocol.xpack.ml.OpenJobResponse; +import org.elasticsearch.protocol.xpack.ml.PutJobRequest; +import org.elasticsearch.protocol.xpack.ml.PutJobResponse; + +import java.io.IOException; +import java.util.Collections; + +/** + * Machine Learning API client wrapper for the {@link RestHighLevelClient} + * + *

+ * See the + * X-Pack Machine Learning APIs for additional information. + */ +public final class MachineLearningClient { + + private final RestHighLevelClient restHighLevelClient; + + MachineLearningClient(RestHighLevelClient restHighLevelClient) { + this.restHighLevelClient = restHighLevelClient; + } + + /** + * Creates a new Machine Learning Job + *

+ * For additional info + * see ML PUT job documentation + * + * @param request the PutJobRequest containing the {@link org.elasticsearch.protocol.xpack.ml.job.config.Job} settings + * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized + * @return PutJobResponse with enclosed {@link org.elasticsearch.protocol.xpack.ml.job.config.Job} object + * @throws IOException when there is a serialization issue sending the request or receiving the response + */ + public PutJobResponse putJob(PutJobRequest request, RequestOptions options) throws IOException { + return restHighLevelClient.performRequestAndParseEntity(request, + MLRequestConverters::putJob, + options, + PutJobResponse::fromXContent, + Collections.emptySet()); + } + + /** + * Creates a new Machine Learning Job asynchronously and notifies listener on completion + *

+ * For additional info + * see ML PUT job documentation + * + * @param request the request containing the {@link org.elasticsearch.protocol.xpack.ml.job.config.Job} settings + * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized + * @param listener Listener to be notified upon request completion + */ + public void putJobAsync(PutJobRequest request, RequestOptions options, ActionListener listener) { + restHighLevelClient.performRequestAsyncAndParseEntity(request, + MLRequestConverters::putJob, + options, + PutJobResponse::fromXContent, + listener, + Collections.emptySet()); + } + + /** + * Deletes the given Machine Learning Job + *

+ * For additional info + * see ML Delete Job documentation + *

+ * @param request the request to delete the job + * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized + * @return action acknowledgement + * @throws IOException when there is a serialization issue sending the request or receiving the response + */ + public DeleteJobResponse deleteJob(DeleteJobRequest request, RequestOptions options) throws IOException { + return restHighLevelClient.performRequestAndParseEntity(request, + MLRequestConverters::deleteJob, + options, + DeleteJobResponse::fromXContent, + Collections.emptySet()); + } + + /** + * Deletes the given Machine Learning Job asynchronously and notifies the listener on completion + *

+ * For additional info + * see ML Delete Job documentation + *

+ * @param request the request to delete the job + * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized + * @param listener Listener to be notified upon request completion + */ + public void deleteJobAsync(DeleteJobRequest request, RequestOptions options, ActionListener listener) { + restHighLevelClient.performRequestAsyncAndParseEntity(request, + MLRequestConverters::deleteJob, + options, + DeleteJobResponse::fromXContent, + listener, + Collections.emptySet()); + } + + /** + * Opens a Machine Learning Job. + * When you open a new job, it starts with an empty model. + * + * When you open an existing job, the most recent model state is automatically loaded. + * The job is ready to resume its analysis from where it left off, once new data is received. + * + *

+ * For additional info + * see + *

+ * @param request request containing job_id and additional optional options + * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized + * @return response containing if the job was successfully opened or not. + * @throws IOException when there is a serialization issue sending the request or receiving the response + */ + public OpenJobResponse openJob(OpenJobRequest request, RequestOptions options) throws IOException { + return restHighLevelClient.performRequestAndParseEntity(request, + MLRequestConverters::openJob, + options, + OpenJobResponse::fromXContent, + Collections.emptySet()); + } + + /** + * Opens a Machine Learning Job asynchronously, notifies listener on completion. + * When you open a new job, it starts with an empty model. + * + * When you open an existing job, the most recent model state is automatically loaded. + * The job is ready to resume its analysis from where it left off, once new data is received. + *

+ * For additional info + * see + *

+ * @param request request containing job_id and additional optional options + * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized + * @param listener Listener to be notified upon request completion + */ + public void openJobAsync(OpenJobRequest request, RequestOptions options, ActionListener listener) { + restHighLevelClient.performRequestAsyncAndParseEntity(request, + MLRequestConverters::openJob, + options, + OpenJobResponse::fromXContent, + listener, + Collections.emptySet()); + } +} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/MigrationClient.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/MigrationClient.java new file mode 100644 index 00000000000..7da38329947 --- /dev/null +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/MigrationClient.java @@ -0,0 +1,55 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.client; + +import org.elasticsearch.protocol.xpack.migration.IndexUpgradeInfoRequest; +import org.elasticsearch.protocol.xpack.migration.IndexUpgradeInfoResponse; + +import java.io.IOException; +import java.util.Collections; + +/** + * A wrapper for the {@link RestHighLevelClient} that provides methods for + * accessing the Elastic License-related methods + *

+ * See the + * X-Pack Migration APIs on elastic.co for more information. + */ +public final class MigrationClient { + + private final RestHighLevelClient restHighLevelClient; + + MigrationClient(RestHighLevelClient restHighLevelClient) { + this.restHighLevelClient = restHighLevelClient; + } + + /** + * Get Migration Assistance for one or more indices + * + * @param request the request + * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized + * @return the response + * @throws IOException in case there is a problem sending the request or parsing back the response + */ + public IndexUpgradeInfoResponse getAssistance(IndexUpgradeInfoRequest request, RequestOptions options) throws IOException { + return restHighLevelClient.performRequestAndParseEntity(request, RequestConverters::getMigrationAssistance, options, + IndexUpgradeInfoResponse::fromXContent, Collections.emptySet()); + } +} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/RequestConverters.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/RequestConverters.java index ce6fd1c8c94..0e5fce5b227 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/RequestConverters.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/RequestConverters.java @@ -39,12 +39,12 @@ import org.elasticsearch.action.admin.cluster.repositories.verify.VerifyReposito import org.elasticsearch.action.admin.cluster.settings.ClusterGetSettingsRequest; import org.elasticsearch.action.admin.cluster.settings.ClusterUpdateSettingsRequest; import org.elasticsearch.action.admin.cluster.snapshots.create.CreateSnapshotRequest; +import org.elasticsearch.action.admin.cluster.snapshots.delete.DeleteSnapshotRequest; import org.elasticsearch.action.admin.cluster.snapshots.get.GetSnapshotsRequest; import org.elasticsearch.action.admin.cluster.snapshots.restore.RestoreSnapshotRequest; +import org.elasticsearch.action.admin.cluster.snapshots.status.SnapshotsStatusRequest; import org.elasticsearch.action.admin.cluster.storedscripts.DeleteStoredScriptRequest; import org.elasticsearch.action.admin.cluster.storedscripts.GetStoredScriptRequest; -import org.elasticsearch.action.admin.cluster.snapshots.delete.DeleteSnapshotRequest; -import org.elasticsearch.action.admin.cluster.snapshots.status.SnapshotsStatusRequest; import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequest; import org.elasticsearch.action.admin.indices.alias.get.GetAliasesRequest; import org.elasticsearch.action.admin.indices.analyze.AnalyzeRequest; @@ -78,8 +78,8 @@ import org.elasticsearch.action.get.MultiGetRequest; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.ingest.DeletePipelineRequest; import org.elasticsearch.action.ingest.GetPipelineRequest; -import org.elasticsearch.action.ingest.SimulatePipelineRequest; import org.elasticsearch.action.ingest.PutPipelineRequest; +import org.elasticsearch.action.ingest.SimulatePipelineRequest; import org.elasticsearch.action.search.ClearScrollRequest; import org.elasticsearch.action.search.MultiSearchRequest; import org.elasticsearch.action.search.SearchRequest; @@ -96,7 +96,7 @@ import org.elasticsearch.common.SuppressForbidden; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.lucene.uid.Versions; import org.elasticsearch.common.unit.TimeValue; -import org.elasticsearch.common.xcontent.LoggingDeprecationHandler; +import org.elasticsearch.common.xcontent.DeprecationHandler; import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContent; @@ -107,10 +107,13 @@ import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.index.VersionType; import org.elasticsearch.index.rankeval.RankEvalRequest; import org.elasticsearch.protocol.xpack.XPackInfoRequest; +import org.elasticsearch.protocol.xpack.XPackUsageRequest; +import org.elasticsearch.protocol.xpack.license.DeleteLicenseRequest; +import org.elasticsearch.protocol.xpack.license.GetLicenseRequest; +import org.elasticsearch.protocol.xpack.license.PutLicenseRequest; +import org.elasticsearch.protocol.xpack.migration.IndexUpgradeInfoRequest; import org.elasticsearch.protocol.xpack.watcher.DeleteWatchRequest; import org.elasticsearch.protocol.xpack.watcher.PutWatchRequest; -import org.elasticsearch.protocol.xpack.XPackUsageRequest; -import org.elasticsearch.protocol.xpack.license.PutLicenseRequest; import org.elasticsearch.rest.action.search.RestSearchAction; import org.elasticsearch.script.mustache.MultiSearchTemplateRequest; import org.elasticsearch.script.mustache.SearchTemplateRequest; @@ -422,8 +425,14 @@ final class RequestConverters { BytesReference indexSource = indexRequest.source(); XContentType indexXContentType = indexRequest.getContentType(); - try (XContentParser parser = XContentHelper.createParser(NamedXContentRegistry.EMPTY, - LoggingDeprecationHandler.INSTANCE, indexSource, indexXContentType)) { + try (XContentParser parser = XContentHelper.createParser( + /* + * EMPTY and THROW are fine here because we just call + * copyCurrentStructure which doesn't touch the + * registry or deprecation. + */ + NamedXContentRegistry.EMPTY, DeprecationHandler.THROW_UNSUPPORTED_OPERATION, + indexSource, indexXContentType)) { try (XContentBuilder builder = XContentBuilder.builder(bulkContentType.xContent())) { builder.copyCurrentStructure(parser); source = BytesReference.bytes(builder).toBytesRef(); @@ -1154,7 +1163,11 @@ final class RequestConverters { } static Request putLicense(PutLicenseRequest putLicenseRequest) { - Request request = new Request(HttpPut.METHOD_NAME, "/_xpack/license"); + String endpoint = new EndpointBuilder() + .addPathPartAsIs("_xpack") + .addPathPartAsIs("license") + .build(); + Request request = new Request(HttpPut.METHOD_NAME, endpoint); Params parameters = new Params(request); parameters.withTimeout(putLicenseRequest.timeout()); parameters.withMasterTimeout(putLicenseRequest.masterNodeTimeout()); @@ -1165,7 +1178,37 @@ final class RequestConverters { return request; } - private static HttpEntity createEntity(ToXContent toXContent, XContentType xContentType) throws IOException { + static Request getLicense(GetLicenseRequest getLicenseRequest) { + String endpoint = new EndpointBuilder() + .addPathPartAsIs("_xpack") + .addPathPartAsIs("license") + .build(); + Request request = new Request(HttpGet.METHOD_NAME, endpoint); + Params parameters = new Params(request); + parameters.withLocal(getLicenseRequest.local()); + return request; + } + + static Request deleteLicense(DeleteLicenseRequest deleteLicenseRequest) { + Request request = new Request(HttpDelete.METHOD_NAME, "/_xpack/license"); + Params parameters = new Params(request); + parameters.withTimeout(deleteLicenseRequest.timeout()); + parameters.withMasterTimeout(deleteLicenseRequest.masterNodeTimeout()); + return request; + } + + static Request getMigrationAssistance(IndexUpgradeInfoRequest indexUpgradeInfoRequest) { + EndpointBuilder endpointBuilder = new EndpointBuilder() + .addPathPartAsIs("_xpack/migration/assistance") + .addCommaSeparatedPathParts(indexUpgradeInfoRequest.indices()); + String endpoint = endpointBuilder.build(); + Request request = new Request(HttpGet.METHOD_NAME, endpoint); + Params parameters = new Params(request); + parameters.withIndicesOptions(indexUpgradeInfoRequest.indicesOptions()); + return request; + } + + static HttpEntity createEntity(ToXContent toXContent, XContentType xContentType) throws IOException { BytesRef source = XContentHelper.toXContent(toXContent, xContentType, false).toBytesRef(); return new ByteArrayEntity(source.bytes, source.offset, source.length, createContentType(xContentType)); } diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/RestHighLevelClient.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/RestHighLevelClient.java index c71bebf6903..e705ca12806 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/RestHighLevelClient.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/RestHighLevelClient.java @@ -27,7 +27,6 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.admin.cluster.storedscripts.DeleteStoredScriptRequest; -import org.elasticsearch.action.admin.cluster.storedscripts.DeleteStoredScriptResponse; import org.elasticsearch.action.admin.cluster.storedscripts.GetStoredScriptRequest; import org.elasticsearch.action.admin.cluster.storedscripts.GetStoredScriptResponse; import org.elasticsearch.action.bulk.BulkRequest; @@ -53,13 +52,14 @@ import org.elasticsearch.action.search.MultiSearchResponse; import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.search.SearchScrollRequest; +import org.elasticsearch.action.support.master.AcknowledgedResponse; import org.elasticsearch.action.update.UpdateRequest; import org.elasticsearch.action.update.UpdateResponse; import org.elasticsearch.common.CheckedConsumer; import org.elasticsearch.common.CheckedFunction; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.xcontent.ContextParser; -import org.elasticsearch.common.xcontent.LoggingDeprecationHandler; +import org.elasticsearch.common.xcontent.DeprecationHandler; import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentType; @@ -163,8 +163,11 @@ import org.elasticsearch.search.aggregations.pipeline.derivative.DerivativePipel import org.elasticsearch.search.aggregations.pipeline.derivative.ParsedDerivative; import org.elasticsearch.search.suggest.Suggest; import org.elasticsearch.search.suggest.completion.CompletionSuggestion; +import org.elasticsearch.search.suggest.completion.CompletionSuggestionBuilder; import org.elasticsearch.search.suggest.phrase.PhraseSuggestion; +import org.elasticsearch.search.suggest.phrase.PhraseSuggestionBuilder; import org.elasticsearch.search.suggest.term.TermSuggestion; +import org.elasticsearch.search.suggest.term.TermSuggestionBuilder; import java.io.Closeable; import java.io.IOException; @@ -205,6 +208,10 @@ public class RestHighLevelClient implements Closeable { private final SnapshotClient snapshotClient = new SnapshotClient(this); private final TasksClient tasksClient = new TasksClient(this); private final XPackClient xPackClient = new XPackClient(this); + private final WatcherClient watcherClient = new WatcherClient(this); + private final LicenseClient licenseClient = new LicenseClient(this); + private final MigrationClient migrationClient = new MigrationClient(this); + private final MachineLearningClient machineLearningClient = new MachineLearningClient(this); /** * Creates a {@link RestHighLevelClient} given the low level {@link RestClientBuilder} that allows to build the @@ -296,18 +303,64 @@ public class RestHighLevelClient implements Closeable { } /** - * A wrapper for the {@link RestHighLevelClient} that provides methods for - * accessing the Elastic Licensed X-Pack APIs that are shipped with the - * default distribution of Elasticsearch. All of these APIs will 404 if run - * against the OSS distribution of Elasticsearch. + * Provides methods for accessing the Elastic Licensed X-Pack Info + * and Usage APIs that are shipped with the default distribution of + * Elasticsearch. All of these APIs will 404 if run against the OSS + * distribution of Elasticsearch. *

- * See the - * X-Pack APIs on elastic.co for more information. + * See the + * Info APIs on elastic.co for more information. */ public final XPackClient xpack() { return xPackClient; } + /** + * Provides methods for accessing the Elastic Licensed Watcher APIs that + * are shipped with the default distribution of Elasticsearch. All of + * these APIs will 404 if run against the OSS distribution of Elasticsearch. + *

+ * See the + * Watcher APIs on elastic.co for more information. + */ + public WatcherClient watcher() { return watcherClient; } + + /** + * Provides methods for accessing the Elastic Licensed Licensing APIs that + * are shipped with the default distribution of Elasticsearch. All of + * these APIs will 404 if run against the OSS distribution of Elasticsearch. + *

+ * See the + * Licensing APIs on elastic.co for more information. + */ + public LicenseClient license() { return licenseClient; } + + /** + * Provides methods for accessing the Elastic Licensed Licensing APIs that + * are shipped with the default distribution of Elasticsearch. All of + * these APIs will 404 if run against the OSS distribution of Elasticsearch. + *

+ * See the + * Migration APIs on elastic.co for more information. + */ + public MigrationClient migration() { + return migrationClient; + } + + /** + * Provides methods for accessing the Elastic Licensed Machine Learning APIs that + * are shipped with the Elastic Stack distribution of Elasticsearch. All of + * these APIs will 404 if run against the OSS distribution of Elasticsearch. + *

+ * See the + * Machine Learning APIs on elastic.co for more information. + * + * @return the client wrapper for making Machine Learning API calls + */ + public MachineLearningClient machineLearning() { + return machineLearningClient; + } + /** * Executes a bulk request using the Bulk API. * See Bulk API on elastic.co @@ -863,9 +916,9 @@ public class RestHighLevelClient implements Closeable { * @return the response * @throws IOException in case there is a problem sending the request or parsing back the response */ - public DeleteStoredScriptResponse deleteScript(DeleteStoredScriptRequest request, RequestOptions options) throws IOException { + public AcknowledgedResponse deleteScript(DeleteStoredScriptRequest request, RequestOptions options) throws IOException { return performRequestAndParseEntity(request, RequestConverters::deleteScript, options, - DeleteStoredScriptResponse::fromXContent, emptySet()); + AcknowledgedResponse::fromXContent, emptySet()); } /** @@ -877,9 +930,9 @@ public class RestHighLevelClient implements Closeable { * @param listener the listener to be notified upon request completion */ public void deleteScriptAsync(DeleteStoredScriptRequest request, RequestOptions options, - ActionListener listener) { + ActionListener listener) { performRequestAsyncAndParseEntity(request, RequestConverters::deleteScript, options, - DeleteStoredScriptResponse::fromXContent, listener, emptySet()); + AcknowledgedResponse::fromXContent, listener, emptySet()); } /** @@ -1050,8 +1103,7 @@ public class RestHighLevelClient implements Closeable { if (xContentType == null) { throw new IllegalStateException("Unsupported Content-Type: " + entity.getContentType().getValue()); } - try (XContentParser parser = xContentType.xContent().createParser(registry, - LoggingDeprecationHandler.INSTANCE, entity.getContent())) { + try (XContentParser parser = xContentType.xContent().createParser(registry, DEPRECATION_HANDLER, entity.getContent())) { return entityParser.apply(parser); } } @@ -1069,6 +1121,19 @@ public class RestHighLevelClient implements Closeable { return response.getStatusLine().getStatusCode() == 200; } + /** + * Ignores deprecation warnings. This is appropriate because it is only + * used to parse responses from Elasticsearch. Any deprecation warnings + * emitted there just mean that you are talking to an old version of + * Elasticsearch. There isn't anything you can do about the deprecation. + */ + private static final DeprecationHandler DEPRECATION_HANDLER = new DeprecationHandler() { + @Override + public void usedDeprecatedName(String usedName, String modernName) {} + @Override + public void usedDeprecatedField(String usedName, String replacedWith) {} + }; + static List getDefaultNamedXContents() { Map> map = new HashMap<>(); map.put(CardinalityAggregationBuilder.NAME, (p, c) -> ParsedCardinality.fromXContent(p, (String) c)); @@ -1119,11 +1184,11 @@ public class RestHighLevelClient implements Closeable { List entries = map.entrySet().stream() .map(entry -> new NamedXContentRegistry.Entry(Aggregation.class, new ParseField(entry.getKey()), entry.getValue())) .collect(Collectors.toList()); - entries.add(new NamedXContentRegistry.Entry(Suggest.Suggestion.class, new ParseField(TermSuggestion.NAME), + entries.add(new NamedXContentRegistry.Entry(Suggest.Suggestion.class, new ParseField(TermSuggestionBuilder.SUGGESTION_NAME), (parser, context) -> TermSuggestion.fromXContent(parser, (String)context))); - entries.add(new NamedXContentRegistry.Entry(Suggest.Suggestion.class, new ParseField(PhraseSuggestion.NAME), + entries.add(new NamedXContentRegistry.Entry(Suggest.Suggestion.class, new ParseField(PhraseSuggestionBuilder.SUGGESTION_NAME), (parser, context) -> PhraseSuggestion.fromXContent(parser, (String)context))); - entries.add(new NamedXContentRegistry.Entry(Suggest.Suggestion.class, new ParseField(CompletionSuggestion.NAME), + entries.add(new NamedXContentRegistry.Entry(Suggest.Suggestion.class, new ParseField(CompletionSuggestionBuilder.SUGGESTION_NAME), (parser, context) -> CompletionSuggestion.fromXContent(parser, (String)context))); return entries; } diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/SnapshotClient.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/SnapshotClient.java index 319eb96a9f8..7df0df4836d 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/SnapshotClient.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/SnapshotClient.java @@ -21,23 +21,21 @@ package org.elasticsearch.client; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.admin.cluster.repositories.delete.DeleteRepositoryRequest; -import org.elasticsearch.action.admin.cluster.repositories.delete.DeleteRepositoryResponse; import org.elasticsearch.action.admin.cluster.repositories.get.GetRepositoriesRequest; import org.elasticsearch.action.admin.cluster.repositories.get.GetRepositoriesResponse; import org.elasticsearch.action.admin.cluster.repositories.put.PutRepositoryRequest; -import org.elasticsearch.action.admin.cluster.repositories.put.PutRepositoryResponse; import org.elasticsearch.action.admin.cluster.repositories.verify.VerifyRepositoryRequest; import org.elasticsearch.action.admin.cluster.repositories.verify.VerifyRepositoryResponse; import org.elasticsearch.action.admin.cluster.snapshots.create.CreateSnapshotRequest; import org.elasticsearch.action.admin.cluster.snapshots.create.CreateSnapshotResponse; +import org.elasticsearch.action.admin.cluster.snapshots.delete.DeleteSnapshotRequest; +import org.elasticsearch.action.admin.cluster.snapshots.get.GetSnapshotsRequest; +import org.elasticsearch.action.admin.cluster.snapshots.get.GetSnapshotsResponse; import org.elasticsearch.action.admin.cluster.snapshots.restore.RestoreSnapshotRequest; import org.elasticsearch.action.admin.cluster.snapshots.restore.RestoreSnapshotResponse; import org.elasticsearch.action.admin.cluster.snapshots.status.SnapshotsStatusRequest; import org.elasticsearch.action.admin.cluster.snapshots.status.SnapshotsStatusResponse; -import org.elasticsearch.action.admin.cluster.snapshots.delete.DeleteSnapshotRequest; -import org.elasticsearch.action.admin.cluster.snapshots.delete.DeleteSnapshotResponse; -import org.elasticsearch.action.admin.cluster.snapshots.get.GetSnapshotsRequest; -import org.elasticsearch.action.admin.cluster.snapshots.get.GetSnapshotsResponse; +import org.elasticsearch.action.support.master.AcknowledgedResponse; import java.io.IOException; @@ -95,9 +93,9 @@ public final class SnapshotClient { * @return the response * @throws IOException in case there is a problem sending the request or parsing back the response */ - public PutRepositoryResponse createRepository(PutRepositoryRequest putRepositoryRequest, RequestOptions options) throws IOException { + public AcknowledgedResponse createRepository(PutRepositoryRequest putRepositoryRequest, RequestOptions options) throws IOException { return restHighLevelClient.performRequestAndParseEntity(putRepositoryRequest, RequestConverters::createRepository, options, - PutRepositoryResponse::fromXContent, emptySet()); + AcknowledgedResponse::fromXContent, emptySet()); } /** @@ -109,9 +107,9 @@ public final class SnapshotClient { * @param listener the listener to be notified upon request completion */ public void createRepositoryAsync(PutRepositoryRequest putRepositoryRequest, RequestOptions options, - ActionListener listener) { + ActionListener listener) { restHighLevelClient.performRequestAsyncAndParseEntity(putRepositoryRequest, RequestConverters::createRepository, options, - PutRepositoryResponse::fromXContent, listener, emptySet()); + AcknowledgedResponse::fromXContent, listener, emptySet()); } /** @@ -123,10 +121,10 @@ public final class SnapshotClient { * @return the response * @throws IOException in case there is a problem sending the request or parsing back the response */ - public DeleteRepositoryResponse deleteRepository(DeleteRepositoryRequest deleteRepositoryRequest, RequestOptions options) + public AcknowledgedResponse deleteRepository(DeleteRepositoryRequest deleteRepositoryRequest, RequestOptions options) throws IOException { return restHighLevelClient.performRequestAndParseEntity(deleteRepositoryRequest, RequestConverters::deleteRepository, options, - DeleteRepositoryResponse::fromXContent, emptySet()); + AcknowledgedResponse::fromXContent, emptySet()); } /** @@ -138,9 +136,9 @@ public final class SnapshotClient { * @param listener the listener to be notified upon request completion */ public void deleteRepositoryAsync(DeleteRepositoryRequest deleteRepositoryRequest, RequestOptions options, - ActionListener listener) { + ActionListener listener) { restHighLevelClient.performRequestAsyncAndParseEntity(deleteRepositoryRequest, RequestConverters::deleteRepository, options, - DeleteRepositoryResponse::fromXContent, listener, emptySet()); + AcknowledgedResponse::fromXContent, listener, emptySet()); } /** @@ -294,9 +292,9 @@ public final class SnapshotClient { * @return the response * @throws IOException in case there is a problem sending the request or parsing back the response */ - public DeleteSnapshotResponse delete(DeleteSnapshotRequest deleteSnapshotRequest, RequestOptions options) throws IOException { + public AcknowledgedResponse delete(DeleteSnapshotRequest deleteSnapshotRequest, RequestOptions options) throws IOException { return restHighLevelClient.performRequestAndParseEntity(deleteSnapshotRequest, RequestConverters::deleteSnapshot, options, - DeleteSnapshotResponse::fromXContent, emptySet()); + AcknowledgedResponse::fromXContent, emptySet()); } /** @@ -309,8 +307,8 @@ public final class SnapshotClient { * @param listener the listener to be notified upon request completion */ public void deleteAsync(DeleteSnapshotRequest deleteSnapshotRequest, RequestOptions options, - ActionListener listener) { + ActionListener listener) { restHighLevelClient.performRequestAsyncAndParseEntity(deleteSnapshotRequest, RequestConverters::deleteSnapshot, options, - DeleteSnapshotResponse::fromXContent, listener, emptySet()); + AcknowledgedResponse::fromXContent, listener, emptySet()); } } diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/XPackClient.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/XPackClient.java index 1401376527d..2af49ba1a1b 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/XPackClient.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/XPackClient.java @@ -41,17 +41,9 @@ import static java.util.Collections.emptySet; public final class XPackClient { private final RestHighLevelClient restHighLevelClient; - private final WatcherClient watcherClient; - private final LicenseClient licenseClient; XPackClient(RestHighLevelClient restHighLevelClient) { this.restHighLevelClient = restHighLevelClient; - this.watcherClient = new WatcherClient(restHighLevelClient); - this.licenseClient = new LicenseClient(restHighLevelClient); - } - - public WatcherClient watcher() { - return watcherClient; } /** @@ -102,15 +94,4 @@ public final class XPackClient { restHighLevelClient.performRequestAsyncAndParseEntity(request, RequestConverters::xpackUsage, options, XPackUsageResponse::fromXContent, listener, emptySet()); } - - /** - * A wrapper for the {@link RestHighLevelClient} that provides methods for - * accessing the Elastic Licensing APIs. - *

- * See the - * X-Pack APIs on elastic.co for more information. - */ - public LicenseClient license() { - return licenseClient; - } } diff --git a/client/rest-high-level/src/main/resources/forbidden/rest-high-level-signatures.txt b/client/rest-high-level/src/main/resources/forbidden/rest-high-level-signatures.txt index fb2330f3f08..33e136a66f4 100644 --- a/client/rest-high-level/src/main/resources/forbidden/rest-high-level-signatures.txt +++ b/client/rest-high-level/src/main/resources/forbidden/rest-high-level-signatures.txt @@ -19,3 +19,6 @@ org.apache.http.entity.ContentType#create(java.lang.String) org.apache.http.entity.ContentType#create(java.lang.String,java.lang.String) org.apache.http.entity.ContentType#create(java.lang.String,java.nio.charset.Charset) org.apache.http.entity.ContentType#create(java.lang.String,org.apache.http.NameValuePair[]) + +@defaultMessage We can't rely on log4j2 being on the classpath so don't log deprecations! +org.elasticsearch.common.xcontent.LoggingDeprecationHandler diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/IndicesClientIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/IndicesClientIT.java index 36a45999b51..533f6bcb22e 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/IndicesClientIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/IndicesClientIT.java @@ -27,18 +27,15 @@ import org.elasticsearch.ElasticsearchStatusException; import org.elasticsearch.action.admin.indices.alias.Alias; import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequest; import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequest.AliasActions; -import org.elasticsearch.action.admin.indices.alias.IndicesAliasesResponse; import org.elasticsearch.action.admin.indices.alias.get.GetAliasesRequest; import org.elasticsearch.action.admin.indices.analyze.AnalyzeRequest; import org.elasticsearch.action.admin.indices.analyze.AnalyzeResponse; import org.elasticsearch.action.admin.indices.cache.clear.ClearIndicesCacheRequest; import org.elasticsearch.action.admin.indices.cache.clear.ClearIndicesCacheResponse; import org.elasticsearch.action.admin.indices.close.CloseIndexRequest; -import org.elasticsearch.action.admin.indices.close.CloseIndexResponse; import org.elasticsearch.action.admin.indices.create.CreateIndexRequest; import org.elasticsearch.action.admin.indices.create.CreateIndexResponse; import org.elasticsearch.action.admin.indices.delete.DeleteIndexRequest; -import org.elasticsearch.action.admin.indices.delete.DeleteIndexResponse; import org.elasticsearch.action.admin.indices.flush.FlushRequest; import org.elasticsearch.action.admin.indices.flush.FlushResponse; import org.elasticsearch.action.admin.indices.flush.SyncedFlushRequest; @@ -51,7 +48,6 @@ import org.elasticsearch.action.admin.indices.mapping.get.GetFieldMappingsRespon import org.elasticsearch.action.admin.indices.mapping.get.GetMappingsRequest; import org.elasticsearch.action.admin.indices.mapping.get.GetMappingsResponse; import org.elasticsearch.action.admin.indices.mapping.put.PutMappingRequest; -import org.elasticsearch.action.admin.indices.mapping.put.PutMappingResponse; import org.elasticsearch.action.admin.indices.open.OpenIndexRequest; import org.elasticsearch.action.admin.indices.open.OpenIndexResponse; import org.elasticsearch.action.admin.indices.refresh.RefreshRequest; @@ -61,20 +57,19 @@ import org.elasticsearch.action.admin.indices.rollover.RolloverResponse; import org.elasticsearch.action.admin.indices.settings.get.GetSettingsRequest; import org.elasticsearch.action.admin.indices.settings.get.GetSettingsResponse; import org.elasticsearch.action.admin.indices.settings.put.UpdateSettingsRequest; -import org.elasticsearch.action.admin.indices.settings.put.UpdateSettingsResponse; import org.elasticsearch.action.admin.indices.shrink.ResizeRequest; import org.elasticsearch.action.admin.indices.shrink.ResizeResponse; import org.elasticsearch.action.admin.indices.shrink.ResizeType; import org.elasticsearch.action.admin.indices.template.get.GetIndexTemplatesRequest; import org.elasticsearch.action.admin.indices.template.get.GetIndexTemplatesResponse; import org.elasticsearch.action.admin.indices.template.put.PutIndexTemplateRequest; -import org.elasticsearch.action.admin.indices.template.put.PutIndexTemplateResponse; import org.elasticsearch.action.admin.indices.validate.query.ValidateQueryRequest; import org.elasticsearch.action.admin.indices.validate.query.ValidateQueryResponse; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.action.support.WriteRequest; import org.elasticsearch.action.support.broadcast.BroadcastResponse; +import org.elasticsearch.action.support.master.AcknowledgedResponse; import org.elasticsearch.cluster.metadata.AliasMetaData; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.IndexTemplateMetaData; @@ -411,7 +406,7 @@ public class IndicesClientIT extends ESRestHighLevelClientTestCase { mappingBuilder.endObject().endObject().endObject(); putMappingRequest.source(mappingBuilder); - PutMappingResponse putMappingResponse = + AcknowledgedResponse putMappingResponse = execute(putMappingRequest, highLevelClient().indices()::putMapping, highLevelClient().indices()::putMappingAsync); assertTrue(putMappingResponse.isAcknowledged()); @@ -431,7 +426,7 @@ public class IndicesClientIT extends ESRestHighLevelClientTestCase { mappingBuilder.endObject().endObject().endObject(); putMappingRequest.source(mappingBuilder); - PutMappingResponse putMappingResponse = + AcknowledgedResponse putMappingResponse = execute(putMappingRequest, highLevelClient().indices()::putMapping, highLevelClient().indices()::putMappingAsync); assertTrue(putMappingResponse.isAcknowledged()); @@ -467,7 +462,7 @@ public class IndicesClientIT extends ESRestHighLevelClientTestCase { mappingBuilder.endObject().endObject().endObject(); putMappingRequest.source(mappingBuilder); - PutMappingResponse putMappingResponse = + AcknowledgedResponse putMappingResponse = execute(putMappingRequest, highLevelClient().indices()::putMapping, highLevelClient().indices()::putMappingAsync); assertTrue(putMappingResponse.isAcknowledged()); @@ -497,7 +492,7 @@ public class IndicesClientIT extends ESRestHighLevelClientTestCase { createIndex(indexName, Settings.EMPTY); DeleteIndexRequest deleteIndexRequest = new DeleteIndexRequest(indexName); - DeleteIndexResponse deleteIndexResponse = + AcknowledgedResponse deleteIndexResponse = execute(deleteIndexRequest, highLevelClient().indices()::delete, highLevelClient().indices()::deleteAsync); assertTrue(deleteIndexResponse.isAcknowledged()); @@ -529,7 +524,7 @@ public class IndicesClientIT extends ESRestHighLevelClientTestCase { AliasActions addAction = new AliasActions(AliasActions.Type.ADD).index(index).aliases(alias); addAction.routing("routing").searchRouting("search_routing").filter("{\"term\":{\"year\":2016}}"); aliasesAddRequest.addAliasAction(addAction); - IndicesAliasesResponse aliasesAddResponse = execute(aliasesAddRequest, highLevelClient().indices()::updateAliases, + AcknowledgedResponse aliasesAddResponse = execute(aliasesAddRequest, highLevelClient().indices()::updateAliases, highLevelClient().indices()::updateAliasesAsync); assertTrue(aliasesAddResponse.isAcknowledged()); assertThat(aliasExists(alias), equalTo(true)); @@ -547,7 +542,7 @@ public class IndicesClientIT extends ESRestHighLevelClientTestCase { aliasesAddRemoveRequest.addAliasAction(addAction); AliasActions removeAction = new AliasActions(AliasActions.Type.REMOVE).index(index).alias(alias); aliasesAddRemoveRequest.addAliasAction(removeAction); - IndicesAliasesResponse aliasesAddRemoveResponse = execute(aliasesAddRemoveRequest, highLevelClient().indices()::updateAliases, + AcknowledgedResponse aliasesAddRemoveResponse = execute(aliasesAddRemoveRequest, highLevelClient().indices()::updateAliases, highLevelClient().indices()::updateAliasesAsync); assertTrue(aliasesAddRemoveResponse.isAcknowledged()); assertThat(aliasExists(alias), equalTo(false)); @@ -558,7 +553,7 @@ public class IndicesClientIT extends ESRestHighLevelClientTestCase { IndicesAliasesRequest aliasesRemoveIndexRequest = new IndicesAliasesRequest(); AliasActions removeIndexAction = new AliasActions(AliasActions.Type.REMOVE_INDEX).index(index); aliasesRemoveIndexRequest.addAliasAction(removeIndexAction); - IndicesAliasesResponse aliasesRemoveIndexResponse = execute(aliasesRemoveIndexRequest, highLevelClient().indices()::updateAliases, + AcknowledgedResponse aliasesRemoveIndexResponse = execute(aliasesRemoveIndexRequest, highLevelClient().indices()::updateAliases, highLevelClient().indices()::updateAliasesAsync); assertTrue(aliasesRemoveIndexResponse.isAcknowledged()); assertThat(aliasExists(alias), equalTo(false)); @@ -654,7 +649,7 @@ public class IndicesClientIT extends ESRestHighLevelClientTestCase { assertThat(response.getStatusLine().getStatusCode(), equalTo(RestStatus.OK.getStatus())); CloseIndexRequest closeIndexRequest = new CloseIndexRequest(index); - CloseIndexResponse closeIndexResponse = execute(closeIndexRequest, highLevelClient().indices()::close, + AcknowledgedResponse closeIndexResponse = execute(closeIndexRequest, highLevelClient().indices()::close, highLevelClient().indices()::closeAsync); assertTrue(closeIndexResponse.isAcknowledged()); @@ -1144,7 +1139,7 @@ public class IndicesClientIT extends ESRestHighLevelClientTestCase { assertThat(dynamicSetting.getDefault(Settings.EMPTY), not(dynamicSettingValue)); UpdateSettingsRequest dynamicSettingRequest = new UpdateSettingsRequest(); dynamicSettingRequest.settings(Settings.builder().put(dynamicSettingKey, dynamicSettingValue).build()); - UpdateSettingsResponse response = execute(dynamicSettingRequest, highLevelClient().indices()::putSettings, + AcknowledgedResponse response = execute(dynamicSettingRequest, highLevelClient().indices()::putSettings, highLevelClient().indices()::putSettingsAsync); assertTrue(response.isAcknowledged()); @@ -1227,7 +1222,7 @@ public class IndicesClientIT extends ESRestHighLevelClientTestCase { .mapping("doc", "host_name", "type=keyword", "description", "type=text") .alias(new Alias("alias-1").indexRouting("abc")).alias(new Alias("{index}-write").searchRouting("xyz")); - PutIndexTemplateResponse putTemplateResponse = execute(putTemplateRequest, + AcknowledgedResponse putTemplateResponse = execute(putTemplateRequest, highLevelClient().indices()::putTemplate, highLevelClient().indices()::putTemplateAsync); assertThat(putTemplateResponse.isAcknowledged(), equalTo(true)); diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/IngestClientIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/IngestClientIT.java index 1f5914f392c..70685296192 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/IngestClientIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/IngestClientIT.java @@ -28,7 +28,7 @@ import org.elasticsearch.action.ingest.SimulateDocumentResult; import org.elasticsearch.action.ingest.SimulateDocumentVerboseResult; import org.elasticsearch.action.ingest.SimulatePipelineRequest; import org.elasticsearch.action.ingest.SimulatePipelineResponse; -import org.elasticsearch.action.ingest.WritePipelineResponse; +import org.elasticsearch.action.support.master.AcknowledgedResponse; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentType; @@ -50,7 +50,7 @@ public class IngestClientIT extends ESRestHighLevelClientTestCase { BytesReference.bytes(pipelineBuilder), pipelineBuilder.contentType()); - WritePipelineResponse putPipelineResponse = + AcknowledgedResponse putPipelineResponse = execute(request, highLevelClient().ingest()::putPipeline, highLevelClient().ingest()::putPipelineAsync); assertTrue(putPipelineResponse.isAcknowledged()); } @@ -86,7 +86,7 @@ public class IngestClientIT extends ESRestHighLevelClientTestCase { DeletePipelineRequest request = new DeletePipelineRequest(id); - WritePipelineResponse response = + AcknowledgedResponse response = execute(request, highLevelClient().ingest()::deletePipeline, highLevelClient().ingest()::deletePipelineAsync); assertTrue(response.isAcknowledged()); } diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/MLRequestConvertersTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/MLRequestConvertersTests.java new file mode 100644 index 00000000000..43a41960e00 --- /dev/null +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/MLRequestConvertersTests.java @@ -0,0 +1,90 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.client; + +import org.apache.http.client.methods.HttpDelete; +import org.apache.http.client.methods.HttpPost; +import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.common.xcontent.json.JsonXContent; +import org.elasticsearch.protocol.xpack.ml.DeleteJobRequest; +import org.elasticsearch.protocol.xpack.ml.OpenJobRequest; +import org.elasticsearch.protocol.xpack.ml.PutJobRequest; +import org.elasticsearch.protocol.xpack.ml.job.config.AnalysisConfig; +import org.elasticsearch.protocol.xpack.ml.job.config.Detector; +import org.elasticsearch.protocol.xpack.ml.job.config.Job; +import org.elasticsearch.test.ESTestCase; + +import java.io.ByteArrayOutputStream; +import java.io.IOException; +import java.util.Collections; + +import static org.hamcrest.Matchers.equalTo; + +public class MLRequestConvertersTests extends ESTestCase { + + public void testPutJob() throws IOException { + Job job = createValidJob("foo"); + PutJobRequest putJobRequest = new PutJobRequest(job); + + Request request = MLRequestConverters.putJob(putJobRequest); + + assertThat(request.getEndpoint(), equalTo("/_xpack/ml/anomaly_detectors/foo")); + try (XContentParser parser = createParser(JsonXContent.jsonXContent, request.getEntity().getContent())) { + Job parsedJob = Job.PARSER.apply(parser, null).build(); + assertThat(parsedJob, equalTo(job)); + } + } + + public void testOpenJob() throws Exception { + String jobId = "some-job-id"; + OpenJobRequest openJobRequest = new OpenJobRequest(jobId); + openJobRequest.setTimeout(TimeValue.timeValueMinutes(10)); + + Request request = MLRequestConverters.openJob(openJobRequest); + assertEquals(HttpPost.METHOD_NAME, request.getMethod()); + assertEquals("/_xpack/ml/anomaly_detectors/" + jobId + "/_open", request.getEndpoint()); + ByteArrayOutputStream bos = new ByteArrayOutputStream(); + request.getEntity().writeTo(bos); + assertEquals(bos.toString("UTF-8"), "{\"job_id\":\""+ jobId +"\",\"timeout\":\"10m\"}"); + } + + public void testDeleteJob() { + String jobId = randomAlphaOfLength(10); + DeleteJobRequest deleteJobRequest = new DeleteJobRequest(jobId); + + Request request = MLRequestConverters.deleteJob(deleteJobRequest); + assertEquals(HttpDelete.METHOD_NAME, request.getMethod()); + assertEquals("/_xpack/ml/anomaly_detectors/" + jobId, request.getEndpoint()); + assertEquals(Boolean.toString(false), request.getParameters().get("force")); + + deleteJobRequest.setForce(true); + request = MLRequestConverters.deleteJob(deleteJobRequest); + assertEquals(Boolean.toString(true), request.getParameters().get("force")); + } + + private static Job createValidJob(String jobId) { + AnalysisConfig.Builder analysisConfig = AnalysisConfig.builder(Collections.singletonList( + Detector.builder().setFunction("count").build())); + Job.Builder jobBuilder = Job.builder(jobId); + jobBuilder.setAnalysisConfig(analysisConfig); + return jobBuilder.build(); + } +} \ No newline at end of file diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/MachineLearningIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/MachineLearningIT.java new file mode 100644 index 00000000000..0037460150f --- /dev/null +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/MachineLearningIT.java @@ -0,0 +1,103 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.client; + +import com.carrotsearch.randomizedtesting.generators.CodepointSetGenerator; +import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.protocol.xpack.ml.DeleteJobRequest; +import org.elasticsearch.protocol.xpack.ml.DeleteJobResponse; +import org.elasticsearch.protocol.xpack.ml.OpenJobRequest; +import org.elasticsearch.protocol.xpack.ml.OpenJobResponse; +import org.elasticsearch.protocol.xpack.ml.PutJobRequest; +import org.elasticsearch.protocol.xpack.ml.PutJobResponse; +import org.elasticsearch.protocol.xpack.ml.job.config.AnalysisConfig; +import org.elasticsearch.protocol.xpack.ml.job.config.DataDescription; +import org.elasticsearch.protocol.xpack.ml.job.config.Detector; +import org.elasticsearch.protocol.xpack.ml.job.config.Job; + +import java.util.Arrays; +import java.util.concurrent.TimeUnit; + +import static org.hamcrest.Matchers.is; + +public class MachineLearningIT extends ESRestHighLevelClientTestCase { + + public void testPutJob() throws Exception { + String jobId = randomValidJobId(); + Job job = buildJob(jobId); + MachineLearningClient machineLearningClient = highLevelClient().machineLearning(); + + PutJobResponse putJobResponse = execute(new PutJobRequest(job), machineLearningClient::putJob, machineLearningClient::putJobAsync); + Job createdJob = putJobResponse.getResponse(); + + assertThat(createdJob.getId(), is(jobId)); + assertThat(createdJob.getJobType(), is(Job.ANOMALY_DETECTOR_JOB_TYPE)); + } + + public void testDeleteJob() throws Exception { + String jobId = randomValidJobId(); + Job job = buildJob(jobId); + MachineLearningClient machineLearningClient = highLevelClient().machineLearning(); + machineLearningClient.putJob(new PutJobRequest(job), RequestOptions.DEFAULT); + + DeleteJobResponse response = execute(new DeleteJobRequest(jobId), + machineLearningClient::deleteJob, + machineLearningClient::deleteJobAsync); + + assertTrue(response.isAcknowledged()); + } + + public void testOpenJob() throws Exception { + String jobId = randomValidJobId(); + Job job = buildJob(jobId); + MachineLearningClient machineLearningClient = highLevelClient().machineLearning(); + + machineLearningClient.putJob(new PutJobRequest(job), RequestOptions.DEFAULT); + + OpenJobResponse response = execute(new OpenJobRequest(jobId), machineLearningClient::openJob, machineLearningClient::openJobAsync); + + assertTrue(response.isOpened()); + } + + public static String randomValidJobId() { + CodepointSetGenerator generator = new CodepointSetGenerator("abcdefghijklmnopqrstuvwxyz0123456789".toCharArray()); + return generator.ofCodePointsLength(random(), 10, 10); + } + + public static Job buildJob(String jobId) { + Job.Builder builder = new Job.Builder(jobId); + builder.setDescription(randomAlphaOfLength(10)); + + Detector detector = new Detector.Builder() + .setFieldName("total") + .setFunction("sum") + .setDetectorDescription(randomAlphaOfLength(10)) + .build(); + AnalysisConfig.Builder configBuilder = new AnalysisConfig.Builder(Arrays.asList(detector)); + configBuilder.setBucketSpan(new TimeValue(randomIntBetween(1, 10), TimeUnit.SECONDS)); + builder.setAnalysisConfig(configBuilder); + + DataDescription.Builder dataDescription = new DataDescription.Builder(); + dataDescription.setTimeFormat(randomFrom(DataDescription.EPOCH_MS, DataDescription.EPOCH)); + dataDescription.setTimeField(randomAlphaOfLength(10)); + builder.setDataDescription(dataDescription); + + return builder.build(); + } +} diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/MigrationIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/MigrationIT.java new file mode 100644 index 00000000000..03614537bfe --- /dev/null +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/MigrationIT.java @@ -0,0 +1,43 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.client; + +import org.elasticsearch.action.admin.indices.create.CreateIndexRequest; +import org.elasticsearch.protocol.xpack.migration.IndexUpgradeInfoRequest; +import org.elasticsearch.protocol.xpack.migration.IndexUpgradeInfoResponse; + +import java.io.IOException; + +public class MigrationIT extends ESRestHighLevelClientTestCase { + + public void testGetAssistance() throws IOException { + RestHighLevelClient client = highLevelClient(); + { + IndexUpgradeInfoResponse response = client.migration().getAssistance(new IndexUpgradeInfoRequest(), RequestOptions.DEFAULT); + assertEquals(0, response.getActions().size()); + } + { + client.indices().create(new CreateIndexRequest("test"), RequestOptions.DEFAULT); + IndexUpgradeInfoResponse response = client.migration().getAssistance( + new IndexUpgradeInfoRequest("test"), RequestOptions.DEFAULT); + assertEquals(0, response.getActions().size()); + } + } +} diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/RequestConvertersTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/RequestConvertersTests.java index e4aa690acb6..47195f0bb2a 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/RequestConvertersTests.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/RequestConvertersTests.java @@ -126,6 +126,7 @@ import org.elasticsearch.index.rankeval.RankEvalSpec; import org.elasticsearch.index.rankeval.RatedRequest; import org.elasticsearch.index.rankeval.RestRankEvalAction; import org.elasticsearch.protocol.xpack.XPackInfoRequest; +import org.elasticsearch.protocol.xpack.migration.IndexUpgradeInfoRequest; import org.elasticsearch.protocol.xpack.watcher.DeleteWatchRequest; import org.elasticsearch.protocol.xpack.watcher.PutWatchRequest; import org.elasticsearch.repositories.fs.FsRepository; @@ -2552,6 +2553,23 @@ public class RequestConvertersTests extends ESTestCase { assertEquals(expectedParams, request.getParameters()); } + public void testGetMigrationAssistance() { + IndexUpgradeInfoRequest upgradeInfoRequest = new IndexUpgradeInfoRequest(); + String expectedEndpoint = "/_xpack/migration/assistance"; + if (randomBoolean()) { + String[] indices = randomIndicesNames(1, 5); + upgradeInfoRequest.indices(indices); + expectedEndpoint += "/" + String.join(",", indices); + } + Map expectedParams = new HashMap<>(); + setRandomIndicesOptions(upgradeInfoRequest::indicesOptions, upgradeInfoRequest::indicesOptions, expectedParams); + Request request = RequestConverters.getMigrationAssistance(upgradeInfoRequest); + assertEquals(HttpGet.METHOD_NAME, request.getMethod()); + assertEquals(expectedEndpoint, request.getEndpoint()); + assertNull(request.getEntity()); + assertEquals(expectedParams, request.getParameters()); + } + public void testXPackPutWatch() throws Exception { PutWatchRequest putWatchRequest = new PutWatchRequest(); String watchId = randomAlphaOfLength(10); diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/RestHighLevelClientTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/RestHighLevelClientTests.java index 5cf3b352756..b5d8dbb628e 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/RestHighLevelClientTests.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/RestHighLevelClientTests.java @@ -20,7 +20,6 @@ package org.elasticsearch.client; import com.fasterxml.jackson.core.JsonParseException; - import org.apache.http.HttpEntity; import org.apache.http.HttpHost; import org.apache.http.HttpResponse; @@ -755,7 +754,11 @@ public class RestHighLevelClientTests extends ESTestCase { method.isAnnotationPresent(Deprecated.class)); } else { //TODO xpack api are currently ignored, we need to load xpack yaml spec too - if (apiName.startsWith("xpack.") == false) { + if (apiName.startsWith("xpack.") == false && + apiName.startsWith("license.") == false && + apiName.startsWith("machine_learning.") == false && + apiName.startsWith("watcher.") == false && + apiName.startsWith("migration.") == false) { apiNotFound.add(apiName); } } diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/SnapshotIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/SnapshotIT.java index 06aec70a018..4616234f145 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/SnapshotIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/SnapshotIT.java @@ -21,24 +21,22 @@ package org.elasticsearch.client; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.action.admin.cluster.repositories.delete.DeleteRepositoryRequest; -import org.elasticsearch.action.admin.cluster.repositories.delete.DeleteRepositoryResponse; import org.elasticsearch.action.admin.cluster.repositories.get.GetRepositoriesRequest; import org.elasticsearch.action.admin.cluster.repositories.get.GetRepositoriesResponse; import org.elasticsearch.action.admin.cluster.repositories.put.PutRepositoryRequest; -import org.elasticsearch.action.admin.cluster.repositories.put.PutRepositoryResponse; import org.elasticsearch.action.admin.cluster.repositories.verify.VerifyRepositoryRequest; import org.elasticsearch.action.admin.cluster.repositories.verify.VerifyRepositoryResponse; +import org.elasticsearch.action.admin.cluster.snapshots.create.CreateSnapshotRequest; +import org.elasticsearch.action.admin.cluster.snapshots.create.CreateSnapshotResponse; +import org.elasticsearch.action.admin.cluster.snapshots.delete.DeleteSnapshotRequest; +import org.elasticsearch.action.admin.cluster.snapshots.get.GetSnapshotsRequest; +import org.elasticsearch.action.admin.cluster.snapshots.get.GetSnapshotsResponse; import org.elasticsearch.action.admin.cluster.snapshots.restore.RestoreSnapshotRequest; import org.elasticsearch.action.admin.cluster.snapshots.restore.RestoreSnapshotResponse; import org.elasticsearch.action.admin.cluster.snapshots.status.SnapshotsStatusRequest; import org.elasticsearch.action.admin.cluster.snapshots.status.SnapshotsStatusResponse; +import org.elasticsearch.action.support.master.AcknowledgedResponse; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.action.admin.cluster.snapshots.create.CreateSnapshotRequest; -import org.elasticsearch.action.admin.cluster.snapshots.create.CreateSnapshotResponse; -import org.elasticsearch.action.admin.cluster.snapshots.delete.DeleteSnapshotRequest; -import org.elasticsearch.action.admin.cluster.snapshots.delete.DeleteSnapshotResponse; -import org.elasticsearch.action.admin.cluster.snapshots.get.GetSnapshotsRequest; -import org.elasticsearch.action.admin.cluster.snapshots.get.GetSnapshotsResponse; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.repositories.fs.FsRepository; import org.elasticsearch.rest.RestStatus; @@ -55,7 +53,7 @@ import static org.hamcrest.Matchers.is; public class SnapshotIT extends ESRestHighLevelClientTestCase { - private PutRepositoryResponse createTestRepository(String repository, String type, String settings) throws IOException { + private AcknowledgedResponse createTestRepository(String repository, String type, String settings) throws IOException { PutRepositoryRequest request = new PutRepositoryRequest(repository); request.settings(settings, XContentType.JSON); request.type(type); @@ -71,7 +69,7 @@ public class SnapshotIT extends ESRestHighLevelClientTestCase { } public void testCreateRepository() throws IOException { - PutRepositoryResponse response = createTestRepository("test", FsRepository.TYPE, "{\"location\": \".\"}"); + AcknowledgedResponse response = createTestRepository("test", FsRepository.TYPE, "{\"location\": \".\"}"); assertTrue(response.isAcknowledged()); } @@ -117,14 +115,14 @@ public class SnapshotIT extends ESRestHighLevelClientTestCase { assertThat(1, equalTo(response.repositories().size())); DeleteRepositoryRequest deleteRequest = new DeleteRepositoryRequest(repository); - DeleteRepositoryResponse deleteResponse = execute(deleteRequest, highLevelClient().snapshot()::deleteRepository, + AcknowledgedResponse deleteResponse = execute(deleteRequest, highLevelClient().snapshot()::deleteRepository, highLevelClient().snapshot()::deleteRepositoryAsync); assertTrue(deleteResponse.isAcknowledged()); } public void testVerifyRepository() throws IOException { - PutRepositoryResponse putRepositoryResponse = createTestRepository("test", FsRepository.TYPE, "{\"location\": \".\"}"); + AcknowledgedResponse putRepositoryResponse = createTestRepository("test", FsRepository.TYPE, "{\"location\": \".\"}"); assertTrue(putRepositoryResponse.isAcknowledged()); VerifyRepositoryRequest request = new VerifyRepositoryRequest("test"); @@ -153,7 +151,7 @@ public class SnapshotIT extends ESRestHighLevelClientTestCase { String snapshot1 = "test_snapshot1"; String snapshot2 = "test_snapshot2"; - PutRepositoryResponse putRepositoryResponse = createTestRepository(repository, FsRepository.TYPE, "{\"location\": \".\"}"); + AcknowledgedResponse putRepositoryResponse = createTestRepository(repository, FsRepository.TYPE, "{\"location\": \".\"}"); assertTrue(putRepositoryResponse.isAcknowledged()); CreateSnapshotRequest createSnapshotRequest1 = new CreateSnapshotRequest(repository, snapshot1); @@ -187,7 +185,7 @@ public class SnapshotIT extends ESRestHighLevelClientTestCase { String testSnapshot = "snapshot"; String testIndex = "test_index"; - PutRepositoryResponse putRepositoryResponse = createTestRepository(testRepository, FsRepository.TYPE, "{\"location\": \".\"}"); + AcknowledgedResponse putRepositoryResponse = createTestRepository(testRepository, FsRepository.TYPE, "{\"location\": \".\"}"); assertTrue(putRepositoryResponse.isAcknowledged()); createIndex(testIndex, Settings.EMPTY); @@ -216,7 +214,7 @@ public class SnapshotIT extends ESRestHighLevelClientTestCase { String testIndex = "test_index"; String restoredIndex = testIndex + "_restored"; - PutRepositoryResponse putRepositoryResponse = createTestRepository(testRepository, FsRepository.TYPE, "{\"location\": \".\"}"); + AcknowledgedResponse putRepositoryResponse = createTestRepository(testRepository, FsRepository.TYPE, "{\"location\": \".\"}"); assertTrue(putRepositoryResponse.isAcknowledged()); createIndex(testIndex, Settings.EMPTY); @@ -250,7 +248,7 @@ public class SnapshotIT extends ESRestHighLevelClientTestCase { String repository = "test_repository"; String snapshot = "test_snapshot"; - PutRepositoryResponse putRepositoryResponse = createTestRepository(repository, FsRepository.TYPE, "{\"location\": \".\"}"); + AcknowledgedResponse putRepositoryResponse = createTestRepository(repository, FsRepository.TYPE, "{\"location\": \".\"}"); assertTrue(putRepositoryResponse.isAcknowledged()); CreateSnapshotRequest createSnapshotRequest = new CreateSnapshotRequest(repository, snapshot); @@ -260,7 +258,7 @@ public class SnapshotIT extends ESRestHighLevelClientTestCase { assertEquals(RestStatus.OK, createSnapshotResponse.status()); DeleteSnapshotRequest request = new DeleteSnapshotRequest(repository, snapshot); - DeleteSnapshotResponse response = execute(request, highLevelClient().snapshot()::delete, highLevelClient().snapshot()::deleteAsync); + AcknowledgedResponse response = execute(request, highLevelClient().snapshot()::delete, highLevelClient().snapshot()::deleteAsync); assertTrue(response.isAcknowledged()); } diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/StoredScriptsIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/StoredScriptsIT.java index e6d380a4cc0..14734c4ab60 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/StoredScriptsIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/StoredScriptsIT.java @@ -23,9 +23,9 @@ import org.apache.http.entity.StringEntity; import org.apache.http.util.EntityUtils; import org.elasticsearch.ElasticsearchStatusException; import org.elasticsearch.action.admin.cluster.storedscripts.DeleteStoredScriptRequest; -import org.elasticsearch.action.admin.cluster.storedscripts.DeleteStoredScriptResponse; import org.elasticsearch.action.admin.cluster.storedscripts.GetStoredScriptRequest; import org.elasticsearch.action.admin.cluster.storedscripts.GetStoredScriptResponse; +import org.elasticsearch.action.support.master.AcknowledgedResponse; import org.elasticsearch.common.Strings; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentType; @@ -90,7 +90,7 @@ public class StoredScriptsIT extends ESRestHighLevelClientTestCase { deleteRequest.masterNodeTimeout("50s"); deleteRequest.timeout("50s"); - DeleteStoredScriptResponse deleteResponse = execute(deleteRequest, highLevelClient()::deleteScript, + AcknowledgedResponse deleteResponse = execute(deleteRequest, highLevelClient()::deleteScript, highLevelClient()::deleteScriptAsync); assertThat(deleteResponse.isAcknowledged(), equalTo(true)); diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/SyncedFlushResponseTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/SyncedFlushResponseTests.java index 0756cfa6bab..8057a92b3f2 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/SyncedFlushResponseTests.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/SyncedFlushResponseTests.java @@ -24,7 +24,7 @@ import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.cluster.routing.ShardRoutingState; import org.elasticsearch.cluster.routing.TestShardRouting; import org.elasticsearch.common.bytes.BytesReference; -import org.elasticsearch.common.xcontent.LoggingDeprecationHandler; +import org.elasticsearch.common.xcontent.DeprecationHandler; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; @@ -63,7 +63,7 @@ public class SyncedFlushResponseTests extends ESTestCase { .xContent() .createParser( xContentRegistry(), - LoggingDeprecationHandler.INSTANCE, + DeprecationHandler.THROW_UNSUPPORTED_OPERATION, BytesReference.bytes(serverResponsebuilder).streamInput() ).map() ); @@ -74,7 +74,7 @@ public class SyncedFlushResponseTests extends ESTestCase { .xContent() .createParser( xContentRegistry(), - LoggingDeprecationHandler.INSTANCE, + DeprecationHandler.THROW_UNSUPPORTED_OPERATION, BytesReference.bytes(clientResponsebuilder).streamInput() ) .map() @@ -94,7 +94,9 @@ public class SyncedFlushResponseTests extends ESTestCase { .contentType() .xContent() .createParser( - xContentRegistry(), LoggingDeprecationHandler.INSTANCE, BytesReference.bytes(builder).streamInput() + xContentRegistry(), + DeprecationHandler.THROW_UNSUPPORTED_OPERATION, + BytesReference.bytes(builder).streamInput() ); SyncedFlushResponse originalResponse = plan.clientResult; SyncedFlushResponse parsedResponse = SyncedFlushResponse.fromXContent(parser); @@ -175,7 +177,8 @@ public class SyncedFlushResponseTests extends ESTestCase { .contentType() .xContent() .createParser( - xContentRegistry(), LoggingDeprecationHandler.INSTANCE, + xContentRegistry(), + DeprecationHandler.THROW_UNSUPPORTED_OPERATION, BytesReference.bytes(builder).streamInput() ) .map(); diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/WatcherIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/WatcherIT.java index 67d1def323a..491992735af 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/WatcherIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/WatcherIT.java @@ -46,7 +46,7 @@ public class WatcherIT extends ESRestHighLevelClientTestCase { "}"; BytesReference bytesReference = new BytesArray(json); PutWatchRequest putWatchRequest = new PutWatchRequest(watchId, bytesReference, XContentType.JSON); - return highLevelClient().xpack().watcher().putWatch(putWatchRequest, RequestOptions.DEFAULT); + return highLevelClient().watcher().putWatch(putWatchRequest, RequestOptions.DEFAULT); } public void testDeleteWatch() throws Exception { @@ -54,7 +54,7 @@ public class WatcherIT extends ESRestHighLevelClientTestCase { { String watchId = randomAlphaOfLength(10); createWatch(watchId); - DeleteWatchResponse deleteWatchResponse = highLevelClient().xpack().watcher().deleteWatch(new DeleteWatchRequest(watchId), + DeleteWatchResponse deleteWatchResponse = highLevelClient().watcher().deleteWatch(new DeleteWatchRequest(watchId), RequestOptions.DEFAULT); assertThat(deleteWatchResponse.getId(), is(watchId)); assertThat(deleteWatchResponse.getVersion(), is(2L)); @@ -64,7 +64,7 @@ public class WatcherIT extends ESRestHighLevelClientTestCase { // delete watch that does not exist { String watchId = randomAlphaOfLength(10); - DeleteWatchResponse deleteWatchResponse = highLevelClient().xpack().watcher().deleteWatch(new DeleteWatchRequest(watchId), + DeleteWatchResponse deleteWatchResponse = highLevelClient().watcher().deleteWatch(new DeleteWatchRequest(watchId), RequestOptions.DEFAULT); assertThat(deleteWatchResponse.getId(), is(watchId)); assertThat(deleteWatchResponse.getVersion(), is(1L)); diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/IndicesClientDocumentationIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/IndicesClientDocumentationIT.java index 36d562c501f..2da4d306c28 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/IndicesClientDocumentationIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/IndicesClientDocumentationIT.java @@ -25,7 +25,6 @@ import org.elasticsearch.action.LatchedActionListener; import org.elasticsearch.action.admin.indices.alias.Alias; import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequest; import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequest.AliasActions; -import org.elasticsearch.action.admin.indices.alias.IndicesAliasesResponse; import org.elasticsearch.action.admin.indices.alias.get.GetAliasesRequest; import org.elasticsearch.action.admin.indices.analyze.AnalyzeRequest; import org.elasticsearch.action.admin.indices.analyze.AnalyzeResponse; @@ -33,11 +32,9 @@ import org.elasticsearch.action.admin.indices.analyze.DetailAnalyzeResponse; import org.elasticsearch.action.admin.indices.cache.clear.ClearIndicesCacheRequest; import org.elasticsearch.action.admin.indices.cache.clear.ClearIndicesCacheResponse; import org.elasticsearch.action.admin.indices.close.CloseIndexRequest; -import org.elasticsearch.action.admin.indices.close.CloseIndexResponse; import org.elasticsearch.action.admin.indices.create.CreateIndexRequest; import org.elasticsearch.action.admin.indices.create.CreateIndexResponse; import org.elasticsearch.action.admin.indices.delete.DeleteIndexRequest; -import org.elasticsearch.action.admin.indices.delete.DeleteIndexResponse; import org.elasticsearch.action.admin.indices.flush.FlushRequest; import org.elasticsearch.action.admin.indices.flush.FlushResponse; import org.elasticsearch.action.admin.indices.flush.SyncedFlushRequest; @@ -50,7 +47,6 @@ import org.elasticsearch.action.admin.indices.mapping.get.GetFieldMappingsRespon import org.elasticsearch.action.admin.indices.mapping.get.GetMappingsRequest; import org.elasticsearch.action.admin.indices.mapping.get.GetMappingsResponse; import org.elasticsearch.action.admin.indices.mapping.put.PutMappingRequest; -import org.elasticsearch.action.admin.indices.mapping.put.PutMappingResponse; import org.elasticsearch.action.admin.indices.open.OpenIndexRequest; import org.elasticsearch.action.admin.indices.open.OpenIndexResponse; import org.elasticsearch.action.admin.indices.refresh.RefreshRequest; @@ -60,20 +56,19 @@ import org.elasticsearch.action.admin.indices.rollover.RolloverResponse; import org.elasticsearch.action.admin.indices.settings.get.GetSettingsRequest; import org.elasticsearch.action.admin.indices.settings.get.GetSettingsResponse; import org.elasticsearch.action.admin.indices.settings.put.UpdateSettingsRequest; -import org.elasticsearch.action.admin.indices.settings.put.UpdateSettingsResponse; import org.elasticsearch.action.admin.indices.shrink.ResizeRequest; import org.elasticsearch.action.admin.indices.shrink.ResizeResponse; import org.elasticsearch.action.admin.indices.shrink.ResizeType; import org.elasticsearch.action.admin.indices.template.get.GetIndexTemplatesRequest; import org.elasticsearch.action.admin.indices.template.get.GetIndexTemplatesResponse; import org.elasticsearch.action.admin.indices.template.put.PutIndexTemplateRequest; -import org.elasticsearch.action.admin.indices.template.put.PutIndexTemplateResponse; import org.elasticsearch.action.admin.indices.validate.query.QueryExplanation; import org.elasticsearch.action.admin.indices.validate.query.ValidateQueryRequest; import org.elasticsearch.action.admin.indices.validate.query.ValidateQueryResponse; import org.elasticsearch.action.support.ActiveShardCount; import org.elasticsearch.action.support.DefaultShardOperationFailedException; import org.elasticsearch.action.support.IndicesOptions; +import org.elasticsearch.action.support.master.AcknowledgedResponse; import org.elasticsearch.client.ESRestHighLevelClientTestCase; import org.elasticsearch.client.GetAliasesResponse; import org.elasticsearch.client.RequestOptions; @@ -220,7 +215,7 @@ public class IndicesClientDocumentationIT extends ESRestHighLevelClientTestCase // end::delete-index-request-indicesOptions // tag::delete-index-execute - DeleteIndexResponse deleteIndexResponse = client.indices().delete(request, RequestOptions.DEFAULT); + AcknowledgedResponse deleteIndexResponse = client.indices().delete(request, RequestOptions.DEFAULT); // end::delete-index-execute // tag::delete-index-response @@ -255,10 +250,10 @@ public class IndicesClientDocumentationIT extends ESRestHighLevelClientTestCase DeleteIndexRequest request = new DeleteIndexRequest("posts"); // tag::delete-index-execute-listener - ActionListener listener = - new ActionListener() { + ActionListener listener = + new ActionListener() { @Override - public void onResponse(DeleteIndexResponse deleteIndexResponse) { + public void onResponse(AcknowledgedResponse deleteIndexResponse) { // <1> } @@ -480,7 +475,7 @@ public class IndicesClientDocumentationIT extends ESRestHighLevelClientTestCase "}", // <1> XContentType.JSON); // end::put-mapping-request-source - PutMappingResponse putMappingResponse = client.indices().putMapping(request, RequestOptions.DEFAULT); + AcknowledgedResponse putMappingResponse = client.indices().putMapping(request, RequestOptions.DEFAULT); assertTrue(putMappingResponse.isAcknowledged()); } @@ -494,7 +489,7 @@ public class IndicesClientDocumentationIT extends ESRestHighLevelClientTestCase jsonMap.put("properties", properties); request.source(jsonMap); // <1> //end::put-mapping-map - PutMappingResponse putMappingResponse = client.indices().putMapping(request, RequestOptions.DEFAULT); + AcknowledgedResponse putMappingResponse = client.indices().putMapping(request, RequestOptions.DEFAULT); assertTrue(putMappingResponse.isAcknowledged()); } { @@ -515,14 +510,14 @@ public class IndicesClientDocumentationIT extends ESRestHighLevelClientTestCase builder.endObject(); request.source(builder); // <1> //end::put-mapping-xcontent - PutMappingResponse putMappingResponse = client.indices().putMapping(request, RequestOptions.DEFAULT); + AcknowledgedResponse putMappingResponse = client.indices().putMapping(request, RequestOptions.DEFAULT); assertTrue(putMappingResponse.isAcknowledged()); } { //tag::put-mapping-shortcut request.source("message", "type=text"); // <1> //end::put-mapping-shortcut - PutMappingResponse putMappingResponse = client.indices().putMapping(request, RequestOptions.DEFAULT); + AcknowledgedResponse putMappingResponse = client.indices().putMapping(request, RequestOptions.DEFAULT); assertTrue(putMappingResponse.isAcknowledged()); } @@ -536,7 +531,7 @@ public class IndicesClientDocumentationIT extends ESRestHighLevelClientTestCase // end::put-mapping-request-masterTimeout // tag::put-mapping-execute - PutMappingResponse putMappingResponse = client.indices().putMapping(request, RequestOptions.DEFAULT); + AcknowledgedResponse putMappingResponse = client.indices().putMapping(request, RequestOptions.DEFAULT); // end::put-mapping-execute // tag::put-mapping-response @@ -558,10 +553,10 @@ public class IndicesClientDocumentationIT extends ESRestHighLevelClientTestCase PutMappingRequest request = new PutMappingRequest("twitter").type("tweet"); // tag::put-mapping-execute-listener - ActionListener listener = - new ActionListener() { + ActionListener listener = + new ActionListener() { @Override - public void onResponse(PutMappingResponse putMappingResponse) { + public void onResponse(AcknowledgedResponse putMappingResponse) { // <1> } @@ -601,7 +596,7 @@ public class IndicesClientDocumentationIT extends ESRestHighLevelClientTestCase " }\n" + "}", // <1> XContentType.JSON); - PutMappingResponse putMappingResponse = client.indices().putMapping(request, RequestOptions.DEFAULT); + AcknowledgedResponse putMappingResponse = client.indices().putMapping(request, RequestOptions.DEFAULT); assertTrue(putMappingResponse.isAcknowledged()); } @@ -658,7 +653,7 @@ public class IndicesClientDocumentationIT extends ESRestHighLevelClientTestCase " }\n" + "}", // <1> XContentType.JSON); - PutMappingResponse putMappingResponse = client.indices().putMapping(request, RequestOptions.DEFAULT); + AcknowledgedResponse putMappingResponse = client.indices().putMapping(request, RequestOptions.DEFAULT); assertTrue(putMappingResponse.isAcknowledged()); } @@ -731,7 +726,7 @@ public class IndicesClientDocumentationIT extends ESRestHighLevelClientTestCase " }\n" + "}", // <1> XContentType.JSON); - PutMappingResponse putMappingResponse = client.indices().putMapping(request, RequestOptions.DEFAULT); + AcknowledgedResponse putMappingResponse = client.indices().putMapping(request, RequestOptions.DEFAULT); assertTrue(putMappingResponse.isAcknowledged()); } @@ -1497,7 +1492,7 @@ public class IndicesClientDocumentationIT extends ESRestHighLevelClientTestCase // end::close-index-request-indicesOptions // tag::close-index-execute - CloseIndexResponse closeIndexResponse = client.indices().close(request, RequestOptions.DEFAULT); + AcknowledgedResponse closeIndexResponse = client.indices().close(request, RequestOptions.DEFAULT); // end::close-index-execute // tag::close-index-response @@ -1506,10 +1501,10 @@ public class IndicesClientDocumentationIT extends ESRestHighLevelClientTestCase assertTrue(acknowledged); // tag::close-index-execute-listener - ActionListener listener = - new ActionListener() { + ActionListener listener = + new ActionListener() { @Override - public void onResponse(CloseIndexResponse closeIndexResponse) { + public void onResponse(AcknowledgedResponse closeIndexResponse) { // <1> } @@ -1649,7 +1644,7 @@ public class IndicesClientDocumentationIT extends ESRestHighLevelClientTestCase // end::update-aliases-request-masterTimeout // tag::update-aliases-execute - IndicesAliasesResponse indicesAliasesResponse = + AcknowledgedResponse indicesAliasesResponse = client.indices().updateAliases(request, RequestOptions.DEFAULT); // end::update-aliases-execute @@ -1665,10 +1660,10 @@ public class IndicesClientDocumentationIT extends ESRestHighLevelClientTestCase request.addAliasAction(aliasAction); // tag::update-aliases-execute-listener - ActionListener listener = - new ActionListener() { + ActionListener listener = + new ActionListener() { @Override - public void onResponse(IndicesAliasesResponse indicesAliasesResponse) { + public void onResponse(AcknowledgedResponse indicesAliasesResponse) { // <1> } @@ -2056,7 +2051,7 @@ public class IndicesClientDocumentationIT extends ESRestHighLevelClientTestCase // end::put-settings-request-indicesOptions // tag::put-settings-execute - UpdateSettingsResponse updateSettingsResponse = + AcknowledgedResponse updateSettingsResponse = client.indices().putSettings(request, RequestOptions.DEFAULT); // end::put-settings-execute @@ -2066,11 +2061,11 @@ public class IndicesClientDocumentationIT extends ESRestHighLevelClientTestCase assertTrue(acknowledged); // tag::put-settings-execute-listener - ActionListener listener = - new ActionListener() { + ActionListener listener = + new ActionListener() { @Override - public void onResponse(UpdateSettingsResponse updateSettingsResponse) { + public void onResponse(AcknowledgedResponse updateSettingsResponse) { // <1> } @@ -2219,7 +2214,7 @@ public class IndicesClientDocumentationIT extends ESRestHighLevelClientTestCase request.create(false); // make test happy // tag::put-template-execute - PutIndexTemplateResponse putTemplateResponse = client.indices().putTemplate(request, RequestOptions.DEFAULT); + AcknowledgedResponse putTemplateResponse = client.indices().putTemplate(request, RequestOptions.DEFAULT); // end::put-template-execute // tag::put-template-response @@ -2228,10 +2223,10 @@ public class IndicesClientDocumentationIT extends ESRestHighLevelClientTestCase assertTrue(acknowledged); // tag::put-template-execute-listener - ActionListener listener = - new ActionListener() { + ActionListener listener = + new ActionListener() { @Override - public void onResponse(PutIndexTemplateResponse putTemplateResponse) { + public void onResponse(AcknowledgedResponse putTemplateResponse) { // <1> } @@ -2461,7 +2456,7 @@ public class IndicesClientDocumentationIT extends ESRestHighLevelClientTestCase .indices("my_index") .type("_doc") .source("my_field", "type=text,analyzer=english"); - PutMappingResponse pmResp = client.indices().putMapping(pmReq, RequestOptions.DEFAULT); + AcknowledgedResponse pmResp = client.indices().putMapping(pmReq, RequestOptions.DEFAULT); assertTrue(pmResp.isAcknowledged()); { diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/IngestClientDocumentationIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/IngestClientDocumentationIT.java index 98502e3668a..4702c34c6de 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/IngestClientDocumentationIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/IngestClientDocumentationIT.java @@ -31,7 +31,7 @@ import org.elasticsearch.action.ingest.SimulateDocumentVerboseResult; import org.elasticsearch.action.ingest.SimulatePipelineRequest; import org.elasticsearch.action.ingest.SimulatePipelineResponse; import org.elasticsearch.action.ingest.SimulateProcessorResult; -import org.elasticsearch.action.ingest.WritePipelineResponse; +import org.elasticsearch.action.support.master.AcknowledgedResponse; import org.elasticsearch.client.ESRestHighLevelClientTestCase; import org.elasticsearch.client.RequestOptions; import org.elasticsearch.client.RestHighLevelClient; @@ -93,7 +93,7 @@ public class IngestClientDocumentationIT extends ESRestHighLevelClientTestCase { // end::put-pipeline-request-masterTimeout // tag::put-pipeline-execute - WritePipelineResponse response = client.ingest().putPipeline(request, RequestOptions.DEFAULT); // <1> + AcknowledgedResponse response = client.ingest().putPipeline(request, RequestOptions.DEFAULT); // <1> // end::put-pipeline-execute // tag::put-pipeline-response @@ -117,10 +117,10 @@ public class IngestClientDocumentationIT extends ESRestHighLevelClientTestCase { ); // tag::put-pipeline-execute-listener - ActionListener listener = - new ActionListener() { + ActionListener listener = + new ActionListener() { @Override - public void onResponse(WritePipelineResponse response) { + public void onResponse(AcknowledgedResponse response) { // <1> } @@ -236,7 +236,7 @@ public class IngestClientDocumentationIT extends ESRestHighLevelClientTestCase { // end::delete-pipeline-request-masterTimeout // tag::delete-pipeline-execute - WritePipelineResponse response = client.ingest().deletePipeline(request, RequestOptions.DEFAULT); // <1> + AcknowledgedResponse response = client.ingest().deletePipeline(request, RequestOptions.DEFAULT); // <1> // end::delete-pipeline-execute // tag::delete-pipeline-response @@ -257,10 +257,10 @@ public class IngestClientDocumentationIT extends ESRestHighLevelClientTestCase { DeletePipelineRequest request = new DeletePipelineRequest("my-pipeline-id"); // tag::delete-pipeline-execute-listener - ActionListener listener = - new ActionListener() { + ActionListener listener = + new ActionListener() { @Override - public void onResponse(WritePipelineResponse response) { + public void onResponse(AcknowledgedResponse response) { // <1> } diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/LicensingDocumentationIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/LicensingDocumentationIT.java index d620adb7131..3737bd70a8d 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/LicensingDocumentationIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/LicensingDocumentationIT.java @@ -19,11 +19,17 @@ package org.elasticsearch.client.documentation; +import org.elasticsearch.Build; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.LatchedActionListener; +import org.elasticsearch.action.support.master.AcknowledgedResponse; import org.elasticsearch.client.ESRestHighLevelClientTestCase; import org.elasticsearch.client.RequestOptions; import org.elasticsearch.client.RestHighLevelClient; +import org.elasticsearch.common.Booleans; +import org.elasticsearch.protocol.xpack.license.DeleteLicenseRequest; +import org.elasticsearch.protocol.xpack.license.GetLicenseRequest; +import org.elasticsearch.protocol.xpack.license.GetLicenseResponse; import org.elasticsearch.protocol.xpack.license.LicensesStatus; import org.elasticsearch.protocol.xpack.license.PutLicenseRequest; import org.elasticsearch.protocol.xpack.license.PutLicenseResponse; @@ -32,6 +38,8 @@ import java.util.Map; import java.util.concurrent.CountDownLatch; import java.util.concurrent.TimeUnit; +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.endsWith; import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.Matchers.not; import static org.hamcrest.Matchers.startsWith; @@ -42,7 +50,8 @@ import static org.hamcrest.Matchers.startsWith; */ public class LicensingDocumentationIT extends ESRestHighLevelClientTestCase { - public void testPutLicense() throws Exception { + public void testLicense() throws Exception { + assumeTrue("License is only valid when tested against snapshot/test builds", Build.CURRENT.isSnapshot()); RestHighLevelClient client = highLevelClient(); String license = "{\"license\": {\"uid\":\"893361dc-9749-4997-93cb-802e3d7fa4a8\",\"type\":\"gold\"," + "\"issue_date_in_millis\":1411948800000,\"expiry_date_in_millis\":1914278399999,\"max_nodes\":1,\"issued_to\":\"issued_to\"," + @@ -60,7 +69,7 @@ public class LicensingDocumentationIT extends ESRestHighLevelClientTestCase { request.setLicenseDefinition(license); // <1> request.setAcknowledge(false); // <2> - PutLicenseResponse response = client.xpack().license().putLicense(request, RequestOptions.DEFAULT); + PutLicenseResponse response = client.license().putLicense(request, RequestOptions.DEFAULT); //end::put-license-execute //tag::put-license-response @@ -80,7 +89,7 @@ public class LicensingDocumentationIT extends ESRestHighLevelClientTestCase { // tag::put-license-execute-listener ActionListener listener = new ActionListener() { @Override - public void onResponse(PutLicenseResponse indexResponse) { + public void onResponse(PutLicenseResponse putLicenseResponse) { // <1> } @@ -96,11 +105,114 @@ public class LicensingDocumentationIT extends ESRestHighLevelClientTestCase { listener = new LatchedActionListener<>(listener, latch); // tag::put-license-execute-async - client.xpack().license().putLicenseAsync( + client.license().putLicenseAsync( request, RequestOptions.DEFAULT, listener); // <1> // end::put-license-execute-async assertTrue(latch.await(30L, TimeUnit.SECONDS)); } + + // we cannot actually delete the license, otherwise the remaining tests won't work + if (Booleans.isTrue("true")) { + return; + } + { + //tag::delete-license-execute + DeleteLicenseRequest request = new DeleteLicenseRequest(); + + AcknowledgedResponse response = client.license().deleteLicense(request, RequestOptions.DEFAULT); + //end::delete-license-execute + + //tag::delete-license-response + boolean acknowledged = response.isAcknowledged(); // <1> + //end::delete-license-response + + assertTrue(acknowledged); + } + { + DeleteLicenseRequest request = new DeleteLicenseRequest(); + // tag::delete-license-execute-listener + ActionListener listener = new ActionListener() { + @Override + public void onResponse(AcknowledgedResponse deleteLicenseResponse) { + // <1> + } + + @Override + public void onFailure(Exception e) { + // <2> + } + }; + // end::delete-license-execute-listener + + // Replace the empty listener by a blocking listener in test + final CountDownLatch latch = new CountDownLatch(1); + listener = new LatchedActionListener<>(listener, latch); + + // tag::delete-license-execute-async + client.license().deleteLicenseAsync( + request, RequestOptions.DEFAULT, listener); // <1> + // end::delete-license-execute-async + + assertTrue(latch.await(30L, TimeUnit.SECONDS)); + } + } + + public void testGetLicense() throws Exception { + RestHighLevelClient client = highLevelClient(); + { + //tag::get-license-execute + GetLicenseRequest request = new GetLicenseRequest(); + + GetLicenseResponse response = client.license().getLicense(request, RequestOptions.DEFAULT); + //end::get-license-execute + + //tag::get-license-response + String currentLicense = response.getLicenseDefinition(); // <1> + //end::get-license-response + + assertThat(currentLicense, containsString("trial")); + assertThat(currentLicense, containsString("client_rest-high-level_integTestCluster")); + } + { + GetLicenseRequest request = new GetLicenseRequest(); + // tag::get-license-execute-listener + ActionListener listener = new ActionListener() { + @Override + public void onResponse(GetLicenseResponse indexResponse) { + // <1> + } + + @Override + public void onFailure(Exception e) { + // <2> + } + }; + // end::get-license-execute-listener + + // Replace the empty listener by a blocking listener in test + final CountDownLatch latch = new CountDownLatch(1); + listener = new LatchedActionListener<>(listener, latch); + + // tag::get-license-execute-async + client.license().getLicenseAsync( + request, RequestOptions.DEFAULT, listener); // <1> + // end::get-license-execute-async + + assertTrue(latch.await(30L, TimeUnit.SECONDS)); + } + { + GetLicenseRequest request = new GetLicenseRequest(); + RequestOptions.Builder builder = RequestOptions.DEFAULT.toBuilder(); + // Make sure that it still works in other formats + builder.addHeader("Accept", randomFrom("application/smile", "application/cbor")); + RequestOptions options = builder.build(); + GetLicenseResponse response = client.license().getLicense(request, options); + String currentLicense = response.getLicenseDefinition(); + assertThat(currentLicense, startsWith("{")); + assertThat(currentLicense, containsString("trial")); + assertThat(currentLicense, containsString("client_rest-high-level_integTestCluster")); + assertThat(currentLicense, endsWith("}")); + } } } diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/MigrationClientDocumentationIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/MigrationClientDocumentationIT.java new file mode 100644 index 00000000000..c8310be8053 --- /dev/null +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/MigrationClientDocumentationIT.java @@ -0,0 +1,83 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.client.documentation; + +import org.elasticsearch.action.support.IndicesOptions; +import org.elasticsearch.client.ESRestHighLevelClientTestCase; +import org.elasticsearch.client.RequestOptions; +import org.elasticsearch.client.RestHighLevelClient; +import org.elasticsearch.common.Strings; +import org.elasticsearch.protocol.xpack.migration.IndexUpgradeInfoRequest; +import org.elasticsearch.protocol.xpack.migration.IndexUpgradeInfoResponse; +import org.elasticsearch.protocol.xpack.migration.UpgradeActionRequired; + +import java.io.IOException; +import java.util.Map; + +/** + * This class is used to generate the Java Migration API documentation. + * You need to wrap your code between two tags like: + * // tag::example + * // end::example + * + * Where example is your tag name. + * + * Then in the documentation, you can extract what is between tag and end tags with + * ["source","java",subs="attributes,callouts,macros"] + * -------------------------------------------------- + * include-tagged::{doc-tests}/MigrationClientDocumentationIT.java[example] + * -------------------------------------------------- + * + * The column width of the code block is 84. If the code contains a line longer + * than 84, the line will be cut and a horizontal scroll bar will be displayed. + * (the code indentation of the tag is not included in the width) + */ +public class MigrationClientDocumentationIT extends ESRestHighLevelClientTestCase { + + public void testGetAssistance() throws IOException { + RestHighLevelClient client = highLevelClient(); + + // tag::get-assistance-request + IndexUpgradeInfoRequest request = new IndexUpgradeInfoRequest(); // <1> + // end::get-assistance-request + + // tag::get-assistance-request-indices + request.indices("index1", "index2"); // <1> + // end::get-assistance-request-indices + + request.indices(Strings.EMPTY_ARRAY); + + // tag::get-assistance-request-indices-options + request.indicesOptions(IndicesOptions.lenientExpandOpen()); // <1> + // end::get-assistance-request-indices-options + + // tag::get-assistance-execute + IndexUpgradeInfoResponse response = client.migration().getAssistance(request, RequestOptions.DEFAULT); + // end::get-assistance-execute + + // tag::get-assistance-response + Map actions = response.getActions(); + for (Map.Entry entry : actions.entrySet()) { + String index = entry.getKey(); // <1> + UpgradeActionRequired actionRequired = entry.getValue(); // <2> + } + // end::get-assistance-response + } +} diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/MlClientDocumentationIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/MlClientDocumentationIT.java new file mode 100644 index 00000000000..a77d8b43e57 --- /dev/null +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/MlClientDocumentationIT.java @@ -0,0 +1,224 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.client.documentation; + +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.LatchedActionListener; +import org.elasticsearch.client.ESRestHighLevelClientTestCase; +import org.elasticsearch.client.MachineLearningIT; +import org.elasticsearch.client.RequestOptions; +import org.elasticsearch.client.RestHighLevelClient; +import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.protocol.xpack.ml.DeleteJobRequest; +import org.elasticsearch.protocol.xpack.ml.DeleteJobResponse; +import org.elasticsearch.protocol.xpack.ml.OpenJobRequest; +import org.elasticsearch.protocol.xpack.ml.OpenJobResponse; +import org.elasticsearch.protocol.xpack.ml.PutJobRequest; +import org.elasticsearch.protocol.xpack.ml.PutJobResponse; +import org.elasticsearch.protocol.xpack.ml.job.config.AnalysisConfig; +import org.elasticsearch.protocol.xpack.ml.job.config.DataDescription; +import org.elasticsearch.protocol.xpack.ml.job.config.Detector; +import org.elasticsearch.protocol.xpack.ml.job.config.Job; + +import java.util.Collections; +import java.util.Date; +import java.util.List; +import java.util.concurrent.CountDownLatch; +import java.util.concurrent.TimeUnit; + +import static org.hamcrest.Matchers.greaterThan; + +public class MlClientDocumentationIT extends ESRestHighLevelClientTestCase { + + public void testCreateJob() throws Exception { + RestHighLevelClient client = highLevelClient(); + + //tag::x-pack-ml-put-job-detector + Detector.Builder detectorBuilder = new Detector.Builder() + .setFunction("sum") // <1> + .setFieldName("total") // <2> + .setDetectorDescription("Sum of total"); // <3> + //end::x-pack-ml-put-job-detector + + //tag::x-pack-ml-put-job-analysis-config + List detectors = Collections.singletonList(detectorBuilder.build()); // <1> + AnalysisConfig.Builder analysisConfigBuilder = new AnalysisConfig.Builder(detectors) // <2> + .setBucketSpan(TimeValue.timeValueMinutes(10)); // <3> + //end::x-pack-ml-put-job-analysis-config + + //tag::x-pack-ml-put-job-data-description + DataDescription.Builder dataDescriptionBuilder = new DataDescription.Builder() + .setTimeField("timestamp"); // <1> + //end::x-pack-ml-put-job-data-description + + { + String id = "job_1"; + + //tag::x-pack-ml-put-job-config + Job.Builder jobBuilder = new Job.Builder(id) // <1> + .setAnalysisConfig(analysisConfigBuilder) // <2> + .setDataDescription(dataDescriptionBuilder) // <3> + .setDescription("Total sum of requests"); // <4> + //end::x-pack-ml-put-job-config + + //tag::x-pack-ml-put-job-request + PutJobRequest request = new PutJobRequest(jobBuilder.build()); // <1> + //end::x-pack-ml-put-job-request + + //tag::x-pack-ml-put-job-execute + PutJobResponse response = client.machineLearning().putJob(request, RequestOptions.DEFAULT); + //end::x-pack-ml-put-job-execute + + //tag::x-pack-ml-put-job-response + Date createTime = response.getResponse().getCreateTime(); // <1> + //end::x-pack-ml-put-job-response + assertThat(createTime.getTime(), greaterThan(0L)); + } + { + String id = "job_2"; + Job.Builder jobBuilder = new Job.Builder(id) + .setAnalysisConfig(analysisConfigBuilder) + .setDataDescription(dataDescriptionBuilder) + .setDescription("Total sum of requests"); + + PutJobRequest request = new PutJobRequest(jobBuilder.build()); + // tag::x-pack-ml-put-job-execute-listener + ActionListener listener = new ActionListener() { + @Override + public void onResponse(PutJobResponse response) { + // <1> + } + + @Override + public void onFailure(Exception e) { + // <2> + } + }; + // end::x-pack-ml-put-job-execute-listener + + // Replace the empty listener by a blocking listener in test + final CountDownLatch latch = new CountDownLatch(1); + listener = new LatchedActionListener<>(listener, latch); + + // tag::x-pack-ml-put-job-execute-async + client.machineLearning().putJobAsync(request, RequestOptions.DEFAULT, listener); // <1> + // end::x-pack-ml-put-job-execute-async + + assertTrue(latch.await(30L, TimeUnit.SECONDS)); + } + } + + public void testDeleteJob() throws Exception { + RestHighLevelClient client = highLevelClient(); + + String jobId = "my-first-machine-learning-job"; + + Job job = MachineLearningIT.buildJob(jobId); + client.machineLearning().putJob(new PutJobRequest(job), RequestOptions.DEFAULT); + + Job secondJob = MachineLearningIT.buildJob("my-second-machine-learning-job"); + client.machineLearning().putJob(new PutJobRequest(secondJob), RequestOptions.DEFAULT); + + { + //tag::x-pack-delete-ml-job-request + DeleteJobRequest deleteJobRequest = new DeleteJobRequest("my-first-machine-learning-job"); + deleteJobRequest.setForce(false); //<1> + DeleteJobResponse deleteJobResponse = client.machineLearning().deleteJob(deleteJobRequest, RequestOptions.DEFAULT); + //end::x-pack-delete-ml-job-request + + //tag::x-pack-delete-ml-job-response + boolean isAcknowledged = deleteJobResponse.isAcknowledged(); //<1> + //end::x-pack-delete-ml-job-response + } + { + //tag::x-pack-delete-ml-job-request-listener + ActionListener listener = new ActionListener() { + @Override + public void onResponse(DeleteJobResponse deleteJobResponse) { + // <1> + } + + @Override + public void onFailure(Exception e) { + // <2> + } + }; + //end::x-pack-delete-ml-job-request-listener + + // Replace the empty listener by a blocking listener in test + final CountDownLatch latch = new CountDownLatch(1); + listener = new LatchedActionListener<>(listener, latch); + + //tag::x-pack-delete-ml-job-request-async + DeleteJobRequest deleteJobRequest = new DeleteJobRequest("my-second-machine-learning-job"); + client.machineLearning().deleteJobAsync(deleteJobRequest, RequestOptions.DEFAULT, listener); // <1> + //end::x-pack-delete-ml-job-request-async + + assertTrue(latch.await(30L, TimeUnit.SECONDS)); + } + } + + public void testOpenJob() throws Exception { + RestHighLevelClient client = highLevelClient(); + + Job job = MachineLearningIT.buildJob("opening-my-first-machine-learning-job"); + client.machineLearning().putJob(new PutJobRequest(job), RequestOptions.DEFAULT); + + Job secondJob = MachineLearningIT.buildJob("opening-my-second-machine-learning-job"); + client.machineLearning().putJob(new PutJobRequest(secondJob), RequestOptions.DEFAULT); + + { + //tag::x-pack-ml-open-job-request + OpenJobRequest openJobRequest = new OpenJobRequest("opening-my-first-machine-learning-job"); //<1> + openJobRequest.setTimeout(TimeValue.timeValueMinutes(10)); //<2> + //end::x-pack-ml-open-job-request + + //tag::x-pack-ml-open-job-execute + OpenJobResponse openJobResponse = client.machineLearning().openJob(openJobRequest, RequestOptions.DEFAULT); + boolean isOpened = openJobResponse.isOpened(); //<1> + //end::x-pack-ml-open-job-execute + + } + { + //tag::x-pack-ml-open-job-listener + ActionListener listener = new ActionListener() { + @Override + public void onResponse(OpenJobResponse openJobResponse) { + //<1> + } + + @Override + public void onFailure(Exception e) { + // <2> + } + }; + //end::x-pack-ml-open-job-listener + OpenJobRequest openJobRequest = new OpenJobRequest("opening-my-second-machine-learning-job"); + // Replace the empty listener by a blocking listener in test + final CountDownLatch latch = new CountDownLatch(1); + listener = new LatchedActionListener<>(listener, latch); + + // tag::x-pack-ml-open-job-execute-async + client.machineLearning().openJobAsync(openJobRequest, RequestOptions.DEFAULT, listener); //<1> + // end::x-pack-ml-open-job-execute-async + + assertTrue(latch.await(30L, TimeUnit.SECONDS)); + } + } +} diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/SnapshotClientDocumentationIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/SnapshotClientDocumentationIT.java index 922fcb984d9..d1aed55f44e 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/SnapshotClientDocumentationIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/SnapshotClientDocumentationIT.java @@ -22,27 +22,25 @@ package org.elasticsearch.client.documentation; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.LatchedActionListener; import org.elasticsearch.action.admin.cluster.repositories.delete.DeleteRepositoryRequest; -import org.elasticsearch.action.admin.cluster.repositories.delete.DeleteRepositoryResponse; import org.elasticsearch.action.admin.cluster.repositories.get.GetRepositoriesRequest; import org.elasticsearch.action.admin.cluster.repositories.get.GetRepositoriesResponse; import org.elasticsearch.action.admin.cluster.repositories.put.PutRepositoryRequest; -import org.elasticsearch.action.admin.cluster.repositories.put.PutRepositoryResponse; import org.elasticsearch.action.admin.cluster.repositories.verify.VerifyRepositoryRequest; import org.elasticsearch.action.admin.cluster.repositories.verify.VerifyRepositoryResponse; import org.elasticsearch.action.admin.cluster.snapshots.create.CreateSnapshotRequest; import org.elasticsearch.action.admin.cluster.snapshots.create.CreateSnapshotResponse; +import org.elasticsearch.action.admin.cluster.snapshots.delete.DeleteSnapshotRequest; import org.elasticsearch.action.admin.cluster.snapshots.get.GetSnapshotsRequest; import org.elasticsearch.action.admin.cluster.snapshots.get.GetSnapshotsResponse; import org.elasticsearch.action.admin.cluster.snapshots.restore.RestoreSnapshotRequest; import org.elasticsearch.action.admin.cluster.snapshots.restore.RestoreSnapshotResponse; -import org.elasticsearch.action.admin.indices.create.CreateIndexRequest; -import org.elasticsearch.action.support.IndicesOptions; -import org.elasticsearch.action.admin.cluster.snapshots.delete.DeleteSnapshotRequest; -import org.elasticsearch.action.admin.cluster.snapshots.delete.DeleteSnapshotResponse; import org.elasticsearch.action.admin.cluster.snapshots.status.SnapshotStats; import org.elasticsearch.action.admin.cluster.snapshots.status.SnapshotStatus; import org.elasticsearch.action.admin.cluster.snapshots.status.SnapshotsStatusRequest; import org.elasticsearch.action.admin.cluster.snapshots.status.SnapshotsStatusResponse; +import org.elasticsearch.action.admin.indices.create.CreateIndexRequest; +import org.elasticsearch.action.support.IndicesOptions; +import org.elasticsearch.action.support.master.AcknowledgedResponse; import org.elasticsearch.client.ESRestHighLevelClientTestCase; import org.elasticsearch.client.Request; import org.elasticsearch.client.RequestOptions; @@ -163,7 +161,7 @@ public class SnapshotClientDocumentationIT extends ESRestHighLevelClientTestCase // end::create-repository-request-verify // tag::create-repository-execute - PutRepositoryResponse response = client.snapshot().createRepository(request, RequestOptions.DEFAULT); + AcknowledgedResponse response = client.snapshot().createRepository(request, RequestOptions.DEFAULT); // end::create-repository-execute // tag::create-repository-response @@ -178,10 +176,10 @@ public class SnapshotClientDocumentationIT extends ESRestHighLevelClientTestCase PutRepositoryRequest request = new PutRepositoryRequest(repositoryName); // tag::create-repository-execute-listener - ActionListener listener = - new ActionListener() { + ActionListener listener = + new ActionListener() { @Override - public void onResponse(PutRepositoryResponse putRepositoryResponse) { + public void onResponse(AcknowledgedResponse putRepositoryResponse) { // <1> } @@ -388,7 +386,7 @@ public class SnapshotClientDocumentationIT extends ESRestHighLevelClientTestCase // end::delete-repository-request-timeout // tag::delete-repository-execute - DeleteRepositoryResponse response = client.snapshot().deleteRepository(request, RequestOptions.DEFAULT); + AcknowledgedResponse response = client.snapshot().deleteRepository(request, RequestOptions.DEFAULT); // end::delete-repository-execute // tag::delete-repository-response @@ -403,10 +401,10 @@ public class SnapshotClientDocumentationIT extends ESRestHighLevelClientTestCase DeleteRepositoryRequest request = new DeleteRepositoryRequest(); // tag::delete-repository-execute-listener - ActionListener listener = - new ActionListener() { + ActionListener listener = + new ActionListener() { @Override - public void onResponse(DeleteRepositoryResponse deleteRepositoryResponse) { + public void onResponse(AcknowledgedResponse deleteRepositoryResponse) { // <1> } @@ -752,7 +750,7 @@ public class SnapshotClientDocumentationIT extends ESRestHighLevelClientTestCase // end::delete-snapshot-request-masterTimeout // tag::delete-snapshot-execute - DeleteSnapshotResponse response = client.snapshot().delete(request, RequestOptions.DEFAULT); + AcknowledgedResponse response = client.snapshot().delete(request, RequestOptions.DEFAULT); // end::delete-snapshot-execute // tag::delete-snapshot-response @@ -767,10 +765,10 @@ public class SnapshotClientDocumentationIT extends ESRestHighLevelClientTestCase DeleteSnapshotRequest request = new DeleteSnapshotRequest(); // tag::delete-snapshot-execute-listener - ActionListener listener = - new ActionListener() { + ActionListener listener = + new ActionListener() { @Override - public void onResponse(DeleteSnapshotResponse deleteSnapshotResponse) { + public void onResponse(AcknowledgedResponse deleteSnapshotResponse) { // <1> } diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/StoredScriptsDocumentationIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/StoredScriptsDocumentationIT.java index 0aadae73ce6..b1374ca85b6 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/StoredScriptsDocumentationIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/StoredScriptsDocumentationIT.java @@ -23,9 +23,9 @@ import org.apache.http.util.EntityUtils; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.LatchedActionListener; import org.elasticsearch.action.admin.cluster.storedscripts.DeleteStoredScriptRequest; -import org.elasticsearch.action.admin.cluster.storedscripts.DeleteStoredScriptResponse; import org.elasticsearch.action.admin.cluster.storedscripts.GetStoredScriptRequest; import org.elasticsearch.action.admin.cluster.storedscripts.GetStoredScriptResponse; +import org.elasticsearch.action.support.master.AcknowledgedResponse; import org.elasticsearch.client.ESRestHighLevelClientTestCase; import org.elasticsearch.client.RequestOptions; import org.elasticsearch.client.Response; @@ -154,7 +154,7 @@ public class StoredScriptsDocumentationIT extends ESRestHighLevelClientTestCase // end::delete-stored-script-request-timeout // tag::delete-stored-script-execute - DeleteStoredScriptResponse deleteResponse = client.deleteScript(deleteRequest, RequestOptions.DEFAULT); + AcknowledgedResponse deleteResponse = client.deleteScript(deleteRequest, RequestOptions.DEFAULT); // end::delete-stored-script-execute // tag::delete-stored-script-response @@ -164,10 +164,10 @@ public class StoredScriptsDocumentationIT extends ESRestHighLevelClientTestCase putStoredScript("calculate-score", scriptSource); // tag::delete-stored-script-execute-listener - ActionListener listener = - new ActionListener() { + ActionListener listener = + new ActionListener() { @Override - public void onResponse(DeleteStoredScriptResponse response) { + public void onResponse(AcknowledgedResponse response) { // <1> } diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/WatcherDocumentationIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/WatcherDocumentationIT.java index 47f8510b746..707997d1f31 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/WatcherDocumentationIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/WatcherDocumentationIT.java @@ -49,7 +49,7 @@ public class WatcherDocumentationIT extends ESRestHighLevelClientTestCase { "}"); PutWatchRequest request = new PutWatchRequest("my_watch_id", watch, XContentType.JSON); request.setActive(false); // <1> - PutWatchResponse response = client.xpack().watcher().putWatch(request, RequestOptions.DEFAULT); + PutWatchResponse response = client.watcher().putWatch(request, RequestOptions.DEFAULT); //end::x-pack-put-watch-execute //tag::x-pack-put-watch-response @@ -85,7 +85,7 @@ public class WatcherDocumentationIT extends ESRestHighLevelClientTestCase { listener = new LatchedActionListener<>(listener, latch); // tag::x-pack-put-watch-execute-async - client.xpack().watcher().putWatchAsync(request, RequestOptions.DEFAULT, listener); // <1> + client.watcher().putWatchAsync(request, RequestOptions.DEFAULT, listener); // <1> // end::x-pack-put-watch-execute-async assertTrue(latch.await(30L, TimeUnit.SECONDS)); @@ -94,7 +94,7 @@ public class WatcherDocumentationIT extends ESRestHighLevelClientTestCase { { //tag::x-pack-delete-watch-execute DeleteWatchRequest request = new DeleteWatchRequest("my_watch_id"); - DeleteWatchResponse response = client.xpack().watcher().deleteWatch(request, RequestOptions.DEFAULT); + DeleteWatchResponse response = client.watcher().deleteWatch(request, RequestOptions.DEFAULT); //end::x-pack-delete-watch-execute //tag::x-pack-delete-watch-response @@ -125,7 +125,7 @@ public class WatcherDocumentationIT extends ESRestHighLevelClientTestCase { listener = new LatchedActionListener<>(listener, latch); // tag::x-pack-delete-watch-execute-async - client.xpack().watcher().deleteWatchAsync(request, RequestOptions.DEFAULT, listener); // <1> + client.watcher().deleteWatchAsync(request, RequestOptions.DEFAULT, listener); // <1> // end::x-pack-delete-watch-execute-async assertTrue(latch.await(30L, TimeUnit.SECONDS)); diff --git a/client/sniffer/licenses/jackson-core-2.8.10.jar.sha1 b/client/sniffer/licenses/jackson-core-2.8.10.jar.sha1 deleted file mode 100644 index a322d371e26..00000000000 --- a/client/sniffer/licenses/jackson-core-2.8.10.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -eb21a035c66ad307e66ec8fce37f5d50fd62d039 \ No newline at end of file diff --git a/client/sniffer/licenses/jackson-core-2.8.11.jar.sha1 b/client/sniffer/licenses/jackson-core-2.8.11.jar.sha1 new file mode 100644 index 00000000000..e7ad1e74ed6 --- /dev/null +++ b/client/sniffer/licenses/jackson-core-2.8.11.jar.sha1 @@ -0,0 +1 @@ +876ead1db19f0c9e79c9789273a3ef8c6fd6c29b \ No newline at end of file diff --git a/distribution/bwc/build.gradle b/distribution/bwc/build.gradle index b84bf1df2fe..b515c606cc3 100644 --- a/distribution/bwc/build.gradle +++ b/distribution/bwc/build.gradle @@ -157,7 +157,7 @@ subprojects { environment('JAVA_HOME', getJavaHome(it, 8)) } else if ("6.2".equals(bwcBranch)) { environment('JAVA_HOME', getJavaHome(it, 9)) - } else if (["6.3", "6.x"].contains(bwcBranch)) { + } else if (["6.3", "6.4", "6.x"].contains(bwcBranch)) { environment('JAVA_HOME', getJavaHome(it, 10)) } else { environment('JAVA_HOME', project.compilerJavaHome) diff --git a/distribution/packages/src/common/systemd/elasticsearch.service b/distribution/packages/src/common/systemd/elasticsearch.service index 409f04f76d0..a4d67d8830a 100644 --- a/distribution/packages/src/common/systemd/elasticsearch.service +++ b/distribution/packages/src/common/systemd/elasticsearch.service @@ -6,6 +6,7 @@ After=network-online.target [Service] RuntimeDirectory=elasticsearch +PrivateTmp=true Environment=ES_HOME=/usr/share/elasticsearch Environment=ES_PATH_CONF=${path.conf} Environment=PID_DIR=/var/run/elasticsearch diff --git a/docs/build.gradle b/docs/build.gradle index a67c0217490..8ee5c8a8e53 100644 --- a/docs/build.gradle +++ b/docs/build.gradle @@ -37,6 +37,13 @@ integTestCluster { extraConfigFile 'hunspell/en_US/en_US.dic', '../server/src/test/resources/indices/analyze/conf_dir/hunspell/en_US/en_US.dic' // Whitelist reindexing from the local node so we can test it. setting 'reindex.remote.whitelist', '127.0.0.1:*' + + // TODO: remove this for 7.0, this exists to allow the doc examples in 6.x to continue using the defaults + systemProperty 'es.scripting.use_java_time', 'false' + systemProperty 'es.scripting.update.ctx_in_params', 'false' + + // TODO: remove this deprecation compatibility setting for 7.0 + systemProperty 'es.aggregations.enable_scripted_metric_agg_param', 'false' } // remove when https://github.com/elastic/elasticsearch/issues/31305 is fixed @@ -396,25 +403,25 @@ buildRestTests.setups['stored_scripted_metric_script'] = ''' - do: put_script: id: "my_init_script" - body: { "script": { "lang": "painless", "source": "params._agg.transactions = []" } } + body: { "script": { "lang": "painless", "source": "state.transactions = []" } } - match: { acknowledged: true } - do: put_script: id: "my_map_script" - body: { "script": { "lang": "painless", "source": "params._agg.transactions.add(doc.type.value == 'sale' ? doc.amount.value : -1 * doc.amount.value)" } } + body: { "script": { "lang": "painless", "source": "state.transactions.add(doc.type.value == 'sale' ? doc.amount.value : -1 * doc.amount.value)" } } - match: { acknowledged: true } - do: put_script: id: "my_combine_script" - body: { "script": { "lang": "painless", "source": "double profit = 0;for (t in params._agg.transactions) { profit += t; } return profit" } } + body: { "script": { "lang": "painless", "source": "double profit = 0;for (t in state.transactions) { profit += t; } return profit" } } - match: { acknowledged: true } - do: put_script: id: "my_reduce_script" - body: { "script": { "lang": "painless", "source": "double profit = 0;for (a in params._aggs) { profit += a; } return profit" } } + body: { "script": { "lang": "painless", "source": "double profit = 0;for (a in states) { profit += a; } return profit" } } - match: { acknowledged: true } ''' diff --git a/docs/java-api/index.asciidoc b/docs/java-api/index.asciidoc index 4fb7db4c4ab..5c3a94d57f4 100644 --- a/docs/java-api/index.asciidoc +++ b/docs/java-api/index.asciidoc @@ -81,7 +81,7 @@ You need to also include Log4j 2 dependencies: org.apache.logging.log4j log4j-core - 2.9.1 + 2.11.1 -------------------------------------------------- @@ -109,7 +109,7 @@ If you want to use another logger than Log4j 2, you can use http://www.slf4j.org org.apache.logging.log4j log4j-to-slf4j - 2.9.1 + 2.11.1 org.slf4j diff --git a/docs/java-api/query-dsl/percolate-query.asciidoc b/docs/java-api/query-dsl/percolate-query.asciidoc index e1968ae456a..9afce0842b9 100644 --- a/docs/java-api/query-dsl/percolate-query.asciidoc +++ b/docs/java-api/query-dsl/percolate-query.asciidoc @@ -49,7 +49,7 @@ XContentBuilder docBuilder = XContentFactory.jsonBuilder().startObject(); docBuilder.field("content", "This is amazing!"); docBuilder.endObject(); //End of the JSON root object -PercolateQueryBuilder percolateQuery = new PercolateQueryBuilder("query", "docs", docBuilder.bytes()); +PercolateQueryBuilder percolateQuery = new PercolateQueryBuilder("query", "docs", BytesReference.bytes(docBuilder)); // Percolate, by executing the percolator query in the query dsl: SearchResponse response = client().prepareSearch("myIndexName") diff --git a/docs/java-rest/high-level/licensing/delete-license.asciidoc b/docs/java-rest/high-level/licensing/delete-license.asciidoc new file mode 100644 index 00000000000..d9aec6e57a1 --- /dev/null +++ b/docs/java-rest/high-level/licensing/delete-license.asciidoc @@ -0,0 +1,51 @@ +[[java-rest-high-delete-license]] +=== Delete License + +[[java-rest-high-delete-license-execution]] +==== Execution + +The license can be deleted using the `deleteLicense()` method: + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/LicensingDocumentationIT.java[delete-license-execute] +-------------------------------------------------- + +[[java-rest-high-delete-license-response]] +==== Response + +The returned `DeleteLicenseResponse` contains the `acknowledged` flag, which +returns true if the request was processed by all nodes. + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/LicensingDocumentationIT.java[delete-license-response] +-------------------------------------------------- +<1> Check the acknowledge flag. It should be true if license deletion is acknowledged. + +[[java-rest-high-delete-license-async]] +==== Asynchronous Execution + +This request can be executed asynchronously: + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/LicensingDocumentationIT.java[delete-license-execute-async] +-------------------------------------------------- +<1> The `DeleteLicenseRequest` to execute and the `ActionListener` to use when +the execution completes + +The asynchronous method does not block and returns immediately. Once it is +completed the `ActionListener` is called back using the `onResponse` method +if the execution successfully completed or using the `onFailure` method if +it failed. + +A typical listener for `DeleteLicenseResponse` looks like: + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/LicensingDocumentationIT.java[delete-license-execute-listener] +-------------------------------------------------- +<1> Called when the execution is successfully completed. The response is +provided as an argument +<2> Called in case of failure. The raised exception is provided as an argument diff --git a/docs/java-rest/high-level/licensing/get-license.asciidoc b/docs/java-rest/high-level/licensing/get-license.asciidoc new file mode 100644 index 00000000000..17eb89450fb --- /dev/null +++ b/docs/java-rest/high-level/licensing/get-license.asciidoc @@ -0,0 +1,50 @@ +[[java-rest-high-get-license]] +=== Get License + +[[java-rest-high-get-license-execution]] +==== Execution + +The license can be added or updated using the `getLicense()` method: + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/LicensingDocumentationIT.java[get-license-execute] +-------------------------------------------------- + +[[java-rest-high-get-license-response]] +==== Response + +The returned `GetLicenseResponse` contains the license in the JSON format. + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/LicensingDocumentationIT.java[get-license-response] +-------------------------------------------------- +<1> The text of the license. + +[[java-rest-high-get-license-async]] +==== Asynchronous Execution + +This request can be executed asynchronously: + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/LicensingDocumentationIT.java[get-license-execute-async] +-------------------------------------------------- +<1> The `GetLicenseRequest` to execute and the `ActionListener` to use when +the execution completes + +The asynchronous method does not block and returns immediately. Once it is +completed the `ActionListener` is called back using the `onResponse` method +if the execution successfully completed or using the `onFailure` method if +it failed. + +A typical listener for `GetLicenseResponse` looks like: + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/LicensingDocumentationIT.java[get-license-execute-listener] +-------------------------------------------------- +<1> Called when the execution is successfully completed. The response is +provided as an argument +<2> Called in case of failure. The raised exception is provided as an argument diff --git a/docs/java-rest/high-level/migration/get-assistance.asciidoc b/docs/java-rest/high-level/migration/get-assistance.asciidoc new file mode 100644 index 00000000000..20f857eb1fb --- /dev/null +++ b/docs/java-rest/high-level/migration/get-assistance.asciidoc @@ -0,0 +1,49 @@ +[[java-rest-high-migration-get-assistance]] +=== Migration Get Assistance + +[[java-rest-high-migraton-get-assistance-request]] +==== Index Upgrade Info Request + +An `IndexUpgradeInfoRequest` does not require any argument: + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/MigrationClientDocumentationIT.java[get-assistance-request] +-------------------------------------------------- +<1> Create a new request instance + +==== Optional arguments +The following arguments can optionally be provided: + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/MigrationClientDocumentationIT.java[get-assistance-request-indices] +-------------------------------------------------- +<1> Set the indices to the request + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/MigrationClientDocumentationIT.java[get-assistance-request-indices-options] +-------------------------------------------------- +<1> Set the `IndicesOptions` to control how unavailable indices are resolved and +how wildcard expressions are expanded + +[[java-rest-high-migration-get-assistance-execution]] +==== Execution + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/MigrationClientDocumentationIT.java[get-assistance-execute] +-------------------------------------------------- + +[[java-rest-high-migration-get-assistance-response]] +==== Response + +The returned `IndexUpgradeInfoResponse` contains the actions required for each index. + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/MigrationClientDocumentationIT.java[get-assistance-response] +-------------------------------------------------- +<1> Retrieve the index +<2> Retrieve the action required for the migration of the current index diff --git a/docs/java-rest/high-level/x-pack/x-pack-info.asciidoc b/docs/java-rest/high-level/miscellaneous/x-pack-info.asciidoc similarity index 100% rename from docs/java-rest/high-level/x-pack/x-pack-info.asciidoc rename to docs/java-rest/high-level/miscellaneous/x-pack-info.asciidoc diff --git a/docs/java-rest/high-level/x-pack/x-pack-usage.asciidoc b/docs/java-rest/high-level/miscellaneous/x-pack-usage.asciidoc similarity index 98% rename from docs/java-rest/high-level/x-pack/x-pack-usage.asciidoc rename to docs/java-rest/high-level/miscellaneous/x-pack-usage.asciidoc index 0927ae71c0b..c1e5ccf13e2 100644 --- a/docs/java-rest/high-level/x-pack/x-pack-usage.asciidoc +++ b/docs/java-rest/high-level/miscellaneous/x-pack-usage.asciidoc @@ -12,7 +12,7 @@ retrieved using the `usage()` method: include-tagged::{doc-tests}/MiscellaneousDocumentationIT.java[x-pack-usage-execute] -------------------------------------------------- -[[java-rest-high-x-pack-info-response]] +[[java-rest-high-x-pack-usage-response]] ==== Response The returned `XPackUsageResponse` contains a `Map` keyed by feature name. diff --git a/docs/java-rest/high-level/ml/delete-job.asciidoc b/docs/java-rest/high-level/ml/delete-job.asciidoc new file mode 100644 index 00000000000..44a6a479409 --- /dev/null +++ b/docs/java-rest/high-level/ml/delete-job.asciidoc @@ -0,0 +1,49 @@ +[[java-rest-high-x-pack-ml-delete-job]] +=== Delete Job API + +[[java-rest-high-x-pack-machine-learning-delete-job-request]] +==== Delete Job Request + +A `DeleteJobRequest` object requires a non-null `jobId` and can optionally set `force`. +Can be executed as follows: + +["source","java",subs="attributes,callouts,macros"] +--------------------------------------------------- +include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-delete-ml-job-request] +--------------------------------------------------- +<1> Use to forcefully delete an opened job; +this method is quicker than closing and deleting the job. +Defaults to `false` + +[[java-rest-high-x-pack-machine-learning-delete-job-response]] +==== Delete Job Response + +The returned `DeleteJobResponse` object indicates the acknowledgement of the request: +["source","java",subs="attributes,callouts,macros"] +--------------------------------------------------- +include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-delete-ml-job-response] +--------------------------------------------------- +<1> `isAcknowledged` was the deletion request acknowledged or not + +[[java-rest-high-x-pack-machine-learning-delete-job-async]] +==== Delete Job Asynchronously + +This request can also be made asynchronously. +["source","java",subs="attributes,callouts,macros"] +--------------------------------------------------- +include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-delete-ml-job-request-async] +--------------------------------------------------- +<1> The `DeleteJobRequest` to execute and the `ActionListener` to alert on completion or error. + +The deletion request returns immediately. Once the request is completed, the `ActionListener` is +called back using the `onResponse` or `onFailure`. The latter indicates some failure occurred when +making the request. + +A typical listener for a `DeleteJobRequest` could be defined as follows: + +["source","java",subs="attributes,callouts,macros"] +--------------------------------------------------- +include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-delete-ml-job-request-listener] +--------------------------------------------------- +<1> The action to be taken when it is completed +<2> What to do when a failure occurs diff --git a/docs/java-rest/high-level/ml/open-job.asciidoc b/docs/java-rest/high-level/ml/open-job.asciidoc new file mode 100644 index 00000000000..ad575121818 --- /dev/null +++ b/docs/java-rest/high-level/ml/open-job.asciidoc @@ -0,0 +1,55 @@ +[[java-rest-high-x-pack-ml-open-job]] +=== Open Job API + +The Open Job API provides the ability to open {ml} jobs in the cluster. +It accepts a `OpenJobRequest` object and responds +with a `OpenJobResponse` object. + +[[java-rest-high-x-pack-ml-open-job-request]] +==== Open Job Request + +An `OpenJobRequest` object gets created with an existing non-null `jobId`. + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-open-job-request] +-------------------------------------------------- +<1> Constructing a new request referencing an existing `jobId` +<2> Optionally setting the `timeout` value for how long the +execution should wait for the job to be opened. + +[[java-rest-high-x-pack-ml-open-job-execution]] +==== Execution + +The request can be executed through the `MachineLearningClient` contained +in the `RestHighLevelClient` object, accessed via the `machineLearningClient()` method. + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-open-job-execute] +-------------------------------------------------- +<1> `isOpened()` from the `OpenJobResponse` indicates if the job was successfully +opened or not. + +[[java-rest-high-x-pack-ml-open-job-execution-async]] +==== Asynchronous Execution + +The request can also be executed asynchronously: + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-open-job-execute-async] +-------------------------------------------------- +<1> The `OpenJobRequest` to execute and the `ActionListener` to use when +the execution completes + +The method does not block and returns immediately. The passed `ActionListener` is used +to notify the caller of completion. A typical `ActionListner` for `OpenJobResponse` may +look like + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-open-job-listener] +-------------------------------------------------- +<1> `onResponse` is called back when the action is completed successfully +<2> `onFailure` is called back when some unexpected error occurs diff --git a/docs/java-rest/high-level/ml/put-job.asciidoc b/docs/java-rest/high-level/ml/put-job.asciidoc new file mode 100644 index 00000000000..d51bb63d405 --- /dev/null +++ b/docs/java-rest/high-level/ml/put-job.asciidoc @@ -0,0 +1,161 @@ +[[java-rest-high-x-pack-ml-put-job]] +=== Put Job API + +The Put Job API can be used to create a new {ml} job +in the cluster. The API accepts a `PutJobRequest` object +as a request and returns a `PutJobResponse`. + +[[java-rest-high-x-pack-ml-put-job-request]] +==== Put Job Request + +A `PutJobRequest` requires the following argument: + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-put-job-request] +-------------------------------------------------- +<1> The configuration of the {ml} job to create as a `Job` + +[[java-rest-high-x-pack-ml-put-job-config]] +==== Job Configuration + +The `Job` object contains all the details about the {ml} job +configuration. + +A `Job` requires the following arguments: + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-put-job-config] +-------------------------------------------------- +<1> The job ID +<2> An analysis configuration +<3> A data description +<4> Optionally, a human-readable description + +[[java-rest-high-x-pack-ml-put-job-analysis-config]] +==== Analysis Configuration + +The analysis configuration of the {ml} job is defined in the `AnalysisConfig`. +`AnalysisConfig` reflects all the configuration +settings that can be defined using the REST API. + +Using the REST API, we could define this analysis configuration: + +[source,js] +-------------------------------------------------- +"analysis_config" : { + "bucket_span" : "10m", + "detectors" : [ + { + "detector_description" : "Sum of total", + "function" : "sum", + "field_name" : "total" + } + ] +} +-------------------------------------------------- +// NOTCONSOLE + +Using the `AnalysisConfig` object and the high level REST client, the list +of detectors must be built first. + +An example of building a `Detector` instance is as follows: + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-put-job-detector] +-------------------------------------------------- +<1> The function to use +<2> The field to apply the function to +<3> Optionally, a human-readable description + +Then the same configuration would be: + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-put-job-analysis-config] +-------------------------------------------------- +<1> Create a list of detectors +<2> Pass the list of detectors to the analysis config builder constructor +<3> The bucket span + +[[java-rest-high-x-pack-ml-put-job-data-description]] +==== Data Description + +After defining the analysis config, the next thing to define is the +data description, using a `DataDescription` instance. `DataDescription` +reflects all the configuration settings that can be defined using the +REST API. + +Using the REST API, we could define this metrics configuration: + +[source,js] +-------------------------------------------------- +"data_description" : { + "time_field" : "timestamp" +} +-------------------------------------------------- +// NOTCONSOLE + +Using the `DataDescription` object and the high level REST client, the same +configuration would be: + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-put-job-data-description] +-------------------------------------------------- +<1> The time field + +[[java-rest-high-x-pack-ml-put-job-execution]] +==== Execution + +The Put Job API can be executed through a `MachineLearningClient` +instance. Such an instance can be retrieved from a `RestHighLevelClient` +using the `machineLearning()` method: + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-put-job-execute] +-------------------------------------------------- + +[[java-rest-high-x-pack-ml-put-job-response]] +==== Response + +The returned `PutJobResponse` returns the full representation of +the new {ml} job if it has been successfully created. This will +contain the creation time and other fields initialized using +default values: + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-put-job-response] +-------------------------------------------------- +<1> The creation time is a field that was not passed in the `Job` object in the request + +[[java-rest-high-x-pack-ml-put-job-async]] +==== Asynchronous Execution + +This request can be executed asynchronously: + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-put-job-execute-async] +-------------------------------------------------- +<1> The `PutMlJobRequest` to execute and the `ActionListener` to use when +the execution completes + +The asynchronous method does not block and returns immediately. Once it is +completed the `ActionListener` is called back using the `onResponse` method +if the execution successfully completed or using the `onFailure` method if +it failed. + +A typical listener for `PutJobResponse` looks like: + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-put-job-execute-listener] +-------------------------------------------------- +<1> Called when the execution is successfully completed. The response is +provided as an argument +<2> Called in case of failure. The raised exception is provided as an argument diff --git a/docs/java-rest/high-level/search/explain.asciidoc b/docs/java-rest/high-level/search/explain.asciidoc index 9e55ad77ea2..4d8c04448b1 100644 --- a/docs/java-rest/high-level/search/explain.asciidoc +++ b/docs/java-rest/high-level/search/explain.asciidoc @@ -80,7 +80,7 @@ A typical listener for `ExplainResponse` is constructed as follows: include-tagged::{doc-tests}/SearchDocumentationIT.java[explain-execute-listener] -------------------------------------------------- <1> Called when the execution is successfully completed. -<2> Called when the whole `FieldCapabilitiesRequest` fails. +<2> Called when the whole `ExplainRequest` fails. [[java-rest-high-explain-response]] ==== ExplainResponse diff --git a/docs/java-rest/high-level/supported-apis.asciidoc b/docs/java-rest/high-level/supported-apis.asciidoc index 63aef865955..6bcb736243a 100644 --- a/docs/java-rest/high-level/supported-apis.asciidoc +++ b/docs/java-rest/high-level/supported-apis.asciidoc @@ -54,14 +54,12 @@ The Java High Level REST Client supports the following Miscellaneous APIs: * <> * <> * <> -* <> -* <> +* <> include::miscellaneous/main.asciidoc[] include::miscellaneous/ping.asciidoc[] -include::x-pack/x-pack-info.asciidoc[] -include::x-pack/watcher/put-watch.asciidoc[] -include::x-pack/watcher/delete-watch.asciidoc[] +include::miscellaneous/x-pack-info.asciidoc[] +include::miscellaneous/x-pack-usage.asciidoc[] == Indices APIs @@ -190,11 +188,44 @@ The Java High Level REST Client supports the following Scripts APIs: include::script/get_script.asciidoc[] include::script/delete_script.asciidoc[] - == Licensing APIs The Java High Level REST Client supports the following Licensing APIs: * <> +* <> +* <> include::licensing/put-license.asciidoc[] +include::licensing/get-license.asciidoc[] +include::licensing/delete-license.asciidoc[] + +== Machine Learning APIs + +The Java High Level REST Client supports the following Machine Learning APIs: + +* <> +* <> +* <> + +include::ml/put-job.asciidoc[] +include::ml/delete-job.asciidoc[] +include::ml/open-job.asciidoc[] + +== Migration APIs + +The Java High Level REST Client supports the following Migration APIs: + +* <> + +include::migration/get-assistance.asciidoc[] + +== Watcher APIs + +The Java High Level REST Client supports the following Watcher APIs: + +* <> +* <> + +include::watcher/put-watch.asciidoc[] +include::watcher/delete-watch.asciidoc[] diff --git a/docs/java-rest/high-level/x-pack/watcher/delete-watch.asciidoc b/docs/java-rest/high-level/watcher/delete-watch.asciidoc similarity index 98% rename from docs/java-rest/high-level/x-pack/watcher/delete-watch.asciidoc rename to docs/java-rest/high-level/watcher/delete-watch.asciidoc index d5f35817558..615337ba317 100644 --- a/docs/java-rest/high-level/x-pack/watcher/delete-watch.asciidoc +++ b/docs/java-rest/high-level/watcher/delete-watch.asciidoc @@ -1,5 +1,5 @@ [[java-rest-high-x-pack-watcher-delete-watch]] -=== X-Pack Delete Watch API +=== Delete Watch API [[java-rest-high-x-pack-watcher-delete-watch-execution]] ==== Execution diff --git a/docs/java-rest/high-level/x-pack/watcher/put-watch.asciidoc b/docs/java-rest/high-level/watcher/put-watch.asciidoc similarity index 98% rename from docs/java-rest/high-level/x-pack/watcher/put-watch.asciidoc rename to docs/java-rest/high-level/watcher/put-watch.asciidoc index a76ba407a1a..e5ee87bea34 100644 --- a/docs/java-rest/high-level/x-pack/watcher/put-watch.asciidoc +++ b/docs/java-rest/high-level/watcher/put-watch.asciidoc @@ -1,5 +1,5 @@ [[java-rest-high-x-pack-watcher-put-watch]] -=== X-Pack Put Watch API +=== Put Watch API [[java-rest-high-x-pack-watcher-put-watch-execution]] ==== Execution diff --git a/docs/painless/painless-contexts.asciidoc b/docs/painless/painless-contexts.asciidoc index 8b8a3b0eec6..cc7bc752ec6 100644 --- a/docs/painless/painless-contexts.asciidoc +++ b/docs/painless/painless-contexts.asciidoc @@ -14,6 +14,8 @@ specialized code may define new ways to use a Painless script. |==== | Name | Painless Documentation | Elasticsearch Documentation +| Ingest processor | <> + | {ref}/script-processor.html[Elasticsearch Documentation] | Update | <> | {ref}/docs-update.html[Elasticsearch Documentation] | Update by query | <> @@ -44,12 +46,12 @@ specialized code may define new ways to use a Painless script. | {ref}/search-aggregations-metrics-scripted-metric-aggregation.html[Elasticsearch Documentation] | Bucket aggregation | <> | {ref}/search-aggregations-pipeline-bucket-script-aggregation.html[Elasticsearch Documentation] -| Ingest processor | <> - | {ref}/script-processor.html[Elasticsearch Documentation] | Watcher condition | <> | {xpack-ref}/condition-script.html[Elasticsearch Documentation] | Watcher transform | <> | {xpack-ref}/transform-script.html[Elasticsearch Documentation] |==== +include::painless-contexts/painless-context-examples.asciidoc[] + include::painless-contexts/index.asciidoc[] diff --git a/docs/painless/painless-contexts/index.asciidoc b/docs/painless/painless-contexts/index.asciidoc index 64e4326e052..a9d3982133e 100644 --- a/docs/painless/painless-contexts/index.asciidoc +++ b/docs/painless/painless-contexts/index.asciidoc @@ -1,3 +1,5 @@ +include::painless-ingest-processor-context.asciidoc[] + include::painless-update-context.asciidoc[] include::painless-update-by-query-context.asciidoc[] @@ -28,8 +30,6 @@ include::painless-metric-agg-reduce-context.asciidoc[] include::painless-bucket-agg-context.asciidoc[] -include::painless-ingest-processor-context.asciidoc[] - include::painless-watcher-condition-context.asciidoc[] include::painless-watcher-transform-context.asciidoc[] diff --git a/docs/painless/painless-contexts/painless-context-examples.asciidoc b/docs/painless/painless-contexts/painless-context-examples.asciidoc new file mode 100644 index 00000000000..469f425d1d8 --- /dev/null +++ b/docs/painless/painless-contexts/painless-context-examples.asciidoc @@ -0,0 +1,80 @@ +[[painless-context-examples]] +=== Context examples + +To run the examples, index the sample seat data into Elasticsearch. The examples +must be run sequentially to work correctly. + +. Download the +https://download.elastic.co/demos/painless/contexts/seats.json[seat data]. This +data set contains booking information for a collection of plays. Each document +represents a single seat for a play at a particular theater on a specific date +and time. ++ +Each document contains the following fields: ++ +`theatre` ({ref}/keyword.html[`keyword`]):: + The name of the theater the play is in. +`play` ({ref}/text.html[`text`]):: + The name of the play. +`actors` ({ref}/text.html[`text`]):: + A list of actors in the play. +`row` ({ref}/number.html[`integer`]):: + The row of the seat. +`number` ({ref}/number.html[`integer`]):: + The number of the seat within a row. +`cost` ({ref}/number.html[`double`]):: + The cost of the ticket for the seat. +`sold` ({ref}/boolean.html[`boolean`]):: + Whether or not the seat is sold. +`datetime` ({ref}/date.html[`date`]):: + The date and time of the play as a date object. +`date` ({ref}/keyword.html[`keyword`]):: + The date of the play as a keyword. +`time` ({ref}/keyword.html[`keyword`]):: + The time of the play as a keyword. + +. {defguide}/running-elasticsearch.html[Start] Elasticsearch. Note these +examples assume Elasticsearch and Kibana are running locally. To use the Console +editor with a remote Kibana instance, click the settings icon and enter the +Console URL. To submit a cURL request to a remote Elasticsearch instance, edit +the request URL. + +. Create {ref}/mapping.html[mappings] for the sample data: ++ +[source,js] +---- +PUT /seats +{ + "mappings": { + "seat": { + "properties": { + "theatre": { "type": "keyword" }, + "play": { "type": "text" }, + "actors": { "type": "text" }, + "row": { "type": "integer" }, + "number": { "type": "integer" }, + "cost": { "type": "double" }, + "sold": { "type": "boolean" }, + "datetime": { "type": "date" }, + "date": { "type": "keyword" }, + "time": { "type": "keyword" } + } + } + } +} +---- ++ +// CONSOLE + +. Run the <> +example. This sets up a script ingest processor used on each document as the +seat data is indexed. + +. Index the seat data: ++ +[source,js] +---- +curl -XPOST localhost:9200/seats/seat/_bulk?pipeline=seats -H "Content-Type: application/x-ndjson" --data-binary "@//seats.json" +---- +// NOTCONSOLE + diff --git a/docs/painless/painless-contexts/painless-ingest-processor-context.asciidoc b/docs/painless/painless-contexts/painless-ingest-processor-context.asciidoc index 5d451268ded..546057ab1a0 100644 --- a/docs/painless/painless-contexts/painless-ingest-processor-context.asciidoc +++ b/docs/painless/painless-contexts/painless-ingest-processor-context.asciidoc @@ -27,7 +27,7 @@ to modify documents upon insertion. {ref}/mapping-type-field.html[`ctx['_type']`]:: Modify this to change the type for the current document. -`ctx` (`Map`, read-only):: +`ctx` (`Map`):: Modify the values in the `Map/List` structure to add, modify, or delete the fields of a document. @@ -38,4 +38,158 @@ void:: *API* -The standard <> is available. \ No newline at end of file +The standard <> is available. + +*Example* + +To run this example, first follow the steps in +<>. + +The seat data contains: + +* A date in the format `YYYY-MM-DD` where the second digit of both month and day + is optional. +* A time in the format HH:MM* where the second digit of both hours and minutes + is optional. The star (*) represents either the `String` `AM` or `PM`. + +The following ingest script processes the date and time `Strings` and stores the +result in a `datetime` field. + +[source,Painless] +---- +String[] split(String s, char d) { <1> + int count = 0; + + for (char c : s.toCharArray()) { <2> + if (c == d) { + ++count; + } + } + + if (count == 0) { + return new String[] {s}; <3> + } + + String[] r = new String[count + 1]; <4> + int i0 = 0, i1 = 0; + count = 0; + + for (char c : s.toCharArray()) { <5> + if (c == d) { + r[count++] = s.substring(i0, i1); + i0 = i1 + 1; + } + + ++i1; + } + + r[count] = s.substring(i0, i1); <6> + + return r; +} + +String[] dateSplit = split(ctx.date, (char)"-"); <7> +String year = dateSplit[0].trim(); +String month = dateSplit[1].trim(); + +if (month.length() == 1) { <8> + month = "0" + month; +} + +String day = dateSplit[2].trim(); + +if (day.length() == 1) { <9> + day = "0" + day; +} + +boolean pm = ctx.time.substring(ctx.time.length() - 2).equals("PM"); <10> +String[] timeSplit = split( + ctx.time.substring(0, ctx.time.length() - 2), (char)":"); <11> +int hours = Integer.parseInt(timeSplit[0].trim()); +int minutes = Integer.parseInt(timeSplit[1].trim()); + +if (pm) { <12> + hours += 12; +} + +String dts = year + "-" + month + "-" + day + "T" + + (hours < 10 ? "0" + hours : "" + hours) + ":" + + (minutes < 10 ? "0" + minutes : "" + minutes) + + ":00+08:00"; <13> + +ZonedDateTime dt = ZonedDateTime.parse( + dts, DateTimeFormatter.ISO_OFFSET_DATE_TIME); <14> +ctx.datetime = dt.getLong(ChronoField.INSTANT_SECONDS)*1000L; <15> +---- +<1> Creates a `split` <> to split a + <> type value using a <> + type value as the delimiter. This is useful for handling the necessity of + pulling out the individual pieces of the date and time `Strings` from the + original seat data. +<2> The first pass through each `char` in the `String` collects how many new + `Strings` the original is split into. +<3> Returns the original `String` if there are no instances of the delimiting + `char`. +<4> Creates an <> value to collect the split `Strings` + into based on the number of `char` delimiters found in the first pass. +<5> The second pass through each `char` in the `String` collects each split + substring into an array type value of `Strings`. +<6> Collects the last substring into the array type value of `Strings`. +<7> Uses the `split` function to separate the date `String` from the seat data + into year, month, and day `Strings`. + Note:: + * The use of a `String` type value to `char` type value + <> as part of the second argument since + character literals do not exist. + * The use of the `ctx` ingest processor context variable to retrieve the + data from the `date` field. +<8> Appends the <> `"0"` value to a single + digit month since the format of the seat data allows for this case. +<9> Appends the <> `"0"` value to a single + digit day since the format of the seat data allows for this case. +<10> Sets the <> + <> to `true` if the time `String` is a time + in the afternoon or evening. + Note:: + * The use of the `ctx` ingest processor context variable to retrieve the + data from the `time` field. +<11> Uses the `split` function to separate the time `String` from the seat data + into hours and minutes `Strings`. + Note:: + * The use of the `substring` method to remove the `AM` or `PM` portion of + the time `String`. + * The use of a `String` type value to `char` type value + <> as part of the second argument since + character literals do not exist. + * The use of the `ctx` ingest processor context variable to retrieve the + data from the `date` field. +<12> If the time `String` is an afternoon or evening value adds the + <> `12` to the existing hours to move to + a 24-hour based time. +<13> Builds a new time `String` that is parsable using existing API methods. +<14> Creates a `ZonedDateTime` <> value by using + the API method `parse` to parse the new time `String`. +<15> Sets the datetime field `datetime` to the number of milliseconds retrieved + from the API method `getLong`. + Note:: + * The use of the `ctx` ingest processor context variable to set the field + `datetime`. Manipulate each document's fields with the `ctx` variable as + each document is indexed. + +Submit the following request: + +[source,js] +---- +PUT /_ingest/pipeline/seats +{ + "description": "update datetime for seats", + "processors": [ + { + "script": { + "source": "String[] split(String s, char d) { int count = 0; for (char c : s.toCharArray()) { if (c == d) { ++count; } } if (count == 0) { return new String[] {s}; } String[] r = new String[count + 1]; int i0 = 0, i1 = 0; count = 0; for (char c : s.toCharArray()) { if (c == d) { r[count++] = s.substring(i0, i1); i0 = i1 + 1; } ++i1; } r[count] = s.substring(i0, i1); return r; } String[] dateSplit = split(ctx.date, (char)\"-\"); String year = dateSplit[0].trim(); String month = dateSplit[1].trim(); if (month.length() == 1) { month = \"0\" + month; } String day = dateSplit[2].trim(); if (day.length() == 1) { day = \"0\" + day; } boolean pm = ctx.time.substring(ctx.time.length() - 2).equals(\"PM\"); String[] timeSplit = split(ctx.time.substring(0, ctx.time.length() - 2), (char)\":\"); int hours = Integer.parseInt(timeSplit[0].trim()); int minutes = Integer.parseInt(timeSplit[1].trim()); if (pm) { hours += 12; } String dts = year + \"-\" + month + \"-\" + day + \"T\" + (hours < 10 ? \"0\" + hours : \"\" + hours) + \":\" + (minutes < 10 ? \"0\" + minutes : \"\" + minutes) + \":00+08:00\"; ZonedDateTime dt = ZonedDateTime.parse(dts, DateTimeFormatter.ISO_OFFSET_DATE_TIME); ctx.datetime = dt.getLong(ChronoField.INSTANT_SECONDS)*1000L;" + } + } + ] +} +---- +// CONSOLE \ No newline at end of file diff --git a/docs/painless/painless-contexts/painless-metric-agg-combine-context.asciidoc b/docs/painless/painless-contexts/painless-metric-agg-combine-context.asciidoc index 31cb596ae81..5cc9ad8ecbb 100644 --- a/docs/painless/painless-contexts/painless-metric-agg-combine-context.asciidoc +++ b/docs/painless/painless-contexts/painless-metric-agg-combine-context.asciidoc @@ -12,7 +12,7 @@ optional as part of a full metric aggregation. `params` (`Map`, read-only):: User-defined parameters passed in as part of the query. -`params['_agg']` (`Map`):: +`state` (`Map`):: `Map` with values available from the prior map script. *Return* diff --git a/docs/painless/painless-contexts/painless-metric-agg-init-context.asciidoc b/docs/painless/painless-contexts/painless-metric-agg-init-context.asciidoc index 1503e3abb58..8c0fddfa339 100644 --- a/docs/painless/painless-contexts/painless-metric-agg-init-context.asciidoc +++ b/docs/painless/painless-contexts/painless-metric-agg-init-context.asciidoc @@ -12,13 +12,13 @@ full metric aggregation. `params` (`Map`, read-only):: User-defined parameters passed in as part of the query. -`params['_agg']` (`Map`):: +`state` (`Map`):: Empty `Map` used to add values for use in a <>. *Side Effects* -`params['_agg']` (`Map`):: +`state` (`Map`):: Add values to this `Map` to for use in a map. Additional values must be of the type `Map`, `List`, `String` or primitive. diff --git a/docs/painless/painless-contexts/painless-metric-agg-map-context.asciidoc b/docs/painless/painless-contexts/painless-metric-agg-map-context.asciidoc index 16016d1cf81..a34308aa938 100644 --- a/docs/painless/painless-contexts/painless-metric-agg-map-context.asciidoc +++ b/docs/painless/painless-contexts/painless-metric-agg-map-context.asciidoc @@ -13,10 +13,9 @@ part of a full metric aggregation. `params` (`Map`, read-only):: User-defined parameters passed in as part of the query. -`params['_agg']` (`Map`):: +`state` (`Map`):: `Map` used to add values for processing in a - <> or returned - directly. + <> or to be returned from the aggregation. `doc` (`Map`, read-only):: Contains the fields of the current document where each field is a @@ -27,15 +26,16 @@ part of a full metric aggregation. *Side Effects* -`params['_agg']` (`Map`):: +`state` (`Map`):: Use this `Map` to add values for processing in a combine script. Additional values must be of the type `Map`, `List`, `String` or - primitive. If an initialization script is provided as part the + primitive. The same `state` `Map` is shared between all aggregated documents + on a given shard. If an initialization script is provided as part of the aggregation then values added from the initialization script are - available as well. If no combine script is specified, values must be - directly stored in `_agg`. If no combine script and no + available. If no combine script is specified, values must be + directly stored in `state` in a usable form. If no combine script and no <> are specified, the - values are used as the result. + `state` values are used as the result. *Return* diff --git a/docs/painless/painless-contexts/painless-metric-agg-reduce-context.asciidoc b/docs/painless/painless-contexts/painless-metric-agg-reduce-context.asciidoc index b76e02b1b04..b492207ef44 100644 --- a/docs/painless/painless-contexts/painless-metric-agg-reduce-context.asciidoc +++ b/docs/painless/painless-contexts/painless-metric-agg-reduce-context.asciidoc @@ -14,7 +14,7 @@ specified) and is optional as part of a full metric aggregation. `params` (`Map`, read-only):: User-defined parameters passed in as part of the query. -`params['_aggs']` (`Map`):: +`states` (`Map`):: `Map` with values available from the prior combine script (or a map script if no combine script is specified). diff --git a/docs/painless/painless-getting-started.asciidoc b/docs/painless/painless-getting-started.asciidoc index 1dec4a33bb5..8cff207ab04 100644 --- a/docs/painless/painless-getting-started.asciidoc +++ b/docs/painless/painless-getting-started.asciidoc @@ -198,7 +198,7 @@ POST hockey/player/1/_update ==== Dates Date fields are exposed as -`ReadableDateTime` +`ReadableDateTime` or so they support methods like `getYear`, and `getDayOfWeek`. @@ -220,6 +220,11 @@ GET hockey/_search } ---------------------------------------------------------------- // CONSOLE +// TEST[warning:The joda time api for doc values is deprecated. Use -Des.scripting.use_java_time=true to use the java time api for date field doc values] + +NOTE: Date fields are changing in 7.0 to be exposed as `ZonedDateTime` +from Java 8's time API. To switch to this functionality early, +add `-Des.scripting.use_java_time=true` to `jvm.options`. [float] [[modules-scripting-painless-regex]] diff --git a/docs/painless/painless-keywords.asciidoc b/docs/painless/painless-keywords.asciidoc index 9463902c8d3..24371d3713c 100644 --- a/docs/painless/painless-keywords.asciidoc +++ b/docs/painless/painless-keywords.asciidoc @@ -5,7 +5,7 @@ Keywords are reserved tokens for built-in language features. *Errors* -If a keyword is used as an <>. +* If a keyword is used as an <>. *Keywords* diff --git a/docs/plugins/integrations.asciidoc b/docs/plugins/integrations.asciidoc index 90f2c685fda..8bffe5193ed 100644 --- a/docs/plugins/integrations.asciidoc +++ b/docs/plugins/integrations.asciidoc @@ -17,14 +17,11 @@ Integrations are not plugins, but are external tools or modules that make it eas * https://drupal.org/project/elasticsearch_connector[Drupal]: Drupal Elasticsearch integration. -* https://wordpress.org/plugins/wpsolr-search-engine/[WPSOLR]: - Elasticsearch (and Apache Solr) WordPress Plugin - -* http://searchbox-io.github.com/wp-elasticsearch/[Wp-Elasticsearch]: +* https://wordpress.org/plugins/elasticpress/[ElasticPress]: Elasticsearch WordPress Plugin -* https://github.com/wallmanderco/elasticsearch-indexer[Elasticsearch Indexer]: - Elasticsearch WordPress Plugin +* https://wordpress.org/plugins/wpsolr-search-engine/[WPSOLR]: + Elasticsearch (and Apache Solr) WordPress Plugin * https://doc.tiki.org/Elasticsearch[Tiki Wiki CMS Groupware]: Tiki has native support for Elasticsearch. This provides faster & better diff --git a/docs/reference/aggregations/bucket/datehistogram-aggregation.asciidoc b/docs/reference/aggregations/bucket/datehistogram-aggregation.asciidoc index efbd8ef7389..e19ecac462d 100644 --- a/docs/reference/aggregations/bucket/datehistogram-aggregation.asciidoc +++ b/docs/reference/aggregations/bucket/datehistogram-aggregation.asciidoc @@ -425,6 +425,7 @@ POST /sales/_search?size=0 -------------------------------------------------- // CONSOLE // TEST[setup:sales] +// TEST[warning:The joda time api for doc values is deprecated. Use -Des.scripting.use_java_time=true to use the java time api for date field doc values] Response: diff --git a/docs/reference/analysis/charfilters/pattern-replace-charfilter.asciidoc b/docs/reference/analysis/charfilters/pattern-replace-charfilter.asciidoc index 3da1c60db05..a998e6b11f9 100644 --- a/docs/reference/analysis/charfilters/pattern-replace-charfilter.asciidoc +++ b/docs/reference/analysis/charfilters/pattern-replace-charfilter.asciidoc @@ -87,7 +87,7 @@ The above example produces the following term: [source,text] --------------------------- -[ My, credit, card, is 123_456_789 ] +[ My, credit, card, is, 123_456_789 ] --------------------------- WARNING: Using a replacement string that changes the length of the original diff --git a/docs/reference/cluster/stats.asciidoc b/docs/reference/cluster/stats.asciidoc index 191da2660d6..3de85041871 100644 --- a/docs/reference/cluster/stats.asciidoc +++ b/docs/reference/cluster/stats.asciidoc @@ -22,6 +22,7 @@ Will return, for example: "successful" : 1, "failed" : 0 }, + "cluster_uuid": "YjAvIhsCQ9CbjWZb2qJw3Q", "cluster_name": "elasticsearch", "timestamp": 1459427693515, "status": "green", diff --git a/docs/reference/docs/update.asciidoc b/docs/reference/docs/update.asciidoc index 7ba7e2da633..1cfc122bee4 100644 --- a/docs/reference/docs/update.asciidoc +++ b/docs/reference/docs/update.asciidoc @@ -47,7 +47,7 @@ POST test/_doc/1/_update // TEST[continued] We can add a tag to the list of tags (note, if the tag exists, it -will still add it, since its a list): +will still add it, since it's a list): [source,js] -------------------------------------------------- @@ -65,6 +65,28 @@ POST test/_doc/1/_update // CONSOLE // TEST[continued] +We can remove a tag from the list of tags. Note that the Painless function to +`remove` a tag takes as its parameter the array index of the element you wish +to remove, so you need a bit more logic to locate it while avoiding a runtime +error. Note that if the tag was present more than once in the list, this will +remove only one occurrence of it: + +[source,js] +-------------------------------------------------- +POST test/_doc/1/_update +{ + "script" : { + "source": "if (ctx._source.tags.contains(params.tag)) { ctx._source.tags.remove(ctx._source.tags.indexOf(params.tag)) }", + "lang": "painless", + "params" : { + "tag" : "blue" + } + } +} +-------------------------------------------------- +// CONSOLE +// TEST[continued] + In addition to `_source`, the following variables are available through the `ctx` map: `_index`, `_type`, `_id`, `_version`, `_routing` and `_now` (the current timestamp). @@ -172,7 +194,7 @@ the request was ignored. "_index": "test", "_type": "_doc", "_id": "1", - "_version": 6, + "_version": 7, "result": "noop" } -------------------------------------------------- diff --git a/docs/reference/getting-started.asciidoc b/docs/reference/getting-started.asciidoc index b89021e1cfe..8229f74bdd0 100755 --- a/docs/reference/getting-started.asciidoc +++ b/docs/reference/getting-started.asciidoc @@ -23,7 +23,7 @@ There are a few concepts that are core to Elasticsearch. Understanding these con [float] === Near Realtime (NRT) -Elasticsearch is a near real time search platform. What this means is there is a slight latency (normally one second) from the time you index a document until the time it becomes searchable. +Elasticsearch is a near-realtime search platform. What this means is there is a slight latency (normally one second) from the time you index a document until the time it becomes searchable. [float] === Cluster @@ -59,7 +59,7 @@ In a single cluster, you can define as many indexes as you want. deprecated[6.0.0,See <>] -A type used to be a logical category/partition of your index to allow you to store different types of documents in the same index, eg one type for users, another type for blog posts. It is no longer possible to create multiple types in an index, and the whole concept of types will be removed in a later version. See <> for more. +A type used to be a logical category/partition of your index to allow you to store different types of documents in the same index, e.g. one type for users, another type for blog posts. It is no longer possible to create multiple types in an index, and the whole concept of types will be removed in a later version. See <> for more. [float] === Document @@ -93,7 +93,8 @@ Replication is important for two primary reasons: To summarize, each index can be split into multiple shards. An index can also be replicated zero (meaning no replicas) or more times. Once replicated, each index will have primary shards (the original shards that were replicated from) and replica shards (the copies of the primary shards). -The number of shards and replicas can be defined per index at the time the index is created. After the index is created, you may change the number of replicas dynamically anytime but you cannot change the number of shards after-the-fact. + +The number of shards and replicas can be defined per index at the time the index is created. After the index is created, you may also change the number of replicas dynamically anytime. You can change the number of shards for an existing index using the {ref}/indices-shrink-index.html[`_shrink`] and {ref}/indices-split-index.html[`_split`] APIs, however this is not a trivial task and pre-planning for the correct number of shards is the optimal approach. By default, each index in Elasticsearch is allocated one primary shard and one replica which means that if you have at least two nodes in your cluster, your index will have one primary shard and another replica shard (one complete replica) for a total of two shards per index. @@ -1065,7 +1066,7 @@ In the previous section, we skipped over a little detail called the document sco But queries do not always need to produce scores, in particular when they are only used for "filtering" the document set. Elasticsearch detects these situations and automatically optimizes query execution in order not to compute useless scores. -The {ref}/query-dsl-bool-query.html[`bool` query] that we introduced in the previous section also supports `filter` clauses which allow to use a query to restrict the documents that will be matched by other clauses, without changing how scores are computed. As an example, let's introduce the {ref}/query-dsl-range-query.html[`range` query], which allows us to filter documents by a range of values. This is generally used for numeric or date filtering. +The {ref}/query-dsl-bool-query.html[`bool` query] that we introduced in the previous section also supports `filter` clauses which allow us to use a query to restrict the documents that will be matched by other clauses, without changing how scores are computed. As an example, let's introduce the {ref}/query-dsl-range-query.html[`range` query], which allows us to filter documents by a range of values. This is generally used for numeric or date filtering. This example uses a bool query to return all accounts with balances between 20000 and 30000, inclusive. In other words, we want to find accounts with a balance that is greater than or equal to 20000 and less than or equal to 30000. diff --git a/docs/reference/ingest/ingest-node.asciidoc b/docs/reference/ingest/ingest-node.asciidoc index 79277d22e81..37c616b2349 100644 --- a/docs/reference/ingest/ingest-node.asciidoc +++ b/docs/reference/ingest/ingest-node.asciidoc @@ -1049,6 +1049,125 @@ understands this to mean `2016-04-01` as is explained in the <> can't be accessed by any processor. + +[[dot-expender-options]] +.Dot Expand Options +[options="header"] +|====== +| Name | Required | Default | Description +| `field` | yes | - | The field to expand into an object field +| `path` | no | - | The field that contains the field to expand. Only required if the field to expand is part another object field, because the `field` option can only understand leaf fields. +|====== + +[source,js] +-------------------------------------------------- +{ + "dot_expander": { + "field": "foo.bar" + } +} +-------------------------------------------------- +// NOTCONSOLE + +For example the dot expand processor would turn this document: + +[source,js] +-------------------------------------------------- +{ + "foo.bar" : "value" +} +-------------------------------------------------- +// NOTCONSOLE + +into: + +[source,js] +-------------------------------------------------- +{ + "foo" : { + "bar" : "value" + } +} +-------------------------------------------------- +// NOTCONSOLE + +If there is already a `bar` field nested under `foo` then +this processor merges the `foo.bar` field into it. If the field is +a scalar value then it will turn that field into an array field. + +For example, the following document: + +[source,js] +-------------------------------------------------- +{ + "foo.bar" : "value2", + "foo" : { + "bar" : "value1" + } +} +-------------------------------------------------- +// NOTCONSOLE + +is transformed by the `dot_expander` processor into: + +[source,js] +-------------------------------------------------- +{ + "foo" : { + "bar" : ["value1", "value2"] + } +} +-------------------------------------------------- +// NOTCONSOLE + +If any field outside of the leaf field conflicts with a pre-existing field of the same name, +then that field needs to be renamed first. + +Consider the following document: + +[source,js] +-------------------------------------------------- +{ + "foo": "value1", + "foo.bar": "value2" +} +-------------------------------------------------- +// NOTCONSOLE + +Then the `foo` needs to be renamed first before the `dot_expander` +processor is applied. So in order for the `foo.bar` field to properly +be expanded into the `bar` field under the `foo` field the following +pipeline should be used: + +[source,js] +-------------------------------------------------- +{ + "processors" : [ + { + "rename" : { + "field" : "foo", + "target_field" : "foo.bar"" + } + }, + { + "dot_expander": { + "field": "foo.bar" + } + } + ] +} +-------------------------------------------------- +// NOTCONSOLE + +The reason for this is that Ingest doesn't know how to automatically cast +a scalar field to an object field. + [[fail-processor]] === Fail Processor Raises an exception. This is useful for when @@ -2058,125 +2177,6 @@ Converts a string to its uppercase equivalent. -------------------------------------------------- // NOTCONSOLE -[[dot-expand-processor]] -=== Dot Expander Processor - -Expands a field with dots into an object field. This processor allows fields -with dots in the name to be accessible by other processors in the pipeline. -Otherwise these <> can't be accessed by any processor. - -[[dot-expender-options]] -.Dot Expand Options -[options="header"] -|====== -| Name | Required | Default | Description -| `field` | yes | - | The field to expand into an object field -| `path` | no | - | The field that contains the field to expand. Only required if the field to expand is part another object field, because the `field` option can only understand leaf fields. -|====== - -[source,js] --------------------------------------------------- -{ - "dot_expander": { - "field": "foo.bar" - } -} --------------------------------------------------- -// NOTCONSOLE - -For example the dot expand processor would turn this document: - -[source,js] --------------------------------------------------- -{ - "foo.bar" : "value" -} --------------------------------------------------- -// NOTCONSOLE - -into: - -[source,js] --------------------------------------------------- -{ - "foo" : { - "bar" : "value" - } -} --------------------------------------------------- -// NOTCONSOLE - -If there is already a `bar` field nested under `foo` then -this processor merges the `foo.bar` field into it. If the field is -a scalar value then it will turn that field into an array field. - -For example, the following document: - -[source,js] --------------------------------------------------- -{ - "foo.bar" : "value2", - "foo" : { - "bar" : "value1" - } -} --------------------------------------------------- -// NOTCONSOLE - -is transformed by the `dot_expander` processor into: - -[source,js] --------------------------------------------------- -{ - "foo" : { - "bar" : ["value1", "value2"] - } -} --------------------------------------------------- -// NOTCONSOLE - -If any field outside of the leaf field conflicts with a pre-existing field of the same name, -then that field needs to be renamed first. - -Consider the following document: - -[source,js] --------------------------------------------------- -{ - "foo": "value1", - "foo.bar": "value2" -} --------------------------------------------------- -// NOTCONSOLE - -Then the `foo` needs to be renamed first before the `dot_expander` -processor is applied. So in order for the `foo.bar` field to properly -be expanded into the `bar` field under the `foo` field the following -pipeline should be used: - -[source,js] --------------------------------------------------- -{ - "processors" : [ - { - "rename" : { - "field" : "foo", - "target_field" : "foo.bar"" - } - }, - { - "dot_expander": { - "field": "foo.bar" - } - } - ] -} --------------------------------------------------- -// NOTCONSOLE - -The reason for this is that Ingest doesn't know how to automatically cast -a scalar field to an object field. - [[urldecode-processor]] === URL Decode Processor URL-decodes a string diff --git a/docs/reference/migration/migrate_7_0/search.asciidoc b/docs/reference/migration/migrate_7_0/search.asciidoc index 11f46509127..094294d8530 100644 --- a/docs/reference/migration/migrate_7_0/search.asciidoc +++ b/docs/reference/migration/migrate_7_0/search.asciidoc @@ -92,6 +92,9 @@ deprecated in 6.x, has been removed. Context enabled suggestion queries without contexts have to visit every suggestion, which degrades the search performance considerably. +For geo context the value of the `path` parameter is now validated against the mapping, +and the context is only accepted if `path` points to a field with `geo_point` type. + ==== Semantics changed for `max_concurrent_shard_requests` `max_concurrent_shard_requests` used to limit the total number of concurrent shard diff --git a/docs/reference/redirects.asciidoc b/docs/reference/redirects.asciidoc index a5a8e4d008a..d67d8a733ac 100644 --- a/docs/reference/redirects.asciidoc +++ b/docs/reference/redirects.asciidoc @@ -503,3 +503,31 @@ guide to the {painless}/index.html[Painless Scripting Language]. See the {painless}/painless-api-reference.html[Painless API Reference] in the guide to the {painless}/index.html[Painless Scripting Language]. + +[role="exclude", id="security-api-roles"] +=== Role management APIs + +You can use the following APIs to add, remove, and retrieve roles in the native realm: + +* <>, <> +* <> +* <> + +[role="exclude",id="security-api-tokens"] +=== Token management APIs + +You can use the following APIs to create and invalidate bearer tokens for access +without requiring basic authentication: + +* <>, <> + +[role="exclude",id="security-api-users"] +=== User Management APIs + +You can use the following APIs to create, read, update, and delete users from the +native realm: + +* <>, <> +* <>, <> +* <> +* <> diff --git a/docs/reference/release-notes/7.0.0-alpha1.asciidoc b/docs/reference/release-notes/7.0.0-alpha1.asciidoc index cf2e1e30be0..c3a03d77f81 100644 --- a/docs/reference/release-notes/7.0.0-alpha1.asciidoc +++ b/docs/reference/release-notes/7.0.0-alpha1.asciidoc @@ -21,4 +21,10 @@ Aggregations:: * The Percentiles and PercentileRanks aggregations now return `null` in the REST response, instead of `NaN`. This makes it consistent with the rest of the aggregations. Note: this only applies to the REST response, the java objects continue to return `NaN` (also - consistent with other aggregations) \ No newline at end of file + consistent with other aggregations) + +Suggesters:: +* Plugins that register suggesters can now define their own types of suggestions and must + explicitly indicate the type of suggestion that they produce. Existing plugins will + require changes to their plugin registration. See the `custom-suggester` example + plugin {pull}30284[#30284] \ No newline at end of file diff --git a/docs/reference/search/request-body.asciidoc b/docs/reference/search/request-body.asciidoc index 2a51d705d83..e7c9b593af3 100644 --- a/docs/reference/search/request-body.asciidoc +++ b/docs/reference/search/request-body.asciidoc @@ -90,7 +90,8 @@ And here is a sample response: Set to `false` to return an overall failure if the request would produce partial results. Defaults to true, which will allow partial results in the case of timeouts - or partial failures. + or partial failures. This default can be controlled using the cluster-level setting + `search.default_allow_partial_results`. `terminate_after`:: diff --git a/docs/reference/search/uri-request.asciidoc b/docs/reference/search/uri-request.asciidoc index a90f32bb3cd..279bc0c0384 100644 --- a/docs/reference/search/uri-request.asciidoc +++ b/docs/reference/search/uri-request.asciidoc @@ -125,5 +125,6 @@ more details on the different types of search that can be performed. |`allow_partial_search_results` |Set to `false` to return an overall failure if the request would produce partial results. Defaults to true, which will allow partial results in the case of timeouts -or partial failures.. +or partial failures. This default can be controlled using the cluster-level setting +`search.default_allow_partial_results`. |======================================================================= diff --git a/docs/reference/setup/important-settings.asciidoc b/docs/reference/setup/important-settings.asciidoc index b9b99b70803..8a9b59480a0 100644 --- a/docs/reference/setup/important-settings.asciidoc +++ b/docs/reference/setup/important-settings.asciidoc @@ -14,6 +14,7 @@ The following settings *must* be considered before going to production: * <> * <> * <> +* <> include::important-settings/path-settings.asciidoc[] @@ -31,4 +32,6 @@ include::important-settings/heap-dump-path.asciidoc[] include::important-settings/gc-logging.asciidoc[] +include::important-settings/es-tmpdir.asciidoc[] + include::important-settings/error-file.asciidoc[] diff --git a/docs/reference/setup/important-settings/es-tmpdir.asciidoc b/docs/reference/setup/important-settings/es-tmpdir.asciidoc new file mode 100644 index 00000000000..20959d969b8 --- /dev/null +++ b/docs/reference/setup/important-settings/es-tmpdir.asciidoc @@ -0,0 +1,23 @@ +[[es-tmpdir]] +=== Temp directory + +By default, Elasticsearch uses a private temporary directory that the startup +script creates immediately below the system temporary directory. + +On some Linux distributions a system utility will clean files and directories +from `/tmp` if they have not been recently accessed. This can lead to the +private temporary directory being removed while Elasticsearch is running if +features that require the temporary directory are not used for a long time. +This causes problems if a feature that requires the temporary directory is +subsequently used. + +If you install Elasticsearch using the `.deb` or `.rpm` packages and run it +under `systemd` then the private temporary directory that Elasticsearch uses +is excluded from periodic cleanup. + +However, if you intend to run the `.tar.gz` distribution on Linux for an +extended period then you should consider creating a dedicated temporary +directory for Elasticsearch that is not under a path that will have old files +and directories cleaned from it. This directory should have permissions set +so that only the user that Elasticsearch runs as can access it. Then set the +`$ES_TMPDIR` environment variable to point to it before starting Elasticsearch. diff --git a/docs/reference/setup/important-settings/heap-dump-path.asciidoc b/docs/reference/setup/important-settings/heap-dump-path.asciidoc index b0d301b21d0..fb8c7ff35f0 100644 --- a/docs/reference/setup/important-settings/heap-dump-path.asciidoc +++ b/docs/reference/setup/important-settings/heap-dump-path.asciidoc @@ -8,8 +8,8 @@ distributions, and the `data` directory under the root of the Elasticsearch installation for the <> archive distributions). If this path is not suitable for receiving heap dumps, you should modify the entry `-XX:HeapDumpPath=...` in -<>. If you specify a fixed filename instead -of a directory, the JVM will repeatedly use the same file; this is one -mechanism for preventing heap dumps from accumulating in the heap dump -path. Alternatively, you can configure a scheduled task via your OS to -remove heap dumps that are older than a configured age. +<>. If you specify a directory, the JVM +will generate a filename for the heap dump based on the PID of the running +instance. If you specify a fixed filename instead of a directory, the file must +not exist when the JVM needs to perform a heap dump on an out of memory +exception, otherwise the heap dump will fail. diff --git a/docs/reference/setup/important-settings/network-host.asciidoc b/docs/reference/setup/important-settings/network-host.asciidoc index 7e29e73123d..1788bfebc66 100644 --- a/docs/reference/setup/important-settings/network-host.asciidoc +++ b/docs/reference/setup/important-settings/network-host.asciidoc @@ -9,7 +9,7 @@ location on a single node. This can be useful for testing Elasticsearch's ability to form clusters, but it is not a configuration recommended for production. -In order to communicate and to form a cluster with nodes on other servers, your +In order to form a cluster with nodes on other servers, your node will need to bind to a non-loopback address. While there are many <>, usually all you need to configure is `network.host`: diff --git a/docs/reference/setup/important-settings/node-name.asciidoc b/docs/reference/setup/important-settings/node-name.asciidoc index fab7ddcf118..5980d8e284e 100644 --- a/docs/reference/setup/important-settings/node-name.asciidoc +++ b/docs/reference/setup/important-settings/node-name.asciidoc @@ -2,7 +2,7 @@ === `node.name` By default, Elasticsearch will use the first seven characters of the randomly -generated UUID as the node id.Note that the node id is persisted and does +generated UUID as the node id. Note that the node id is persisted and does not change when a node restarts and therefore the default node name will also not change. @@ -19,4 +19,4 @@ The `node.name` can also be set to the server's HOSTNAME as follows: [source,yaml] -------------------------------------------------- node.name: ${HOSTNAME} --------------------------------------------------- \ No newline at end of file +-------------------------------------------------- diff --git a/docs/reference/setup/install/docker.asciidoc b/docs/reference/setup/install/docker.asciidoc index 523217b921a..e2e5c6ab70b 100644 --- a/docs/reference/setup/install/docker.asciidoc +++ b/docs/reference/setup/install/docker.asciidoc @@ -4,9 +4,9 @@ {es} is also available as Docker images. The images use https://hub.docker.com/_/centos/[centos:7] as the base image. -A list of all published Docker images and tags can be found in -https://www.docker.elastic.co[www.docker.elastic.co]. The source code can be found -on https://github.com/elastic/elasticsearch-docker/tree/{branch}[GitHub]. +A list of all published Docker images and tags is available at +https://www.docker.elastic.co[www.docker.elastic.co]. The source code is in +https://github.com/elastic/elasticsearch-docker/tree/{branch}[GitHub]. These images are free to use under the Elastic license. They contain open source and free commercial features and access to paid commercial features. @@ -29,15 +29,13 @@ endif::[] ifeval::["{release-state}"!="unreleased"] -For example, the Docker image can be retrieved with the following command: - ["source","sh",subs="attributes"] -------------------------------------------- docker pull {docker-repo}:{version} -------------------------------------------- Alternatively, you can download other Docker images that contain only features -that are available under the Apache 2.0 license from +available under the Apache 2.0 license. To download the images, go to https://www.docker.elastic.co[www.docker.elastic.co]. endif::[] diff --git a/docs/src/test/java/org/elasticsearch/smoketest/DocsClientYamlTestSuiteIT.java b/docs/src/test/java/org/elasticsearch/smoketest/DocsClientYamlTestSuiteIT.java index a8dd91e8b6d..dbda453e5f9 100644 --- a/docs/src/test/java/org/elasticsearch/smoketest/DocsClientYamlTestSuiteIT.java +++ b/docs/src/test/java/org/elasticsearch/smoketest/DocsClientYamlTestSuiteIT.java @@ -92,8 +92,7 @@ public class DocsClientYamlTestSuiteIT extends ESClientYamlSuiteTestCase { final List hosts, final Version esVersion, final Version masterVersion) { - return new ClientYamlDocsTestClient(restSpec, restClient, hosts, esVersion, masterVersion, - restClientBuilder -> configureClient(restClientBuilder, restClientSettings())); + return new ClientYamlDocsTestClient(restSpec, restClient, hosts, esVersion, masterVersion, this::getClientBuilderWithSniffedHosts); } /** diff --git a/libs/core/src/main/java/org/elasticsearch/common/CharArrays.java b/libs/core/src/main/java/org/elasticsearch/common/CharArrays.java new file mode 100644 index 00000000000..907874ca573 --- /dev/null +++ b/libs/core/src/main/java/org/elasticsearch/common/CharArrays.java @@ -0,0 +1,150 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.common; + +import java.nio.ByteBuffer; +import java.nio.CharBuffer; +import java.nio.charset.StandardCharsets; +import java.util.Arrays; +import java.util.Objects; + +/** + * Helper class similar to Arrays to handle conversions for Char arrays + */ +public final class CharArrays { + + private CharArrays() {} + + /** + * Decodes the provided byte[] to a UTF-8 char[]. This is done while avoiding + * conversions to String. The provided byte[] is not modified by this method, so + * the caller needs to take care of clearing the value if it is sensitive. + */ + public static char[] utf8BytesToChars(byte[] utf8Bytes) { + final ByteBuffer byteBuffer = ByteBuffer.wrap(utf8Bytes); + final CharBuffer charBuffer = StandardCharsets.UTF_8.decode(byteBuffer); + final char[] chars; + if (charBuffer.hasArray()) { + // there is no guarantee that the char buffers backing array is the right size + // so we need to make a copy + chars = Arrays.copyOfRange(charBuffer.array(), charBuffer.position(), charBuffer.limit()); + Arrays.fill(charBuffer.array(), (char) 0); // clear sensitive data + } else { + final int length = charBuffer.limit() - charBuffer.position(); + chars = new char[length]; + charBuffer.get(chars); + // if the buffer is not read only we can reset and fill with 0's + if (charBuffer.isReadOnly() == false) { + charBuffer.clear(); // reset + for (int i = 0; i < charBuffer.limit(); i++) { + charBuffer.put((char) 0); + } + } + } + return chars; + } + + /** + * Encodes the provided char[] to a UTF-8 byte[]. This is done while avoiding + * conversions to String. The provided char[] is not modified by this method, so + * the caller needs to take care of clearing the value if it is sensitive. + */ + public static byte[] toUtf8Bytes(char[] chars) { + final CharBuffer charBuffer = CharBuffer.wrap(chars); + final ByteBuffer byteBuffer = StandardCharsets.UTF_8.encode(charBuffer); + final byte[] bytes; + if (byteBuffer.hasArray()) { + // there is no guarantee that the byte buffers backing array is the right size + // so we need to make a copy + bytes = Arrays.copyOfRange(byteBuffer.array(), byteBuffer.position(), byteBuffer.limit()); + Arrays.fill(byteBuffer.array(), (byte) 0); // clear sensitive data + } else { + final int length = byteBuffer.limit() - byteBuffer.position(); + bytes = new byte[length]; + byteBuffer.get(bytes); + // if the buffer is not read only we can reset and fill with 0's + if (byteBuffer.isReadOnly() == false) { + byteBuffer.clear(); // reset + for (int i = 0; i < byteBuffer.limit(); i++) { + byteBuffer.put((byte) 0); + } + } + } + return bytes; + } + + /** + * Tests if a char[] contains a sequence of characters that match the prefix. This is like + * {@link String#startsWith(String)} but does not require conversion of the char[] to a string. + */ + public static boolean charsBeginsWith(String prefix, char[] chars) { + if (chars == null || prefix == null) { + return false; + } + + if (prefix.length() > chars.length) { + return false; + } + + for (int i = 0; i < prefix.length(); i++) { + if (chars[i] != prefix.charAt(i)) { + return false; + } + } + + return true; + } + + /** + * Constant time equality check of char arrays to avoid potential timing attacks. + */ + public static boolean constantTimeEquals(char[] a, char[] b) { + Objects.requireNonNull(a, "char arrays must not be null for constantTimeEquals"); + Objects.requireNonNull(b, "char arrays must not be null for constantTimeEquals"); + if (a.length != b.length) { + return false; + } + + int equals = 0; + for (int i = 0; i < a.length; i++) { + equals |= a[i] ^ b[i]; + } + + return equals == 0; + } + + /** + * Constant time equality check of strings to avoid potential timing attacks. + */ + public static boolean constantTimeEquals(String a, String b) { + Objects.requireNonNull(a, "strings must not be null for constantTimeEquals"); + Objects.requireNonNull(b, "strings must not be null for constantTimeEquals"); + if (a.length() != b.length()) { + return false; + } + + int equals = 0; + for (int i = 0; i < a.length(); i++) { + equals |= a.charAt(i) ^ b.charAt(i); + } + + return equals == 0; + } +} diff --git a/libs/core/src/test/java/org/elasticsearch/common/CharArraysTests.java b/libs/core/src/test/java/org/elasticsearch/common/CharArraysTests.java new file mode 100644 index 00000000000..9283283ab08 --- /dev/null +++ b/libs/core/src/test/java/org/elasticsearch/common/CharArraysTests.java @@ -0,0 +1,75 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.common; + +import org.elasticsearch.test.ESTestCase; + +import java.nio.charset.StandardCharsets; + +public class CharArraysTests extends ESTestCase { + + public void testCharsToBytes() { + final String originalValue = randomUnicodeOfCodepointLengthBetween(0, 32); + final byte[] expectedBytes = originalValue.getBytes(StandardCharsets.UTF_8); + final char[] valueChars = originalValue.toCharArray(); + + final byte[] convertedBytes = CharArrays.toUtf8Bytes(valueChars); + assertArrayEquals(expectedBytes, convertedBytes); + } + + public void testBytesToUtf8Chars() { + final String originalValue = randomUnicodeOfCodepointLengthBetween(0, 32); + final byte[] bytes = originalValue.getBytes(StandardCharsets.UTF_8); + final char[] expectedChars = originalValue.toCharArray(); + + final char[] convertedChars = CharArrays.utf8BytesToChars(bytes); + assertArrayEquals(expectedChars, convertedChars); + } + + public void testCharsBeginsWith() { + assertFalse(CharArrays.charsBeginsWith(randomAlphaOfLength(4), null)); + assertFalse(CharArrays.charsBeginsWith(null, null)); + assertFalse(CharArrays.charsBeginsWith(null, randomAlphaOfLength(4).toCharArray())); + assertFalse(CharArrays.charsBeginsWith(randomAlphaOfLength(2), randomAlphaOfLengthBetween(3, 8).toCharArray())); + + final String prefix = randomAlphaOfLengthBetween(2, 4); + assertTrue(CharArrays.charsBeginsWith(prefix, prefix.toCharArray())); + final char[] prefixedValue = prefix.concat(randomAlphaOfLengthBetween(1, 12)).toCharArray(); + assertTrue(CharArrays.charsBeginsWith(prefix, prefixedValue)); + + final String modifiedPrefix = randomBoolean() ? prefix.substring(1) : prefix.substring(0, prefix.length() - 1); + char[] nonMatchingValue; + do { + nonMatchingValue = modifiedPrefix.concat(randomAlphaOfLengthBetween(0, 12)).toCharArray(); + } while (new String(nonMatchingValue).startsWith(prefix)); + assertFalse(CharArrays.charsBeginsWith(prefix, nonMatchingValue)); + assertTrue(CharArrays.charsBeginsWith(modifiedPrefix, nonMatchingValue)); + } + + public void testConstantTimeEquals() { + final String value = randomAlphaOfLengthBetween(0, 32); + assertTrue(CharArrays.constantTimeEquals(value, value)); + assertTrue(CharArrays.constantTimeEquals(value.toCharArray(), value.toCharArray())); + + final String other = randomAlphaOfLengthBetween(1, 32); + assertFalse(CharArrays.constantTimeEquals(value, other)); + assertFalse(CharArrays.constantTimeEquals(value.toCharArray(), other.toCharArray())); + } +} diff --git a/libs/dissect/build.gradle b/libs/dissect/build.gradle new file mode 100644 index 00000000000..c09a2a4ebd1 --- /dev/null +++ b/libs/dissect/build.gradle @@ -0,0 +1,50 @@ +import org.elasticsearch.gradle.precommit.PrecommitTasks + +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +archivesBaseName = 'elasticsearch-dissect' + +dependencies { + if (isEclipse == false || project.path == ":libs:dissect-tests") { + testCompile("org.elasticsearch.test:framework:${version}") { + exclude group: 'org.elasticsearch', module: 'dissect' + } + } + testCompile "com.fasterxml.jackson.core:jackson-core:${versions.jackson}" + testCompile("com.fasterxml.jackson.core:jackson-annotations:${versions.jackson}") + testCompile("com.fasterxml.jackson.core:jackson-databind:${versions.jackson}") +} + +forbiddenApisMain { + signaturesURLs = [PrecommitTasks.getResource('/forbidden/jdk-signatures.txt')] +} + +if (isEclipse) { + // in eclipse the project is under a fake root, we need to change around the source sets + sourceSets { + if (project.path == ":libs:dissect") { + main.java.srcDirs = ['java'] + main.resources.srcDirs = ['resources'] + } else { + test.java.srcDirs = ['java'] + test.resources.srcDirs = ['resources'] + } + } +} diff --git a/libs/dissect/src/main/eclipse-build.gradle b/libs/dissect/src/main/eclipse-build.gradle new file mode 100644 index 00000000000..c2b72bd21e1 --- /dev/null +++ b/libs/dissect/src/main/eclipse-build.gradle @@ -0,0 +1,3 @@ + +// this is just shell gradle file for eclipse to have separate projects for dissect src and tests +apply from: '../../build.gradle' diff --git a/libs/dissect/src/main/java/org/elasticsearch/dissect/DissectException.java b/libs/dissect/src/main/java/org/elasticsearch/dissect/DissectException.java new file mode 100644 index 00000000000..a2f1ab33640 --- /dev/null +++ b/libs/dissect/src/main/java/org/elasticsearch/dissect/DissectException.java @@ -0,0 +1,57 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.dissect; + +/** + * Parent class for all dissect related exceptions. Consumers may catch this exception or more specific child exceptions. + */ +public abstract class DissectException extends RuntimeException { + DissectException(String message) { + super(message); + } + + /** + * Error while parsing a dissect pattern + */ + static class PatternParse extends DissectException { + PatternParse(String pattern, String reason) { + super("Unable to parse pattern: " + pattern + " Reason: " + reason); + } + } + + /** + * Error while parsing a dissect key + */ + static class KeyParse extends DissectException { + KeyParse(String key, String reason) { + super("Unable to parse key: " + key + " Reason: " + reason); + } + } + + /** + * Unable to find a match between pattern and source string + */ + static class FindMatch extends DissectException { + FindMatch(String pattern, String source) { + super("Unable to find match for dissect pattern: " + pattern + " against source: " + source); + + } + } +} diff --git a/libs/dissect/src/main/java/org/elasticsearch/dissect/DissectKey.java b/libs/dissect/src/main/java/org/elasticsearch/dissect/DissectKey.java new file mode 100644 index 00000000000..67a6842182d --- /dev/null +++ b/libs/dissect/src/main/java/org/elasticsearch/dissect/DissectKey.java @@ -0,0 +1,191 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.dissect; + +import java.util.EnumSet; +import java.util.regex.Matcher; +import java.util.regex.Pattern; + +/** + *

A Key of a dissect pattern. This class models the name and modifiers and provides some validation.

+ *

For dissect pattern of {@code %{a} %{+a} %{b}} the dissect keys are: + *

    + *
  • {@code a}
  • + *
  • {@code +a}
  • + *
  • {@code b}
  • + *
+ * This class represents a single key. + *

A single key is composed of a name and it's modifiers. For the key {@code +a}, {@code a} is the name and {@code +} is the modifier. + * @see DissectParser + */ +public final class DissectKey { + private static final Pattern LEFT_MODIFIER_PATTERN = Pattern.compile("([+*&?])(.*?)(->)?$", Pattern.DOTALL); + private static final Pattern RIGHT_PADDING_PATTERN = Pattern.compile("^(.*?)(->)?$", Pattern.DOTALL); + private static final Pattern APPEND_WITH_ORDER_PATTERN = Pattern.compile("[+](.*?)(/)([0-9]+)(->)?$", Pattern.DOTALL); + private final Modifier modifier; + private boolean skip; + private boolean skipRightPadding; + private int appendPosition; + private String name; + + /** + * Constructor - parses the String key into it's name and modifier(s) + * + * @param key The key without the leading %{ or trailing }, for example {@code a->} + */ + DissectKey(String key) { + skip = key == null || key.isEmpty(); + modifier = Modifier.findModifier(key); + switch (modifier) { + case NONE: + Matcher matcher = RIGHT_PADDING_PATTERN.matcher(key); + while (matcher.find()) { + name = matcher.group(1); + skipRightPadding = matcher.group(2) != null; + } + skip = name.isEmpty(); + break; + case NAMED_SKIP: + matcher = LEFT_MODIFIER_PATTERN.matcher(key); + while (matcher.find()) { + name = matcher.group(2); + skipRightPadding = matcher.group(3) != null; + } + skip = true; + break; + case APPEND: + matcher = LEFT_MODIFIER_PATTERN.matcher(key); + while (matcher.find()) { + name = matcher.group(2); + skipRightPadding = matcher.group(3) != null; + } + break; + case FIELD_NAME: + matcher = LEFT_MODIFIER_PATTERN.matcher(key); + while (matcher.find()) { + name = matcher.group(2); + skipRightPadding = matcher.group(3) != null; + } + break; + case FIELD_VALUE: + matcher = LEFT_MODIFIER_PATTERN.matcher(key); + while (matcher.find()) { + name = matcher.group(2); + skipRightPadding = matcher.group(3) != null; + } + break; + case APPEND_WITH_ORDER: + matcher = APPEND_WITH_ORDER_PATTERN.matcher(key); + while (matcher.find()) { + name = matcher.group(1); + appendPosition = Short.valueOf(matcher.group(3)); + skipRightPadding = matcher.group(4) != null; + } + break; + } + + if (name == null || (name.isEmpty() && !skip)) { + throw new DissectException.KeyParse(key, "The key name could be determined"); + } + } + + /** + * Copy constructor to explicitly override the modifier. + * @param key The key to copy (except for the modifier) + * @param modifier the modifer to use for this copy + */ + DissectKey(DissectKey key, DissectKey.Modifier modifier){ + this.modifier = modifier; + this.skipRightPadding = key.skipRightPadding; + this.skip = key.skip; + this.name = key.name; + this.appendPosition = key.appendPosition; + } + + Modifier getModifier() { + return modifier; + } + + boolean skip() { + return skip; + } + + boolean skipRightPadding() { + return skipRightPadding; + } + + int getAppendPosition() { + return appendPosition; + } + + String getName() { + return name; + } + + //generated + @Override + public String toString() { + return "DissectKey{" + + "modifier=" + modifier + + ", skip=" + skip + + ", appendPosition=" + appendPosition + + ", name='" + name + '\'' + + '}'; + } + + public enum Modifier { + NONE(""), APPEND_WITH_ORDER("/"), APPEND("+"), FIELD_NAME("*"), FIELD_VALUE("&"), NAMED_SKIP("?"); + + private static final Pattern MODIFIER_PATTERN = Pattern.compile("[/+*&?]"); + + private final String modifier; + + @Override + public String toString() { + return modifier; + } + + Modifier(final String modifier) { + this.modifier = modifier; + } + + //package private for testing + static Modifier fromString(String modifier) { + return EnumSet.allOf(Modifier.class).stream().filter(km -> km.modifier.equals(modifier)) + .findFirst().orElseThrow(() -> new IllegalArgumentException("Found invalid modifier.")); //throw should never happen + } + + private static Modifier findModifier(String key) { + Modifier modifier = Modifier.NONE; + if (key != null && !key.isEmpty()) { + Matcher matcher = MODIFIER_PATTERN.matcher(key); + int matches = 0; + while (matcher.find()) { + Modifier priorModifier = modifier; + modifier = Modifier.fromString(matcher.group()); + if (++matches > 1 && !(APPEND.equals(priorModifier) && APPEND_WITH_ORDER.equals(modifier))) { + throw new DissectException.KeyParse(key, "multiple modifiers are not allowed."); + } + } + } + return modifier; + } + } +} diff --git a/libs/dissect/src/main/java/org/elasticsearch/dissect/DissectMatch.java b/libs/dissect/src/main/java/org/elasticsearch/dissect/DissectMatch.java new file mode 100644 index 00000000000..9217413e075 --- /dev/null +++ b/libs/dissect/src/main/java/org/elasticsearch/dissect/DissectMatch.java @@ -0,0 +1,198 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.dissect; + +import java.util.ArrayList; +import java.util.Collections; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.stream.Collectors; + +/** + * Represents the matches of a {@link DissectParser#parse(String)}. Handles the appending and referencing based on the key instruction. + */ +final class DissectMatch { + + private final String appendSeparator; + private final Map results; + private final Map simpleResults; + private final Map referenceResults; + private final Map appendResults; + private int implicitAppendOrder = -1000; + private final int maxMatches; + private final int maxResults; + private final int appendCount; + private final int referenceCount; + private final int simpleCount; + private int matches = 0; + + DissectMatch(String appendSeparator, int maxMatches, int maxResults, int appendCount, int referenceCount) { + if (maxMatches <= 0 || maxResults <= 0) { + throw new IllegalArgumentException("Expected results are zero, can not construct DissectMatch");//should never happen + } + this.maxMatches = maxMatches; + this.maxResults = maxResults; + this.appendCount = appendCount; + this.referenceCount = referenceCount; + this.appendSeparator = appendSeparator; + results = new HashMap<>(maxResults); + this.simpleCount = maxMatches - referenceCount - appendCount; + simpleResults = simpleCount <= 0 ? null : new HashMap<>(simpleCount); + referenceResults = referenceCount <= 0 ? null : new HashMap<>(referenceCount); + appendResults = appendCount <= 0 ? null : new HashMap<>(appendCount); + } + + /** + * Add the key/value that was found as result of the parsing + * @param key the {@link DissectKey} + * @param value the discovered value for the key + */ + void add(DissectKey key, String value) { + matches++; + if (key.skip()) { + return; + } + switch (key.getModifier()) { + case NONE: + simpleResults.put(key.getName(), value); + break; + case APPEND: + appendResults.computeIfAbsent(key.getName(), k -> new AppendResult(appendSeparator)).addValue(value, implicitAppendOrder++); + break; + case APPEND_WITH_ORDER: + appendResults.computeIfAbsent(key.getName(), + k -> new AppendResult(appendSeparator)).addValue(value, key.getAppendPosition()); + break; + case FIELD_NAME: + referenceResults.computeIfAbsent(key.getName(), k -> new ReferenceResult()).setKey(value); + break; + case FIELD_VALUE: + referenceResults.computeIfAbsent(key.getName(), k -> new ReferenceResult()).setValue(value); + break; + } + } + + boolean fullyMatched() { + return matches == maxMatches; + } + + /** + * Checks if results are valid. + * @param results the results to check + * @return true if all dissect keys have been matched and the results are of the expected size. + */ + boolean isValid(Map results) { + return fullyMatched() && results.size() == maxResults; + } + + /** + * Gets all the current matches. Pass the results of this to isValid to determine if a fully successful match has occured. + * + * @return the map of the results. + */ + Map getResults() { + results.clear(); + if (simpleCount > 0) { + results.putAll(simpleResults); + } + if (referenceCount > 0) { + referenceResults.forEach((k, v) -> results.put(v.getKey(), v.getValue())); + } + if (appendCount > 0) { + appendResults.forEach((k, v) -> results.put(k, v.getAppendResult())); + } + + return results; + } + + /** + * a result that will need to be part of an append operation. + */ + private final class AppendResult { + private final List values = new ArrayList<>(); + private final String appendSeparator; + + private AppendResult(String appendSeparator) { + this.appendSeparator = appendSeparator; + } + + private void addValue(String value, int order) { + values.add(new AppendValue(value, order)); + } + + private String getAppendResult() { + Collections.sort(values); + return values.stream().map(AppendValue::getValue).collect(Collectors.joining(appendSeparator)); + } + } + + /** + * An appendable value that can be sorted based on the provided order + */ + private final class AppendValue implements Comparable { + private final String value; + private final int order; + + private AppendValue(String value, int order) { + this.value = value; + this.order = order; + } + + private String getValue() { + return value; + } + + private int getOrder() { + return order; + } + + @Override + public int compareTo(AppendValue o) { + return Integer.compare(this.order, o.getOrder()); + } + } + + /** + * A result that needs to be converted to a key/value reference + */ + private final class ReferenceResult { + + private String key; + + private String getKey() { + return key; + } + + private String getValue() { + return value; + } + + private String value; + + private void setValue(String value) { + this.value = value; + } + + private void setKey(String key) { + this.key = key; + } + } +} diff --git a/libs/dissect/src/main/java/org/elasticsearch/dissect/DissectParser.java b/libs/dissect/src/main/java/org/elasticsearch/dissect/DissectParser.java new file mode 100644 index 00000000000..407d73134b6 --- /dev/null +++ b/libs/dissect/src/main/java/org/elasticsearch/dissect/DissectParser.java @@ -0,0 +1,310 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.dissect; + +import java.nio.charset.StandardCharsets; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collections; +import java.util.EnumSet; +import java.util.Iterator; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.function.Function; +import java.util.regex.Matcher; +import java.util.regex.Pattern; +import java.util.stream.Collectors; + +/** + *

Splits (dissects) a string into its parts based on a pattern.

A dissect pattern is composed of a set of keys and delimiters. + * For example the dissect pattern:

%{a} %{b},%{c}
has 3 keys (a,b,c) and two delimiters (space and comma). This pattern will + * match a string of the form:
foo bar,baz
and will result a key/value pairing of
a=foo, b=bar, and c=baz.
+ *

Matches are all or nothing. For example, the same pattern will NOT match

foo bar baz
since all of the delimiters did not + * match. (the comma did not match) + *

Dissect patterns can optionally have modifiers. These modifiers instruct the parser to change it's behavior. For example the + * dissect pattern of

%{a},%{b}:%{c}
would not match
foo,bar,baz
since there the colon never matches. + *

Modifiers appear to the left or the right of the key name. The supported modifiers are: + *

    + *
  • {@code ->} Instructs the parser to ignore repeating delimiters to the right of the key. Example:
    + * pattern: {@code %{a->} %{b} %{c}}
    + * string: {@code foo         bar baz}
    + * result: {@code a=foo, b=bar, c=baz}
    + * 
  • + *
  • {@code +} Instructs the parser to appends this key's value to value of prior key with the same name. + * Example:
    + * pattern: {@code %{a} %{+a} %{+a}}
    + * string: {@code foo bar baz}
    + * result: {@code a=foobarbaz}
    + * 
  • + *
  • {@code /} Instructs the parser to appends this key's value to value of a key based based on the order specified after the + * {@code /}. Requires the {@code +} modifier to also be present in the key. Example:
    + * pattern: {@code %{a} %{+a/2} %{+a/1}}
    + * string: {@code foo bar baz}
    + * result: {@code a=foobazbar}
    + * 
    + *
  • + *
  • {@code *} Instructs the parser to ignore the name of this key, instead use the value of key as the key name. + * Requires another key with the same name and the {@code &} modifier to be the value. Example:
    + * pattern: {@code %{*a} %{b} %{&a}}
    + * string: {@code foo bar baz}
    + * result: {@code foo=baz, b=bar}
    + * 
  • + *
  • {@code &} Instructs the parser to ignore this key and place the matched value to a key of the same name with the {@code *} modifier. + * Requires another key with the same name and the {@code *} modifier. + * Example:
    + * pattern: {@code %{*a} %{b} %{&a}}
    + * string: {@code foo bar baz}
    + * result: {@code foo=baz, b=bar}
    + * 
  • + *
  • {@code ?} Instructs the parser to ignore this key. The key name exists only for the purpose of human readability. Example + *
    + *  pattern: {@code %{a} %{?skipme} %{c}}
    + *  string: {@code foo bar baz}
    + *  result: {@code a=foo, c=baz}
    + * 
    + *
+ *

Empty key names patterns are also supported. They behave just like the {@code ?} modifier, except the name is not required. + * The result will simply be ignored. Example + *

+ * pattern: {@code %{a} %{} %{c}}
+ * string: {@code foo bar baz}
+ * result: {@code a=foo, c=baz}
+ * 
+ + *

+ * Inspired by the Logstash Dissect Filter by Guy Boertje + */ +public final class DissectParser { + private static final Pattern LEADING_DELIMITER_PATTERN = Pattern.compile("^(.*?)%"); + private static final Pattern KEY_DELIMITER_FIELD_PATTERN = Pattern.compile("%\\{([^}]*?)}([^%]*)", Pattern.DOTALL); + private static final EnumSet ASSOCIATE_MODIFIERS = EnumSet.of( + DissectKey.Modifier.FIELD_NAME, + DissectKey.Modifier.FIELD_VALUE); + private static final EnumSet APPEND_MODIFIERS = EnumSet.of( + DissectKey.Modifier.APPEND, + DissectKey.Modifier.APPEND_WITH_ORDER); + private static final Function KEY_NAME = val -> val.getKey().getName(); + private final List matchPairs; + private final String pattern; + private String leadingDelimiter = ""; + private final int maxMatches; + private final int maxResults; + private final int appendCount; + private final int referenceCount; + private final String appendSeparator; + + public DissectParser(String pattern, String appendSeparator) { + this.pattern = pattern; + this.appendSeparator = appendSeparator == null ? "" : appendSeparator; + Matcher matcher = LEADING_DELIMITER_PATTERN.matcher(pattern); + while (matcher.find()) { + leadingDelimiter = matcher.group(1); + } + List matchPairs = new ArrayList<>(); + matcher = KEY_DELIMITER_FIELD_PATTERN.matcher(pattern.substring(leadingDelimiter.length())); + while (matcher.find()) { + DissectKey key = new DissectKey(matcher.group(1)); + String delimiter = matcher.group(2); + matchPairs.add(new DissectPair(key, delimiter)); + } + this.maxMatches = matchPairs.size(); + this.maxResults = Long.valueOf(matchPairs.stream() + .filter(dissectPair -> !dissectPair.getKey().skip()).map(KEY_NAME).distinct().count()).intValue(); + if (this.maxMatches == 0 || maxResults == 0) { + throw new DissectException.PatternParse(pattern, "Unable to find any keys or delimiters."); + } + //append validation - look through all of the keys to see if there are any keys that need to participate in an append operation + // but don't have the '+' defined + Set appendKeyNames = matchPairs.stream() + .filter(dissectPair -> APPEND_MODIFIERS.contains(dissectPair.getKey().getModifier())) + .map(KEY_NAME).distinct().collect(Collectors.toSet()); + if (appendKeyNames.size() > 0) { + List modifiedMatchPairs = new ArrayList<>(matchPairs.size()); + for (DissectPair p : matchPairs) { + if (p.getKey().getModifier().equals(DissectKey.Modifier.NONE) && appendKeyNames.contains(p.getKey().getName())) { + modifiedMatchPairs.add(new DissectPair(new DissectKey(p.getKey(), DissectKey.Modifier.APPEND), p.getDelimiter())); + } else { + modifiedMatchPairs.add(p); + } + } + matchPairs = modifiedMatchPairs; + } + appendCount = appendKeyNames.size(); + + //reference validation - ensure that '*' and '&' come in pairs + Map> referenceGroupings = matchPairs.stream() + .filter(dissectPair -> ASSOCIATE_MODIFIERS.contains(dissectPair.getKey().getModifier())) + .collect(Collectors.groupingBy(KEY_NAME)); + for (Map.Entry> entry : referenceGroupings.entrySet()) { + if (entry.getValue().size() != 2) { + throw new DissectException.PatternParse(pattern, "Found invalid key/reference associations: '" + + entry.getValue().stream().map(KEY_NAME).collect(Collectors.joining(",")) + + "' Please ensure each '*' is matched with a matching '&"); + } + } + + referenceCount = referenceGroupings.size() * 2; + this.matchPairs = Collections.unmodifiableList(matchPairs); + } + + + /** + *

Entry point to dissect a string into it's parts.

+ * + * @param inputString The string to dissect + * @return the key/value Map of the results + * @throws DissectException if unable to dissect a pair into it's parts. + */ + public Map parse(String inputString) { + /** + * + * This implements a naive string matching algorithm. The string is walked left to right, comparing each byte against + * another string's bytes looking for matches. If the bytes match, then a second cursor looks ahead to see if all the bytes + * of the other string matches. If they all match, record it and advances the primary cursor to the match point. If it can not match + * all of the bytes then progress the main cursor. Repeat till the end of the input string. Since the string being searching for + * (the delimiter) is generally small and rare the naive approach is efficient. + * + * In this case the the string that is walked is the input string, and the string being searched for is the current delimiter. + * For example for a dissect pattern of {@code %{a},%{b}:%{c}} the delimiters (comma then colon) are searched for in the + * input string. At class construction the list of keys+delimiters are found (dissectPairs), which allows the use of that ordered + * list to know which delimiter to use for the search. The delimiters is progressed once the current delimiter is matched. + * + * There are two special cases that requires additional parsing beyond the standard naive algorithm. Consecutive delimiters should + * results in a empty matches unless the {@code ->} is provided. For example given the dissect pattern of + * {@code %{a},%{b},%{c},%{d}} and input string of {@code foo,,,} the match should be successful with empty values for b,c and d. + * However, if the key modifier {@code ->}, is present it will simply skip over any delimiters just to the right of the key + * without assigning any values. For example {@code %{a->},{%b}} will match the input string of {@code foo,,,,,,bar} with a=foo and + * b=bar. + * + */ + DissectMatch dissectMatch = new DissectMatch(appendSeparator, maxMatches, maxResults, appendCount, referenceCount); + Iterator it = matchPairs.iterator(); + //ensure leading delimiter matches + if (inputString != null && inputString.length() > leadingDelimiter.length() + && leadingDelimiter.equals(inputString.substring(0, leadingDelimiter.length()))) { + byte[] input = inputString.getBytes(StandardCharsets.UTF_8); + //grab the first key/delimiter pair + DissectPair dissectPair = it.next(); + DissectKey key = dissectPair.getKey(); + byte[] delimiter = dissectPair.getDelimiter().getBytes(StandardCharsets.UTF_8); + //start dissection after the first delimiter + int i = leadingDelimiter.length(); + int valueStart = i; + int lookAheadMatches; + //start walking the input string byte by byte, look ahead for matches where needed + //if a match is found jump forward to the end of the match + for (; i < input.length; i++) { + lookAheadMatches = 0; + //potential match between delimiter and input string + if (delimiter.length > 0 && input[i] == delimiter[0]) { + //look ahead to see if the entire delimiter matches the input string + for (int j = 0; j < delimiter.length; j++) { + if (i + j < input.length && input[i + j] == delimiter[j]) { + lookAheadMatches++; + } + } + //found a full delimiter match + if (lookAheadMatches == delimiter.length) { + //record the key/value tuple + byte[] value = Arrays.copyOfRange(input, valueStart, i); + dissectMatch.add(key, new String(value, StandardCharsets.UTF_8)); + //jump to the end of the match + i += lookAheadMatches; + //look for consecutive delimiters (e.g. a,,,,d,e) + while (i < input.length) { + lookAheadMatches = 0; + for (int j = 0; j < delimiter.length; j++) { + if (i + j < input.length && input[i + j] == delimiter[j]) { + lookAheadMatches++; + } + } + //found consecutive delimiters + if (lookAheadMatches == delimiter.length) { + //jump to the end of the match + i += lookAheadMatches; + if (!key.skipRightPadding()) { + //progress the keys/delimiter if possible + if (!it.hasNext()) { + break; //the while loop + } + dissectPair = it.next(); + key = dissectPair.getKey(); + //add the key with an empty value for the empty delimiter + dissectMatch.add(key, ""); + } + } else { + break; //the while loop + } + } + //progress the keys/delimiter if possible + if (!it.hasNext()) { + break; //the for loop + } + dissectPair = it.next(); + key = dissectPair.getKey(); + delimiter = dissectPair.getDelimiter().getBytes(StandardCharsets.UTF_8); + //i is always one byte after the last found delimiter, aka the start of the next value + valueStart = i; + } + } + } + //the last key, grab the rest of the input (unless consecutive delimiters already grabbed the last key) + //and there is no trailing delimiter + if (!dissectMatch.fullyMatched() && delimiter.length == 0 ) { + byte[] value = Arrays.copyOfRange(input, valueStart, input.length); + String valueString = new String(value, StandardCharsets.UTF_8); + dissectMatch.add(key, valueString); + } + } + Map results = dissectMatch.getResults(); + + if (!dissectMatch.isValid(results)) { + throw new DissectException.FindMatch(pattern, inputString); + } + return results; + } + + /** + * A tuple class to hold the dissect key and delimiter + */ + private class DissectPair { + + private final DissectKey key; + private final String delimiter; + + private DissectPair(DissectKey key, String delimiter) { + this.key = key; + this.delimiter = delimiter; + } + + private DissectKey getKey() { + return key; + } + + private String getDelimiter() { + return delimiter; + } + } + +} + + + diff --git a/libs/dissect/src/test/eclipse-build.gradle b/libs/dissect/src/test/eclipse-build.gradle new file mode 100644 index 00000000000..56d632f23b1 --- /dev/null +++ b/libs/dissect/src/test/eclipse-build.gradle @@ -0,0 +1,7 @@ + +// this is just shell gradle file for eclipse to have separate projects for dissect src and tests +apply from: '../../build.gradle' + +dependencies { + testCompile project(':libs:dissect') +} diff --git a/libs/dissect/src/test/java/org/elasticsearch/dissect/DissectKeyTests.java b/libs/dissect/src/test/java/org/elasticsearch/dissect/DissectKeyTests.java new file mode 100644 index 00000000000..0f3f7ed041d --- /dev/null +++ b/libs/dissect/src/test/java/org/elasticsearch/dissect/DissectKeyTests.java @@ -0,0 +1,178 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.dissect; + +import org.elasticsearch.test.ESTestCase; +import org.hamcrest.CoreMatchers; + +import java.util.EnumSet; +import java.util.List; +import java.util.stream.Collectors; + +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.is; + +public class DissectKeyTests extends ESTestCase { + + public void testNoModifier() { + String keyName = randomAlphaOfLengthBetween(1, 10); + DissectKey dissectKey = new DissectKey(keyName); + assertThat(dissectKey.getModifier(), equalTo(DissectKey.Modifier.NONE)); + assertThat(dissectKey.skip(), is(false)); + assertThat(dissectKey.skipRightPadding(), is(false)); + assertThat(dissectKey.getAppendPosition(), equalTo(0)); + assertThat(dissectKey.getName(), equalTo(keyName)); + } + + public void testAppendModifier() { + String keyName = randomAlphaOfLengthBetween(1, 10); + DissectKey dissectKey = new DissectKey("+" + keyName); + assertThat(dissectKey.getModifier(), equalTo(DissectKey.Modifier.APPEND)); + assertThat(dissectKey.skip(), is(false)); + assertThat(dissectKey.skipRightPadding(), is(false)); + assertThat(dissectKey.getAppendPosition(), equalTo(0)); + assertThat(dissectKey.getName(), equalTo(keyName)); + } + + public void testAppendWithOrderModifier() { + String keyName = randomAlphaOfLengthBetween(1, 10); + int length = randomIntBetween(1, 100); + DissectKey dissectKey = new DissectKey("+" + keyName + "/" + length); + assertThat(dissectKey.getModifier(), equalTo(DissectKey.Modifier.APPEND_WITH_ORDER)); + assertThat(dissectKey.skip(), is(false)); + assertThat(dissectKey.skipRightPadding(), is(false)); + assertThat(dissectKey.getAppendPosition(), equalTo(length)); + assertThat(dissectKey.getName(), equalTo(keyName)); + } + + public void testAppendWithOrderModifierNoName() { + int length = randomIntBetween(1, 100); + DissectException e = expectThrows(DissectException.class, () -> new DissectKey("+/" + length)); + assertThat(e.getMessage(), CoreMatchers.containsString("Unable to parse key")); + } + + public void testOrderModifierWithoutAppend() { + String keyName = randomAlphaOfLengthBetween(1, 10); + int length = randomIntBetween(1, 100); + DissectException e = expectThrows(DissectException.class, () -> new DissectKey(keyName + "/" + length)); + assertThat(e.getMessage(), CoreMatchers.containsString("Unable to parse key")); + } + + public void testFieldNameModifier() { + String keyName = randomAlphaOfLengthBetween(1, 10); + DissectKey dissectKey = new DissectKey("*" + keyName); + assertThat(dissectKey.getModifier(), equalTo(DissectKey.Modifier.FIELD_NAME)); + assertThat(dissectKey.skip(), is(false)); + assertThat(dissectKey.skipRightPadding(), is(false)); + assertThat(dissectKey.getAppendPosition(), equalTo(0)); + assertThat(dissectKey.getName(), equalTo(keyName)); + } + + public void testFieldValueModifiers() { + String keyName = randomAlphaOfLengthBetween(1, 10); + DissectKey dissectKey = new DissectKey("&" + keyName); + assertThat(dissectKey.getModifier(), equalTo(DissectKey.Modifier.FIELD_VALUE)); + assertThat(dissectKey.skip(), is(false)); + assertThat(dissectKey.skipRightPadding(), is(false)); + assertThat(dissectKey.getAppendPosition(), equalTo(0)); + assertThat(dissectKey.getName(), equalTo(keyName)); + } + + public void testRightPaddingModifiers() { + String keyName = randomAlphaOfLengthBetween(1, 10); + DissectKey dissectKey = new DissectKey(keyName + "->"); + assertThat(dissectKey.getModifier(), equalTo(DissectKey.Modifier.NONE)); + assertThat(dissectKey.skip(), is(false)); + assertThat(dissectKey.skipRightPadding(), is(true)); + assertThat(dissectKey.getAppendPosition(), equalTo(0)); + assertThat(dissectKey.getName(), equalTo(keyName)); + + dissectKey = new DissectKey("*" + keyName + "->"); + assertThat(dissectKey.skipRightPadding(), is(true)); + + dissectKey = new DissectKey("&" + keyName + "->"); + assertThat(dissectKey.skipRightPadding(), is(true)); + + dissectKey = new DissectKey("+" + keyName + "->"); + assertThat(dissectKey.skipRightPadding(), is(true)); + + dissectKey = new DissectKey("?" + keyName + "->"); + assertThat(dissectKey.skipRightPadding(), is(true)); + + dissectKey = new DissectKey("+" + keyName + "/2->"); + assertThat(dissectKey.skipRightPadding(), is(true)); + } + + public void testMultipleLeftModifiers() { + String keyName = randomAlphaOfLengthBetween(1, 10); + List validModifiers = EnumSet.allOf(DissectKey.Modifier.class).stream() + .filter(m -> !m.equals(DissectKey.Modifier.NONE)) + .map(DissectKey.Modifier::toString) + .collect(Collectors.toList()); + String modifier1 = randomFrom(validModifiers); + String modifier2 = randomFrom(validModifiers); + DissectException e = expectThrows(DissectException.class, () -> new DissectKey(modifier1 + modifier2 + keyName)); + assertThat(e.getMessage(), CoreMatchers.containsString("Unable to parse key")); + } + + public void testSkipKey() { + String keyName = ""; + DissectKey dissectKey = new DissectKey(keyName); + assertThat(dissectKey.getModifier(), equalTo(DissectKey.Modifier.NONE)); + assertThat(dissectKey.skip(), is(true)); + assertThat(dissectKey.skipRightPadding(), is(false)); + assertThat(dissectKey.getAppendPosition(), equalTo(0)); + assertThat(dissectKey.getName(), equalTo(keyName)); + } + public void testNamedSkipKey() { + String keyName = "myname"; + DissectKey dissectKey = new DissectKey("?" +keyName); + assertThat(dissectKey.getModifier(), equalTo(DissectKey.Modifier.NAMED_SKIP)); + assertThat(dissectKey.skip(), is(true)); + assertThat(dissectKey.skipRightPadding(), is(false)); + assertThat(dissectKey.getAppendPosition(), equalTo(0)); + assertThat(dissectKey.getName(), equalTo(keyName)); + } + + public void testSkipKeyWithPadding() { + String keyName = ""; + DissectKey dissectKey = new DissectKey(keyName + "->"); + assertThat(dissectKey.getModifier(), equalTo(DissectKey.Modifier.NONE)); + assertThat(dissectKey.skip(), is(true)); + assertThat(dissectKey.skipRightPadding(), is(true)); + assertThat(dissectKey.getAppendPosition(), equalTo(0)); + assertThat(dissectKey.getName(), equalTo(keyName)); + } + public void testNamedEmptySkipKeyWithPadding() { + String keyName = ""; + DissectKey dissectKey = new DissectKey("?" +keyName + "->"); + assertThat(dissectKey.getModifier(), equalTo(DissectKey.Modifier.NAMED_SKIP)); + assertThat(dissectKey.skip(), is(true)); + assertThat(dissectKey.skipRightPadding(), is(true)); + assertThat(dissectKey.getAppendPosition(), equalTo(0)); + assertThat(dissectKey.getName(), equalTo(keyName)); + } + + public void testInvalidModifiers() { + //should never happen due to regex + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> DissectKey.Modifier.fromString("x")); + assertThat(e.getMessage(), CoreMatchers.containsString("invalid modifier")); + } +} diff --git a/libs/dissect/src/test/java/org/elasticsearch/dissect/DissectMatchTests.java b/libs/dissect/src/test/java/org/elasticsearch/dissect/DissectMatchTests.java new file mode 100644 index 00000000000..d562afb6363 --- /dev/null +++ b/libs/dissect/src/test/java/org/elasticsearch/dissect/DissectMatchTests.java @@ -0,0 +1,93 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.dissect; + +import org.elasticsearch.common.collect.MapBuilder; +import org.elasticsearch.test.ESTestCase; + +import java.nio.charset.StandardCharsets; +import java.util.Map; +import java.util.stream.IntStream; + +import static org.hamcrest.Matchers.equalTo; + +public class DissectMatchTests extends ESTestCase { + + public void testIllegalArgs() { + expectThrows(IllegalArgumentException.class, () -> new DissectMatch("", 0, 1, 0, 0)); + expectThrows(IllegalArgumentException.class, () -> new DissectMatch("", 1, 0, 0, 0)); + } + + public void testValidAndFullyMatched() { + int expectedMatches = randomIntBetween(1, 26); + DissectMatch dissectMatch = new DissectMatch("", expectedMatches, expectedMatches, 0, 0); + IntStream.range(97, 97 + expectedMatches) //allow for a-z values + .forEach(i -> dissectMatch.add(new DissectKey(new String(new byte[]{(byte) i}, StandardCharsets.UTF_8)), "")); + assertThat(dissectMatch.fullyMatched(), equalTo(true)); + assertThat(dissectMatch.isValid(dissectMatch.getResults()), equalTo(true)); + } + + public void testNotValidAndFullyMatched() { + int expectedMatches = randomIntBetween(1, 26); + DissectMatch dissectMatch = new DissectMatch("", expectedMatches, expectedMatches, 0, 0); + IntStream.range(97, 97 + expectedMatches - 1) //allow for a-z values + .forEach(i -> dissectMatch.add(new DissectKey(new String(new byte[]{(byte) i}, StandardCharsets.UTF_8)), "")); + assertThat(dissectMatch.fullyMatched(), equalTo(false)); + assertThat(dissectMatch.isValid(dissectMatch.getResults()), equalTo(false)); + } + + public void testGetResultsIdempotent(){ + int expectedMatches = randomIntBetween(1, 26); + DissectMatch dissectMatch = new DissectMatch("", expectedMatches, expectedMatches, 0, 0); + IntStream.range(97, 97 + expectedMatches) //allow for a-z values + .forEach(i -> dissectMatch.add(new DissectKey(new String(new byte[]{(byte) i}, StandardCharsets.UTF_8)), "")); + assertThat(dissectMatch.getResults(), equalTo(dissectMatch.getResults())); + } + + public void testAppend(){ + DissectMatch dissectMatch = new DissectMatch("-", 3, 1, 3, 0); + dissectMatch.add(new DissectKey("+a"), "x"); + dissectMatch.add(new DissectKey("+a"), "y"); + dissectMatch.add(new DissectKey("+a"), "z"); + Map results = dissectMatch.getResults(); + assertThat(dissectMatch.isValid(results), equalTo(true)); + assertThat(results, equalTo(MapBuilder.newMapBuilder().put("a", "x-y-z").map())); + } + + public void testAppendWithOrder(){ + DissectMatch dissectMatch = new DissectMatch("-", 3, 1, 3, 0); + dissectMatch.add(new DissectKey("+a/3"), "x"); + dissectMatch.add(new DissectKey("+a"), "y"); + dissectMatch.add(new DissectKey("+a/1"), "z"); + Map results = dissectMatch.getResults(); + assertThat(dissectMatch.isValid(results), equalTo(true)); + assertThat(results, equalTo(MapBuilder.newMapBuilder().put("a", "y-z-x").map())); + } + + public void testReference(){ + DissectMatch dissectMatch = new DissectMatch("-", 2, 1, 0, 1); + dissectMatch.add(new DissectKey("&a"), "x"); + dissectMatch.add(new DissectKey("*a"), "y"); + Map results = dissectMatch.getResults(); + assertThat(dissectMatch.isValid(results), equalTo(true)); + assertThat(results, equalTo(MapBuilder.newMapBuilder().put("y", "x").map())); + } + +} diff --git a/libs/dissect/src/test/java/org/elasticsearch/dissect/DissectParserTests.java b/libs/dissect/src/test/java/org/elasticsearch/dissect/DissectParserTests.java new file mode 100644 index 00000000000..c22cec98eb7 --- /dev/null +++ b/libs/dissect/src/test/java/org/elasticsearch/dissect/DissectParserTests.java @@ -0,0 +1,386 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.dissect; + +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.ObjectMapper; +import org.elasticsearch.test.ESTestCase; +import org.hamcrest.CoreMatchers; +import org.hamcrest.Matchers; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collections; +import java.util.Iterator; +import java.util.List; +import java.util.Map; + +import static com.carrotsearch.randomizedtesting.RandomizedTest.randomAsciiAlphanumOfLengthBetween; + +public class DissectParserTests extends ESTestCase { + + public void testJavaDocExamples() { + assertMatch("%{a} %{b},%{c}", "foo bar,baz", Arrays.asList("a", "b", "c"), Arrays.asList("foo", "bar", "baz")); + assertMiss("%{a},%{b}:%{c}", "foo,bar,baz"); + assertMatch("%{a->} %{b} %{c}", "foo bar baz", Arrays.asList("a", "b", "c"), Arrays.asList("foo", "bar", "baz")); + assertMatch("%{a} %{+a} %{+a}", "foo bar baz", Arrays.asList("a"), Arrays.asList("foobarbaz")); + assertMatch("%{a} %{+a/2} %{+a/1}", "foo bar baz", Arrays.asList("a"), Arrays.asList("foobazbar")); + assertMatch("%{*a} %{b} %{&a}", "foo bar baz", Arrays.asList("foo", "b"), Arrays.asList("baz", "bar")); + assertMatch("%{a} %{} %{c}", "foo bar baz", Arrays.asList("a", "c"), Arrays.asList("foo", "baz")); + assertMatch("%{a} %{?skipme} %{c}", "foo bar baz", Arrays.asList("a", "c"), Arrays.asList("foo", "baz")); + assertMatch("%{a},%{b},%{c},%{d}", "foo,,,", Arrays.asList("a", "b", "c", "d"), Arrays.asList("foo", "", "", "")); + assertMatch("%{a->},%{b}", "foo,,,,,,bar", Arrays.asList("a", "b"), Arrays.asList("foo", "bar")); + } + + /** + * Borrowed from Logstash's test cases: + * https://github.com/logstash-plugins/logstash-filter-dissect/blob/master/src/test/java/org/logstash/dissect/DissectorTest.java + * Append Note - Logstash appends with the delimiter as the separator between values, this uses a user defined separator + */ + public void testLogstashSpecs() { + assertMatch("%{a} %{b->} %{c}", "foo bar baz", Arrays.asList("a", "b", "c"), Arrays.asList("foo", "bar", "baz")); + assertMiss("%{a}%{b} %{c}", null); + assertMiss("%{a} %{b}%{c} %{d}", "foo bar baz"); + assertMiss("%{a} %{b} %{c}%{d}", "foo bar baz quux"); + assertMatch("%{a} %{b->} %{c}", "foo bar baz", Arrays.asList("a", "b", "c"), Arrays.asList("foo", "bar", "baz")); + assertMatch("%{a} %{} %{c}", "foo bar baz", Arrays.asList("a", "c"), Arrays.asList("foo", "baz")); + assertMatch("%{a} %{b} %{+b} %{z}", "foo bar baz quux", Arrays.asList("a", "b", "z"), Arrays.asList("foo", "bar baz", "quux"), " "); + assertMatch("%{a}------->%{b}", "foo------->bar baz quux", Arrays.asList("a", "b"), Arrays.asList("foo", "bar baz quux")); + assertMatch("%{a}------->%{}", "foo------->bar baz quux", Arrays.asList("a"), Arrays.asList("foo")); + assertMatch("%{a} » %{b}»%{c}€%{d}", "foo » bar»baz€quux", + Arrays.asList("a", "b", "c", "d"), Arrays.asList("foo", "bar", "baz", "quux")); + assertMatch("%{a} %{b} %{+a}", "foo bar baz quux", Arrays.asList("a", "b"), Arrays.asList("foo baz quux", "bar"), " "); + //Logstash supports implicit ordering based anchored by the the key without the '+' + //This implementation will only honor implicit ordering for appending right to left else explicit order (/N) is required. + //The results of this test differ from Logstash. + assertMatch("%{+a} %{a} %{+a} %{b}", "December 31 1999 quux", + Arrays.asList("a", "b"), Arrays.asList("December 31 1999", "quux"), " "); + //Same test as above, but with same result as Logstash using explicit ordering in the pattern + assertMatch("%{+a/1} %{a} %{+a/2} %{b}", "December 31 1999 quux", + Arrays.asList("a", "b"), Arrays.asList("31 December 1999", "quux"), " "); + assertMatch("%{+a/2} %{+a/4} %{+a/1} %{+a/3}", "bar quux foo baz", Arrays.asList("a"), Arrays.asList("foo bar baz quux"), " "); + assertMatch("%{+a} %{b}", "foo bar", Arrays.asList("a", "b"), Arrays.asList("foo", "bar")); + assertMatch("%{+a} %{b} %{+a} %{c}", "foo bar baz quux", + Arrays.asList("a", "b", "c"), Arrays.asList("foo baz", "bar", "quux"), " "); + assertMatch("%{} %{syslog_timestamp} %{hostname} %{rt}: %{reason} %{+reason} %{src_ip}/%{src_port}->%{dst_ip}/%{dst_port} " + + "%{polrt} %{+polrt} %{+polrt} %{from_zone} %{to_zone} %{rest}", + "42 2016-05-25T14:47:23Z host.name.com RT_FLOW - RT_FLOW_SESSION_DENY: session denied 2.2.2.20/60000->1.1.1.10/8090 None " + + "6(0) DEFAULT-DENY ZONE-UNTRUST ZONE-DMZ UNKNOWN UNKNOWN N/A(N/A) ge-0/0/0.0", + Arrays.asList("syslog_timestamp", "hostname", "rt", "reason", "src_ip", "src_port", "dst_ip", "dst_port", "polrt" + , "from_zone", "to_zone", "rest"), + Arrays.asList("2016-05-25T14:47:23Z", "host.name.com", "RT_FLOW - RT_FLOW_SESSION_DENY", "session denied", "2.2.2.20", "60000" + , "1.1.1.10", "8090", "None 6(0) DEFAULT-DENY", "ZONE-UNTRUST", "ZONE-DMZ", "UNKNOWN UNKNOWN N/A(N/A) ge-0/0/0.0"), " "); + assertBadKey("%{+/2}"); + assertBadKey("%{&+a_field}"); + assertMatch("%{a->} %{b->}---%{c}", "foo bar------------baz", + Arrays.asList("a", "b", "c"), Arrays.asList("foo", "bar", "baz")); + assertMatch("%{->}-%{a}", "-----666", Arrays.asList("a"), Arrays.asList("666")); + assertMatch("%{?skipme->}-%{a}", "-----666", Arrays.asList("a"), Arrays.asList("666")); + assertMatch("%{a},%{b},%{c},%{d},%{e},%{f}", "111,,333,,555,666", + Arrays.asList("a", "b", "c", "d", "e", "f"), Arrays.asList("111", "", "333", "", "555", "666")); + assertMatch("%{a}.࿏.%{b}", "⟳༒.࿏.༒⟲", Arrays.asList("a", "b"), Arrays.asList("⟳༒", "༒⟲")); + assertMatch("%{a}", "子", Arrays.asList("a"), Arrays.asList("子")); + assertMatch("%{a}{\n}%{b}", "aaa{\n}bbb", Arrays.asList("a", "b"), Arrays.asList("aaa", "bbb")); + assertMiss("MACHINE[%{a}] %{b}", "1234567890 MACHINE[foo] bar"); + assertMiss("%{a} %{b} %{c}", "foo:bar:baz"); + assertMatch("/var/%{key1}/log/%{key2}.log", "/var/foo/log/bar.log", Arrays.asList("key1", "key2"), Arrays.asList("foo", "bar")); + assertMatch("%{a->} %{b}-.-%{c}-%{d}-..-%{e}-%{f}-%{g}-%{h}", "foo bar-.-baz-1111-..-22-333-4444-55555", + Arrays.asList("a", "b", "c", "d", "e", "f", "g", "h"), + Arrays.asList("foo", "bar", "baz", "1111", "22", "333", "4444", "55555")); + } + + public void testBasicMatch() { + String valueFirstInput = ""; + String keyFirstPattern = ""; + String delimiterFirstInput = ""; + String delimiterFirstPattern = ""; + //parallel arrays + List expectedKeys = Arrays.asList(generateRandomStringArray(100, 10, false, false)); + List expectedValues = new ArrayList<>(expectedKeys.size()); + for (String key : expectedKeys) { + String value = randomAsciiAlphanumOfLengthBetween(1, 100); + String delimiter = Integer.toString(randomInt()); //int to ensures values and delimiters don't overlap, else validation can fail + keyFirstPattern += "%{" + key + "}" + delimiter; + valueFirstInput += value + delimiter; + delimiterFirstPattern += delimiter + "%{" + key + "}"; + delimiterFirstInput += delimiter + value; + expectedValues.add(value); + } + assertMatch(keyFirstPattern, valueFirstInput, expectedKeys, expectedValues); + assertMatch(delimiterFirstPattern, delimiterFirstInput, expectedKeys, expectedValues); + } + + public void testBasicMatchUnicode() { + String valueFirstInput = ""; + String keyFirstPattern = ""; + String delimiterFirstInput = ""; + String delimiterFirstPattern = ""; + //parallel arrays + List expectedKeys = new ArrayList<>(); + List expectedValues = new ArrayList<>(); + for (int i = 0; i < randomIntBetween(1, 100); i++) { + String key = randomAsciiAlphanumOfLengthBetween(1, 100); + String value = randomRealisticUnicodeOfCodepointLengthBetween(1, 100); + String delimiter = Integer.toString(randomInt()); //int to ensures values and delimiters don't overlap, else validation can fail + keyFirstPattern += "%{" + key + "}" + delimiter; + valueFirstInput += value + delimiter; + delimiterFirstPattern += delimiter + "%{" + key + "}"; + delimiterFirstInput += delimiter + value; + expectedKeys.add(key); + expectedValues.add(value); + } + assertMatch(keyFirstPattern, valueFirstInput, expectedKeys, expectedValues); + assertMatch(delimiterFirstPattern, delimiterFirstInput, expectedKeys, expectedValues); + } + + public void testMatchUnicode() { + assertMatch("%{a} %{b}", "foo 子", Arrays.asList("a", "b"), Arrays.asList("foo", "子")); + assertMatch("%{a}࿏%{b} %{c}", "⟳༒࿏༒⟲ 子", Arrays.asList("a", "b", "c"), Arrays.asList("⟳༒", "༒⟲", "子")); + assertMatch("%{a}࿏%{+a} %{+a}", "⟳༒࿏༒⟲ 子", Arrays.asList("a"), Arrays.asList("⟳༒༒⟲子")); + assertMatch("%{a}࿏%{+a/2} %{+a/1}", "⟳༒࿏༒⟲ 子", Arrays.asList("a"), Arrays.asList("⟳༒子༒⟲")); + assertMatch("%{a->}࿏%{b}", "⟳༒࿏࿏࿏࿏࿏༒⟲", Arrays.asList("a", "b"), Arrays.asList("⟳༒", "༒⟲")); + assertMatch("%{*a}࿏%{&a}", "⟳༒࿏༒⟲", Arrays.asList("⟳༒"), Arrays.asList("༒⟲")); + assertMatch("%{}࿏%{a}", "⟳༒࿏༒⟲", Arrays.asList("a"), Arrays.asList("༒⟲")); + } + + public void testMatchRemainder() { + assertMatch("%{a}", "foo bar the rest", Arrays.asList("a"), Arrays.asList("foo bar the rest")); + assertMatch("%{a} %{b}", "foo bar the rest", Arrays.asList("a", "b"), Arrays.asList("foo", "bar the rest")); + assertMatch("%{} %{b}", "foo bar the rest", Arrays.asList("b"), Arrays.asList("bar the rest")); + assertMatch("%{a} %{b->}", "foo bar the rest", Arrays.asList("a", "b"), Arrays.asList("foo", "bar the rest")); + assertMatch("%{*a} %{&a}", "foo bar the rest", Arrays.asList("foo"), Arrays.asList("bar the rest")); + assertMatch("%{a} %{+a}", "foo bar the rest", Arrays.asList("a"), Arrays.asList("foo bar the rest"), " "); + } + + public void testAppend() { + assertMatch("%{a} %{+a} %{+a}", "foo bar baz", Arrays.asList("a"), Arrays.asList("foobarbaz")); + assertMatch("%{a} %{+a} %{b} %{+b}", "foo bar baz lol", Arrays.asList("a", "b"), Arrays.asList("foobar", "bazlol")); + assertMatch("%{a} %{+a/2} %{+a/1}", "foo bar baz", Arrays.asList("a"), Arrays.asList("foobazbar")); + assertMatch("%{a} %{+a/2} %{+a/1}", "foo bar baz", Arrays.asList("a"), Arrays.asList("foo baz bar"), " "); + } + + public void testAssociate() { + assertMatch("%{*a} %{&a}", "foo bar", Arrays.asList("foo"), Arrays.asList("bar")); + assertMatch("%{&a} %{*a}", "foo bar", Arrays.asList("bar"), Arrays.asList("foo")); + assertMatch("%{*a} %{&a} %{*b} %{&b}", "foo bar baz lol", Arrays.asList("foo", "baz"), Arrays.asList("bar", "lol")); + assertMatch("%{*a} %{&a} %{c} %{*b} %{&b}", "foo bar x baz lol", + Arrays.asList("foo", "baz", "c"), Arrays.asList("bar", "lol", "x")); + assertBadPattern("%{*a} %{a}"); + assertBadPattern("%{a} %{&a}"); + assertMiss("%{*a} %{&a} {a} %{*b} %{&b}", "foo bar x baz lol"); + } + + public void testAppendAndAssociate() { + assertMatch("%{a} %{+a} %{*b} %{&b}", "foo bar baz lol", Arrays.asList("a", "baz"), Arrays.asList("foobar", "lol")); + assertMatch("%{a->} %{+a/2} %{+a/1} %{*b} %{&b}", "foo bar baz lol x", + Arrays.asList("a", "lol"), Arrays.asList("foobazbar", "x")); + } + + public void testEmptyKey() { + assertMatch("%{} %{b}", "foo bar", Arrays.asList("b"), Arrays.asList("bar")); + assertMatch("%{a} %{}", "foo bar", Arrays.asList("a"), Arrays.asList("foo")); + assertMatch("%{->} %{b}", "foo bar", Arrays.asList("b"), Arrays.asList("bar")); + assertMatch("%{->} %{b}", " bar", Arrays.asList("b"), Arrays.asList("bar")); + assertMatch("%{a} %{->}", "foo bar ", Arrays.asList("a"), Arrays.asList("foo")); + } + + public void testNamedSkipKey() { + assertMatch("%{?foo} %{b}", "foo bar", Arrays.asList("b"), Arrays.asList("bar")); + assertMatch("%{?} %{b}", "foo bar", Arrays.asList("b"), Arrays.asList("bar")); + assertMatch("%{a} %{?bar}", "foo bar", Arrays.asList("a"), Arrays.asList("foo")); + assertMatch("%{?foo->} %{b}", "foo bar", Arrays.asList("b"), Arrays.asList("bar")); + assertMatch("%{?->} %{b}", "foo bar", Arrays.asList("b"), Arrays.asList("bar")); + assertMatch("%{?foo->} %{b}", " bar", Arrays.asList("b"), Arrays.asList("bar")); + assertMatch("%{a} %{->?bar}", "foo bar ", Arrays.asList("a"), Arrays.asList("foo")); + assertMatch("%{a} %{?skipme} %{?skipme}", "foo bar baz", Arrays.asList("a"), Arrays.asList("foo")); + assertMatch("%{a} %{?} %{?}", "foo bar baz", Arrays.asList("a"), Arrays.asList("foo")); + } + + public void testConsecutiveDelimiters() { + //leading + assertMatch("%{->},%{a}", ",,,,,foo", Arrays.asList("a"), Arrays.asList("foo")); + assertMatch("%{a->},%{b}", ",,,,,foo", Arrays.asList("a", "b"), Arrays.asList("", "foo")); + //trailing + assertMatch("%{a->},", "foo,,,,,", Arrays.asList("a"), Arrays.asList("foo")); + assertMatch("%{a} %{b},", "foo bar,,,,,", Arrays.asList("a", "b"), Arrays.asList("foo", "bar")); + assertMatch("%{a} %{b->},", "foo bar,,,,,", Arrays.asList("a", "b"), Arrays.asList("foo", "bar")); + //middle + assertMatch("%{a->},%{b}", "foo,,,,,bar", Arrays.asList("a", "b"), Arrays.asList("foo", "bar")); + assertMatch("%{a->} %{b}", "foo bar", Arrays.asList("a", "b"), Arrays.asList("foo", "bar")); + assertMatch("%{a->}x%{b}", "fooxxxxxbar", Arrays.asList("a", "b"), Arrays.asList("foo", "bar")); + assertMatch("%{a->} xyz%{b}", "foo xyz xyz xyz xyz xyzbar", Arrays.asList("a", "b"), Arrays.asList("foo", "bar")); + //skipped with empty values + assertMatch("%{a},%{b},%{c},%{d}", "foo,,,", Arrays.asList("a", "b", "c", "d"), Arrays.asList("foo", "", "", "")); + assertMatch("%{a},%{b},%{c},%{d}", "foo,,bar,baz", Arrays.asList("a", "b", "c", "d"), Arrays.asList("foo", "", "bar", "baz")); + assertMatch("%{a},%{b},%{c},%{d}", "foo,,,baz", Arrays.asList("a", "b", "c", "d"), Arrays.asList("foo", "", "", "baz")); + assertMatch("%{a},%{b},%{c},%{d}", ",bar,,baz", Arrays.asList("a", "b", "c", "d"), Arrays.asList("", "bar", "", "baz")); + assertMatch("%{->},%{a->},%{b}", ",,,bar,,baz", Arrays.asList("a", "b"), Arrays.asList("bar", "baz")); + } + + public void testAppendWithConsecutiveDelimiters() { + assertMatch("%{+a/1},%{+a/3}-%{+a/2} %{b}", "foo,bar----baz lol", Arrays.asList("a", "b"), Arrays.asList("foobar", "")); + assertMatch("%{+a/1},%{+a/3->}-%{+a/2} %{b}", "foo,bar----baz lol", Arrays.asList("a", "b"), Arrays.asList("foobazbar", "lol")); + } + + public void testSkipRightPadding() { + assertMatch("%{a->} %{b}", "foo bar", Arrays.asList("a", "b"), Arrays.asList("foo", "bar")); + assertMatch("%{a->} %{b}", "foo bar", Arrays.asList("a", "b"), Arrays.asList("foo", "bar")); + assertMatch("%{->} %{a}", "foo bar", Arrays.asList("a"), Arrays.asList("bar")); + assertMatch("%{a->} %{+a->} %{*b->} %{&b->} %{c}", "foo bar baz lol x", + Arrays.asList("a", "baz", "c"), Arrays.asList("foobar", "lol", "x")); + } + + public void testTrimmedEnd() { + assertMatch("%{a} %{b}", "foo bar", Arrays.asList("a", "b"), Arrays.asList("foo", "bar")); + assertMatch("%{a} %{b->} ", "foo bar ", Arrays.asList("a", "b"), Arrays.asList("foo", "bar")); + //only whitespace is trimmed in the absence of trailing characters + assertMatch("%{a} %{b->}", "foo bar,,,,,,", Arrays.asList("a", "b"), Arrays.asList("foo", "bar,,,,,,")); + //consecutive delimiters + right padding can be used to skip over the trailing delimiters + assertMatch("%{a} %{b->},", "foo bar,,,,,,", Arrays.asList("a", "b"), Arrays.asList("foo", "bar")); + } + + public void testLeadingDelimiter() { + assertMatch(",,,%{a} %{b}", ",,,foo bar", Arrays.asList("a", "b"), Arrays.asList("foo", "bar")); + assertMatch(",%{a} %{b}", ",,foo bar", Arrays.asList("a", "b"), Arrays.asList(",foo", "bar")); + } + + /** + * Runtime errors + */ + public void testMiss() { + assertMiss("%{a}%{b}", "foo"); + assertMiss("%{a},%{b}", "foo bar"); + assertMiss("%{a}, %{b}", "foo,bar"); + assertMiss("x%{a},%{b}", "foo,bar"); + assertMiss("x%{},%{b}", "foo,bar"); + assertMiss("leading_delimiter_long%{a}", "foo"); + assertMiss("%{a}trailing_delimiter_long", "foo"); + assertMiss("leading_delimiter_long%{a}trailing_delimiter_long", "foo"); + assertMiss("%{a}x", "foo"); + assertMiss("%{a},%{b}x", "foo,bar"); + } + + /** + * Construction errors + */ + public void testBadPatternOrKey() { + assertBadPattern(""); + assertBadPattern("{}"); + assertBadPattern("%{*a} %{&b}"); + assertBadKey("%{*}"); + assertBadKey("%{++}"); + } + + public void testSyslog() { + assertMatch("%{timestamp} %{+timestamp} %{+timestamp} %{logsource} %{program}[%{pid}]: %{message}", + "Mar 16 00:01:25 evita postfix/smtpd[1713]: connect from camomile.cloud9.net[168.100.1.3]", + Arrays.asList("timestamp", "logsource", "program", "pid", "message"), + Arrays.asList("Mar 16 00:01:25", "evita", "postfix/smtpd", "1713", "connect from camomile.cloud9.net[168.100.1.3]"), " "); + } + + public void testApacheLog() { + assertMatch("%{clientip} %{ident} %{auth} [%{timestamp}] \"%{verb} %{request} HTTP/%{httpversion}\" %{response} %{bytes}" + + " \"%{referrer}\" \"%{agent}\" %{->}", + "31.184.238.164 - - [24/Jul/2014:05:35:37 +0530] \"GET /logs/access.log HTTP/1.0\" 200 69849 " + + "\"http://8rursodiol.enjin.com\" \"Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) " + + "Chrome/30.0.1599.12785 YaBrowser/13.12.1599.12785 Safari/537.36\" \"www.dlwindianrailways.com\"", + Arrays.asList("clientip", "ident", "auth", "timestamp", "verb", "request", "httpversion", "response", "bytes", + "referrer", "agent"), + Arrays.asList("31.184.238.164", "-", "-", "24/Jul/2014:05:35:37 +0530", "GET", "/logs/access.log", "1.0", "200", "69849", + "http://8rursodiol.enjin.com", "Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36" + + " (KHTML, like Gecko) Chrome/30.0.1599.12785 YaBrowser/13.12.1599.12785 Safari/537.36")); + } + + /** + * Shared specification between Beats, Logstash, and Ingest node + */ + public void testJsonSpecification() throws Exception { + ObjectMapper mapper = new ObjectMapper(); + JsonNode rootNode = mapper.readTree(this.getClass().getResourceAsStream("/specification/tests.json")); + Iterator tests = rootNode.elements(); + while (tests.hasNext()) { + JsonNode test = tests.next(); + boolean skip = test.path("skip").asBoolean(); + if (!skip) { + String name = test.path("name").asText(); + logger.debug("Running Json specification: " + name); + String pattern = test.path("tok").asText(); + String input = test.path("msg").asText(); + String append = test.path("append").asText(); + boolean fail = test.path("fail").asBoolean(); + Iterator> expected = test.path("expected").fields(); + List expectedKeys = new ArrayList<>(); + List expectedValues = new ArrayList<>(); + expected.forEachRemaining(entry -> { + expectedKeys.add(entry.getKey()); + expectedValues.add(entry.getValue().asText()); + }); + if (fail) { + assertFail(pattern, input); + } else { + assertMatch(pattern, input, expectedKeys, expectedValues, append); + } + } + } + } + + private DissectException assertFail(String pattern, String input){ + return expectThrows(DissectException.class, () -> new DissectParser(pattern, null).parse(input)); + } + + private void assertMiss(String pattern, String input) { + DissectException e = assertFail(pattern, input); + assertThat(e.getMessage(), CoreMatchers.containsString("Unable to find match for dissect pattern")); + assertThat(e.getMessage(), CoreMatchers.containsString(pattern)); + assertThat(e.getMessage(), input == null ? CoreMatchers.containsString("null") : CoreMatchers.containsString(input)); + } + + private void assertBadPattern(String pattern) { + DissectException e = assertFail(pattern, null); + assertThat(e.getMessage(), CoreMatchers.containsString("Unable to parse pattern")); + assertThat(e.getMessage(), CoreMatchers.containsString(pattern)); + } + + private void assertBadKey(String pattern, String key) { + DissectException e = assertFail(pattern, null); + assertThat(e.getMessage(), CoreMatchers.containsString("Unable to parse key")); + assertThat(e.getMessage(), CoreMatchers.containsString(key)); + } + + private void assertBadKey(String pattern) { + assertBadKey(pattern, pattern.replace("%{", "").replace("}", "")); + } + + private void assertMatch(String pattern, String input, List expectedKeys, List expectedValues) { + assertMatch(pattern, input, expectedKeys, expectedValues, null); + } + + private void assertMatch(String pattern, String input, List expectedKeys, List expectedValues, String appendSeperator) { + Map results = new DissectParser(pattern, appendSeperator).parse(input); + List foundKeys = new ArrayList<>(results.keySet()); + List foundValues = new ArrayList<>(results.values()); + Collections.sort(foundKeys); + Collections.sort(foundValues); + Collections.sort(expectedKeys); + Collections.sort(expectedValues); + assertThat(foundKeys, Matchers.equalTo(expectedKeys)); + assertThat(foundValues, Matchers.equalTo(expectedValues)); + } +} diff --git a/libs/dissect/src/test/resources/specification/tests.json b/libs/dissect/src/test/resources/specification/tests.json new file mode 100644 index 00000000000..1cb85ce6519 --- /dev/null +++ b/libs/dissect/src/test/resources/specification/tests.json @@ -0,0 +1,363 @@ +[ + { + "name": "When all the defined fields are captured by we have remaining data", + "tok": "level=%{level} ts=%{timestamp} caller=%{caller} msg=\"%{message}\"", + "msg": "level=info ts=2018-06-27T17:19:13.036579993Z caller=main.go:222 msg=\"Starting OK\" version=\"(version=2.3.1, branch=HEAD, revision=188ca45bd85ce843071e768d855722a9d9dabe03)\"}", + "expected": { + "caller": "main.go:222", + "level": "info", + "message": "Starting OK", + "timestamp": "2018-06-27T17:19:13.036579993Z" + }, + "skip": false, + "fail": false, + "append": "" + }, + { + "name": "Complex stack trace", + "tok": "%{day}-%{month}-%{year} %{hour} %{severity} [%{thread_id}] %{origin} %{message}", + "msg": "18-Apr-2018 06:53:20.411 INFO [http-nio-8080-exec-1] org.apache.coyote.http11.Http11Processor.service Error parsing HTTP request header\n Note: further occurrences of HTTP header parsing errors will be logged at DEBUG level.\n java.lang.IllegalArgumentException: Invalid character found in method name. HTTP method names must be tokens\n at org.apache.coyote.http11.Http11InputBuffer.parseRequestLine(Http11InputBuffer.java:426)\n at org.apache.coyote.http11.Http11Processor.service(Http11Processor.java:687)\n at org.apache.coyote.AbstractProcessorLight.process(AbstractProcessorLight.java:66)\n at org.apache.coyote.AbstractProtocol$ConnectionHandler.process(AbstractProtocol.java:790)\n at org.apache.tomcat.util.net.NioEndpoint$SocketProcessor.doRun(NioEndpoint.java:1459)\n at org.apache.tomcat.util.net.SocketProcessorBase.run(SocketProcessorBase.java:49)\n at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)\n at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)\n at org.apache.tomcat.util.threads.TaskThread$WrappingRunnable.run(TaskThread.java:61)\n at java.lang.Thread.run(Thread.java:748)", + "expected": { + "day": "18", + "hour": "06:53:20.411", + "message": "Error parsing HTTP request header\n Note: further occurrences of HTTP header parsing errors will be logged at DEBUG level.\n java.lang.IllegalArgumentException: Invalid character found in method name. HTTP method names must be tokens\n at org.apache.coyote.http11.Http11InputBuffer.parseRequestLine(Http11InputBuffer.java:426)\n at org.apache.coyote.http11.Http11Processor.service(Http11Processor.java:687)\n at org.apache.coyote.AbstractProcessorLight.process(AbstractProcessorLight.java:66)\n at org.apache.coyote.AbstractProtocol$ConnectionHandler.process(AbstractProtocol.java:790)\n at org.apache.tomcat.util.net.NioEndpoint$SocketProcessor.doRun(NioEndpoint.java:1459)\n at org.apache.tomcat.util.net.SocketProcessorBase.run(SocketProcessorBase.java:49)\n at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)\n at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)\n at org.apache.tomcat.util.threads.TaskThread$WrappingRunnable.run(TaskThread.java:61)\n at java.lang.Thread.run(Thread.java:748)", + "month": "Apr", + "origin": "org.apache.coyote.http11.Http11Processor.service", + "severity": "INFO", + "thread_id": "http-nio-8080-exec-1", + "year": "2018" + }, + "skip": false, + "fail": false, + "append": "" + }, + { + "name": "success when delimiter found at the beginning and end of the string", + "tok": "/var/log/%{key}.log", + "msg": "/var/log/foobar.log", + "expected": { + "key": "foobar" + }, + "skip": false, + "fail": false, + "append": "" + }, + { + "name": "fails when delimiter is not found at the beginning of the string", + "tok": "/var/log/%{key}.log", + "msg": "foobar", + "expected": null, + "skip": false, + "fail": true, + "append": "" + }, + { + "name": "fails when delimiter is not found after the key", + "tok": "/var/log/%{key}.log", + "msg": "/var/log/foobar", + "expected": null, + "skip": false, + "fail": true, + "append": "" + }, + { + "name": "simple dissect", + "tok": "%{key}", + "msg": "foobar", + "expected": { + "key": "foobar" + }, + "skip": false, + "fail": false, + "append": "" + }, + { + "name": "dissect two replacement", + "tok": "%{key1} %{key2}", + "msg": "foo bar", + "expected": { + "key1": "foo", + "key2": "bar" + }, + "skip": false, + "fail": false, + "append": "" + }, + { + "name": "fail on partial match", + "tok": "%{key1} %{key2} %{key3}", + "msg": "foo bar", + "expected": null, + "skip": false, + "fail": true, + "append": "" + }, + { + "name": "one level dissect not end of string", + "tok": "/var/%{key}/log", + "msg": "/var/foobar/log", + "expected": { + "key": "foobar" + }, + "skip": false, + "fail": false, + "append": "" + }, + { + "name": "one level dissect", + "tok": "/var/%{key}", + "msg": "/var/foobar/log", + "expected": { + "key": "foobar/log" + }, + "skip": false, + "fail": false, + "append": "" + }, + { + "name": "multiple keys dissect end of string", + "tok": "/var/%{key}/log/%{key1}", + "msg": "/var/foobar/log/apache", + "expected": { + "key": "foobar", + "key1": "apache" + }, + "skip": false, + "fail": false, + "append": "" + }, + { + "name": "multiple keys not end of string", + "tok": "/var/%{key}/log/%{key1}.log", + "msg": "/var/foobar/log/apache.log", + "expected": { + "key": "foobar", + "key1": "apache" + }, + "skip": false, + "fail": false, + "append": "" + }, + { + "name": "append with order", + "tok": "%{+key/3} %{+key/1} %{+key/2}", + "msg": "1 2 3", + "expected": { + "key": "231" + }, + "skip": false, + "fail": false, + "append": "" + }, + { + "name": "append with order and separator", + "tok": "%{+key/3} %{+key/1} %{+key/2}", + "msg": "1 2 3", + "expected": { + "key": "2::3::1" + }, + "skip": false, + "fail": false, + "append": "::" + }, + { + "name": "append with order and right padding", + "tok": "%{+key/3} %{+key/1-\u003e} %{+key/2}", + "msg": "1 2 3", + "expected": { + "key": "231" + }, + "skip": false, + "fail": false, + "append": "" + }, + { + "name": "simple append", + "tok": "%{key}-%{+key}-%{+key}", + "msg": "1-2-3", + "expected": { + "key": "123" + }, + "skip": false, + "fail": false, + "append": "" + }, + { + "name": "simple append with separator", + "tok": "%{key}-%{+key}-%{+key}", + "msg": "1-2-3", + "expected": { + "key": "1,2,3" + }, + "skip": false, + "fail": false, + "append": "," + }, + { + "name": "reference field", + "tok": "%{*key} %{\u0026key}", + "msg": "hello world", + "expected": { + "hello": "world" + }, + "skip": false, + "fail": false, + "append": "" + }, + { + "name": "reference field alt order", + "tok": "%{\u0026key} %{*key}", + "msg": "hello world", + "expected": { + "world": "hello" + }, + "skip": false, + "fail": false, + "append": "" + }, + { + "name": "nameless skip field", + "tok": "%{} %{key}", + "msg": "hello world", + "expected": { + "key": "world" + }, + "skip": false, + "fail": false, + "append": "" + }, + { + "name": "named skip field", + "tok": "%{?skipme} %{key}", + "msg": "hello world", + "expected": { + "key": "world" + }, + "skip": false, + "fail": false, + "append": "" + }, + { + "name": "reference without pairing", + "tok": "%{key} %{\u0026key}", + "msg": "hello world", + "expected": null, + "skip": false, + "fail": true, + "append": "" + }, + { + "name": "missing fields (consecutive delimiters)", + "tok": "%{name},%{addr1},%{addr2},%{addr3},%{city},%{zip}", + "msg": "Jane Doe,4321 Fifth Avenue,,,New York,87432", + "expected": { + "addr1": "4321 Fifth Avenue", + "addr2": "", + "addr3": "", + "city": "New York", + "name": "Jane Doe", + "zip": "87432" + }, + "skip": false, + "fail": false, + "append": "" + }, + { + "name": "missing fields with right padding (consecutive delimiters)", + "tok": "%{name},%{addr1-\u003e},%{city},%{zip}", + "msg": "Jane Doe,4321 Fifth Avenue,,,New York,87432", + "expected": { + "addr1": "4321 Fifth Avenue", + "city": "New York", + "name": "Jane Doe", + "zip": "87432" + }, + "skip": false, + "fail": false, + "append": "" + }, + { + "name": "ignore right padding", + "tok": "%{id} %{function-\u003e} %{server}", + "msg": "00000043 ViewReceive machine-321", + "expected": { + "function": "ViewReceive", + "id": "00000043", + "server": "machine-321" + }, + "skip": false, + "fail": false, + "append": "" + }, + { + "name": "padding on the last key need a delimiter", + "tok": "%{id} %{function} %{server-\u003e} ", + "msg": "00000043 ViewReceive machine-321 ", + "expected": { + "function": "ViewReceive", + "id": "00000043", + "server": "machine-321" + }, + "skip": false, + "fail": false, + "append": "" + }, + { + "name": "ignore left padding", + "tok": "%{id-\u003e} %{function} %{server}", + "msg": "00000043 ViewReceive machine-321", + "expected": { + "function": "ViewReceive", + "id": "00000043", + "server": "machine-321" + }, + "skip": false, + "fail": false, + "append": "" + }, + { + "name": "when the delimiters contains `{` and `}`", + "tok": "{%{a}}{%{b}} %{rest}", + "msg": "{c}{d} anything", + "expected": { + "a": "c", + "b": "d", + "rest": "anything" + }, + "skip": false, + "fail": false, + "append": "" + }, + { + "name": "no keys defined", + "tok": "anything", + "msg": "anything", + "expected": null, + "skip": false, + "fail": true, + "append": "" + }, + { + "name": "invalid key", + "tok": "%{some?thing}", + "msg": "anything", + "expected": null, + "skip": false, + "fail": true, + "append": "" + }, + { + "name": "matches non-ascii", + "tok": "%{a}࿏%{b} %{c}", + "msg": "⟳༒࿏༒⟲ 子", + "expected": { + "a": "⟳༒", + "b": "༒⟲", + "c": "子" + }, + "skip": false, + "fail": false, + "append": "" + } + +] \ No newline at end of file diff --git a/libs/grok/src/main/java/org/elasticsearch/grok/ThreadWatchdog.java b/libs/grok/src/main/java/org/elasticsearch/grok/ThreadWatchdog.java index d0de7637d2c..f3515fcfe83 100644 --- a/libs/grok/src/main/java/org/elasticsearch/grok/ThreadWatchdog.java +++ b/libs/grok/src/main/java/org/elasticsearch/grok/ThreadWatchdog.java @@ -21,6 +21,8 @@ package org.elasticsearch.grok; import java.util.Map; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ScheduledFuture; +import java.util.concurrent.atomic.AtomicBoolean; +import java.util.concurrent.atomic.AtomicInteger; import java.util.function.BiFunction; import java.util.function.LongSupplier; @@ -104,6 +106,8 @@ public interface ThreadWatchdog { private final long maxExecutionTime; private final LongSupplier relativeTimeSupplier; private final BiFunction> scheduler; + private final AtomicInteger registered = new AtomicInteger(0); + private final AtomicBoolean running = new AtomicBoolean(false); final ConcurrentHashMap registry = new ConcurrentHashMap<>(); private Default(long interval, @@ -114,11 +118,14 @@ public interface ThreadWatchdog { this.maxExecutionTime = maxExecutionTime; this.relativeTimeSupplier = relativeTimeSupplier; this.scheduler = scheduler; - scheduler.apply(interval, this::interruptLongRunningExecutions); } public void register() { + registered.getAndIncrement(); Long previousValue = registry.put(Thread.currentThread(), relativeTimeSupplier.getAsLong()); + if (running.compareAndSet(false, true) == true) { + scheduler.apply(interval, this::interruptLongRunningExecutions); + } assert previousValue == null; } @@ -129,6 +136,7 @@ public interface ThreadWatchdog { public void unregister() { Long previousValue = registry.remove(Thread.currentThread()); + registered.decrementAndGet(); assert previousValue != null; } @@ -140,7 +148,11 @@ public interface ThreadWatchdog { // not removing the entry here, this happens in the unregister() method. } } - scheduler.apply(interval, this::interruptLongRunningExecutions); + if (registered.get() > 0) { + scheduler.apply(interval, this::interruptLongRunningExecutions); + } else { + running.set(false); + } } } diff --git a/libs/grok/src/test/java/org/elasticsearch/grok/ThreadWatchdogTests.java b/libs/grok/src/test/java/org/elasticsearch/grok/ThreadWatchdogTests.java index 46faa4ae05d..29e2351215f 100644 --- a/libs/grok/src/test/java/org/elasticsearch/grok/ThreadWatchdogTests.java +++ b/libs/grok/src/test/java/org/elasticsearch/grok/ThreadWatchdogTests.java @@ -18,15 +18,25 @@ */ package org.elasticsearch.grok; -import org.elasticsearch.test.ESTestCase; - import java.util.Map; +import java.util.concurrent.CompletableFuture; +import java.util.concurrent.ScheduledExecutorService; +import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; +import org.elasticsearch.test.ESTestCase; +import org.mockito.Mockito; import static org.hamcrest.Matchers.is; +import static org.mockito.Matchers.any; +import static org.mockito.Matchers.eq; +import static org.mockito.Mockito.doAnswer; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.verifyNoMoreInteractions; +import static org.mockito.Mockito.verifyZeroInteractions; public class ThreadWatchdogTests extends ESTestCase { - + public void testInterrupt() throws Exception { AtomicBoolean run = new AtomicBoolean(true); // to avoid a lingering thread when test has completed ThreadWatchdog watchdog = ThreadWatchdog.newInstance(10, 100, System::currentTimeMillis, (delay, command) -> { @@ -43,7 +53,7 @@ public class ThreadWatchdogTests extends ESTestCase { thread.start(); return null; }); - + Map registry = ((ThreadWatchdog.Default) watchdog).registry; assertThat(registry.size(), is(0)); // need to call #register() method on a different thread, assertBusy() fails if current thread gets interrupted @@ -66,5 +76,39 @@ public class ThreadWatchdogTests extends ESTestCase { assertThat(registry.size(), is(0)); }); } - + + public void testIdleIfNothingRegistered() throws Exception { + long interval = 1L; + ScheduledExecutorService threadPool = mock(ScheduledExecutorService.class); + ThreadWatchdog watchdog = ThreadWatchdog.newInstance(interval, Long.MAX_VALUE, System::currentTimeMillis, + (delay, command) -> threadPool.schedule(command, delay, TimeUnit.MILLISECONDS)); + // Periodic action is not scheduled because no thread is registered + verifyZeroInteractions(threadPool); + CompletableFuture commandFuture = new CompletableFuture<>(); + // Periodic action is scheduled because a thread is registered + doAnswer(invocationOnMock -> { + commandFuture.complete((Runnable) invocationOnMock.getArguments()[0]); + return null; + }).when(threadPool).schedule( + any(Runnable.class), eq(interval), eq(TimeUnit.MILLISECONDS) + ); + watchdog.register(); + // Registering the first thread should have caused the command to get scheduled again + Runnable command = commandFuture.get(1L, TimeUnit.MILLISECONDS); + Mockito.reset(threadPool); + watchdog.unregister(); + command.run(); + // Periodic action is not scheduled again because no thread is registered + verifyZeroInteractions(threadPool); + watchdog.register(); + Thread otherThread = new Thread(watchdog::register); + try { + verify(threadPool).schedule(any(Runnable.class), eq(interval), eq(TimeUnit.MILLISECONDS)); + // Registering a second thread does not cause the command to get scheduled twice + verifyNoMoreInteractions(threadPool); + otherThread.start(); + } finally { + otherThread.join(); + } + } } diff --git a/libs/x-content/licenses/jackson-core-2.8.10.jar.sha1 b/libs/x-content/licenses/jackson-core-2.8.10.jar.sha1 deleted file mode 100644 index a322d371e26..00000000000 --- a/libs/x-content/licenses/jackson-core-2.8.10.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -eb21a035c66ad307e66ec8fce37f5d50fd62d039 \ No newline at end of file diff --git a/libs/x-content/licenses/jackson-core-2.8.11.jar.sha1 b/libs/x-content/licenses/jackson-core-2.8.11.jar.sha1 new file mode 100644 index 00000000000..e7ad1e74ed6 --- /dev/null +++ b/libs/x-content/licenses/jackson-core-2.8.11.jar.sha1 @@ -0,0 +1 @@ +876ead1db19f0c9e79c9789273a3ef8c6fd6c29b \ No newline at end of file diff --git a/libs/x-content/licenses/jackson-dataformat-cbor-2.8.10.jar.sha1 b/libs/x-content/licenses/jackson-dataformat-cbor-2.8.10.jar.sha1 deleted file mode 100644 index 1d3e18e21a6..00000000000 --- a/libs/x-content/licenses/jackson-dataformat-cbor-2.8.10.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -1c58cc9313ddf19f0900cd61ed044874278ce320 \ No newline at end of file diff --git a/libs/x-content/licenses/jackson-dataformat-cbor-2.8.11.jar.sha1 b/libs/x-content/licenses/jackson-dataformat-cbor-2.8.11.jar.sha1 new file mode 100644 index 00000000000..378ba524422 --- /dev/null +++ b/libs/x-content/licenses/jackson-dataformat-cbor-2.8.11.jar.sha1 @@ -0,0 +1 @@ +8b9826e16c3366764bfb7ad7362554f0471046c3 \ No newline at end of file diff --git a/libs/x-content/licenses/jackson-dataformat-smile-2.8.10.jar.sha1 b/libs/x-content/licenses/jackson-dataformat-smile-2.8.10.jar.sha1 deleted file mode 100644 index 4f4cacde220..00000000000 --- a/libs/x-content/licenses/jackson-dataformat-smile-2.8.10.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -e853081fadaad3e98ed801937acc3d8f77580686 \ No newline at end of file diff --git a/libs/x-content/licenses/jackson-dataformat-smile-2.8.11.jar.sha1 b/libs/x-content/licenses/jackson-dataformat-smile-2.8.11.jar.sha1 new file mode 100644 index 00000000000..510afb3df53 --- /dev/null +++ b/libs/x-content/licenses/jackson-dataformat-smile-2.8.11.jar.sha1 @@ -0,0 +1 @@ +d9d1c49c5d9d5e46e2aee55f3cdd119286fe0fc1 \ No newline at end of file diff --git a/libs/x-content/licenses/jackson-dataformat-yaml-2.8.10.jar.sha1 b/libs/x-content/licenses/jackson-dataformat-yaml-2.8.10.jar.sha1 deleted file mode 100644 index 40bcb05f697..00000000000 --- a/libs/x-content/licenses/jackson-dataformat-yaml-2.8.10.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -1e08caf1d787c825307d8cc6362452086020d853 \ No newline at end of file diff --git a/libs/x-content/licenses/jackson-dataformat-yaml-2.8.11.jar.sha1 b/libs/x-content/licenses/jackson-dataformat-yaml-2.8.11.jar.sha1 new file mode 100644 index 00000000000..78a68d715ec --- /dev/null +++ b/libs/x-content/licenses/jackson-dataformat-yaml-2.8.11.jar.sha1 @@ -0,0 +1 @@ +2e77c6ff7342cd61ab1ae7cb14ed16aebfc8a72a \ No newline at end of file diff --git a/libs/x-content/src/main/java/org/elasticsearch/common/xcontent/XContentFactory.java b/libs/x-content/src/main/java/org/elasticsearch/common/xcontent/XContentFactory.java index fb871590df7..38bc251be41 100644 --- a/libs/x-content/src/main/java/org/elasticsearch/common/xcontent/XContentFactory.java +++ b/libs/x-content/src/main/java/org/elasticsearch/common/xcontent/XContentFactory.java @@ -35,7 +35,7 @@ import java.io.OutputStream; */ public class XContentFactory { - private static final int GUESS_HEADER_LENGTH = 20; + static final int GUESS_HEADER_LENGTH = 20; /** * Returns a content builder using JSON format ({@link org.elasticsearch.common.xcontent.XContentType#JSON}. @@ -153,8 +153,10 @@ public class XContentFactory { return XContentType.JSON; } // Should we throw a failure here? Smile idea is to use it in bytes.... - if (length > 2 && first == SmileConstants.HEADER_BYTE_1 && content.charAt(1) == SmileConstants.HEADER_BYTE_2 && - content.charAt(2) == SmileConstants.HEADER_BYTE_3) { + if (length > 2 + && first == SmileConstants.HEADER_BYTE_1 + && content.charAt(1) == SmileConstants.HEADER_BYTE_2 + && content.charAt(2) == SmileConstants.HEADER_BYTE_3) { return XContentType.SMILE; } if (length > 2 && first == '-' && content.charAt(1) == '-' && content.charAt(2) == '-') { @@ -227,13 +229,29 @@ public class XContentFactory { */ @Deprecated public static XContentType xContentType(InputStream si) throws IOException { + /* + * We need to guess the content type. To do this, we look for the first non-whitespace character and then try to guess the content + * type on the GUESS_HEADER_LENGTH bytes that follow. We do this in a way that does not modify the initial read position in the + * underlying input stream. This is why the input stream must support mark/reset and why we repeatedly mark the read position and + * reset. + */ if (si.markSupported() == false) { throw new IllegalArgumentException("Cannot guess the xcontent type without mark/reset support on " + si.getClass()); } - si.mark(GUESS_HEADER_LENGTH); + si.mark(Integer.MAX_VALUE); try { + // scan until we find the first non-whitespace character or the end of the stream + int current; + do { + current = si.read(); + if (current == -1) { + return null; + } + } while (Character.isWhitespace((char) current)); + // now guess the content type off the next GUESS_HEADER_LENGTH bytes including the current byte final byte[] firstBytes = new byte[GUESS_HEADER_LENGTH]; - int read = 0; + firstBytes[0] = (byte) current; + int read = 1; while (read < GUESS_HEADER_LENGTH) { final int r = si.read(firstBytes, read, GUESS_HEADER_LENGTH - read); if (r == -1) { @@ -245,6 +263,7 @@ public class XContentFactory { } finally { si.reset(); } + } /** @@ -278,15 +297,17 @@ public class XContentFactory { if (first == '{') { return XContentType.JSON; } - if (length > 2 && first == SmileConstants.HEADER_BYTE_1 && bytes[offset + 1] == SmileConstants.HEADER_BYTE_2 && - bytes[offset + 2] == SmileConstants.HEADER_BYTE_3) { + if (length > 2 + && first == SmileConstants.HEADER_BYTE_1 + && bytes[offset + 1] == SmileConstants.HEADER_BYTE_2 + && bytes[offset + 2] == SmileConstants.HEADER_BYTE_3) { return XContentType.SMILE; } if (length > 2 && first == '-' && bytes[offset + 1] == '-' && bytes[offset + 2] == '-') { return XContentType.YAML; } // CBOR logic similar to CBORFactory#hasCBORFormat - if (first == CBORConstants.BYTE_OBJECT_INDEFINITE && length > 1){ + if (first == CBORConstants.BYTE_OBJECT_INDEFINITE && length > 1) { return XContentType.CBOR; } if (CBORConstants.hasMajorType(CBORConstants.MAJOR_TYPE_TAG, first) && length > 2) { diff --git a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/AppendProcessorFactoryTests.java b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/AppendProcessorFactoryTests.java index 39a7bfd9a20..d51cb368e43 100644 --- a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/AppendProcessorFactoryTests.java +++ b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/AppendProcessorFactoryTests.java @@ -100,6 +100,6 @@ public class AppendProcessorFactoryTests extends ESTestCase { String processorTag = randomAlphaOfLength(10); ElasticsearchException exception = expectThrows(ElasticsearchException.class, () -> factory.create(null, processorTag, config)); assertThat(exception.getMessage(), equalTo("java.lang.RuntimeException: could not compile script")); - assertThat(exception.getHeader("processor_tag").get(0), equalTo(processorTag)); + assertThat(exception.getMetadata("es.processor_tag").get(0), equalTo(processorTag)); } } diff --git a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/ConvertProcessorFactoryTests.java b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/ConvertProcessorFactoryTests.java index 9e4acd7b17f..f3396da64eb 100644 --- a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/ConvertProcessorFactoryTests.java +++ b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/ConvertProcessorFactoryTests.java @@ -58,9 +58,9 @@ public class ConvertProcessorFactoryTests extends ESTestCase { fail("factory create should have failed"); } catch (ElasticsearchParseException e) { assertThat(e.getMessage(), Matchers.equalTo("[type] type [" + type + "] not supported, cannot convert field.")); - assertThat(e.getHeader("processor_type").get(0), equalTo(ConvertProcessor.TYPE)); - assertThat(e.getHeader("property_name").get(0), equalTo("type")); - assertThat(e.getHeader("processor_tag"), nullValue()); + assertThat(e.getMetadata("es.processor_type").get(0), equalTo(ConvertProcessor.TYPE)); + assertThat(e.getMetadata("es.property_name").get(0), equalTo("type")); + assertThat(e.getMetadata("es.processor_tag"), nullValue()); } } diff --git a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/FailProcessorFactoryTests.java b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/FailProcessorFactoryTests.java index 801441407a7..3c89778f0e8 100644 --- a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/FailProcessorFactoryTests.java +++ b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/FailProcessorFactoryTests.java @@ -66,6 +66,6 @@ public class FailProcessorFactoryTests extends ESTestCase { String processorTag = randomAlphaOfLength(10); ElasticsearchException exception = expectThrows(ElasticsearchException.class, () -> factory.create(null, processorTag, config)); assertThat(exception.getMessage(), equalTo("java.lang.RuntimeException: could not compile script")); - assertThat(exception.getHeader("processor_tag").get(0), equalTo(processorTag)); + assertThat(exception.getMetadata("es.processor_tag").get(0), equalTo(processorTag)); } } diff --git a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/RemoveProcessorFactoryTests.java b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/RemoveProcessorFactoryTests.java index c439a9662f2..bebe7802762 100644 --- a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/RemoveProcessorFactoryTests.java +++ b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/RemoveProcessorFactoryTests.java @@ -79,6 +79,6 @@ public class RemoveProcessorFactoryTests extends ESTestCase { String processorTag = randomAlphaOfLength(10); ElasticsearchException exception = expectThrows(ElasticsearchException.class, () -> factory.create(null, processorTag, config)); assertThat(exception.getMessage(), equalTo("java.lang.RuntimeException: could not compile script")); - assertThat(exception.getHeader("processor_tag").get(0), equalTo(processorTag)); + assertThat(exception.getMetadata("es.processor_tag").get(0), equalTo(processorTag)); } } diff --git a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/SetProcessorFactoryTests.java b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/SetProcessorFactoryTests.java index 59a99b8f995..9602f34f698 100644 --- a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/SetProcessorFactoryTests.java +++ b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/SetProcessorFactoryTests.java @@ -108,7 +108,7 @@ public class SetProcessorFactoryTests extends ESTestCase { String processorTag = randomAlphaOfLength(10); ElasticsearchException exception = expectThrows(ElasticsearchException.class, () -> factory.create(null, processorTag, config)); assertThat(exception.getMessage(), equalTo("java.lang.RuntimeException: could not compile script")); - assertThat(exception.getHeader("processor_tag").get(0), equalTo(processorTag)); + assertThat(exception.getMetadata("es.processor_tag").get(0), equalTo(processorTag)); } } diff --git a/modules/ingest-common/src/test/resources/rest-api-spec/test/ingest/200_default_pipeline.yml b/modules/ingest-common/src/test/resources/rest-api-spec/test/ingest/200_default_pipeline.yml new file mode 100644 index 00000000000..c20d7698131 --- /dev/null +++ b/modules/ingest-common/src/test/resources/rest-api-spec/test/ingest/200_default_pipeline.yml @@ -0,0 +1,73 @@ +--- +teardown: + - do: + ingest.delete_pipeline: + id: "my_pipeline" + ignore: 404 + +--- +"Test index with default pipeline": + - do: + ingest.put_pipeline: + id: "my_pipeline" + body: > + { + "description": "_description", + "processors": [ + { + "bytes" : { + "field" : "bytes_source_field", + "target_field" : "bytes_target_field" + } + } + ] + } + - match: { acknowledged: true } + + - do: + indices.create: + index: test + body: + settings: + index: + default_pipeline: "my_pipeline" + + - do: + index: + index: test + type: test + id: 1 + body: {bytes_source_field: "1kb"} + + - do: + get: + index: test + type: test + id: 1 + - match: { _source.bytes_source_field: "1kb" } + - match: { _source.bytes_target_field: 1024 } + + - do: + index: + index: test + type: test + id: 2 + pipeline: "_none" + body: {bytes_source_field: "1kb"} + + - do: + get: + index: test + type: test + id: 2 + - match: { _source.bytes_source_field: "1kb" } + - is_false: _source.bytes_target_field + + - do: + catch: bad_request + index: + index: test + type: test + id: 3 + pipeline: "" + body: {bytes_source_field: "1kb"} diff --git a/modules/ingest-common/src/test/resources/rest-api-spec/test/ingest/20_crud.yml b/modules/ingest-common/src/test/resources/rest-api-spec/test/ingest/20_crud.yml index 0e348bbd726..bd6a3e6ca14 100644 --- a/modules/ingest-common/src/test/resources/rest-api-spec/test/ingest/20_crud.yml +++ b/modules/ingest-common/src/test/resources/rest-api-spec/test/ingest/20_crud.yml @@ -158,9 +158,9 @@ teardown: } - match: { error.root_cause.0.type: "parse_exception" } - match: { error.root_cause.0.reason: "[field] required property is missing" } - - match: { error.root_cause.0.header.processor_tag: "fritag" } - - match: { error.root_cause.0.header.processor_type: "set" } - - match: { error.root_cause.0.header.property_name: "field" } + - match: { error.root_cause.0.processor_tag: "fritag" } + - match: { error.root_cause.0.processor_type: "set" } + - match: { error.root_cause.0.property_name: "field" } --- "Test basic pipeline with on_failure in processor": diff --git a/modules/ingest-common/src/test/resources/rest-api-spec/test/ingest/50_on_failure.yml b/modules/ingest-common/src/test/resources/rest-api-spec/test/ingest/50_on_failure.yml index 4b40d9f670b..718b91ac1c1 100644 --- a/modules/ingest-common/src/test/resources/rest-api-spec/test/ingest/50_on_failure.yml +++ b/modules/ingest-common/src/test/resources/rest-api-spec/test/ingest/50_on_failure.yml @@ -148,9 +148,9 @@ teardown: } - match: { error.root_cause.0.type: "parse_exception" } - match: { error.root_cause.0.reason: "[on_failure] processors list cannot be empty" } - - match: { error.root_cause.0.header.processor_type: "fail" } - - match: { error.root_cause.0.header.processor_tag: "emptyfail" } - - match: { error.root_cause.0.header.property_name: "on_failure" } + - match: { error.root_cause.0.processor_type: "fail" } + - match: { error.root_cause.0.processor_tag: "emptyfail" } + - match: { error.root_cause.0.property_name: "on_failure" } --- "Test pipeline with empty on_failure in pipeline": diff --git a/modules/ingest-common/src/test/resources/rest-api-spec/test/ingest/90_simulate.yml b/modules/ingest-common/src/test/resources/rest-api-spec/test/ingest/90_simulate.yml index 8b3ed313314..776a8af0c24 100644 --- a/modules/ingest-common/src/test/resources/rest-api-spec/test/ingest/90_simulate.yml +++ b/modules/ingest-common/src/test/resources/rest-api-spec/test/ingest/90_simulate.yml @@ -107,9 +107,9 @@ teardown: } - match: { error.root_cause.0.type: "parse_exception" } - match: { error.root_cause.0.reason: "[field] required property is missing" } - - match: { error.root_cause.0.header.processor_tag: "fails" } - - match: { error.root_cause.0.header.processor_type: "set" } - - match: { error.root_cause.0.header.property_name: "field" } + - match: { error.root_cause.0.processor_tag: "fails" } + - match: { error.root_cause.0.processor_type: "set" } + - match: { error.root_cause.0.property_name: "field" } --- "Test simulate without index type and id": @@ -198,9 +198,9 @@ teardown: } ] } - - is_false: error.root_cause.0.header.processor_type - - is_false: error.root_cause.0.header.processor_tag - - match: { error.root_cause.0.header.property_name: "pipeline" } + - is_false: error.root_cause.0.processor_type + - is_false: error.root_cause.0.processor_tag + - match: { error.root_cause.0.property_name: "pipeline" } - match: { error.reason: "[pipeline] required property is missing" } --- @@ -233,9 +233,9 @@ teardown: } - match: { error.root_cause.0.type: "parse_exception" } - match: { error.root_cause.0.reason: "[value] required property is missing" } - - match: { error.root_cause.0.header.processor_type: "set" } - - match: { error.root_cause.0.header.property_name: "value" } - - is_false: error.root_cause.0.header.processor_tag + - match: { error.root_cause.0.processor_type: "set" } + - match: { error.root_cause.0.property_name: "value" } + - is_false: error.root_cause.0.processor_tag --- "Test simulate with verbose flag": diff --git a/modules/lang-expression/licenses/lucene-expressions-7.5.0-snapshot-13b9e28f9d.jar.sha1 b/modules/lang-expression/licenses/lucene-expressions-7.5.0-snapshot-13b9e28f9d.jar.sha1 new file mode 100644 index 00000000000..0ebdddcc5f1 --- /dev/null +++ b/modules/lang-expression/licenses/lucene-expressions-7.5.0-snapshot-13b9e28f9d.jar.sha1 @@ -0,0 +1 @@ +fded6bb485b8b01bb2a9280162fd14d4d3ce4510 \ No newline at end of file diff --git a/modules/lang-expression/licenses/lucene-expressions-7.5.0-snapshot-608f0277b0.jar.sha1 b/modules/lang-expression/licenses/lucene-expressions-7.5.0-snapshot-608f0277b0.jar.sha1 deleted file mode 100644 index 908f70131b3..00000000000 --- a/modules/lang-expression/licenses/lucene-expressions-7.5.0-snapshot-608f0277b0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -bd7d8078a2d0ad11a24f54156cc015630c96858a \ No newline at end of file diff --git a/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/ExpressionScriptEngine.java b/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/ExpressionScriptEngine.java index 1cde9c258b4..23dc0fd276c 100644 --- a/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/ExpressionScriptEngine.java +++ b/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/ExpressionScriptEngine.java @@ -34,10 +34,12 @@ import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.fielddata.IndexFieldData; import org.elasticsearch.index.fielddata.IndexNumericFieldData; -import org.elasticsearch.index.mapper.GeoPointFieldMapper.GeoPointFieldType; import org.elasticsearch.index.mapper.DateFieldMapper; +import org.elasticsearch.index.mapper.GeoPointFieldMapper.GeoPointFieldType; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.MapperService; +import org.elasticsearch.script.BucketAggregationScript; +import org.elasticsearch.script.BucketAggregationSelectorScript; import org.elasticsearch.script.ClassPermission; import org.elasticsearch.script.ExecutableScript; import org.elasticsearch.script.FilterScript; @@ -54,6 +56,7 @@ import java.security.AccessController; import java.security.PrivilegedAction; import java.text.ParseException; import java.util.ArrayList; +import java.util.HashMap; import java.util.List; import java.util.Map; @@ -112,6 +115,17 @@ public class ExpressionScriptEngine extends AbstractComponent implements ScriptE } else if (context.instanceClazz.equals(ExecutableScript.class)) { ExecutableScript.Factory factory = (p) -> new ExpressionExecutableScript(expr, p); return context.factoryClazz.cast(factory); + } else if (context.instanceClazz.equals(BucketAggregationScript.class)) { + return context.factoryClazz.cast(newBucketAggregationScriptFactory(expr)); + } else if (context.instanceClazz.equals(BucketAggregationSelectorScript.class)) { + BucketAggregationScript.Factory factory = newBucketAggregationScriptFactory(expr); + BucketAggregationSelectorScript.Factory wrappedFactory = parameters -> new BucketAggregationSelectorScript(parameters) { + @Override + public boolean execute() { + return factory.newInstance(getParams()).execute() == 1.0; + } + }; + return context.factoryClazz.cast(wrappedFactory); } else if (context.instanceClazz.equals(FilterScript.class)) { FilterScript.Factory factory = (p, lookup) -> newFilterScript(expr, lookup, p); return context.factoryClazz.cast(factory); @@ -122,6 +136,37 @@ public class ExpressionScriptEngine extends AbstractComponent implements ScriptE throw new IllegalArgumentException("expression engine does not know how to handle script context [" + context.name + "]"); } + private static BucketAggregationScript.Factory newBucketAggregationScriptFactory(Expression expr) { + return parameters -> { + ReplaceableConstDoubleValues[] functionValuesArray = + new ReplaceableConstDoubleValues[expr.variables.length]; + Map functionValuesMap = new HashMap<>(); + for (int i = 0; i < expr.variables.length; ++i) { + functionValuesArray[i] = new ReplaceableConstDoubleValues(); + functionValuesMap.put(expr.variables[i], functionValuesArray[i]); + } + return new BucketAggregationScript(parameters) { + @Override + public Double execute() { + getParams().forEach((name, value) -> { + ReplaceableConstDoubleValues placeholder = functionValuesMap.get(name); + if (placeholder == null) { + throw new IllegalArgumentException("Error using " + expr + ". " + + "The variable [" + name + "] does not exist in the executable expressions script."); + } else if (value instanceof Number == false) { + throw new IllegalArgumentException("Error using " + expr + ". " + + "Executable expressions scripts can only process numbers." + + " The variable [" + name + "] is not a number."); + } else { + placeholder.setValue(((Number) value).doubleValue()); + } + }); + return expr.evaluate(functionValuesArray); + } + }; + }; + } + private SearchScript.LeafFactory newSearchScript(Expression expr, SearchLookup lookup, @Nullable Map vars) { MapperService mapper = lookup.doc().mapperService(); // NOTE: if we need to do anything complicated with bindings in the future, we can just extend Bindings, @@ -267,7 +312,7 @@ public class ExpressionScriptEngine extends AbstractComponent implements ScriptE }; }; } - + private ScoreScript.LeafFactory newScoreScript(Expression expr, SearchLookup lookup, @Nullable Map vars) { SearchScript.LeafFactory searchLeafFactory = newSearchScript(expr, lookup, vars); return new ScoreScript.LeafFactory() { @@ -284,17 +329,17 @@ public class ExpressionScriptEngine extends AbstractComponent implements ScriptE public double execute() { return script.runAsDouble(); } - + @Override public void setDocument(int docid) { script.setDocument(docid); } - + @Override public void setScorer(Scorer scorer) { script.setScorer(scorer); } - + @Override public double get_score() { return script.getScore(); diff --git a/modules/lang-painless/build.gradle b/modules/lang-painless/build.gradle index fb1ea441a9d..ed4b1d631e0 100644 --- a/modules/lang-painless/build.gradle +++ b/modules/lang-painless/build.gradle @@ -17,8 +17,6 @@ * under the License. */ - - esplugin { description 'An easy, safe and fast scripting language for Elasticsearch' classname 'org.elasticsearch.painless.PainlessPlugin' @@ -26,6 +24,8 @@ esplugin { integTestCluster { module project.project(':modules:mapper-extras') + systemProperty 'es.scripting.use_java_time', 'true' + systemProperty 'es.scripting.update.ctx_in_params', 'false' } dependencies { diff --git a/modules/lang-painless/spi/src/main/java/org/elasticsearch/painless/spi/Whitelist.java b/modules/lang-painless/spi/src/main/java/org/elasticsearch/painless/spi/Whitelist.java index 55b64b0420d..c38325edd14 100644 --- a/modules/lang-painless/spi/src/main/java/org/elasticsearch/painless/spi/Whitelist.java +++ b/modules/lang-painless/spi/src/main/java/org/elasticsearch/painless/spi/Whitelist.java @@ -28,7 +28,7 @@ import java.util.Objects; * constructors, methods, and fields that can be used within a Painless script at both compile-time * and run-time. * - * A whitelist consists of several pieces with {@link WhitelistClass}s as the top level. Each + * A whitelist consists of several pieces with {@link WhitelistClass}s as the top level. Each * {@link WhitelistClass} will contain zero-to-many {@link WhitelistConstructor}s, {@link WhitelistMethod}s, and * {@link WhitelistField}s which are what will be available with a Painless script. See each individual * whitelist object for more detail. @@ -56,14 +56,14 @@ public final class Whitelist { Collections.singletonList(WhitelistLoader.loadFromResourceFiles(Whitelist.class, BASE_WHITELIST_FILES)); /** The {@link ClassLoader} used to look up the whitelisted Java classes, constructors, methods, and fields. */ - public final ClassLoader javaClassLoader; + public final ClassLoader classLoader; /** The {@link List} of all the whitelisted Painless classes. */ - public final List whitelistStructs; + public final List whitelistClasses; /** Standard constructor. All values must be not {@code null}. */ - public Whitelist(ClassLoader javaClassLoader, List whitelistStructs) { - this.javaClassLoader = Objects.requireNonNull(javaClassLoader); - this.whitelistStructs = Collections.unmodifiableList(Objects.requireNonNull(whitelistStructs)); + public Whitelist(ClassLoader classLoader, List whitelistClasses) { + this.classLoader = Objects.requireNonNull(classLoader); + this.whitelistClasses = Collections.unmodifiableList(Objects.requireNonNull(whitelistClasses)); } } diff --git a/modules/lang-painless/spi/src/main/java/org/elasticsearch/painless/spi/WhitelistClass.java b/modules/lang-painless/spi/src/main/java/org/elasticsearch/painless/spi/WhitelistClass.java index 12aa5f5bdd6..0b216ae5c29 100644 --- a/modules/lang-painless/spi/src/main/java/org/elasticsearch/painless/spi/WhitelistClass.java +++ b/modules/lang-painless/spi/src/main/java/org/elasticsearch/painless/spi/WhitelistClass.java @@ -30,7 +30,7 @@ import java.util.Objects; * specific context, as long as multiple classes representing the same Java class have the same * class name and have legal constructor/method overloading they can be merged together. * - * Classes in Painless allow for arity overloading for constructors and methods. Arity overloading + * Classes in Painless allow for arity overloading for constructors and methods. Arity overloading * means that multiple constructors are allowed for a single class as long as they have a different * number of parameters, and multiples methods with the same name are allowed for a single class * as long as they have the same return type and a different number of parameters. @@ -40,7 +40,7 @@ import java.util.Objects; */ public final class WhitelistClass { - /** Information about where this class was white-listed from. Can be used for error messages. */ + /** Information about where this class was white-listed from. */ public final String origin; /** The Java class name this class represents. */ @@ -49,7 +49,7 @@ public final class WhitelistClass { /** * Allow the Java class name to only be specified as the fully-qualified name. */ - public final boolean onlyFQNJavaClassName; + public final boolean noImport; /** The {@link List} of whitelisted ({@link WhitelistConstructor}s) available to this class. */ public final List whitelistConstructors; @@ -61,13 +61,14 @@ public final class WhitelistClass { public final List whitelistFields; /** Standard constructor. All values must be not {@code null}. */ - public WhitelistClass(String origin, String javaClassName, boolean onlyFQNJavaClassName, + public WhitelistClass(String origin, String javaClassName, boolean noImport, List whitelistConstructors, List whitelistMethods, List whitelistFields) { + this.origin = Objects.requireNonNull(origin); this.javaClassName = Objects.requireNonNull(javaClassName); - this.onlyFQNJavaClassName = onlyFQNJavaClassName; + this.noImport = noImport; this.whitelistConstructors = Collections.unmodifiableList(Objects.requireNonNull(whitelistConstructors)); this.whitelistMethods = Collections.unmodifiableList(Objects.requireNonNull(whitelistMethods)); diff --git a/modules/lang-painless/spi/src/main/java/org/elasticsearch/painless/spi/WhitelistConstructor.java b/modules/lang-painless/spi/src/main/java/org/elasticsearch/painless/spi/WhitelistConstructor.java index 0e705527602..032ef397def 100644 --- a/modules/lang-painless/spi/src/main/java/org/elasticsearch/painless/spi/WhitelistConstructor.java +++ b/modules/lang-painless/spi/src/main/java/org/elasticsearch/painless/spi/WhitelistConstructor.java @@ -25,24 +25,24 @@ import java.util.Objects; /** * Constructor represents the equivalent of a Java constructor available as a whitelisted class - * constructor within Painless. Constructors for Painless classes may be accessed exactly as - * constructors for Java classes are using the 'new' keyword. Painless classes may have multiple + * constructor within Painless. Constructors for Painless classes may be accessed exactly as + * constructors for Java classes are using the 'new' keyword. Painless classes may have multiple * constructors as long as they comply with arity overloading described for {@link WhitelistClass}. */ public final class WhitelistConstructor { - /** Information about where this constructor was whitelisted from. Can be used for error messages. */ + /** Information about where this constructor was whitelisted from. */ public final String origin; /** * A {@link List} of {@link String}s that are the Painless type names for the parameters of the * constructor which can be used to look up the Java constructor through reflection. */ - public final List painlessParameterTypeNames; + public final List canonicalTypeNameParameters; /** Standard constructor. All values must be not {@code null}. */ - public WhitelistConstructor(String origin, List painlessParameterTypeNames) { + public WhitelistConstructor(String origin, List canonicalTypeNameParameters) { this.origin = Objects.requireNonNull(origin); - this.painlessParameterTypeNames = Collections.unmodifiableList(Objects.requireNonNull(painlessParameterTypeNames)); + this.canonicalTypeNameParameters = Collections.unmodifiableList(Objects.requireNonNull(canonicalTypeNameParameters)); } } diff --git a/modules/lang-painless/spi/src/main/java/org/elasticsearch/painless/spi/WhitelistField.java b/modules/lang-painless/spi/src/main/java/org/elasticsearch/painless/spi/WhitelistField.java index 116aea98fcf..44ed31a227e 100644 --- a/modules/lang-painless/spi/src/main/java/org/elasticsearch/painless/spi/WhitelistField.java +++ b/modules/lang-painless/spi/src/main/java/org/elasticsearch/painless/spi/WhitelistField.java @@ -23,24 +23,24 @@ import java.util.Objects; /** * Field represents the equivalent of a Java field available as a whitelisted class field - * within Painless. Fields for Painless classes may be accessed exactly as fields for Java classes + * within Painless. Fields for Painless classes may be accessed exactly as fields for Java classes * are using the '.' operator on an existing class variable/field. */ public class WhitelistField { - /** Information about where this method was whitelisted from. Can be used for error messages. */ + /** Information about where this method was whitelisted from. */ public final String origin; - /** The Java field name used to look up the Java field through reflection. */ - public final String javaFieldName; + /** The field name used to look up the field reflection object. */ + public final String fieldName; - /** The Painless type name for the field which can be used to look up the Java field through reflection. */ - public final String painlessFieldTypeName; + /** The canonical type name for the field which can be used to look up the Java field through reflection. */ + public final String canonicalTypeNameParameter; /** Standard constructor. All values must be not {@code null}. */ - public WhitelistField(String origin, String javaFieldName, String painlessFieldTypeName) { + public WhitelistField(String origin, String fieldName, String canonicalTypeNameParameter) { this.origin = Objects.requireNonNull(origin); - this.javaFieldName = Objects.requireNonNull(javaFieldName); - this.painlessFieldTypeName = Objects.requireNonNull(painlessFieldTypeName); + this.fieldName = Objects.requireNonNull(fieldName); + this.canonicalTypeNameParameter = Objects.requireNonNull(canonicalTypeNameParameter); } } diff --git a/modules/lang-painless/spi/src/main/java/org/elasticsearch/painless/spi/WhitelistLoader.java b/modules/lang-painless/spi/src/main/java/org/elasticsearch/painless/spi/WhitelistLoader.java index b104d03f1ea..a4a0076626a 100644 --- a/modules/lang-painless/spi/src/main/java/org/elasticsearch/painless/spi/WhitelistLoader.java +++ b/modules/lang-painless/spi/src/main/java/org/elasticsearch/painless/spi/WhitelistLoader.java @@ -35,14 +35,14 @@ import java.util.List; public final class WhitelistLoader { /** - * Loads and creates a {@link Whitelist} from one to many text files. The file paths are passed in as an array of + * Loads and creates a {@link Whitelist} from one to many text files. The file paths are passed in as an array of * {@link String}s with a single {@link Class} to be be used to load the resources where each {@link String} - * is the path of a single text file. The {@link Class}'s {@link ClassLoader} will be used to lookup the Java + * is the path of a single text file. The {@link Class}'s {@link ClassLoader} will be used to lookup the Java * reflection objects for each individual {@link Class}, {@link Constructor}, {@link Method}, and {@link Field} * specified as part of the whitelist in the text file. * * A single pass is made through each file to collect all the information about each class, constructor, method, - * and field. Most validation will be done at a later point after all whitelists have been gathered and their + * and field. Most validation will be done at a later point after all whitelists have been gathered and their * merging takes place. * * A painless type name is one of the following: @@ -52,20 +52,20 @@ public final class WhitelistLoader { *
  • fully-qualified Java type name - Any whitelisted Java class will have the equivalent name as * a Painless type name with the exception that any dollar symbols used as part of inner classes will * be replaced with dot symbols.
  • - *
  • short Java type name - The text after the final dot symbol of any specified Java class. A - * short type Java name may be excluded by using the 'only_fqn' token during Painless class parsing + *
  • short Java type name - The text after the final dot symbol of any specified Java class. A + * short type Java name may be excluded by using the 'no_import' token during Painless class parsing * as described later.
  • * * * The following can be parsed from each whitelist text file: *
      *
    • Blank lines will be ignored by the parser.
    • - *
    • Comments may be created starting with a pound '#' symbol and end with a newline. These will + *
    • Comments may be created starting with a pound '#' symbol and end with a newline. These will * be ignored by the parser.
    • *
    • Primitive types may be specified starting with 'class' and followed by the Java type name, * an opening bracket, a newline, a closing bracket, and a final newline.
    • *
    • Complex types may be specified starting with 'class' and followed the fully-qualified Java - * class name, optionally followed by an 'only_fqn' token, an opening bracket, a newline, + * class name, optionally followed by an 'no_import' token, an opening bracket, a newline, * constructor/method/field specifications, a closing bracket, and a final newline. Within a complex * type the following may be parsed: *
        @@ -93,10 +93,10 @@ public final class WhitelistLoader { * * Note there must be a one-to-one correspondence of Painless type names to Java type/class names. * If the same Painless type is defined across multiple files and the Java class is the same, all - * specified constructors, methods, and fields will be merged into a single Painless type. The + * specified constructors, methods, and fields will be merged into a single Painless type. The * Painless dynamic type, 'def', used as part of constructor, method, and field definitions will - * be appropriately parsed and handled. Painless complex types must be specified with the - * fully-qualified Java class name. Method argument types, method return types, and field types + * be appropriately parsed and handled. Painless complex types must be specified with the + * fully-qualified Java class name. Method argument types, method return types, and field types * must be specified with Painless type names (def, fully-qualified, or short) as described earlier. * * The following example is used to create a single whitelist text file: @@ -109,7 +109,7 @@ public final class WhitelistLoader { * * # complex types * - * class my.package.Example only_fqn { + * class my.package.Example no_import { * # constructors * () * (int) @@ -132,7 +132,7 @@ public final class WhitelistLoader { * } */ public static Whitelist loadFromResourceFiles(Class resource, String... filepaths) { - List whitelistStructs = new ArrayList<>(); + List whitelistClasses = new ArrayList<>(); // Execute a single pass through the whitelist text files. This will gather all the // constructors, methods, augmented methods, and fields for each whitelisted class. @@ -143,9 +143,9 @@ public final class WhitelistLoader { try (LineNumberReader reader = new LineNumberReader( new InputStreamReader(resource.getResourceAsStream(filepath), StandardCharsets.UTF_8))) { - String whitelistStructOrigin = null; + String whitelistClassOrigin = null; String javaClassName = null; - boolean onlyFQNJavaClassName = false; + boolean noImport = false; List whitelistConstructors = null; List whitelistMethods = null; List whitelistFields = null; @@ -160,7 +160,7 @@ public final class WhitelistLoader { } // Handle a new class by resetting all the variables necessary to construct a new WhitelistClass for the whitelist. - // Expects the following format: 'class' ID 'only_fqn'? '{' '\n' + // Expects the following format: 'class' ID 'no_import'? '{' '\n' if (line.startsWith("class ")) { // Ensure the final token of the line is '{'. if (line.endsWith("{") == false) { @@ -172,13 +172,13 @@ public final class WhitelistLoader { String[] tokens = line.substring(5, line.length() - 1).trim().split("\\s+"); // Ensure the correct number of tokens. - if (tokens.length == 2 && "only_fqn".equals(tokens[1])) { - onlyFQNJavaClassName = true; + if (tokens.length == 2 && "no_import".equals(tokens[1])) { + noImport = true; } else if (tokens.length != 1) { throw new IllegalArgumentException("invalid class definition: failed to parse class name [" + line + "]"); } - whitelistStructOrigin = "[" + filepath + "]:[" + number + "]"; + whitelistClassOrigin = "[" + filepath + "]:[" + number + "]"; javaClassName = tokens[0]; // Reset all the constructors, methods, and fields to support a new class. @@ -194,13 +194,13 @@ public final class WhitelistLoader { throw new IllegalArgumentException("invalid class definition: extraneous closing bracket"); } - whitelistStructs.add(new WhitelistClass(whitelistStructOrigin, javaClassName, onlyFQNJavaClassName, + whitelistClasses.add(new WhitelistClass(whitelistClassOrigin, javaClassName, noImport, whitelistConstructors, whitelistMethods, whitelistFields)); // Set all the variables to null to ensure a new class definition is found before other parsable values. - whitelistStructOrigin = null; + whitelistClassOrigin = null; javaClassName = null; - onlyFQNJavaClassName = false; + noImport = false; whitelistConstructors = null; whitelistMethods = null; whitelistFields = null; @@ -300,7 +300,7 @@ public final class WhitelistLoader { } ClassLoader loader = AccessController.doPrivileged((PrivilegedAction)resource::getClassLoader); - return new Whitelist(loader, whitelistStructs); + return new Whitelist(loader, whitelistClasses); } private WhitelistLoader() {} diff --git a/modules/lang-painless/spi/src/main/java/org/elasticsearch/painless/spi/WhitelistMethod.java b/modules/lang-painless/spi/src/main/java/org/elasticsearch/painless/spi/WhitelistMethod.java index df86619055b..5cd023a3591 100644 --- a/modules/lang-painless/spi/src/main/java/org/elasticsearch/painless/spi/WhitelistMethod.java +++ b/modules/lang-painless/spi/src/main/java/org/elasticsearch/painless/spi/WhitelistMethod.java @@ -25,52 +25,53 @@ import java.util.Objects; /** * Method represents the equivalent of a Java method available as a whitelisted class method - * within Painless. Methods for Painless classes may be accessed exactly as methods for Java classes - * are using the '.' operator on an existing class variable/field. Painless classes may have multiple - * methods with the same name as long as they comply with arity overloading described for {@link WhitelistMethod}. + * within Painless. Methods for Painless classes may be accessed exactly as methods for Java classes + * are using the '.' operator on an existing class variable/field. Painless classes may have multiple + * methods with the same name as long as they comply with arity overloading described in + * {@link WhitelistClass}. * * Classes may also have additional methods that are not part of the Java class the class represents - - * these are known as augmented methods. An augmented method can be added to a class as a part of any + * these are known as augmented methods. An augmented method can be added to a class as a part of any * Java class as long as the method is static and the first parameter of the method is the Java class - * represented by the class. Note that the augmented method's parent Java class does not need to be + * represented by the class. Note that the augmented method's parent Java class does not need to be * whitelisted. */ public class WhitelistMethod { - /** Information about where this method was whitelisted from. Can be used for error messages. */ + /** Information about where this method was whitelisted from. */ public final String origin; /** - * The Java class name for the owner of an augmented method. If the method is not augmented + * The class name for the owner of an augmented method. If the method is not augmented * this should be {@code null}. */ - public final String javaAugmentedClassName; + public final String augmentedCanonicalClassName; - /** The Java method name used to look up the Java method through reflection. */ - public final String javaMethodName; + /** The method name used to look up the method reflection object. */ + public final String methodName; /** - * The Painless type name for the return type of the method which can be used to look up the Java - * method through reflection. + * The canonical type name for the return type. */ - public final String painlessReturnTypeName; + public final String returnCanonicalTypeName; /** - * A {@link List} of {@link String}s that are the Painless type names for the parameters of the - * method which can be used to look up the Java method through reflection. + * A {@link List} of {@link String}s that are the canonical type names for the parameters of the + * method used to look up the method reflection object. */ - public final List painlessParameterTypeNames; + public final List canonicalTypeNameParameters; /** - * Standard constructor. All values must be not {@code null} with the exception of jAugmentedClass; - * jAugmentedClass will be {@code null} unless the method is augmented as described in the class documentation. + * Standard constructor. All values must be not {@code null} with the exception of + * augmentedCanonicalClassName; augmentedCanonicalClassName will be {@code null} unless the method + * is augmented as described in the class documentation. */ - public WhitelistMethod(String origin, String javaAugmentedClassName, String javaMethodName, - String painlessReturnTypeName, List painlessParameterTypeNames) { + public WhitelistMethod(String origin, String augmentedCanonicalClassName, String methodName, + String returnCanonicalTypeName, List canonicalTypeNameParameters) { this.origin = Objects.requireNonNull(origin); - this.javaAugmentedClassName = javaAugmentedClassName; - this.javaMethodName = javaMethodName; - this.painlessReturnTypeName = Objects.requireNonNull(painlessReturnTypeName); - this.painlessParameterTypeNames = Collections.unmodifiableList(Objects.requireNonNull(painlessParameterTypeNames)); + this.augmentedCanonicalClassName = augmentedCanonicalClassName; + this.methodName = methodName; + this.returnCanonicalTypeName = Objects.requireNonNull(returnCanonicalTypeName); + this.canonicalTypeNameParameters = Collections.unmodifiableList(Objects.requireNonNull(canonicalTypeNameParameters)); } } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/AnalyzerCaster.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/AnalyzerCaster.java index fe53a3c1100..588fe8ef5f7 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/AnalyzerCaster.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/AnalyzerCaster.java @@ -41,421 +41,421 @@ public final class AnalyzerCaster { if (actual == def.class) { if (expected == boolean.class) { - return PainlessCast.unboxTo(def.class, Boolean.class, explicit, boolean.class); + return PainlessCast.unboxTargetType(def.class, Boolean.class, explicit, boolean.class); } else if (expected == byte.class) { - return PainlessCast.unboxTo(def.class, Byte.class, explicit, byte.class); + return PainlessCast.unboxTargetType(def.class, Byte.class, explicit, byte.class); } else if (expected == short.class) { - return PainlessCast.unboxTo(def.class, Short.class, explicit, short.class); + return PainlessCast.unboxTargetType(def.class, Short.class, explicit, short.class); } else if (expected == char.class) { - return PainlessCast.unboxTo(def.class, Character.class, explicit, char.class); + return PainlessCast.unboxTargetType(def.class, Character.class, explicit, char.class); } else if (expected == int.class) { - return PainlessCast.unboxTo(def.class, Integer.class, explicit, int.class); + return PainlessCast.unboxTargetType(def.class, Integer.class, explicit, int.class); } else if (expected == long.class) { - return PainlessCast.unboxTo(def.class, Long.class, explicit, long.class); + return PainlessCast.unboxTargetType(def.class, Long.class, explicit, long.class); } else if (expected == float.class) { - return PainlessCast.unboxTo(def.class, Float.class, explicit, float.class); + return PainlessCast.unboxTargetType(def.class, Float.class, explicit, float.class); } else if (expected == double.class) { - return PainlessCast.unboxTo(def.class, Double.class, explicit, double.class); + return PainlessCast.unboxTargetType(def.class, Double.class, explicit, double.class); } } else if (actual == Object.class) { if (expected == byte.class && explicit && internal) { - return PainlessCast.unboxTo(Object.class, Byte.class, true, byte.class); + return PainlessCast.unboxTargetType(Object.class, Byte.class, true, byte.class); } else if (expected == short.class && explicit && internal) { - return PainlessCast.unboxTo(Object.class, Short.class, true, short.class); + return PainlessCast.unboxTargetType(Object.class, Short.class, true, short.class); } else if (expected == char.class && explicit && internal) { - return PainlessCast.unboxTo(Object.class, Character.class, true, char.class); + return PainlessCast.unboxTargetType(Object.class, Character.class, true, char.class); } else if (expected == int.class && explicit && internal) { - return PainlessCast.unboxTo(Object.class, Integer.class, true, int.class); + return PainlessCast.unboxTargetType(Object.class, Integer.class, true, int.class); } else if (expected == long.class && explicit && internal) { - return PainlessCast.unboxTo(Object.class, Long.class, true, long.class); + return PainlessCast.unboxTargetType(Object.class, Long.class, true, long.class); } else if (expected == float.class && explicit && internal) { - return PainlessCast.unboxTo(Object.class, Float.class, true, float.class); + return PainlessCast.unboxTargetType(Object.class, Float.class, true, float.class); } else if (expected == double.class && explicit && internal) { - return PainlessCast.unboxTo(Object.class, Double.class, true, double.class); + return PainlessCast.unboxTargetType(Object.class, Double.class, true, double.class); } } else if (actual == Number.class) { if (expected == byte.class && explicit && internal) { - return PainlessCast.unboxTo(Number.class, Byte.class, true, byte.class); + return PainlessCast.unboxTargetType(Number.class, Byte.class, true, byte.class); } else if (expected == short.class && explicit && internal) { - return PainlessCast.unboxTo(Number.class, Short.class, true, short.class); + return PainlessCast.unboxTargetType(Number.class, Short.class, true, short.class); } else if (expected == char.class && explicit && internal) { - return PainlessCast.unboxTo(Number.class, Character.class, true, char.class); + return PainlessCast.unboxTargetType(Number.class, Character.class, true, char.class); } else if (expected == int.class && explicit && internal) { - return PainlessCast.unboxTo(Number.class, Integer.class, true, int.class); + return PainlessCast.unboxTargetType(Number.class, Integer.class, true, int.class); } else if (expected == long.class && explicit && internal) { - return PainlessCast.unboxTo(Number.class, Long.class, true, long.class); + return PainlessCast.unboxTargetType(Number.class, Long.class, true, long.class); } else if (expected == float.class && explicit && internal) { - return PainlessCast.unboxTo(Number.class, Float.class, true, float.class); + return PainlessCast.unboxTargetType(Number.class, Float.class, true, float.class); } else if (expected == double.class && explicit && internal) { - return PainlessCast.unboxTo(Number.class, Double.class, true, double.class); + return PainlessCast.unboxTargetType(Number.class, Double.class, true, double.class); } } else if (actual == String.class) { if (expected == char.class && explicit) { - return PainlessCast.standard(String.class, char.class, true); + return PainlessCast.originalTypetoTargetType(String.class, char.class, true); } } else if (actual == boolean.class) { if (expected == def.class) { - return PainlessCast.boxFrom(Boolean.class, def.class, explicit, boolean.class); + return PainlessCast.boxOriginalType(Boolean.class, def.class, explicit, boolean.class); } else if (expected == Object.class && internal) { - return PainlessCast.boxFrom(Boolean.class, Object.class, explicit, boolean.class); + return PainlessCast.boxOriginalType(Boolean.class, Object.class, explicit, boolean.class); } else if (expected == Boolean.class && internal) { - return PainlessCast.boxTo(boolean.class, boolean.class, explicit, boolean.class); + return PainlessCast.boxTargetType(boolean.class, boolean.class, explicit, boolean.class); } } else if (actual == byte.class) { if (expected == def.class) { - return PainlessCast.boxFrom(Byte.class, def.class, explicit, byte.class); + return PainlessCast.boxOriginalType(Byte.class, def.class, explicit, byte.class); } else if (expected == Object.class && internal) { - return PainlessCast.boxFrom(Byte.class, Object.class, explicit, byte.class); + return PainlessCast.boxOriginalType(Byte.class, Object.class, explicit, byte.class); } else if (expected == Number.class && internal) { - return PainlessCast.boxFrom(Byte.class, Number.class, explicit, byte.class); + return PainlessCast.boxOriginalType(Byte.class, Number.class, explicit, byte.class); } else if (expected == short.class) { - return PainlessCast.standard(byte.class, short.class, explicit); + return PainlessCast.originalTypetoTargetType(byte.class, short.class, explicit); } else if (expected == char.class && explicit) { - return PainlessCast.standard(byte.class, char.class, true); + return PainlessCast.originalTypetoTargetType(byte.class, char.class, true); } else if (expected == int.class) { - return PainlessCast.standard(byte.class, int.class, explicit); + return PainlessCast.originalTypetoTargetType(byte.class, int.class, explicit); } else if (expected == long.class) { - return PainlessCast.standard(byte.class, long.class, explicit); + return PainlessCast.originalTypetoTargetType(byte.class, long.class, explicit); } else if (expected == float.class) { - return PainlessCast.standard(byte.class, float.class, explicit); + return PainlessCast.originalTypetoTargetType(byte.class, float.class, explicit); } else if (expected == double.class) { - return PainlessCast.standard(byte.class, double.class, explicit); + return PainlessCast.originalTypetoTargetType(byte.class, double.class, explicit); } else if (expected == Byte.class && internal) { - return PainlessCast.boxTo(byte.class, byte.class, explicit, byte.class); + return PainlessCast.boxTargetType(byte.class, byte.class, explicit, byte.class); } else if (expected == Short.class && internal) { - return PainlessCast.boxTo(byte.class, short.class, explicit, short.class); + return PainlessCast.boxTargetType(byte.class, short.class, explicit, short.class); } else if (expected == Character.class && explicit && internal) { - return PainlessCast.boxTo(byte.class, char.class, true, char.class); + return PainlessCast.boxTargetType(byte.class, char.class, true, char.class); } else if (expected == Integer.class && internal) { - return PainlessCast.boxTo(byte.class, int.class, explicit, int.class); + return PainlessCast.boxTargetType(byte.class, int.class, explicit, int.class); } else if (expected == Long.class && internal) { - return PainlessCast.boxTo(byte.class, long.class, explicit, long.class); + return PainlessCast.boxTargetType(byte.class, long.class, explicit, long.class); } else if (expected == Float.class && internal) { - return PainlessCast.boxTo(byte.class, float.class, explicit, float.class); + return PainlessCast.boxTargetType(byte.class, float.class, explicit, float.class); } else if (expected == Double.class && internal) { - return PainlessCast.boxTo(byte.class, double.class, explicit, double.class); + return PainlessCast.boxTargetType(byte.class, double.class, explicit, double.class); } } else if (actual == short.class) { if (expected == def.class) { - return PainlessCast.boxFrom(Short.class, def.class, explicit, short.class); + return PainlessCast.boxOriginalType(Short.class, def.class, explicit, short.class); } else if (expected == Object.class && internal) { - return PainlessCast.boxFrom(Short.class, Object.class, explicit, short.class); + return PainlessCast.boxOriginalType(Short.class, Object.class, explicit, short.class); } else if (expected == Number.class && internal) { - return PainlessCast.boxFrom(Short.class, Number.class, explicit, short.class); + return PainlessCast.boxOriginalType(Short.class, Number.class, explicit, short.class); } else if (expected == byte.class && explicit) { - return PainlessCast.standard(short.class, byte.class, true); + return PainlessCast.originalTypetoTargetType(short.class, byte.class, true); } else if (expected == char.class && explicit) { - return PainlessCast.standard(short.class, char.class, true); + return PainlessCast.originalTypetoTargetType(short.class, char.class, true); } else if (expected == int.class) { - return PainlessCast.standard(short.class, int.class, explicit); + return PainlessCast.originalTypetoTargetType(short.class, int.class, explicit); } else if (expected == long.class) { - return PainlessCast.standard(short.class, long.class, explicit); + return PainlessCast.originalTypetoTargetType(short.class, long.class, explicit); } else if (expected == float.class) { - return PainlessCast.standard(short.class, float.class, explicit); + return PainlessCast.originalTypetoTargetType(short.class, float.class, explicit); } else if (expected == double.class) { - return PainlessCast.standard(short.class, double.class, explicit); + return PainlessCast.originalTypetoTargetType(short.class, double.class, explicit); } else if (expected == Byte.class && explicit && internal) { - return PainlessCast.boxTo(short.class, byte.class, true, byte.class); + return PainlessCast.boxTargetType(short.class, byte.class, true, byte.class); } else if (expected == Short.class && internal) { - return PainlessCast.boxTo(short.class, short.class, explicit, short.class); + return PainlessCast.boxTargetType(short.class, short.class, explicit, short.class); } else if (expected == Character.class && explicit && internal) { - return PainlessCast.boxTo(short.class, char.class, true, char.class); + return PainlessCast.boxTargetType(short.class, char.class, true, char.class); } else if (expected == Integer.class && internal) { - return PainlessCast.boxTo(short.class, int.class, explicit, int.class); + return PainlessCast.boxTargetType(short.class, int.class, explicit, int.class); } else if (expected == Long.class && internal) { - return PainlessCast.boxTo(short.class, long.class, explicit, long.class); + return PainlessCast.boxTargetType(short.class, long.class, explicit, long.class); } else if (expected == Float.class && internal) { - return PainlessCast.boxTo(short.class, float.class, explicit, float.class); + return PainlessCast.boxTargetType(short.class, float.class, explicit, float.class); } else if (expected == Double.class && internal) { - return PainlessCast.boxTo(short.class, double.class, explicit, double.class); + return PainlessCast.boxTargetType(short.class, double.class, explicit, double.class); } } else if (actual == char.class) { if (expected == def.class) { - return PainlessCast.boxFrom(Character.class, def.class, explicit, char.class); + return PainlessCast.boxOriginalType(Character.class, def.class, explicit, char.class); } else if (expected == Object.class && internal) { - return PainlessCast.boxFrom(Character.class, Object.class, explicit, char.class); + return PainlessCast.boxOriginalType(Character.class, Object.class, explicit, char.class); } else if (expected == Number.class && internal) { - return PainlessCast.boxFrom(Character.class, Number.class, explicit, char.class); + return PainlessCast.boxOriginalType(Character.class, Number.class, explicit, char.class); } else if (expected == String.class) { - return PainlessCast.standard(char.class, String.class, explicit); + return PainlessCast.originalTypetoTargetType(char.class, String.class, explicit); } else if (expected == byte.class && explicit) { - return PainlessCast.standard(char.class, byte.class, true); + return PainlessCast.originalTypetoTargetType(char.class, byte.class, true); } else if (expected == short.class && explicit) { - return PainlessCast.standard(char.class, short.class, true); + return PainlessCast.originalTypetoTargetType(char.class, short.class, true); } else if (expected == int.class) { - return PainlessCast.standard(char.class, int.class, explicit); + return PainlessCast.originalTypetoTargetType(char.class, int.class, explicit); } else if (expected == long.class) { - return PainlessCast.standard(char.class, long.class, explicit); + return PainlessCast.originalTypetoTargetType(char.class, long.class, explicit); } else if (expected == float.class) { - return PainlessCast.standard(char.class, float.class, explicit); + return PainlessCast.originalTypetoTargetType(char.class, float.class, explicit); } else if (expected == double.class) { - return PainlessCast.standard(char.class, double.class, explicit); + return PainlessCast.originalTypetoTargetType(char.class, double.class, explicit); } else if (expected == Byte.class && explicit && internal) { - return PainlessCast.boxTo(char.class, byte.class, true, byte.class); + return PainlessCast.boxTargetType(char.class, byte.class, true, byte.class); } else if (expected == Short.class && internal) { - return PainlessCast.boxTo(char.class, short.class, explicit, short.class); + return PainlessCast.boxTargetType(char.class, short.class, explicit, short.class); } else if (expected == Character.class && internal) { - return PainlessCast.boxTo(char.class, char.class, true, char.class); + return PainlessCast.boxTargetType(char.class, char.class, true, char.class); } else if (expected == Integer.class && internal) { - return PainlessCast.boxTo(char.class, int.class, explicit, int.class); + return PainlessCast.boxTargetType(char.class, int.class, explicit, int.class); } else if (expected == Long.class && internal) { - return PainlessCast.boxTo(char.class, long.class, explicit, long.class); + return PainlessCast.boxTargetType(char.class, long.class, explicit, long.class); } else if (expected == Float.class && internal) { - return PainlessCast.boxTo(char.class, float.class, explicit, float.class); + return PainlessCast.boxTargetType(char.class, float.class, explicit, float.class); } else if (expected == Double.class && internal) { - return PainlessCast.boxTo(char.class, double.class, explicit, double.class); + return PainlessCast.boxTargetType(char.class, double.class, explicit, double.class); } } else if (actual == int.class) { if (expected == def.class) { - return PainlessCast.boxFrom(Integer.class, def.class, explicit, int.class); + return PainlessCast.boxOriginalType(Integer.class, def.class, explicit, int.class); } else if (expected == Object.class && internal) { - return PainlessCast.boxFrom(Integer.class, Object.class, explicit, int.class); + return PainlessCast.boxOriginalType(Integer.class, Object.class, explicit, int.class); } else if (expected == Number.class && internal) { - return PainlessCast.boxFrom(Integer.class, Number.class, explicit, int.class); + return PainlessCast.boxOriginalType(Integer.class, Number.class, explicit, int.class); } else if (expected == byte.class && explicit) { - return PainlessCast.standard(int.class, byte.class, true); + return PainlessCast.originalTypetoTargetType(int.class, byte.class, true); } else if (expected == char.class && explicit) { - return PainlessCast.standard(int.class, char.class, true); + return PainlessCast.originalTypetoTargetType(int.class, char.class, true); } else if (expected == short.class && explicit) { - return PainlessCast.standard(int.class, short.class, true); + return PainlessCast.originalTypetoTargetType(int.class, short.class, true); } else if (expected == long.class) { - return PainlessCast.standard(int.class, long.class, explicit); + return PainlessCast.originalTypetoTargetType(int.class, long.class, explicit); } else if (expected == float.class) { - return PainlessCast.standard(int.class, float.class, explicit); + return PainlessCast.originalTypetoTargetType(int.class, float.class, explicit); } else if (expected == double.class) { - return PainlessCast.standard(int.class, double.class, explicit); + return PainlessCast.originalTypetoTargetType(int.class, double.class, explicit); } else if (expected == Byte.class && explicit && internal) { - return PainlessCast.boxTo(int.class, byte.class, true, byte.class); + return PainlessCast.boxTargetType(int.class, byte.class, true, byte.class); } else if (expected == Short.class && explicit && internal) { - return PainlessCast.boxTo(int.class, short.class, true, short.class); + return PainlessCast.boxTargetType(int.class, short.class, true, short.class); } else if (expected == Character.class && explicit && internal) { - return PainlessCast.boxTo(int.class, char.class, true, char.class); + return PainlessCast.boxTargetType(int.class, char.class, true, char.class); } else if (expected == Integer.class && internal) { - return PainlessCast.boxTo(int.class, int.class, explicit, int.class); + return PainlessCast.boxTargetType(int.class, int.class, explicit, int.class); } else if (expected == Long.class && internal) { - return PainlessCast.boxTo(int.class, long.class, explicit, long.class); + return PainlessCast.boxTargetType(int.class, long.class, explicit, long.class); } else if (expected == Float.class && internal) { - return PainlessCast.boxTo(int.class, float.class, explicit, float.class); + return PainlessCast.boxTargetType(int.class, float.class, explicit, float.class); } else if (expected == Double.class && internal) { - return PainlessCast.boxTo(int.class, double.class, explicit, double.class); + return PainlessCast.boxTargetType(int.class, double.class, explicit, double.class); } } else if (actual == long.class) { if (expected == def.class) { - return PainlessCast.boxFrom(Long.class, def.class, explicit, long.class); + return PainlessCast.boxOriginalType(Long.class, def.class, explicit, long.class); } else if (expected == Object.class && internal) { - return PainlessCast.boxFrom(Long.class, Object.class, explicit, long.class); + return PainlessCast.boxOriginalType(Long.class, Object.class, explicit, long.class); } else if (expected == Number.class && internal) { - return PainlessCast.boxFrom(Long.class, Number.class, explicit, long.class); + return PainlessCast.boxOriginalType(Long.class, Number.class, explicit, long.class); } else if (expected == byte.class && explicit) { - return PainlessCast.standard(long.class, byte.class, true); + return PainlessCast.originalTypetoTargetType(long.class, byte.class, true); } else if (expected == char.class && explicit) { - return PainlessCast.standard(long.class, char.class, true); + return PainlessCast.originalTypetoTargetType(long.class, char.class, true); } else if (expected == short.class && explicit) { - return PainlessCast.standard(long.class, short.class, true); + return PainlessCast.originalTypetoTargetType(long.class, short.class, true); } else if (expected == int.class && explicit) { - return PainlessCast.standard(long.class, int.class, true); + return PainlessCast.originalTypetoTargetType(long.class, int.class, true); } else if (expected == float.class) { - return PainlessCast.standard(long.class, float.class, explicit); + return PainlessCast.originalTypetoTargetType(long.class, float.class, explicit); } else if (expected == double.class) { - return PainlessCast.standard(long.class, double.class, explicit); + return PainlessCast.originalTypetoTargetType(long.class, double.class, explicit); } else if (expected == Byte.class && explicit && internal) { - return PainlessCast.boxTo(long.class, byte.class, true, byte.class); + return PainlessCast.boxTargetType(long.class, byte.class, true, byte.class); } else if (expected == Short.class && explicit && internal) { - return PainlessCast.boxTo(long.class, short.class, true, short.class); + return PainlessCast.boxTargetType(long.class, short.class, true, short.class); } else if (expected == Character.class && explicit && internal) { - return PainlessCast.boxTo(long.class, char.class, true, char.class); + return PainlessCast.boxTargetType(long.class, char.class, true, char.class); } else if (expected == Integer.class && explicit && internal) { - return PainlessCast.boxTo(long.class, int.class, true, int.class); + return PainlessCast.boxTargetType(long.class, int.class, true, int.class); } else if (expected == Long.class && internal) { - return PainlessCast.boxTo(long.class, long.class, explicit, long.class); + return PainlessCast.boxTargetType(long.class, long.class, explicit, long.class); } else if (expected == Float.class && internal) { - return PainlessCast.boxTo(long.class, float.class, explicit, float.class); + return PainlessCast.boxTargetType(long.class, float.class, explicit, float.class); } else if (expected == Double.class && internal) { - return PainlessCast.boxTo(long.class, double.class, explicit, double.class); + return PainlessCast.boxTargetType(long.class, double.class, explicit, double.class); } } else if (actual == float.class) { if (expected == def.class) { - return PainlessCast.boxFrom(Float.class, def.class, explicit, float.class); + return PainlessCast.boxOriginalType(Float.class, def.class, explicit, float.class); } else if (expected == Object.class && internal) { - return PainlessCast.boxFrom(Float.class, Object.class, explicit, float.class); + return PainlessCast.boxOriginalType(Float.class, Object.class, explicit, float.class); } else if (expected == Number.class && internal) { - return PainlessCast.boxFrom(Float.class, Number.class, explicit, float.class); + return PainlessCast.boxOriginalType(Float.class, Number.class, explicit, float.class); } else if (expected == byte.class && explicit) { - return PainlessCast.standard(float.class, byte.class, true); + return PainlessCast.originalTypetoTargetType(float.class, byte.class, true); } else if (expected == char.class && explicit) { - return PainlessCast.standard(float.class, char.class, true); + return PainlessCast.originalTypetoTargetType(float.class, char.class, true); } else if (expected == short.class && explicit) { - return PainlessCast.standard(float.class, short.class, true); + return PainlessCast.originalTypetoTargetType(float.class, short.class, true); } else if (expected == int.class && explicit) { - return PainlessCast.standard(float.class, int.class, true); + return PainlessCast.originalTypetoTargetType(float.class, int.class, true); } else if (expected == long.class && explicit) { - return PainlessCast.standard(float.class, long.class, true); + return PainlessCast.originalTypetoTargetType(float.class, long.class, true); } else if (expected == double.class) { - return PainlessCast.standard(float.class, double.class, explicit); + return PainlessCast.originalTypetoTargetType(float.class, double.class, explicit); } else if (expected == Byte.class && explicit && internal) { - return PainlessCast.boxTo(float.class, byte.class, true, byte.class); + return PainlessCast.boxTargetType(float.class, byte.class, true, byte.class); } else if (expected == Short.class && explicit && internal) { - return PainlessCast.boxTo(float.class, short.class, true, short.class); + return PainlessCast.boxTargetType(float.class, short.class, true, short.class); } else if (expected == Character.class && explicit && internal) { - return PainlessCast.boxTo(float.class, char.class, true, char.class); + return PainlessCast.boxTargetType(float.class, char.class, true, char.class); } else if (expected == Integer.class && explicit && internal) { - return PainlessCast.boxTo(float.class, int.class, true, int.class); + return PainlessCast.boxTargetType(float.class, int.class, true, int.class); } else if (expected == Long.class && explicit && internal) { - return PainlessCast.boxTo(float.class, long.class, true, long.class); + return PainlessCast.boxTargetType(float.class, long.class, true, long.class); } else if (expected == Float.class && internal) { - return PainlessCast.boxTo(float.class, float.class, explicit, float.class); + return PainlessCast.boxTargetType(float.class, float.class, explicit, float.class); } else if (expected == Double.class && internal) { - return PainlessCast.boxTo(float.class, double.class, explicit, double.class); + return PainlessCast.boxTargetType(float.class, double.class, explicit, double.class); } } else if (actual == double.class) { if (expected == def.class) { - return PainlessCast.boxFrom(Double.class, def.class, explicit, double.class); + return PainlessCast.boxOriginalType(Double.class, def.class, explicit, double.class); } else if (expected == Object.class && internal) { - return PainlessCast.boxFrom(Double.class, Object.class, explicit, double.class); + return PainlessCast.boxOriginalType(Double.class, Object.class, explicit, double.class); } else if (expected == Number.class && internal) { - return PainlessCast.boxFrom(Double.class, Number.class, explicit, double.class); + return PainlessCast.boxOriginalType(Double.class, Number.class, explicit, double.class); } else if (expected == byte.class && explicit) { - return PainlessCast.standard(double.class, byte.class, true); + return PainlessCast.originalTypetoTargetType(double.class, byte.class, true); } else if (expected == char.class && explicit) { - return PainlessCast.standard(double.class, char.class, true); + return PainlessCast.originalTypetoTargetType(double.class, char.class, true); } else if (expected == short.class && explicit) { - return PainlessCast.standard(double.class, short.class, true); + return PainlessCast.originalTypetoTargetType(double.class, short.class, true); } else if (expected == int.class && explicit) { - return PainlessCast.standard(double.class, int.class, true); + return PainlessCast.originalTypetoTargetType(double.class, int.class, true); } else if (expected == long.class && explicit) { - return PainlessCast.standard(double.class, long.class, true); + return PainlessCast.originalTypetoTargetType(double.class, long.class, true); } else if (expected == float.class && explicit) { - return PainlessCast.standard(double.class, float.class, true); + return PainlessCast.originalTypetoTargetType(double.class, float.class, true); } else if (expected == Byte.class && explicit && internal) { - return PainlessCast.boxTo(double.class, byte.class, true, byte.class); + return PainlessCast.boxTargetType(double.class, byte.class, true, byte.class); } else if (expected == Short.class && explicit && internal) { - return PainlessCast.boxTo(double.class, short.class, true, short.class); + return PainlessCast.boxTargetType(double.class, short.class, true, short.class); } else if (expected == Character.class && explicit && internal) { - return PainlessCast.boxTo(double.class, char.class, true, char.class); + return PainlessCast.boxTargetType(double.class, char.class, true, char.class); } else if (expected == Integer.class && explicit && internal) { - return PainlessCast.boxTo(double.class, int.class, true, int.class); + return PainlessCast.boxTargetType(double.class, int.class, true, int.class); } else if (expected == Long.class && explicit && internal) { - return PainlessCast.boxTo(double.class, long.class, true, long.class); + return PainlessCast.boxTargetType(double.class, long.class, true, long.class); } else if (expected == Float.class && explicit && internal) { - return PainlessCast.boxTo(double.class, float.class, true, float.class); + return PainlessCast.boxTargetType(double.class, float.class, true, float.class); } else if (expected == Double.class && internal) { - return PainlessCast.boxTo(double.class, double.class, explicit, double.class); + return PainlessCast.boxTargetType(double.class, double.class, explicit, double.class); } } else if (actual == Boolean.class) { if (expected == boolean.class && internal) { - return PainlessCast.unboxFrom(boolean.class, boolean.class, explicit, boolean.class); + return PainlessCast.unboxOriginalType(boolean.class, boolean.class, explicit, boolean.class); } } else if (actual == Byte.class) { if (expected == byte.class && internal) { - return PainlessCast.unboxFrom(byte.class, byte.class, explicit, byte.class); + return PainlessCast.unboxOriginalType(byte.class, byte.class, explicit, byte.class); } else if (expected == short.class && internal) { - return PainlessCast.unboxFrom(byte.class, short.class, explicit, byte.class); + return PainlessCast.unboxOriginalType(byte.class, short.class, explicit, byte.class); } else if (expected == char.class && explicit && internal) { - return PainlessCast.unboxFrom(byte.class, char.class, true, byte.class); + return PainlessCast.unboxOriginalType(byte.class, char.class, true, byte.class); } else if (expected == int.class && internal) { - return PainlessCast.unboxFrom(byte.class, int.class, explicit, byte.class); + return PainlessCast.unboxOriginalType(byte.class, int.class, explicit, byte.class); } else if (expected == long.class && internal) { - return PainlessCast.unboxFrom(byte.class, long.class, explicit, byte.class); + return PainlessCast.unboxOriginalType(byte.class, long.class, explicit, byte.class); } else if (expected == float.class && internal) { - return PainlessCast.unboxFrom(byte.class, float.class, explicit, byte.class); + return PainlessCast.unboxOriginalType(byte.class, float.class, explicit, byte.class); } else if (expected == double.class && internal) { - return PainlessCast.unboxFrom(byte.class, double.class, explicit, byte.class); + return PainlessCast.unboxOriginalType(byte.class, double.class, explicit, byte.class); } } else if (actual == Short.class) { if (expected == byte.class && explicit && internal) { - return PainlessCast.unboxFrom(short.class, byte.class, true, short.class); + return PainlessCast.unboxOriginalType(short.class, byte.class, true, short.class); } else if (expected == short.class && internal) { - return PainlessCast.unboxFrom(short.class, short.class, explicit, short.class); + return PainlessCast.unboxOriginalType(short.class, short.class, explicit, short.class); } else if (expected == char.class && explicit && internal) { - return PainlessCast.unboxFrom(short.class, char.class, true, short.class); + return PainlessCast.unboxOriginalType(short.class, char.class, true, short.class); } else if (expected == int.class && internal) { - return PainlessCast.unboxFrom(short.class, int.class, explicit, short.class); + return PainlessCast.unboxOriginalType(short.class, int.class, explicit, short.class); } else if (expected == long.class && internal) { - return PainlessCast.unboxFrom(short.class, long.class, explicit, short.class); + return PainlessCast.unboxOriginalType(short.class, long.class, explicit, short.class); } else if (expected == float.class && internal) { - return PainlessCast.unboxFrom(short.class, float.class, explicit, short.class); + return PainlessCast.unboxOriginalType(short.class, float.class, explicit, short.class); } else if (expected == double.class && internal) { - return PainlessCast.unboxFrom(short.class, double.class, explicit, short.class); + return PainlessCast.unboxOriginalType(short.class, double.class, explicit, short.class); } } else if (actual == Character.class) { if (expected == byte.class && explicit && internal) { - return PainlessCast.unboxFrom(char.class, byte.class, true, char.class); + return PainlessCast.unboxOriginalType(char.class, byte.class, true, char.class); } else if (expected == short.class && explicit && internal) { - return PainlessCast.unboxFrom(char.class, short.class, true, char.class); + return PainlessCast.unboxOriginalType(char.class, short.class, true, char.class); } else if (expected == char.class && internal) { - return PainlessCast.unboxFrom(char.class, char.class, explicit, char.class); + return PainlessCast.unboxOriginalType(char.class, char.class, explicit, char.class); } else if (expected == int.class && internal) { - return PainlessCast.unboxFrom(char.class, int.class, explicit, char.class); + return PainlessCast.unboxOriginalType(char.class, int.class, explicit, char.class); } else if (expected == long.class && internal) { - return PainlessCast.unboxFrom(char.class, long.class, explicit, char.class); + return PainlessCast.unboxOriginalType(char.class, long.class, explicit, char.class); } else if (expected == float.class && internal) { - return PainlessCast.unboxFrom(char.class, float.class, explicit, char.class); + return PainlessCast.unboxOriginalType(char.class, float.class, explicit, char.class); } else if (expected == double.class && internal) { - return PainlessCast.unboxFrom(char.class, double.class, explicit, char.class); + return PainlessCast.unboxOriginalType(char.class, double.class, explicit, char.class); } } else if (actual == Integer.class) { if (expected == byte.class && explicit && internal) { - return PainlessCast.unboxFrom(int.class, byte.class, true, int.class); + return PainlessCast.unboxOriginalType(int.class, byte.class, true, int.class); } else if (expected == short.class && explicit && internal) { - return PainlessCast.unboxFrom(int.class, short.class, true, int.class); + return PainlessCast.unboxOriginalType(int.class, short.class, true, int.class); } else if (expected == char.class && explicit && internal) { - return PainlessCast.unboxFrom(int.class, char.class, true, int.class); + return PainlessCast.unboxOriginalType(int.class, char.class, true, int.class); } else if (expected == int.class && internal) { - return PainlessCast.unboxFrom(int.class, int.class, explicit, int.class); + return PainlessCast.unboxOriginalType(int.class, int.class, explicit, int.class); } else if (expected == long.class && internal) { - return PainlessCast.unboxFrom(int.class, long.class, explicit, int.class); + return PainlessCast.unboxOriginalType(int.class, long.class, explicit, int.class); } else if (expected == float.class && internal) { - return PainlessCast.unboxFrom(int.class, float.class, explicit, int.class); + return PainlessCast.unboxOriginalType(int.class, float.class, explicit, int.class); } else if (expected == double.class && internal) { - return PainlessCast.unboxFrom(int.class, double.class, explicit, int.class); + return PainlessCast.unboxOriginalType(int.class, double.class, explicit, int.class); } } else if (actual == Long.class) { if (expected == byte.class && explicit && internal) { - return PainlessCast.unboxFrom(long.class, byte.class, true, long.class); + return PainlessCast.unboxOriginalType(long.class, byte.class, true, long.class); } else if (expected == short.class && explicit && internal) { - return PainlessCast.unboxFrom(long.class, short.class, true, long.class); + return PainlessCast.unboxOriginalType(long.class, short.class, true, long.class); } else if (expected == char.class && explicit && internal) { - return PainlessCast.unboxFrom(long.class, char.class, true, long.class); + return PainlessCast.unboxOriginalType(long.class, char.class, true, long.class); } else if (expected == int.class && explicit && internal) { - return PainlessCast.unboxFrom(long.class, int.class, true, long.class); + return PainlessCast.unboxOriginalType(long.class, int.class, true, long.class); } else if (expected == long.class && internal) { - return PainlessCast.unboxFrom(long.class, long.class, explicit, long.class); + return PainlessCast.unboxOriginalType(long.class, long.class, explicit, long.class); } else if (expected == float.class && internal) { - return PainlessCast.unboxFrom(long.class, float.class, explicit, long.class); + return PainlessCast.unboxOriginalType(long.class, float.class, explicit, long.class); } else if (expected == double.class && internal) { - return PainlessCast.unboxFrom(long.class, double.class, explicit, long.class); + return PainlessCast.unboxOriginalType(long.class, double.class, explicit, long.class); } } else if (actual == Float.class) { if (expected == byte.class && explicit && internal) { - return PainlessCast.unboxFrom(float.class, byte.class, true, float.class); + return PainlessCast.unboxOriginalType(float.class, byte.class, true, float.class); } else if (expected == short.class && explicit && internal) { - return PainlessCast.unboxFrom(float.class, short.class, true, float.class); + return PainlessCast.unboxOriginalType(float.class, short.class, true, float.class); } else if (expected == char.class && explicit && internal) { - return PainlessCast.unboxFrom(float.class, char.class, true, float.class); + return PainlessCast.unboxOriginalType(float.class, char.class, true, float.class); } else if (expected == int.class && explicit && internal) { - return PainlessCast.unboxFrom(float.class, int.class, true, float.class); + return PainlessCast.unboxOriginalType(float.class, int.class, true, float.class); } else if (expected == long.class && explicit && internal) { - return PainlessCast.unboxFrom(float.class, long.class, true, float.class); + return PainlessCast.unboxOriginalType(float.class, long.class, true, float.class); } else if (expected == float.class && internal) { - return PainlessCast.unboxFrom(float.class, float.class, explicit, float.class); + return PainlessCast.unboxOriginalType(float.class, float.class, explicit, float.class); } else if (expected == double.class && internal) { - return PainlessCast.unboxFrom(float.class, double.class, explicit, float.class); + return PainlessCast.unboxOriginalType(float.class, double.class, explicit, float.class); } } else if (actual == Double.class) { if (expected == byte.class && explicit && internal) { - return PainlessCast.unboxFrom(double.class, byte.class, true, double.class); + return PainlessCast.unboxOriginalType(double.class, byte.class, true, double.class); } else if (expected == short.class && explicit && internal) { - return PainlessCast.unboxFrom(double.class, short.class, true, double.class); + return PainlessCast.unboxOriginalType(double.class, short.class, true, double.class); } else if (expected == char.class && explicit && internal) { - return PainlessCast.unboxFrom(double.class, char.class, true, double.class); + return PainlessCast.unboxOriginalType(double.class, char.class, true, double.class); } else if (expected == int.class && explicit && internal) { - return PainlessCast.unboxFrom(double.class, int.class, true, double.class); + return PainlessCast.unboxOriginalType(double.class, int.class, true, double.class); } else if (expected == long.class && explicit && internal) { - return PainlessCast.unboxFrom(double.class, long.class, true, double.class); + return PainlessCast.unboxOriginalType(double.class, long.class, true, double.class); } else if (expected == float.class && explicit && internal) { - return PainlessCast.unboxFrom(double.class, float.class, true, double.class); + return PainlessCast.unboxOriginalType(double.class, float.class, true, double.class); } else if (expected == double.class && internal) { - return PainlessCast.unboxFrom(double.class, double.class, explicit, double.class); + return PainlessCast.unboxOriginalType(double.class, double.class, explicit, double.class); } } @@ -463,7 +463,7 @@ public final class AnalyzerCaster { (actual != void.class && expected == def.class) || expected.isAssignableFrom(actual) || (actual.isAssignableFrom(expected) && explicit)) { - return PainlessCast.standard(actual, expected, explicit); + return PainlessCast.originalTypetoTargetType(actual, expected, explicit); } else { throw location.createError(new ClassCastException("Cannot cast from " + "[" + PainlessLookupUtility.typeToCanonicalTypeName(actual) + "] to " + @@ -472,8 +472,8 @@ public final class AnalyzerCaster { } public static Object constCast(Location location, Object constant, PainlessCast cast) { - Class fsort = cast.from; - Class tsort = cast.to; + Class fsort = cast.originalType; + Class tsort = cast.targetType; if (fsort == tsort) { return constant; @@ -499,11 +499,11 @@ public final class AnalyzerCaster { else if (tsort == double.class) return number.doubleValue(); else { throw location.createError(new IllegalStateException("Cannot cast from " + - "[" + cast.from.getCanonicalName() + "] to [" + cast.to.getCanonicalName() + "].")); + "[" + cast.originalType.getCanonicalName() + "] to [" + cast.targetType.getCanonicalName() + "].")); } } else { throw location.createError(new IllegalStateException("Cannot cast from " + - "[" + cast.from.getCanonicalName() + "] to [" + cast.to.getCanonicalName() + "].")); + "[" + cast.originalType.getCanonicalName() + "] to [" + cast.targetType.getCanonicalName() + "].")); } } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/Compiler.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/Compiler.java index 807b4409d7a..97dddbdfe52 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/Compiler.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/Compiler.java @@ -20,6 +20,7 @@ package org.elasticsearch.painless; import org.elasticsearch.bootstrap.BootstrapInfo; +import org.elasticsearch.painless.Locals.LocalMethod; import org.elasticsearch.painless.antlr.Walker; import org.elasticsearch.painless.lookup.PainlessLookup; import org.elasticsearch.painless.node.SSource; @@ -32,6 +33,7 @@ import java.net.URL; import java.security.CodeSource; import java.security.SecureClassLoader; import java.security.cert.Certificate; +import java.util.Map; import java.util.concurrent.atomic.AtomicInteger; import static org.elasticsearch.painless.WriterConstants.CLASS_NAME; @@ -96,7 +98,7 @@ final class Compiler { if (statefulFactoryClass != null && statefulFactoryClass.getName().equals(name)) { return statefulFactoryClass; } - Class found = painlessLookup.getClassFromBinaryName(name); + Class found = painlessLookup.canonicalTypeNameToType(name.replace('$', '.')); return found != null ? found : super.findClass(name); } @@ -200,7 +202,7 @@ final class Compiler { ScriptClassInfo scriptClassInfo = new ScriptClassInfo(painlessLookup, scriptClass); SSource root = Walker.buildPainlessTree(scriptClassInfo, reserved, name, source, settings, painlessLookup, null); - root.analyze(painlessLookup); + Map localMethods = root.analyze(painlessLookup); root.write(); try { @@ -209,6 +211,7 @@ final class Compiler { clazz.getField("$SOURCE").set(null, source); clazz.getField("$STATEMENTS").set(null, root.getStatements()); clazz.getField("$DEFINITION").set(null, painlessLookup); + clazz.getField("$LOCALS").set(null, localMethods); return clazz.getConstructors()[0]; } catch (Exception exception) { // Catch everything to let the user know this is something caused internally. diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/Def.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/Def.java index 91c25b7cfec..1e17d6024d4 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/Def.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/Def.java @@ -19,7 +19,7 @@ package org.elasticsearch.painless; -import org.elasticsearch.painless.lookup.PainlessClass; +import org.elasticsearch.painless.Locals.LocalMethod; import org.elasticsearch.painless.lookup.PainlessLookup; import org.elasticsearch.painless.lookup.PainlessLookupUtility; import org.elasticsearch.painless.lookup.PainlessMethod; @@ -37,6 +37,8 @@ import java.util.function.Function; import java.util.stream.Collectors; import java.util.stream.Stream; +import static org.elasticsearch.painless.lookup.PainlessLookupUtility.typeToCanonicalTypeName; + /** * Support for dynamic type (def). *

        @@ -166,52 +168,6 @@ public final class Def { } } - /** - * Looks up method entry for a dynamic method call. - *

        - * A dynamic method call for variable {@code x} of type {@code def} looks like: - * {@code x.method(args...)} - *

        - * This method traverses {@code recieverClass}'s class hierarchy (including interfaces) - * until it finds a matching whitelisted method. If one is not found, it throws an exception. - * Otherwise it returns the matching method. - *

        - * @params painlessLookup the whitelist - * @param receiverClass Class of the object to invoke the method on. - * @param name Name of the method. - * @param arity arity of method - * @return matching method to invoke. never returns null. - * @throws IllegalArgumentException if no matching whitelisted method was found. - */ - static PainlessMethod lookupMethodInternal(PainlessLookup painlessLookup, Class receiverClass, String name, int arity) { - String key = PainlessLookupUtility.buildPainlessMethodKey(name, arity); - // check whitelist for matching method - for (Class clazz = receiverClass; clazz != null; clazz = clazz.getSuperclass()) { - PainlessClass struct = painlessLookup.getPainlessStructFromJavaClass(clazz); - - if (struct != null) { - PainlessMethod method = struct.methods.get(key); - if (method != null) { - return method; - } - } - - for (Class iface : clazz.getInterfaces()) { - struct = painlessLookup.getPainlessStructFromJavaClass(iface); - - if (struct != null) { - PainlessMethod method = struct.methods.get(key); - if (method != null) { - return method; - } - } - } - } - - throw new IllegalArgumentException("Unable to find dynamic method [" + name + "] with [" + arity + "] arguments " + - "for class [" + receiverClass.getCanonicalName() + "]."); - } - /** * Looks up handle for a dynamic method call, with lambda replacement *

        @@ -232,13 +188,22 @@ public final class Def { * @throws IllegalArgumentException if no matching whitelisted method was found. * @throws Throwable if a method reference cannot be converted to an functional interface */ - static MethodHandle lookupMethod(PainlessLookup painlessLookup, MethodHandles.Lookup methodHandlesLookup, MethodType callSiteType, - Class receiverClass, String name, Object args[]) throws Throwable { + static MethodHandle lookupMethod(PainlessLookup painlessLookup, Map localMethods, + MethodHandles.Lookup methodHandlesLookup, MethodType callSiteType, Class receiverClass, String name, Object args[]) + throws Throwable { + String recipeString = (String) args[0]; int numArguments = callSiteType.parameterCount(); // simple case: no lambdas if (recipeString.isEmpty()) { - return lookupMethodInternal(painlessLookup, receiverClass, name, numArguments - 1).methodHandle; + PainlessMethod painlessMethod = painlessLookup.lookupRuntimePainlessMethod(receiverClass, name, numArguments - 1); + + if (painlessMethod == null) { + throw new IllegalArgumentException("dynamic method " + + "[" + typeToCanonicalTypeName(receiverClass) + ", " + name + "/" + (numArguments - 1) + "] not found"); + } + + return painlessMethod.methodHandle; } // convert recipe string to a bitset for convenience (the code below should be refactored...) @@ -261,7 +226,13 @@ public final class Def { // lookup the method with the proper arity, then we know everything (e.g. interface types of parameters). // based on these we can finally link any remaining lambdas that were deferred. - PainlessMethod method = lookupMethodInternal(painlessLookup, receiverClass, name, arity); + PainlessMethod method = painlessLookup.lookupRuntimePainlessMethod(receiverClass, name, arity); + + if (method == null) { + throw new IllegalArgumentException( + "dynamic method [" + typeToCanonicalTypeName(receiverClass) + ", " + name + "/" + arity + "] not found"); + } + MethodHandle handle = method.methodHandle; int replaced = 0; @@ -276,27 +247,29 @@ public final class Def { String type = signature.substring(1, separator); String call = signature.substring(separator+1, separator2); int numCaptures = Integer.parseInt(signature.substring(separator2+1)); - Class captures[] = new Class[numCaptures]; - for (int capture = 0; capture < captures.length; capture++) { - captures[capture] = callSiteType.parameterType(i + 1 + capture); - } MethodHandle filter; Class interfaceType = method.typeParameters.get(i - 1 - replaced); if (signature.charAt(0) == 'S') { // the implementation is strongly typed, now that we know the interface type, // we have everything. filter = lookupReferenceInternal(painlessLookup, + localMethods, methodHandlesLookup, interfaceType, type, call, - captures); + numCaptures); } else if (signature.charAt(0) == 'D') { // the interface type is now known, but we need to get the implementation. // this is dynamically based on the receiver type (and cached separately, underneath // this cache). It won't blow up since we never nest here (just references) + Class captures[] = new Class[numCaptures]; + for (int capture = 0; capture < captures.length; capture++) { + captures[capture] = callSiteType.parameterType(i + 1 + capture); + } MethodType nestedType = MethodType.methodType(interfaceType, captures); CallSite nested = DefBootstrap.bootstrap(painlessLookup, + localMethods, methodHandlesLookup, call, nestedType, @@ -324,70 +297,44 @@ public final class Def { * This is just like LambdaMetaFactory, only with a dynamic type. The interface type is known, * so we simply need to lookup the matching implementation method based on receiver type. */ - static MethodHandle lookupReference(PainlessLookup painlessLookup, MethodHandles.Lookup methodHandlesLookup, String interfaceClass, - Class receiverClass, String name) throws Throwable { - Class interfaceType = painlessLookup.getJavaClassFromPainlessType(interfaceClass); - PainlessMethod interfaceMethod = painlessLookup.getPainlessStructFromJavaClass(interfaceType).functionalMethod; - if (interfaceMethod == null) { - throw new IllegalArgumentException("Class [" + interfaceClass + "] is not a functional interface"); - } - int arity = interfaceMethod.typeParameters.size(); - PainlessMethod implMethod = lookupMethodInternal(painlessLookup, receiverClass, name, arity); - return lookupReferenceInternal(painlessLookup, methodHandlesLookup, interfaceType, - PainlessLookupUtility.typeToCanonicalTypeName(implMethod.targetClass), - implMethod.javaMethod.getName(), receiverClass); + static MethodHandle lookupReference(PainlessLookup painlessLookup, Map localMethods, + MethodHandles.Lookup methodHandlesLookup, String interfaceClass, Class receiverClass, String name) throws Throwable { + Class interfaceType = painlessLookup.canonicalTypeNameToType(interfaceClass); + if (interfaceType == null) { + throw new IllegalArgumentException("type [" + interfaceClass + "] not found"); + } + PainlessMethod interfaceMethod = painlessLookup.lookupFunctionalInterfacePainlessMethod(interfaceType); + if (interfaceMethod == null) { + throw new IllegalArgumentException("Class [" + interfaceClass + "] is not a functional interface"); + } + int arity = interfaceMethod.typeParameters.size(); + PainlessMethod implMethod = painlessLookup.lookupRuntimePainlessMethod(receiverClass, name, arity); + if (implMethod == null) { + throw new IllegalArgumentException( + "dynamic method [" + typeToCanonicalTypeName(receiverClass) + ", " + name + "/" + arity + "] not found"); + } + + return lookupReferenceInternal(painlessLookup, localMethods, methodHandlesLookup, + interfaceType, PainlessLookupUtility.typeToCanonicalTypeName(implMethod.targetClass), + implMethod.javaMethod.getName(), 1); } /** Returns a method handle to an implementation of clazz, given method reference signature. */ - private static MethodHandle lookupReferenceInternal(PainlessLookup painlessLookup, MethodHandles.Lookup methodHandlesLookup, - Class clazz, String type, String call, Class... captures) - throws Throwable { - final FunctionRef ref; - if ("this".equals(type)) { - // user written method - PainlessMethod interfaceMethod = painlessLookup.getPainlessStructFromJavaClass(clazz).functionalMethod; - if (interfaceMethod == null) { - throw new IllegalArgumentException("Cannot convert function reference [" + type + "::" + call + "] " + - "to [" + PainlessLookupUtility.typeToCanonicalTypeName(clazz) + "], not a functional interface"); - } - int arity = interfaceMethod.typeParameters.size() + captures.length; - final MethodHandle handle; - try { - MethodHandle accessor = methodHandlesLookup.findStaticGetter(methodHandlesLookup.lookupClass(), - getUserFunctionHandleFieldName(call, arity), - MethodHandle.class); - handle = (MethodHandle)accessor.invokeExact(); - } catch (NoSuchFieldException | IllegalAccessException e) { - // is it a synthetic method? If we generated the method ourselves, be more helpful. It can only fail - // because the arity does not match the expected interface type. - if (call.contains("$")) { - throw new IllegalArgumentException("Incorrect number of parameters for [" + interfaceMethod.javaMethod.getName() + - "] in [" + clazz + "]"); - } - throw new IllegalArgumentException("Unknown call [" + call + "] with [" + arity + "] arguments."); - } - ref = new FunctionRef(clazz, interfaceMethod, call, handle.type(), captures.length); - } else { - // whitelist lookup - ref = FunctionRef.resolveFromLookup(painlessLookup, clazz, type, call, captures.length); - } - final CallSite callSite = LambdaBootstrap.lambdaBootstrap( - methodHandlesLookup, - ref.interfaceMethodName, - ref.factoryMethodType, - ref.interfaceMethodType, - ref.delegateClassName, - ref.delegateInvokeType, - ref.delegateMethodName, - ref.delegateMethodType, - ref.isDelegateInterface ? 1 : 0 - ); - return callSite.dynamicInvoker().asType(MethodType.methodType(clazz, captures)); - } - - /** gets the field name used to lookup up the MethodHandle for a function. */ - public static String getUserFunctionHandleFieldName(String name, int arity) { - return "handle$" + name + "$" + arity; + private static MethodHandle lookupReferenceInternal(PainlessLookup painlessLookup, Map localMethods, + MethodHandles.Lookup methodHandlesLookup, Class clazz, String type, String call, int captures) throws Throwable { + final FunctionRef ref = FunctionRef.create(painlessLookup, localMethods, null, clazz, type, call, captures); + final CallSite callSite = LambdaBootstrap.lambdaBootstrap( + methodHandlesLookup, + ref.interfaceMethodName, + ref.factoryMethodType, + ref.interfaceMethodType, + ref.delegateClassName, + ref.delegateInvokeType, + ref.delegateMethodName, + ref.delegateMethodType, + ref.isDelegateInterface ? 1 : 0 + ); + return callSite.dynamicInvoker().asType(MethodType.methodType(clazz, ref.factoryMethodType.parameterArray())); } /** @@ -418,27 +365,12 @@ public final class Def { */ static MethodHandle lookupGetter(PainlessLookup painlessLookup, Class receiverClass, String name) { // first try whitelist - for (Class clazz = receiverClass; clazz != null; clazz = clazz.getSuperclass()) { - PainlessClass struct = painlessLookup.getPainlessStructFromJavaClass(clazz); + MethodHandle getter = painlessLookup.lookupRuntimeGetterMethodHandle(receiverClass, name); - if (struct != null) { - MethodHandle handle = struct.getterMethodHandles.get(name); - if (handle != null) { - return handle; - } - } - - for (final Class iface : clazz.getInterfaces()) { - struct = painlessLookup.getPainlessStructFromJavaClass(iface); - - if (struct != null) { - MethodHandle handle = struct.getterMethodHandles.get(name); - if (handle != null) { - return handle; - } - } - } + if (getter != null) { + return getter; } + // special case: arrays, maps, and lists if (receiverClass.isArray() && "length".equals(name)) { // arrays expose .length as a read-only getter @@ -455,12 +387,12 @@ public final class Def { int index = Integer.parseInt(name); return MethodHandles.insertArguments(LIST_GET, 1, index); } catch (NumberFormatException exception) { - throw new IllegalArgumentException( "Illegal list shortcut value [" + name + "]."); + throw new IllegalArgumentException("Illegal list shortcut value [" + name + "]."); } } - throw new IllegalArgumentException("Unable to find dynamic field [" + name + "] " + - "for class [" + receiverClass.getCanonicalName() + "]."); + throw new IllegalArgumentException( + "dynamic getter [" + typeToCanonicalTypeName(receiverClass) + ", " + name + "] not found"); } /** @@ -489,27 +421,12 @@ public final class Def { */ static MethodHandle lookupSetter(PainlessLookup painlessLookup, Class receiverClass, String name) { // first try whitelist - for (Class clazz = receiverClass; clazz != null; clazz = clazz.getSuperclass()) { - PainlessClass struct = painlessLookup.getPainlessStructFromJavaClass(clazz); + MethodHandle setter = painlessLookup.lookupRuntimeSetterMethodHandle(receiverClass, name); - if (struct != null) { - MethodHandle handle = struct.setterMethodHandles.get(name); - if (handle != null) { - return handle; - } - } - - for (final Class iface : clazz.getInterfaces()) { - struct = painlessLookup.getPainlessStructFromJavaClass(iface); - - if (struct != null) { - MethodHandle handle = struct.setterMethodHandles.get(name); - if (handle != null) { - return handle; - } - } - } + if (setter != null) { + return setter; } + // special case: maps, and lists if (Map.class.isAssignableFrom(receiverClass)) { // maps allow access like mymap.key @@ -523,12 +440,12 @@ public final class Def { int index = Integer.parseInt(name); return MethodHandles.insertArguments(LIST_SET, 1, index); } catch (final NumberFormatException exception) { - throw new IllegalArgumentException( "Illegal list shortcut value [" + name + "]."); + throw new IllegalArgumentException("Illegal list shortcut value [" + name + "]."); } } - throw new IllegalArgumentException("Unable to find dynamic field [" + name + "] " + - "for class [" + receiverClass.getCanonicalName() + "]."); + throw new IllegalArgumentException( + "dynamic getter [" + typeToCanonicalTypeName(receiverClass) + ", " + name + "] not found"); } /** diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/DefBootstrap.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/DefBootstrap.java index 2fadaf30964..2488b6f218a 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/DefBootstrap.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/DefBootstrap.java @@ -20,6 +20,7 @@ package org.elasticsearch.painless; import org.elasticsearch.common.SuppressForbidden; +import org.elasticsearch.painless.Locals.LocalMethod; import org.elasticsearch.painless.lookup.PainlessLookup; import java.lang.invoke.CallSite; @@ -28,6 +29,7 @@ import java.lang.invoke.MethodHandles; import java.lang.invoke.MethodType; import java.lang.invoke.MutableCallSite; import java.lang.invoke.WrongMethodTypeException; +import java.util.Map; /** * Painless invokedynamic bootstrap for the call site. @@ -105,19 +107,21 @@ public final class DefBootstrap { static final int MAX_DEPTH = 5; private final PainlessLookup painlessLookup; + private final Map localMethods; private final MethodHandles.Lookup methodHandlesLookup; private final String name; private final int flavor; private final Object[] args; int depth; // pkg-protected for testing - PIC(PainlessLookup painlessLookup, MethodHandles.Lookup methodHandlesLookup, - String name, MethodType type, int initialDepth, int flavor, Object[] args) { + PIC(PainlessLookup painlessLookup, Map localMethods, + MethodHandles.Lookup methodHandlesLookup, String name, MethodType type, int initialDepth, int flavor, Object[] args) { super(type); if (type.parameterType(0) != Object.class) { throw new BootstrapMethodError("The receiver type (1st arg) of invokedynamic descriptor must be Object."); } this.painlessLookup = painlessLookup; + this.localMethods = localMethods; this.methodHandlesLookup = methodHandlesLookup; this.name = name; this.flavor = flavor; @@ -145,7 +149,7 @@ public final class DefBootstrap { private MethodHandle lookup(int flavor, String name, Class receiver) throws Throwable { switch(flavor) { case METHOD_CALL: - return Def.lookupMethod(painlessLookup, methodHandlesLookup, type(), receiver, name, args); + return Def.lookupMethod(painlessLookup, localMethods, methodHandlesLookup, type(), receiver, name, args); case LOAD: return Def.lookupGetter(painlessLookup, receiver, name); case STORE: @@ -157,7 +161,7 @@ public final class DefBootstrap { case ITERATOR: return Def.lookupIterator(receiver); case REFERENCE: - return Def.lookupReference(painlessLookup, methodHandlesLookup, (String) args[0], receiver, name); + return Def.lookupReference(painlessLookup, localMethods, methodHandlesLookup, (String) args[0], receiver, name); case INDEX_NORMALIZE: return Def.lookupIndexNormalize(receiver); default: throw new AssertionError(); @@ -432,8 +436,9 @@ public final class DefBootstrap { *

        * see https://docs.oracle.com/javase/specs/jvms/se7/html/jvms-6.html#jvms-6.5.invokedynamic */ - public static CallSite bootstrap(PainlessLookup painlessLookup, MethodHandles.Lookup methodHandlesLookup, String name, - MethodType type, int initialDepth, int flavor, Object... args) { + @SuppressWarnings("unchecked") + public static CallSite bootstrap(PainlessLookup painlessLookup, Map localMethods, + MethodHandles.Lookup methodHandlesLookup, String name, MethodType type, int initialDepth, int flavor, Object... args) { // validate arguments switch(flavor) { // "function-call" like things get a polymorphic cache @@ -452,7 +457,7 @@ public final class DefBootstrap { if (args.length != numLambdas + 1) { throw new BootstrapMethodError("Illegal number of parameters: expected " + numLambdas + " references"); } - return new PIC(painlessLookup, methodHandlesLookup, name, type, initialDepth, flavor, args); + return new PIC(painlessLookup, localMethods, methodHandlesLookup, name, type, initialDepth, flavor, args); case LOAD: case STORE: case ARRAY_LOAD: @@ -462,7 +467,7 @@ public final class DefBootstrap { if (args.length > 0) { throw new BootstrapMethodError("Illegal static bootstrap parameters for flavor: " + flavor); } - return new PIC(painlessLookup, methodHandlesLookup, name, type, initialDepth, flavor, args); + return new PIC(painlessLookup, localMethods, methodHandlesLookup, name, type, initialDepth, flavor, args); case REFERENCE: if (args.length != 1) { throw new BootstrapMethodError("Invalid number of parameters for reference call"); @@ -470,7 +475,7 @@ public final class DefBootstrap { if (args[0] instanceof String == false) { throw new BootstrapMethodError("Illegal parameter for reference call: " + args[0]); } - return new PIC(painlessLookup, methodHandlesLookup, name, type, initialDepth, flavor, args); + return new PIC(painlessLookup, localMethods, methodHandlesLookup, name, type, initialDepth, flavor, args); // operators get monomorphic cache, with a generic impl for a fallback case UNARY_OPERATOR: diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/FunctionRef.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/FunctionRef.java index d4671f05b6c..2580d7da3e8 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/FunctionRef.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/FunctionRef.java @@ -20,17 +20,17 @@ package org.elasticsearch.painless; import org.elasticsearch.painless.Locals.LocalMethod; -import org.elasticsearch.painless.lookup.PainlessClass; import org.elasticsearch.painless.lookup.PainlessConstructor; import org.elasticsearch.painless.lookup.PainlessLookup; import org.elasticsearch.painless.lookup.PainlessLookupUtility; import org.elasticsearch.painless.lookup.PainlessMethod; -import org.objectweb.asm.Type; import java.lang.invoke.MethodType; -import java.lang.reflect.Constructor; import java.lang.reflect.Modifier; +import java.util.ArrayList; import java.util.List; +import java.util.Map; +import java.util.Objects; import static org.elasticsearch.painless.WriterConstants.CLASS_NAME; import static org.objectweb.asm.Opcodes.H_INVOKEINTERFACE; @@ -39,251 +39,205 @@ import static org.objectweb.asm.Opcodes.H_INVOKEVIRTUAL; import static org.objectweb.asm.Opcodes.H_NEWINVOKESPECIAL; /** - * Reference to a function or lambda. - *

        - * Once you have created one of these, you have "everything you need" to call {@link LambdaBootstrap} - * either statically from bytecode with invokedynamic, or at runtime from Java. + * Contains all the values necessary to write the instruction to initiate a + * {@link LambdaBootstrap} for either a function reference or a user-defined + * lambda function. */ public class FunctionRef { + /** + * Creates a new FunctionRef which will resolve {@code type::call} from the whitelist. + * @param painlessLookup the whitelist against which this script is being compiled + * @param localMethods user-defined and synthetic methods generated directly on the script class + * @param location the character number within the script at compile-time + * @param targetClass functional interface type to implement. + * @param typeName the left hand side of a method reference expression + * @param methodName the right hand side of a method reference expression + * @param numberOfCaptures number of captured arguments + */ + public static FunctionRef create(PainlessLookup painlessLookup, Map localMethods, Location location, + Class targetClass, String typeName, String methodName, int numberOfCaptures) { + + Objects.requireNonNull(painlessLookup); + Objects.requireNonNull(targetClass); + Objects.requireNonNull(typeName); + Objects.requireNonNull(methodName); + + String targetClassName = PainlessLookupUtility.typeToCanonicalTypeName(targetClass); + PainlessMethod interfaceMethod; + + try { + interfaceMethod = painlessLookup.lookupFunctionalInterfacePainlessMethod(targetClass); + + if (interfaceMethod == null) { + throw new IllegalArgumentException("cannot convert function reference [" + typeName + "::" + methodName + "] " + + "to a non-functional interface [" + targetClassName + "]"); + } + + String interfaceMethodName = interfaceMethod.javaMethod.getName(); + MethodType interfaceMethodType = interfaceMethod.methodType.dropParameterTypes(0, 1); + String delegateClassName; + boolean isDelegateInterface; + int delegateInvokeType; + String delegateMethodName; + MethodType delegateMethodType; + + Class delegateMethodReturnType; + List> delegateMethodParameters; + int interfaceTypeParametersSize = interfaceMethod.typeParameters.size(); + + if ("this".equals(typeName)) { + Objects.requireNonNull(localMethods); + + if (numberOfCaptures < 0) { + throw new IllegalStateException("internal error"); + } + + String localMethodKey = Locals.buildLocalMethodKey(methodName, numberOfCaptures + interfaceTypeParametersSize); + LocalMethod localMethod = localMethods.get(localMethodKey); + + if (localMethod == null) { + throw new IllegalArgumentException("function reference [this::" + localMethodKey + "] " + + "matching [" + targetClassName + ", " + interfaceMethodName + "/" + interfaceTypeParametersSize + "] " + + "not found" + (localMethodKey.contains("$") ? " due to an incorrect number of arguments" : "") + ); + } + + delegateClassName = CLASS_NAME; + isDelegateInterface = false; + delegateInvokeType = H_INVOKESTATIC; + delegateMethodName = localMethod.name; + delegateMethodType = localMethod.methodType; + + delegateMethodReturnType = localMethod.returnType; + delegateMethodParameters = localMethod.typeParameters; + } else if ("new".equals(methodName)) { + if (numberOfCaptures != 0) { + throw new IllegalStateException("internal error"); + } + + PainlessConstructor painlessConstructor = painlessLookup.lookupPainlessConstructor(typeName, interfaceTypeParametersSize); + + if (painlessConstructor == null) { + throw new IllegalArgumentException("function reference [" + typeName + "::new/" + interfaceTypeParametersSize + "] " + + "matching [" + targetClassName + ", " + interfaceMethodName + "/" + interfaceTypeParametersSize + "] " + + "not found"); + } + + delegateClassName = painlessConstructor.javaConstructor.getDeclaringClass().getName(); + isDelegateInterface = false; + delegateInvokeType = H_NEWINVOKESPECIAL; + delegateMethodName = PainlessLookupUtility.CONSTRUCTOR_NAME; + delegateMethodType = painlessConstructor.methodType; + + delegateMethodReturnType = painlessConstructor.javaConstructor.getDeclaringClass(); + delegateMethodParameters = painlessConstructor.typeParameters; + } else { + if (numberOfCaptures != 0 && numberOfCaptures != 1) { + throw new IllegalStateException("internal error"); + } + + boolean captured = numberOfCaptures == 1; + PainlessMethod painlessMethod = + painlessLookup.lookupPainlessMethod(typeName, true, methodName, interfaceTypeParametersSize); + + if (painlessMethod == null) { + painlessMethod = painlessLookup.lookupPainlessMethod(typeName, false, methodName, + captured ? interfaceTypeParametersSize : interfaceTypeParametersSize - 1); + + if (painlessMethod == null) { + throw new IllegalArgumentException( + "function reference " + "[" + typeName + "::" + methodName + "/" + interfaceTypeParametersSize + "] " + + "matching [" + targetClassName + ", " + interfaceMethodName + "/" + interfaceTypeParametersSize + "] " + + "not found"); + } + } else if (captured) { + throw new IllegalStateException("internal error"); + } + + delegateClassName = painlessMethod.javaMethod.getDeclaringClass().getName(); + isDelegateInterface = painlessMethod.javaMethod.getDeclaringClass().isInterface(); + + if (Modifier.isStatic(painlessMethod.javaMethod.getModifiers())) { + delegateInvokeType = H_INVOKESTATIC; + } else if (isDelegateInterface) { + delegateInvokeType = H_INVOKEINTERFACE; + } else { + delegateInvokeType = H_INVOKEVIRTUAL; + } + + delegateMethodName = painlessMethod.javaMethod.getName(); + delegateMethodType = painlessMethod.methodType; + + delegateMethodReturnType = painlessMethod.returnType; + + if (delegateMethodType.parameterList().size() > painlessMethod.typeParameters.size()) { + delegateMethodParameters = new ArrayList<>(painlessMethod.typeParameters); + delegateMethodParameters.add(0, delegateMethodType.parameterType(0)); + } else { + delegateMethodParameters = painlessMethod.typeParameters; + } + } + + if (location != null) { + for (int typeParameter = 0; typeParameter < interfaceTypeParametersSize; ++typeParameter) { + Class from = interfaceMethod.typeParameters.get(typeParameter); + Class to = delegateMethodParameters.get(numberOfCaptures + typeParameter); + AnalyzerCaster.getLegalCast(location, from, to, false, true); + } + + if (interfaceMethod.returnType != void.class) { + AnalyzerCaster.getLegalCast(location, delegateMethodReturnType, interfaceMethod.returnType, false, true); + } + } + + MethodType factoryMethodType = MethodType.methodType(targetClass, + delegateMethodType.dropParameterTypes(numberOfCaptures, delegateMethodType.parameterCount())); + delegateMethodType = delegateMethodType.dropParameterTypes(0, numberOfCaptures); + + return new FunctionRef(interfaceMethodName, interfaceMethodType, + delegateClassName, isDelegateInterface, delegateInvokeType, delegateMethodName, delegateMethodType, + factoryMethodType + ); + } catch (IllegalArgumentException iae) { + if (location != null) { + throw location.createError(iae); + } + + throw iae; + } + } + /** functional interface method name */ public final String interfaceMethodName; - /** factory (CallSite) method signature */ - public final MethodType factoryMethodType; /** functional interface method signature */ public final MethodType interfaceMethodType; /** class of the delegate method to be called */ public final String delegateClassName; + /** whether a call is made on a delegate interface */ + public final boolean isDelegateInterface; /** the invocation type of the delegate method */ public final int delegateInvokeType; /** the name of the delegate method */ public final String delegateMethodName; /** delegate method signature */ public final MethodType delegateMethodType; + /** factory (CallSite) method signature */ + public final MethodType factoryMethodType; - /** interface method */ - public final PainlessMethod interfaceMethod; - /** delegate method type parameters */ - public final List> delegateTypeParameters; - /** delegate method return type */ - public final Class delegateReturnType; + private FunctionRef( + String interfaceMethodName, MethodType interfaceMethodType, + String delegateClassName, boolean isDelegateInterface, + int delegateInvokeType, String delegateMethodName, MethodType delegateMethodType, + MethodType factoryMethodType) { - /** factory method type descriptor */ - public final String factoryDescriptor; - /** functional interface method as type */ - public final Type interfaceType; - /** delegate method type method as type */ - public final Type delegateType; - - /** whether a call is made on a delegate interface */ - public final boolean isDelegateInterface; - - /** - * Creates a new FunctionRef, which will resolve {@code type::call} from the whitelist. - * @param painlessLookup the whitelist against which this script is being compiled - * @param expected functional interface type to implement. - * @param type the left hand side of a method reference expression - * @param call the right hand side of a method reference expression - * @param numCaptures number of captured arguments - */ - public static FunctionRef resolveFromLookup( - PainlessLookup painlessLookup, Class expected, String type, String call, int numCaptures) { - - if ("new".equals(call)) { - return new FunctionRef(expected, painlessLookup.getPainlessStructFromJavaClass(expected).functionalMethod, - lookup(painlessLookup, expected, type), numCaptures); - } else { - return new FunctionRef(expected, painlessLookup.getPainlessStructFromJavaClass(expected).functionalMethod, - lookup(painlessLookup, expected, type, call, numCaptures > 0), numCaptures); - } - } - - /** - * Creates a new FunctionRef (already resolved) - * @param expected functional interface type to implement - * @param interfaceMethod functional interface method - * @param delegateConstructor implementation constructor - * @param numCaptures number of captured arguments - */ - public FunctionRef(Class expected, PainlessMethod interfaceMethod, PainlessConstructor delegateConstructor, int numCaptures) { - Constructor javaConstructor = delegateConstructor.javaConstructor; - MethodType delegateMethodType = delegateConstructor.methodType; - - this.interfaceMethodName = interfaceMethod.javaMethod.getName(); - this.factoryMethodType = MethodType.methodType(expected, - delegateMethodType.dropParameterTypes(numCaptures, delegateMethodType.parameterCount())); - this.interfaceMethodType = interfaceMethod.methodType.dropParameterTypes(0, 1); - - this.delegateClassName = javaConstructor.getDeclaringClass().getName(); - this.isDelegateInterface = false; - this.delegateInvokeType = H_NEWINVOKESPECIAL; - this.delegateMethodName = PainlessLookupUtility.CONSTRUCTOR_NAME; - this.delegateMethodType = delegateMethodType.dropParameterTypes(0, numCaptures); - - this.interfaceMethod = interfaceMethod; - this.delegateTypeParameters = delegateConstructor.typeParameters; - this.delegateReturnType = void.class; - - this.factoryDescriptor = factoryMethodType.toMethodDescriptorString(); - this.interfaceType = Type.getMethodType(interfaceMethodType.toMethodDescriptorString()); - this.delegateType = Type.getMethodType(this.delegateMethodType.toMethodDescriptorString()); - } - - /** - * Creates a new FunctionRef (already resolved) - * @param expected functional interface type to implement - * @param interfaceMethod functional interface method - * @param delegateMethod implementation method - * @param numCaptures number of captured arguments - */ - public FunctionRef(Class expected, PainlessMethod interfaceMethod, PainlessMethod delegateMethod, int numCaptures) { - MethodType delegateMethodType = delegateMethod.methodType; - - this.interfaceMethodName = interfaceMethod.javaMethod.getName(); - this.factoryMethodType = MethodType.methodType(expected, - delegateMethodType.dropParameterTypes(numCaptures, delegateMethodType.parameterCount())); - this.interfaceMethodType = interfaceMethod.methodType.dropParameterTypes(0, 1); - - this.delegateClassName = delegateMethod.javaMethod.getDeclaringClass().getName(); - this.isDelegateInterface = delegateMethod.javaMethod.getDeclaringClass().isInterface(); - - if (Modifier.isStatic(delegateMethod.javaMethod.getModifiers())) { - this.delegateInvokeType = H_INVOKESTATIC; - } else if (delegateMethod.javaMethod.getDeclaringClass().isInterface()) { - this.delegateInvokeType = H_INVOKEINTERFACE; - } else { - this.delegateInvokeType = H_INVOKEVIRTUAL; - } - - this.delegateMethodName = delegateMethod.javaMethod.getName(); - this.delegateMethodType = delegateMethodType.dropParameterTypes(0, numCaptures); - - this.interfaceMethod = interfaceMethod; - this.delegateTypeParameters = delegateMethod.typeParameters; - this.delegateReturnType = delegateMethod.returnType; - - this.factoryDescriptor = factoryMethodType.toMethodDescriptorString(); - this.interfaceType = Type.getMethodType(interfaceMethodType.toMethodDescriptorString()); - this.delegateType = Type.getMethodType(this.delegateMethodType.toMethodDescriptorString()); - } - - /** - * Creates a new FunctionRef (already resolved) - * @param expected functional interface type to implement - * @param interfaceMethod functional interface method - * @param delegateMethod implementation method - * @param numCaptures number of captured arguments - */ - public FunctionRef(Class expected, PainlessMethod interfaceMethod, LocalMethod delegateMethod, int numCaptures) { - MethodType delegateMethodType = delegateMethod.methodType; - - this.interfaceMethodName = interfaceMethod.javaMethod.getName(); - this.factoryMethodType = MethodType.methodType(expected, - delegateMethodType.dropParameterTypes(numCaptures, delegateMethodType.parameterCount())); - this.interfaceMethodType = interfaceMethod.methodType.dropParameterTypes(0, 1); - - this.delegateClassName = CLASS_NAME; - this.isDelegateInterface = false; - this.delegateInvokeType = H_INVOKESTATIC; - - this.delegateMethodName = delegateMethod.name; - this.delegateMethodType = delegateMethodType.dropParameterTypes(0, numCaptures); - - this.interfaceMethod = interfaceMethod; - this.delegateTypeParameters = delegateMethod.typeParameters; - this.delegateReturnType = delegateMethod.returnType; - - this.factoryDescriptor = factoryMethodType.toMethodDescriptorString(); - this.interfaceType = Type.getMethodType(interfaceMethodType.toMethodDescriptorString()); - this.delegateType = Type.getMethodType(this.delegateMethodType.toMethodDescriptorString()); - } - - /** - * Creates a new FunctionRef (low level). - * It is for runtime use only. - */ - public FunctionRef(Class expected, - PainlessMethod interfaceMethod, String delegateMethodName, MethodType delegateMethodType, int numCaptures) { - this.interfaceMethodName = interfaceMethod.javaMethod.getName(); - this.factoryMethodType = MethodType.methodType(expected, - delegateMethodType.dropParameterTypes(numCaptures, delegateMethodType.parameterCount())); - this.interfaceMethodType = interfaceMethod.methodType.dropParameterTypes(0, 1); - - this.delegateClassName = CLASS_NAME; - this.delegateInvokeType = H_INVOKESTATIC; + this.interfaceMethodName = interfaceMethodName; + this.interfaceMethodType = interfaceMethodType; + this.delegateClassName = delegateClassName; + this.isDelegateInterface = isDelegateInterface; + this.delegateInvokeType = delegateInvokeType; this.delegateMethodName = delegateMethodName; - this.delegateMethodType = delegateMethodType.dropParameterTypes(0, numCaptures); - this.isDelegateInterface = false; - - this.interfaceMethod = null; - this.delegateTypeParameters = null; - this.delegateReturnType = null; - - this.factoryDescriptor = null; - this.interfaceType = null; - this.delegateType = null; - } - - /** - * Looks up {@code type} from the whitelist, and returns a matching constructor. - */ - private static PainlessConstructor lookup(PainlessLookup painlessLookup, Class expected, String type) { - // check its really a functional interface - // for e.g. Comparable - PainlessMethod method = painlessLookup.getPainlessStructFromJavaClass(expected).functionalMethod; - if (method == null) { - throw new IllegalArgumentException("Cannot convert function reference [" + type + "::new] " + - "to [" + PainlessLookupUtility.typeToCanonicalTypeName(expected) + "], not a functional interface"); - } - - // lookup requested constructor - PainlessClass struct = painlessLookup.getPainlessStructFromJavaClass(painlessLookup.getJavaClassFromPainlessType(type)); - PainlessConstructor impl = struct.constructors.get(PainlessLookupUtility.buildPainlessConstructorKey(method.typeParameters.size())); - - if (impl == null) { - throw new IllegalArgumentException("Unknown reference [" + type + "::new] matching [" + expected + "]"); - } - - return impl; - } - - /** - * Looks up {@code type::call} from the whitelist, and returns a matching method. - */ - private static PainlessMethod lookup(PainlessLookup painlessLookup, Class expected, - String type, String call, boolean receiverCaptured) { - // check its really a functional interface - // for e.g. Comparable - PainlessMethod method = painlessLookup.getPainlessStructFromJavaClass(expected).functionalMethod; - if (method == null) { - throw new IllegalArgumentException("Cannot convert function reference [" + type + "::" + call + "] " + - "to [" + PainlessLookupUtility.typeToCanonicalTypeName(expected) + "], not a functional interface"); - } - - // lookup requested method - PainlessClass struct = painlessLookup.getPainlessStructFromJavaClass(painlessLookup.getJavaClassFromPainlessType(type)); - final PainlessMethod impl; - // look for a static impl first - PainlessMethod staticImpl = - struct.staticMethods.get(PainlessLookupUtility.buildPainlessMethodKey(call, method.typeParameters.size())); - if (staticImpl == null) { - // otherwise a virtual impl - final int arity; - if (receiverCaptured) { - // receiver captured - arity = method.typeParameters.size(); - } else { - // receiver passed - arity = method.typeParameters.size() - 1; - } - impl = struct.methods.get(PainlessLookupUtility.buildPainlessMethodKey(call, arity)); - } else { - impl = staticImpl; - } - if (impl == null) { - throw new IllegalArgumentException("Unknown reference [" + type + "::" + call + "] matching " + - "[" + expected + "]"); - } - return impl; + this.delegateMethodType = delegateMethodType; + this.factoryMethodType = factoryMethodType; } } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/Locals.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/Locals.java index f2c7e02c637..e07c016ddd0 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/Locals.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/Locals.java @@ -32,6 +32,9 @@ import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; +import java.util.stream.Collectors; + +import static org.elasticsearch.painless.lookup.PainlessLookupUtility.typeToJavaType; /** * Tracks user defined methods and variables across compilation phases. @@ -74,7 +77,10 @@ public final class Locals { /** Creates a new local variable scope (e.g. loop) inside the current scope */ public static Locals newLocalScope(Locals currentScope) { - return new Locals(currentScope); + Locals locals = new Locals(currentScope); + locals.methods = currentScope.methods; + + return locals; } /** @@ -82,9 +88,13 @@ public final class Locals { *

        * This is just like {@link #newFunctionScope}, except the captured parameters are made read-only. */ - public static Locals newLambdaScope(Locals programScope, Class returnType, List parameters, + public static Locals newLambdaScope(Locals programScope, String name, Class returnType, List parameters, int captureCount, int maxLoopCounter) { Locals locals = new Locals(programScope, programScope.painlessLookup, returnType, KEYWORDS); + locals.methods = programScope.methods; + List> typeParameters = parameters.stream().map(parameter -> typeToJavaType(parameter.clazz)).collect(Collectors.toList()); + locals.methods.put(buildLocalMethodKey(name, parameters.size()), new LocalMethod(name, returnType, typeParameters, + MethodType.methodType(typeToJavaType(returnType), typeParameters))); for (int i = 0; i < parameters.size(); i++) { Parameter parameter = parameters.get(i); // TODO: allow non-captures to be r/w: @@ -104,6 +114,7 @@ public final class Locals { /** Creates a new function scope inside the current scope */ public static Locals newFunctionScope(Locals programScope, Class returnType, List parameters, int maxLoopCounter) { Locals locals = new Locals(programScope, programScope.painlessLookup, returnType, KEYWORDS); + locals.methods = programScope.methods; for (Parameter parameter : parameters) { locals.addVariable(parameter.location, parameter.clazz, parameter.name, false); } @@ -118,6 +129,7 @@ public final class Locals { public static Locals newMainMethodScope(ScriptClassInfo scriptClassInfo, Locals programScope, int maxLoopCounter) { Locals locals = new Locals( programScope, programScope.painlessLookup, scriptClassInfo.getExecuteMethodReturnType(), KEYWORDS); + locals.methods = programScope.methods; // This reference. Internal use only. locals.defineVariable(null, Object.class, THIS, true); @@ -136,6 +148,7 @@ public final class Locals { /** Creates a new program scope: the list of methods. It is the parent for all methods */ public static Locals newProgramScope(PainlessLookup painlessLookup, Collection methods) { Locals locals = new Locals(null, painlessLookup, null, null); + locals.methods = new HashMap<>(); for (LocalMethod method : methods) { locals.addMethod(method); } @@ -167,15 +180,8 @@ public final class Locals { } /** Looks up a method. Returns null if the method does not exist. */ - public LocalMethod getMethod(String key) { - LocalMethod method = lookupMethod(key); - if (method != null) { - return method; - } - if (parent != null) { - return parent.getMethod(key); - } - return null; + public LocalMethod getMethod(String methodName, int methodArity) { + return methods.get(buildLocalMethodKey(methodName, methodArity)); } /** Creates a new variable. Throws IAE if the variable has already been defined (even in a parent) or reserved. */ @@ -260,15 +266,10 @@ public final class Locals { return variables.get(name); } - /** Looks up a method at this scope only. Returns null if the method does not exist. */ - private LocalMethod lookupMethod(String key) { - if (methods == null) { - return null; - } - return methods.get(key); + public Map getMethods() { + return Collections.unmodifiableMap(methods); } - /** Defines a variable at this scope internally. */ private Variable defineVariable(Location location, Class type, String name, boolean readonly) { if (variables == null) { @@ -281,14 +282,9 @@ public final class Locals { } private void addMethod(LocalMethod method) { - if (methods == null) { - methods = new HashMap<>(); - } methods.put(buildLocalMethodKey(method.name, method.typeParameters.size()), method); - // TODO: check result } - private int getNextSlot() { return nextSlotNumber; } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/MethodWriter.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/MethodWriter.java index 72435562a3b..dca638b3ddd 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/MethodWriter.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/MethodWriter.java @@ -56,6 +56,7 @@ import static org.elasticsearch.painless.WriterConstants.DEF_TO_SHORT_EXPLICIT; import static org.elasticsearch.painless.WriterConstants.DEF_TO_SHORT_IMPLICIT; import static org.elasticsearch.painless.WriterConstants.DEF_UTIL_TYPE; import static org.elasticsearch.painless.WriterConstants.INDY_STRING_CONCAT_BOOTSTRAP_HANDLE; +import static org.elasticsearch.painless.WriterConstants.LAMBDA_BOOTSTRAP_HANDLE; import static org.elasticsearch.painless.WriterConstants.MAX_INDY_STRING_CONCAT_ARGS; import static org.elasticsearch.painless.WriterConstants.PAINLESS_ERROR_TYPE; import static org.elasticsearch.painless.WriterConstants.STRINGBUILDER_APPEND_BOOLEAN; @@ -134,52 +135,52 @@ public final class MethodWriter extends GeneratorAdapter { public void writeCast(PainlessCast cast) { if (cast != null) { - if (cast.from == char.class && cast.to == String.class) { + if (cast.originalType == char.class && cast.targetType == String.class) { invokeStatic(UTILITY_TYPE, CHAR_TO_STRING); - } else if (cast.from == String.class && cast.to == char.class) { + } else if (cast.originalType == String.class && cast.targetType == char.class) { invokeStatic(UTILITY_TYPE, STRING_TO_CHAR); - } else if (cast.unboxFrom != null) { - unbox(getType(cast.unboxFrom)); - writeCast(cast.from, cast.to); - } else if (cast.unboxTo != null) { - if (cast.from == def.class) { - if (cast.explicit) { - if (cast.to == Boolean.class) invokeStatic(DEF_UTIL_TYPE, DEF_TO_BOOLEAN); - else if (cast.to == Byte.class) invokeStatic(DEF_UTIL_TYPE, DEF_TO_BYTE_EXPLICIT); - else if (cast.to == Short.class) invokeStatic(DEF_UTIL_TYPE, DEF_TO_SHORT_EXPLICIT); - else if (cast.to == Character.class) invokeStatic(DEF_UTIL_TYPE, DEF_TO_CHAR_EXPLICIT); - else if (cast.to == Integer.class) invokeStatic(DEF_UTIL_TYPE, DEF_TO_INT_EXPLICIT); - else if (cast.to == Long.class) invokeStatic(DEF_UTIL_TYPE, DEF_TO_LONG_EXPLICIT); - else if (cast.to == Float.class) invokeStatic(DEF_UTIL_TYPE, DEF_TO_FLOAT_EXPLICIT); - else if (cast.to == Double.class) invokeStatic(DEF_UTIL_TYPE, DEF_TO_DOUBLE_EXPLICIT); + } else if (cast.unboxOriginalType != null) { + unbox(getType(cast.unboxOriginalType)); + writeCast(cast.originalType, cast.targetType); + } else if (cast.unboxTargetType != null) { + if (cast.originalType == def.class) { + if (cast.explicitCast) { + if (cast.targetType == Boolean.class) invokeStatic(DEF_UTIL_TYPE, DEF_TO_BOOLEAN); + else if (cast.targetType == Byte.class) invokeStatic(DEF_UTIL_TYPE, DEF_TO_BYTE_EXPLICIT); + else if (cast.targetType == Short.class) invokeStatic(DEF_UTIL_TYPE, DEF_TO_SHORT_EXPLICIT); + else if (cast.targetType == Character.class) invokeStatic(DEF_UTIL_TYPE, DEF_TO_CHAR_EXPLICIT); + else if (cast.targetType == Integer.class) invokeStatic(DEF_UTIL_TYPE, DEF_TO_INT_EXPLICIT); + else if (cast.targetType == Long.class) invokeStatic(DEF_UTIL_TYPE, DEF_TO_LONG_EXPLICIT); + else if (cast.targetType == Float.class) invokeStatic(DEF_UTIL_TYPE, DEF_TO_FLOAT_EXPLICIT); + else if (cast.targetType == Double.class) invokeStatic(DEF_UTIL_TYPE, DEF_TO_DOUBLE_EXPLICIT); else { throw new IllegalStateException("Illegal tree structure."); } } else { - if (cast.to == Boolean.class) invokeStatic(DEF_UTIL_TYPE, DEF_TO_BOOLEAN); - else if (cast.to == Byte.class) invokeStatic(DEF_UTIL_TYPE, DEF_TO_BYTE_IMPLICIT); - else if (cast.to == Short.class) invokeStatic(DEF_UTIL_TYPE, DEF_TO_SHORT_IMPLICIT); - else if (cast.to == Character.class) invokeStatic(DEF_UTIL_TYPE, DEF_TO_CHAR_IMPLICIT); - else if (cast.to == Integer.class) invokeStatic(DEF_UTIL_TYPE, DEF_TO_INT_IMPLICIT); - else if (cast.to == Long.class) invokeStatic(DEF_UTIL_TYPE, DEF_TO_LONG_IMPLICIT); - else if (cast.to == Float.class) invokeStatic(DEF_UTIL_TYPE, DEF_TO_FLOAT_IMPLICIT); - else if (cast.to == Double.class) invokeStatic(DEF_UTIL_TYPE, DEF_TO_DOUBLE_IMPLICIT); + if (cast.targetType == Boolean.class) invokeStatic(DEF_UTIL_TYPE, DEF_TO_BOOLEAN); + else if (cast.targetType == Byte.class) invokeStatic(DEF_UTIL_TYPE, DEF_TO_BYTE_IMPLICIT); + else if (cast.targetType == Short.class) invokeStatic(DEF_UTIL_TYPE, DEF_TO_SHORT_IMPLICIT); + else if (cast.targetType == Character.class) invokeStatic(DEF_UTIL_TYPE, DEF_TO_CHAR_IMPLICIT); + else if (cast.targetType == Integer.class) invokeStatic(DEF_UTIL_TYPE, DEF_TO_INT_IMPLICIT); + else if (cast.targetType == Long.class) invokeStatic(DEF_UTIL_TYPE, DEF_TO_LONG_IMPLICIT); + else if (cast.targetType == Float.class) invokeStatic(DEF_UTIL_TYPE, DEF_TO_FLOAT_IMPLICIT); + else if (cast.targetType == Double.class) invokeStatic(DEF_UTIL_TYPE, DEF_TO_DOUBLE_IMPLICIT); else { throw new IllegalStateException("Illegal tree structure."); } } } else { - writeCast(cast.from, cast.to); - unbox(getType(cast.unboxTo)); + writeCast(cast.originalType, cast.targetType); + unbox(getType(cast.unboxTargetType)); } - } else if (cast.boxFrom != null) { - box(getType(cast.boxFrom)); - writeCast(cast.from, cast.to); - } else if (cast.boxTo != null) { - writeCast(cast.from, cast.to); - box(getType(cast.boxTo)); + } else if (cast.boxOriginalType != null) { + box(getType(cast.boxOriginalType)); + writeCast(cast.originalType, cast.targetType); + } else if (cast.boxTargetType != null) { + writeCast(cast.originalType, cast.targetType); + box(getType(cast.boxTargetType)); } else { - writeCast(cast.from, cast.to); + writeCast(cast.originalType, cast.targetType); } } } @@ -439,4 +440,18 @@ public final class MethodWriter extends GeneratorAdapter { invokeVirtual(type, method); } } + + public void invokeLambdaCall(FunctionRef functionRef) { + invokeDynamic( + functionRef.interfaceMethodName, + functionRef.factoryMethodType.toMethodDescriptorString(), + LAMBDA_BOOTSTRAP_HANDLE, + Type.getMethodType(functionRef.interfaceMethodType.toMethodDescriptorString()), + functionRef.delegateClassName, + functionRef.delegateInvokeType, + functionRef.delegateMethodName, + Type.getMethodType(functionRef.delegateMethodType.toMethodDescriptorString()), + functionRef.isDelegateInterface ? 1 : 0 + ); + } } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/PainlessExplainError.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/PainlessExplainError.java index 7bef028c7d1..e4988103bc6 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/PainlessExplainError.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/PainlessExplainError.java @@ -57,7 +57,7 @@ public class PainlessExplainError extends Error { if (objectToExplain != null) { toString = objectToExplain.toString(); javaClassName = objectToExplain.getClass().getName(); - PainlessClass struct = painlessLookup.getPainlessStructFromJavaClass(objectToExplain.getClass()); + PainlessClass struct = painlessLookup.lookupPainlessClass(objectToExplain.getClass()); if (struct != null) { painlessClassName = PainlessLookupUtility.typeToCanonicalTypeName(objectToExplain.getClass()); } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/ScriptClassInfo.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/ScriptClassInfo.java index 6d4b4552696..7de8353194d 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/ScriptClassInfo.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/ScriptClassInfo.java @@ -21,6 +21,7 @@ package org.elasticsearch.painless; import org.elasticsearch.painless.lookup.PainlessLookup; import org.elasticsearch.painless.lookup.PainlessLookupUtility; +import org.elasticsearch.painless.lookup.def; import java.lang.invoke.MethodType; import java.lang.reflect.Field; @@ -190,7 +191,7 @@ public class ScriptClassInfo { componentType = componentType.getComponentType(); } - if (painlessLookup.getPainlessStructFromJavaClass(componentType) == null) { + if (componentType != def.class && painlessLookup.lookupPainlessClass(componentType) == null) { throw new IllegalArgumentException(unknownErrorMessageSource.apply(componentType)); } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/WriterConstants.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/WriterConstants.java index db3aeff0483..9c3d991080d 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/WriterConstants.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/WriterConstants.java @@ -120,7 +120,7 @@ public final class WriterConstants { DEF_BOOTSTRAP_METHOD.getDescriptor(), false); public static final Type DEF_BOOTSTRAP_DELEGATE_TYPE = Type.getType(DefBootstrap.class); public static final Method DEF_BOOTSTRAP_DELEGATE_METHOD = getAsmMethod(CallSite.class, "bootstrap", PainlessLookup.class, - MethodHandles.Lookup.class, String.class, MethodType.class, int.class, int.class, Object[].class); + Map.class, MethodHandles.Lookup.class, String.class, MethodType.class, int.class, int.class, Object[].class); public static final Type DEF_UTIL_TYPE = Type.getType(Def.class); public static final Method DEF_TO_BOOLEAN = getAsmMethod(boolean.class, "DefToboolean" , Object.class); diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/antlr/EnhancedPainlessLexer.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/antlr/EnhancedPainlessLexer.java index f1db35636b4..9279093cf31 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/antlr/EnhancedPainlessLexer.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/antlr/EnhancedPainlessLexer.java @@ -75,7 +75,7 @@ final class EnhancedPainlessLexer extends PainlessLexer { @Override protected boolean isType(String name) { - return painlessLookup.isSimplePainlessType(name); + return painlessLookup.isValidCanonicalClassName(name); } @Override diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessCast.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessCast.java index 2440fb45d4d..f87f8a134b8 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessCast.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessCast.java @@ -22,46 +22,55 @@ package org.elasticsearch.painless.lookup; public class PainlessCast { /** Create a standard cast with no boxing/unboxing. */ - public static PainlessCast standard(Class from, Class to, boolean explicit) { - return new PainlessCast(from, to, explicit, null, null, null, null); + public static PainlessCast originalTypetoTargetType(Class originalType, Class targetType, boolean explicitCast) { + return new PainlessCast(originalType, targetType, explicitCast, null, null, null, null); } - /** Create a cast where the from type will be unboxed, and then the cast will be performed. */ - public static PainlessCast unboxFrom(Class from, Class to, boolean explicit, Class unboxFrom) { - return new PainlessCast(from, to, explicit, unboxFrom, null, null, null); + /** Create a cast where the original type will be unboxed, and then the cast will be performed. */ + public static PainlessCast unboxOriginalType( + Class originalType, Class targetType, boolean explicitCast, Class unboxOriginalType) { + + return new PainlessCast(originalType, targetType, explicitCast, unboxOriginalType, null, null, null); } - /** Create a cast where the to type will be unboxed, and then the cast will be performed. */ - public static PainlessCast unboxTo(Class from, Class to, boolean explicit, Class unboxTo) { - return new PainlessCast(from, to, explicit, null, unboxTo, null, null); + /** Create a cast where the target type will be unboxed, and then the cast will be performed. */ + public static PainlessCast unboxTargetType( + Class originalType, Class targetType, boolean explicitCast, Class unboxTargetType) { + + return new PainlessCast(originalType, targetType, explicitCast, null, unboxTargetType, null, null); } - /** Create a cast where the from type will be boxed, and then the cast will be performed. */ - public static PainlessCast boxFrom(Class from, Class to, boolean explicit, Class boxFrom) { - return new PainlessCast(from, to, explicit, null, null, boxFrom, null); + /** Create a cast where the original type will be boxed, and then the cast will be performed. */ + public static PainlessCast boxOriginalType( + Class originalType, Class targetType, boolean explicitCast, Class boxOriginalType) { + + return new PainlessCast(originalType, targetType, explicitCast, null, null, boxOriginalType, null); } - /** Create a cast where the to type will be boxed, and then the cast will be performed. */ - public static PainlessCast boxTo(Class from, Class to, boolean explicit, Class boxTo) { - return new PainlessCast(from, to, explicit, null, null, null, boxTo); + /** Create a cast where the target type will be boxed, and then the cast will be performed. */ + public static PainlessCast boxTargetType( + Class originalType, Class targetType, boolean explicitCast, Class boxTargetType) { + + return new PainlessCast(originalType, targetType, explicitCast, null, null, null, boxTargetType); } - public final Class from; - public final Class to; - public final boolean explicit; - public final Class unboxFrom; - public final Class unboxTo; - public final Class boxFrom; - public final Class boxTo; + public final Class originalType; + public final Class targetType; + public final boolean explicitCast; + public final Class unboxOriginalType; + public final Class unboxTargetType; + public final Class boxOriginalType; + public final Class boxTargetType; - private PainlessCast(Class from, Class to, boolean explicit, - Class unboxFrom, Class unboxTo, Class boxFrom, Class boxTo) { - this.from = from; - this.to = to; - this.explicit = explicit; - this.unboxFrom = unboxFrom; - this.unboxTo = unboxTo; - this.boxFrom = boxFrom; - this.boxTo = boxTo; + private PainlessCast(Class originalType, Class targetType, boolean explicitCast, + Class unboxOriginalType, Class unboxTargetType, Class boxOriginalType, Class boxTargetType) { + + this.originalType = originalType; + this.targetType = targetType; + this.explicitCast = explicitCast; + this.unboxOriginalType = unboxOriginalType; + this.unboxTargetType = unboxTargetType; + this.boxOriginalType = boxOriginalType; + this.boxTargetType = boxTargetType; } } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessClass.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessClass.java index 835bfb5c505..50bb79dcfbd 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessClass.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessClass.java @@ -35,13 +35,13 @@ public final class PainlessClass { public final Map getterMethodHandles; public final Map setterMethodHandles; - public final PainlessMethod functionalMethod; + public final PainlessMethod functionalInterfaceMethod; PainlessClass(Map constructors, Map staticMethods, Map methods, Map staticFields, Map fields, Map getterMethodHandles, Map setterMethodHandles, - PainlessMethod functionalMethod) { + PainlessMethod functionalInterfaceMethod) { this.constructors = Collections.unmodifiableMap(constructors); @@ -54,6 +54,6 @@ public final class PainlessClass { this.getterMethodHandles = Collections.unmodifiableMap(getterMethodHandles); this.setterMethodHandles = Collections.unmodifiableMap(setterMethodHandles); - this.functionalMethod = functionalMethod; + this.functionalInterfaceMethod = functionalInterfaceMethod; } } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessClassBuilder.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessClassBuilder.java index 866f711ba0f..a61215e9ed7 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessClassBuilder.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessClassBuilder.java @@ -35,7 +35,7 @@ final class PainlessClassBuilder { final Map getterMethodHandles; final Map setterMethodHandles; - PainlessMethod functionalMethod; + PainlessMethod functionalInterfaceMethod; PainlessClassBuilder() { constructors = new HashMap<>(); @@ -49,11 +49,11 @@ final class PainlessClassBuilder { getterMethodHandles = new HashMap<>(); setterMethodHandles = new HashMap<>(); - functionalMethod = null; + functionalInterfaceMethod = null; } PainlessClass build() { return new PainlessClass(constructors, staticMethods, methods, staticFields, fields, - getterMethodHandles, setterMethodHandles, functionalMethod); + getterMethodHandles, setterMethodHandles, functionalInterfaceMethod); } } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessField.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessField.java index f316e1438ec..a55d6c3730e 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessField.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessField.java @@ -20,24 +20,20 @@ package org.elasticsearch.painless.lookup; import java.lang.invoke.MethodHandle; +import java.lang.reflect.Field; public final class PainlessField { - public final String name; - public final Class target; - public final Class clazz; - public final String javaName; - public final int modifiers; - public final MethodHandle getter; - public final MethodHandle setter; + public final Field javaField; + public final Class typeParameter; - PainlessField(String name, String javaName, Class target, Class clazz, int modifiers, - MethodHandle getter, MethodHandle setter) { - this.name = name; - this.javaName = javaName; - this.target = target; - this.clazz = clazz; - this.modifiers = modifiers; - this.getter = getter; - this.setter = setter; + public final MethodHandle getterMethodHandle; + public final MethodHandle setterMethodHandle; + + PainlessField(Field javaField, Class typeParameter, MethodHandle getterMethodHandle, MethodHandle setterMethodHandle) { + this.javaField = javaField; + this.typeParameter = typeParameter; + + this.getterMethodHandle = getterMethodHandle; + this.setterMethodHandle = setterMethodHandle; } } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessLookup.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessLookup.java index 67c04498a58..55855a3cb1e 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessLookup.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessLookup.java @@ -19,41 +19,222 @@ package org.elasticsearch.painless.lookup; -import java.util.Collection; +import java.lang.invoke.MethodHandle; import java.util.Collections; import java.util.Map; +import java.util.Objects; +import java.util.Set; +import java.util.function.Function; + +import static org.elasticsearch.painless.lookup.PainlessLookupUtility.DEF_CLASS_NAME; +import static org.elasticsearch.painless.lookup.PainlessLookupUtility.buildPainlessConstructorKey; +import static org.elasticsearch.painless.lookup.PainlessLookupUtility.buildPainlessFieldKey; +import static org.elasticsearch.painless.lookup.PainlessLookupUtility.buildPainlessMethodKey; +import static org.elasticsearch.painless.lookup.PainlessLookupUtility.typeToBoxedType; -/** - * The entire API for Painless. Also used as a whitelist for checking for legal - * methods and fields during at both compile-time and runtime. - */ public final class PainlessLookup { - public Collection> getStructs() { - return classesToPainlessClasses.keySet(); - } - private final Map> canonicalClassNamesToClasses; private final Map, PainlessClass> classesToPainlessClasses; PainlessLookup(Map> canonicalClassNamesToClasses, Map, PainlessClass> classesToPainlessClasses) { + Objects.requireNonNull(canonicalClassNamesToClasses); + Objects.requireNonNull(classesToPainlessClasses); + this.canonicalClassNamesToClasses = Collections.unmodifiableMap(canonicalClassNamesToClasses); this.classesToPainlessClasses = Collections.unmodifiableMap(classesToPainlessClasses); } - public Class getClassFromBinaryName(String painlessType) { - return canonicalClassNamesToClasses.get(painlessType.replace('$', '.')); + public boolean isValidCanonicalClassName(String canonicalClassName) { + Objects.requireNonNull(canonicalClassName); + + return DEF_CLASS_NAME.equals(canonicalClassName) || canonicalClassNamesToClasses.containsKey(canonicalClassName); } - public boolean isSimplePainlessType(String painlessType) { - return canonicalClassNamesToClasses.containsKey(painlessType); + public Class canonicalTypeNameToType(String canonicalTypeName) { + Objects.requireNonNull(canonicalTypeName); + + return PainlessLookupUtility.canonicalTypeNameToType(canonicalTypeName, canonicalClassNamesToClasses); } - public PainlessClass getPainlessStructFromJavaClass(Class clazz) { - return classesToPainlessClasses.get(clazz); + public Set> getClasses() { + return classesToPainlessClasses.keySet(); } - public Class getJavaClassFromPainlessType(String painlessType) { - return PainlessLookupUtility.canonicalTypeNameToType(painlessType, canonicalClassNamesToClasses); + public PainlessClass lookupPainlessClass(Class targetClass) { + return classesToPainlessClasses.get(targetClass); + } + + public PainlessConstructor lookupPainlessConstructor(String targetCanonicalClassName, int constructorArity) { + Objects.requireNonNull(targetCanonicalClassName); + + Class targetClass = canonicalTypeNameToType(targetCanonicalClassName); + + if (targetClass == null) { + return null; + } + + return lookupPainlessConstructor(targetClass, constructorArity); + } + + public PainlessConstructor lookupPainlessConstructor(Class targetClass, int constructorArity) { + Objects.requireNonNull(targetClass); + + PainlessClass targetPainlessClass = classesToPainlessClasses.get(targetClass); + String painlessConstructorKey = buildPainlessConstructorKey(constructorArity); + + if (targetPainlessClass == null) { + return null; + } + + PainlessConstructor painlessConstructor = targetPainlessClass.constructors.get(painlessConstructorKey); + + if (painlessConstructor == null) { + return null; + } + + return painlessConstructor; + } + + public PainlessMethod lookupPainlessMethod(String targetCanonicalClassName, boolean isStatic, String methodName, int methodArity) { + Objects.requireNonNull(targetCanonicalClassName); + + Class targetClass = canonicalTypeNameToType(targetCanonicalClassName); + + if (targetClass == null) { + return null; + } + + return lookupPainlessMethod(targetClass, isStatic, methodName, methodArity); + } + + public PainlessMethod lookupPainlessMethod(Class targetClass, boolean isStatic, String methodName, int methodArity) { + Objects.requireNonNull(targetClass); + Objects.requireNonNull(methodName); + + if (targetClass.isPrimitive()) { + targetClass = typeToBoxedType(targetClass); + } + + PainlessClass targetPainlessClass = classesToPainlessClasses.get(targetClass); + String painlessMethodKey = buildPainlessMethodKey(methodName, methodArity); + + if (targetPainlessClass == null) { + return null; + } + + return isStatic ? + targetPainlessClass.staticMethods.get(painlessMethodKey) : + targetPainlessClass.methods.get(painlessMethodKey); + } + + public PainlessField lookupPainlessField(String targetCanonicalClassName, boolean isStatic, String fieldName) { + Objects.requireNonNull(targetCanonicalClassName); + + Class targetClass = canonicalTypeNameToType(targetCanonicalClassName); + + if (targetClass == null) { + return null; + } + + return lookupPainlessField(targetClass, isStatic, fieldName); + } + + public PainlessField lookupPainlessField(Class targetClass, boolean isStatic, String fieldName) { + Objects.requireNonNull(targetClass); + Objects.requireNonNull(fieldName); + + PainlessClass targetPainlessClass = classesToPainlessClasses.get(targetClass); + String painlessFieldKey = buildPainlessFieldKey(fieldName); + + if (targetPainlessClass == null) { + return null; + } + + PainlessField painlessField = isStatic ? + targetPainlessClass.staticFields.get(painlessFieldKey) : + targetPainlessClass.fields.get(painlessFieldKey); + + if (painlessField == null) { + return null; + } + + return painlessField; + } + + public PainlessMethod lookupFunctionalInterfacePainlessMethod(Class targetClass) { + PainlessClass targetPainlessClass = classesToPainlessClasses.get(targetClass); + + if (targetPainlessClass == null) { + return null; + } + + return targetPainlessClass.functionalInterfaceMethod; + } + + public PainlessMethod lookupRuntimePainlessMethod(Class originalTargetClass, String methodName, int methodArity) { + Objects.requireNonNull(originalTargetClass); + Objects.requireNonNull(methodName); + + String painlessMethodKey = buildPainlessMethodKey(methodName, methodArity); + Function objectLookup = targetPainlessClass -> targetPainlessClass.methods.get(painlessMethodKey); + + return lookupRuntimePainlessObject(originalTargetClass, objectLookup); + } + + public MethodHandle lookupRuntimeGetterMethodHandle(Class originalTargetClass, String getterName) { + Objects.requireNonNull(originalTargetClass); + Objects.requireNonNull(getterName); + + Function objectLookup = targetPainlessClass -> targetPainlessClass.getterMethodHandles.get(getterName); + + return lookupRuntimePainlessObject(originalTargetClass, objectLookup); + } + + public MethodHandle lookupRuntimeSetterMethodHandle(Class originalTargetClass, String setterName) { + Objects.requireNonNull(originalTargetClass); + Objects.requireNonNull(setterName); + + Function objectLookup = targetPainlessClass -> targetPainlessClass.setterMethodHandles.get(setterName); + + return lookupRuntimePainlessObject(originalTargetClass, objectLookup); + } + + private T lookupRuntimePainlessObject(Class originalTargetClass, Function objectLookup) { + Class currentTargetClass = originalTargetClass; + + while (currentTargetClass != null) { + PainlessClass targetPainlessClass = classesToPainlessClasses.get(currentTargetClass); + + if (targetPainlessClass != null) { + T painlessObject = objectLookup.apply(targetPainlessClass); + + if (painlessObject != null) { + return painlessObject; + } + } + + currentTargetClass = currentTargetClass.getSuperclass(); + } + + currentTargetClass = originalTargetClass; + + while (currentTargetClass != null) { + for (Class targetInterface : currentTargetClass.getInterfaces()) { + PainlessClass targetPainlessClass = classesToPainlessClasses.get(targetInterface); + + if (targetPainlessClass != null) { + T painlessObject = objectLookup.apply(targetPainlessClass); + + if (painlessObject != null) { + return painlessObject; + } + } + } + + currentTargetClass = currentTargetClass.getSuperclass(); + } + + return null; } } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessLookupBuilder.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessLookupBuilder.java index a4dbe1006d6..c8353b54c9f 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessLookupBuilder.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessLookupBuilder.java @@ -166,35 +166,35 @@ public final class PainlessLookupBuilder { try { for (Whitelist whitelist : whitelists) { - for (WhitelistClass whitelistClass : whitelist.whitelistStructs) { + for (WhitelistClass whitelistClass : whitelist.whitelistClasses) { origin = whitelistClass.origin; painlessLookupBuilder.addPainlessClass( - whitelist.javaClassLoader, whitelistClass.javaClassName, whitelistClass.onlyFQNJavaClassName == false); + whitelist.classLoader, whitelistClass.javaClassName, whitelistClass.noImport == false); } } for (Whitelist whitelist : whitelists) { - for (WhitelistClass whitelistClass : whitelist.whitelistStructs) { + for (WhitelistClass whitelistClass : whitelist.whitelistClasses) { String targetCanonicalClassName = whitelistClass.javaClassName.replace('$', '.'); for (WhitelistConstructor whitelistConstructor : whitelistClass.whitelistConstructors) { origin = whitelistConstructor.origin; painlessLookupBuilder.addPainlessConstructor( - targetCanonicalClassName, whitelistConstructor.painlessParameterTypeNames); + targetCanonicalClassName, whitelistConstructor.canonicalTypeNameParameters); } for (WhitelistMethod whitelistMethod : whitelistClass.whitelistMethods) { origin = whitelistMethod.origin; painlessLookupBuilder.addPainlessMethod( - whitelist.javaClassLoader, targetCanonicalClassName, whitelistMethod.javaAugmentedClassName, - whitelistMethod.javaMethodName, whitelistMethod.painlessReturnTypeName, - whitelistMethod.painlessParameterTypeNames); + whitelist.classLoader, targetCanonicalClassName, whitelistMethod.augmentedCanonicalClassName, + whitelistMethod.methodName, whitelistMethod.returnCanonicalTypeName, + whitelistMethod.canonicalTypeNameParameters); } for (WhitelistField whitelistField : whitelistClass.whitelistFields) { origin = whitelistField.origin; painlessLookupBuilder.addPainlessField( - targetCanonicalClassName, whitelistField.javaFieldName, whitelistField.painlessFieldTypeName); + targetCanonicalClassName, whitelistField.fieldName, whitelistField.canonicalTypeNameParameter); } } } @@ -211,17 +211,18 @@ public final class PainlessLookupBuilder { public PainlessLookupBuilder() { canonicalClassNamesToClasses = new HashMap<>(); classesToPainlessClassBuilders = new HashMap<>(); - - canonicalClassNamesToClasses.put(DEF_CLASS_NAME, def.class); - classesToPainlessClassBuilders.put(def.class, new PainlessClassBuilder()); } private Class canonicalTypeNameToType(String canonicalTypeName) { return PainlessLookupUtility.canonicalTypeNameToType(canonicalTypeName, canonicalClassNamesToClasses); } - private void validateType(Class type) { - PainlessLookupUtility.validateType(type, classesToPainlessClassBuilders.keySet()); + private boolean isValidType(Class type) { + while (type.getComponentType() != null) { + type = type.getComponentType(); + } + + return type == def.class || classesToPainlessClassBuilders.containsKey(type); } public void addPainlessClass(ClassLoader classLoader, String javaClassName, boolean importClassName) { @@ -289,7 +290,7 @@ public final class PainlessLookupBuilder { if (canonicalClassName.equals(importedCanonicalClassName)) { if (importClassName == true) { - throw new IllegalArgumentException("must use only_fqn parameter on class [" + canonicalClassName + "] with no package"); + throw new IllegalArgumentException("must use no_import parameter on class [" + canonicalClassName + "] with no package"); } } else { Class importedPainlessClass = canonicalClassNamesToClasses.get(importedCanonicalClassName); @@ -297,7 +298,8 @@ public final class PainlessLookupBuilder { if (importedPainlessClass == null) { if (importClassName) { if (existingPainlessClassBuilder != null) { - throw new IllegalArgumentException("inconsistent only_fqn parameters found for class [" + canonicalClassName + "]"); + throw new IllegalArgumentException( + "inconsistent no_import parameters found for class [" + canonicalClassName + "]"); } canonicalClassNamesToClasses.put(importedCanonicalClassName, clazz); @@ -306,32 +308,33 @@ public final class PainlessLookupBuilder { throw new IllegalArgumentException("imported class [" + importedCanonicalClassName + "] cannot represent multiple " + "classes [" + canonicalClassName + "] and [" + typeToCanonicalTypeName(importedPainlessClass) + "]"); } else if (importClassName == false) { - throw new IllegalArgumentException("inconsistent only_fqn parameters found for class [" + canonicalClassName + "]"); + throw new IllegalArgumentException("inconsistent no_import parameters found for class [" + canonicalClassName + "]"); } } } - public void addPainlessConstructor(String targetCanonicalClassName, List typeNameParameters) { + public void addPainlessConstructor(String targetCanonicalClassName, List canonicalTypeNameParameters) { Objects.requireNonNull(targetCanonicalClassName); - Objects.requireNonNull(typeNameParameters); + Objects.requireNonNull(canonicalTypeNameParameters); Class targetClass = canonicalClassNamesToClasses.get(targetCanonicalClassName); if (targetClass == null) { throw new IllegalArgumentException("target class [" + targetCanonicalClassName + "] not found" + - "for constructor [[" + targetCanonicalClassName + "], " + typeNameParameters + "]"); + "for constructor [[" + targetCanonicalClassName + "], " + canonicalTypeNameParameters + "]"); } - List> typeParameters = new ArrayList<>(typeNameParameters.size()); + List> typeParameters = new ArrayList<>(canonicalTypeNameParameters.size()); - for (String typeNameParameter : typeNameParameters) { - try { - Class typeParameter = canonicalTypeNameToType(typeNameParameter); - typeParameters.add(typeParameter); - } catch (IllegalArgumentException iae) { - throw new IllegalArgumentException("type parameter [" + typeNameParameter + "] not found " + - "for constructor [[" + targetCanonicalClassName + "], " + typeNameParameters + "]", iae); + for (String canonicalTypeNameParameter : canonicalTypeNameParameters) { + Class typeParameter = canonicalTypeNameToType(canonicalTypeNameParameter); + + if (typeParameter == null) { + throw new IllegalArgumentException("type parameter [" + canonicalTypeNameParameter + "] not found " + + "for constructor [[" + targetCanonicalClassName + "], " + canonicalTypeNameParameters + "]"); } + + typeParameters.add(typeParameter); } addPainlessConstructor(targetClass, typeParameters); @@ -357,11 +360,9 @@ public final class PainlessLookupBuilder { List> javaTypeParameters = new ArrayList<>(typeParametersSize); for (Class typeParameter : typeParameters) { - try { - validateType(typeParameter); - } catch (IllegalArgumentException iae) { + if (isValidType(typeParameter) == false) { throw new IllegalArgumentException("type parameter [" + typeToCanonicalTypeName(typeParameter) + "] not found " + - "for constructor [[" + targetCanonicalClassName + "], " + typesToCanonicalTypeNames(typeParameters) + "]", iae); + "for constructor [[" + targetCanonicalClassName + "], " + typesToCanonicalTypeNames(typeParameters) + "]"); } javaTypeParameters.add(typeToJavaType(typeParameter)); @@ -406,19 +407,19 @@ public final class PainlessLookupBuilder { } public void addPainlessMethod(ClassLoader classLoader, String targetCanonicalClassName, String augmentedCanonicalClassName, - String methodName, String returnCanonicalTypeName, List typeNameParameters) { + String methodName, String returnCanonicalTypeName, List canonicalTypeNameParameters) { Objects.requireNonNull(classLoader); Objects.requireNonNull(targetCanonicalClassName); Objects.requireNonNull(methodName); Objects.requireNonNull(returnCanonicalTypeName); - Objects.requireNonNull(typeNameParameters); + Objects.requireNonNull(canonicalTypeNameParameters); Class targetClass = canonicalClassNamesToClasses.get(targetCanonicalClassName); if (targetClass == null) { throw new IllegalArgumentException("target class [" + targetCanonicalClassName + "] not found for method " + - "[[" + targetCanonicalClassName + "], [" + methodName + "], " + typeNameParameters + "]"); + "[[" + targetCanonicalClassName + "], [" + methodName + "], " + canonicalTypeNameParameters + "]"); } Class augmentedClass = null; @@ -428,29 +429,28 @@ public final class PainlessLookupBuilder { augmentedClass = Class.forName(augmentedCanonicalClassName, true, classLoader); } catch (ClassNotFoundException cnfe) { throw new IllegalArgumentException("augmented class [" + augmentedCanonicalClassName + "] not found for method " + - "[[" + targetCanonicalClassName + "], [" + methodName + "], " + typeNameParameters + "]", cnfe); + "[[" + targetCanonicalClassName + "], [" + methodName + "], " + canonicalTypeNameParameters + "]", cnfe); } } - List> typeParameters = new ArrayList<>(typeNameParameters.size()); + List> typeParameters = new ArrayList<>(canonicalTypeNameParameters.size()); - for (String typeNameParameter : typeNameParameters) { - try { - Class typeParameter = canonicalTypeNameToType(typeNameParameter); - typeParameters.add(typeParameter); - } catch (IllegalArgumentException iae) { - throw new IllegalArgumentException("parameter type [" + typeNameParameter + "] not found for method " + - "[[" + targetCanonicalClassName + "], [" + methodName + "], " + typeNameParameters + "]", iae); + for (String canonicalTypeNameParameter : canonicalTypeNameParameters) { + Class typeParameter = canonicalTypeNameToType(canonicalTypeNameParameter); + + if (typeParameter == null) { + throw new IllegalArgumentException("parameter type [" + canonicalTypeNameParameter + "] not found for method " + + "[[" + targetCanonicalClassName + "], [" + methodName + "], " + canonicalTypeNameParameters + "]"); } + + typeParameters.add(typeParameter); } - Class returnType; + Class returnType = canonicalTypeNameToType(returnCanonicalTypeName); - try { - returnType = canonicalTypeNameToType(returnCanonicalTypeName); - } catch (IllegalArgumentException iae) { + if (returnType == null) { throw new IllegalArgumentException("parameter type [" + returnCanonicalTypeName + "] not found for method " + - "[[" + targetCanonicalClassName + "], [" + methodName + "], " + typeNameParameters + "]", iae); + "[[" + targetCanonicalClassName + "], [" + methodName + "], " + canonicalTypeNameParameters + "]"); } addPainlessMethod(targetClass, augmentedClass, methodName, returnType, typeParameters); @@ -490,22 +490,18 @@ public final class PainlessLookupBuilder { } for (Class typeParameter : typeParameters) { - try { - validateType(typeParameter); - } catch (IllegalArgumentException iae) { + if (isValidType(typeParameter) == false) { throw new IllegalArgumentException("type parameter [" + typeToCanonicalTypeName(typeParameter) + "] " + "not found for method [[" + targetCanonicalClassName + "], [" + methodName + "], " + - typesToCanonicalTypeNames(typeParameters) + "]", iae); + typesToCanonicalTypeNames(typeParameters) + "]"); } javaTypeParameters.add(typeToJavaType(typeParameter)); } - try { - validateType(returnType); - } catch (IllegalArgumentException iae) { + if (isValidType(returnType) == false) { throw new IllegalArgumentException("return type [" + typeToCanonicalTypeName(returnType) + "] not found for method " + - "[[" + targetCanonicalClassName + "], [" + methodName + "], " + typesToCanonicalTypeNames(typeParameters) + "]", iae); + "[[" + targetCanonicalClassName + "], [" + methodName + "], " + typesToCanonicalTypeNames(typeParameters) + "]"); } Method javaMethod; @@ -570,7 +566,6 @@ public final class PainlessLookupBuilder { PainlessMethod painlessMethod = painlessClassBuilder.staticMethods.get(painlessMethodKey); if (painlessMethod == null) { - org.objectweb.asm.commons.Method asmMethod = org.objectweb.asm.commons.Method.getMethod(javaMethod); MethodHandle methodHandle; if (augmentedClass == null) { @@ -610,10 +605,10 @@ public final class PainlessLookupBuilder { } } - public void addPainlessField(String targetCanonicalClassName, String fieldName, String typeNameParameter) { + public void addPainlessField(String targetCanonicalClassName, String fieldName, String canonicalTypeNameParameter) { Objects.requireNonNull(targetCanonicalClassName); Objects.requireNonNull(fieldName); - Objects.requireNonNull(typeNameParameter); + Objects.requireNonNull(canonicalTypeNameParameter); Class targetClass = canonicalClassNamesToClasses.get(targetCanonicalClassName); @@ -621,12 +616,10 @@ public final class PainlessLookupBuilder { throw new IllegalArgumentException("class [" + targetCanonicalClassName + "] not found"); } - Class typeParameter; + Class typeParameter = canonicalTypeNameToType(canonicalTypeNameParameter); - try { - typeParameter = canonicalTypeNameToType(typeNameParameter); - } catch (IllegalArgumentException iae) { - throw new IllegalArgumentException("type parameter [" + typeNameParameter + "] not found " + + if (typeParameter == null) { + throw new IllegalArgumentException("type parameter [" + canonicalTypeNameParameter + "] not found " + "for field [[" + targetCanonicalClassName + "], [" + fieldName + "]"); } @@ -657,11 +650,9 @@ public final class PainlessLookupBuilder { throw new IllegalArgumentException("class [" + targetCanonicalClassName + "] not found"); } - try { - validateType(typeParameter); - } catch (IllegalArgumentException iae) { + if (isValidType(typeParameter) == false) { throw new IllegalArgumentException("type parameter [" + typeToCanonicalTypeName(typeParameter) + "] not found " + - "for field [[" + targetCanonicalClassName + "], [" + fieldName + "]", iae); + "for field [[" + targetCanonicalClassName + "], [" + fieldName + "]"); } Field javaField; @@ -679,11 +670,20 @@ public final class PainlessLookupBuilder { "for field [[" + targetCanonicalClassName + "], [" + fieldName + "]"); } + MethodHandle methodHandleGetter; + + try { + methodHandleGetter = MethodHandles.publicLookup().unreflectGetter(javaField); + } catch (IllegalAccessException iae) { + throw new IllegalArgumentException( + "getter method handle not found for field [[" + targetCanonicalClassName + "], [" + fieldName + "]]"); + } + String painlessFieldKey = buildPainlessFieldKey(fieldName); if (Modifier.isStatic(javaField.getModifiers())) { if (Modifier.isFinal(javaField.getModifiers()) == false) { - throw new IllegalArgumentException("static field [[" + targetCanonicalClassName + "]. [" + fieldName + "]] must be final"); + throw new IllegalArgumentException("static field [[" + targetCanonicalClassName + "], [" + fieldName + "]] must be final"); } PainlessField painlessField = painlessClassBuilder.staticFields.get(painlessFieldKey); @@ -691,28 +691,18 @@ public final class PainlessLookupBuilder { if (painlessField == null) { painlessField = painlessFieldCache.computeIfAbsent( new PainlessFieldCacheKey(targetClass, fieldName, typeParameter), - key -> new PainlessField(fieldName, javaField.getName(), targetClass, - typeParameter, javaField.getModifiers(), null, null)); + key -> new PainlessField(javaField, typeParameter, methodHandleGetter, null)); painlessClassBuilder.staticFields.put(painlessFieldKey, painlessField); - } else if (painlessField.clazz != typeParameter) { + } else if (painlessField.typeParameter != typeParameter) { throw new IllegalArgumentException("cannot have static fields " + "[[" + targetCanonicalClassName + "], [" + fieldName + "], [" + typeToCanonicalTypeName(typeParameter) + "] and " + - "[[" + targetCanonicalClassName + "], [" + painlessField.name + "], " + - typeToCanonicalTypeName(painlessField.clazz) + "] " + - "with the same and different type parameters"); + "[[" + targetCanonicalClassName + "], [" + painlessField.javaField.getName() + "], " + + typeToCanonicalTypeName(painlessField.typeParameter) + "] " + + "with the same name and different type parameters"); } } else { - MethodHandle methodHandleGetter; - - try { - methodHandleGetter = MethodHandles.publicLookup().unreflectGetter(javaField); - } catch (IllegalAccessException iae) { - throw new IllegalArgumentException( - "getter method handle not found for field [[" + targetCanonicalClassName + "], [" + fieldName + "]]"); - } - MethodHandle methodHandleSetter; try { @@ -727,17 +717,16 @@ public final class PainlessLookupBuilder { if (painlessField == null) { painlessField = painlessFieldCache.computeIfAbsent( new PainlessFieldCacheKey(targetClass, painlessFieldKey, typeParameter), - key -> new PainlessField(fieldName, javaField.getName(), targetClass, - typeParameter, javaField.getModifiers(), methodHandleGetter, methodHandleSetter)); + key -> new PainlessField(javaField, typeParameter, methodHandleGetter, methodHandleSetter)); painlessClassBuilder.fields.put(fieldName, painlessField); - } else if (painlessField.clazz != typeParameter) { + } else if (painlessField.typeParameter != typeParameter) { throw new IllegalArgumentException("cannot have fields " + "[[" + targetCanonicalClassName + "], [" + fieldName + "], [" + typeToCanonicalTypeName(typeParameter) + "] and " + - "[[" + targetCanonicalClassName + "], [" + painlessField.name + "], " + - typeToCanonicalTypeName(painlessField.clazz) + "] " + - "with the same and different type parameters"); + "[[" + targetCanonicalClassName + "], [" + painlessField.javaField.getName() + "], " + + typeToCanonicalTypeName(painlessField.typeParameter) + "] " + + "with the same name and different type parameters"); } } } @@ -812,8 +801,9 @@ public final class PainlessLookupBuilder { PainlessField newPainlessField = painlessFieldEntry.getValue(); PainlessField existingPainlessField = targetPainlessClassBuilder.fields.get(painlessFieldKey); - if (existingPainlessField == null || existingPainlessField.target != newPainlessField.target && - existingPainlessField.target.isAssignableFrom(newPainlessField.target)) { + if (existingPainlessField == null || + existingPainlessField.javaField.getDeclaringClass() != newPainlessField.javaField.getDeclaringClass() && + existingPainlessField.javaField.getDeclaringClass().isAssignableFrom(newPainlessField.javaField.getDeclaringClass())) { targetPainlessClassBuilder.fields.put(painlessFieldKey, newPainlessField); } } @@ -846,8 +836,8 @@ public final class PainlessLookupBuilder { } for (PainlessField painlessField : painlessClassBuilder.fields.values()) { - painlessClassBuilder.getterMethodHandles.put(painlessField.name, painlessField.getter); - painlessClassBuilder.setterMethodHandles.put(painlessField.name, painlessField.setter); + painlessClassBuilder.getterMethodHandles.put(painlessField.javaField.getName(), painlessField.getterMethodHandle); + painlessClassBuilder.setterMethodHandles.put(painlessField.javaField.getName(), painlessField.setterMethodHandle); } } @@ -877,7 +867,7 @@ public final class PainlessLookupBuilder { } else if (javaMethods.size() == 1) { java.lang.reflect.Method javaMethod = javaMethods.get(0); String painlessMethodKey = buildPainlessMethodKey(javaMethod.getName(), javaMethod.getParameterCount()); - painlessClassBuilder.functionalMethod = painlessClassBuilder.methods.get(painlessMethodKey); + painlessClassBuilder.functionalInterfaceMethod = painlessClassBuilder.methods.get(painlessMethodKey); } } } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessLookupUtility.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessLookupUtility.java index 0a181c5f1b0..71cacab9eba 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessLookupUtility.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessLookupUtility.java @@ -20,7 +20,6 @@ package org.elasticsearch.painless.lookup; import java.util.Arrays; -import java.util.Collection; import java.util.List; import java.util.Map; import java.util.Objects; @@ -83,7 +82,7 @@ public final class PainlessLookupUtility { Objects.requireNonNull(canonicalTypeName); Objects.requireNonNull(canonicalClassNamesToClasses); - Class type = canonicalClassNamesToClasses.get(canonicalTypeName); + Class type = DEF_CLASS_NAME.equals(canonicalTypeName) ? def.class : canonicalClassNamesToClasses.get(canonicalTypeName); if (type != null) { return type; @@ -101,45 +100,47 @@ public final class PainlessLookupUtility { canonicalTypeName.charAt(arrayIndex++) == ']') { ++arrayDimensions; } else { - throw new IllegalArgumentException("type [" + canonicalTypeName + "] not found"); + return null; } } canonicalTypeName = canonicalTypeName.substring(0, canonicalTypeName.indexOf('[')); - type = canonicalClassNamesToClasses.get(canonicalTypeName); + type = DEF_CLASS_NAME.equals(canonicalTypeName) ? def.class : canonicalClassNamesToClasses.get(canonicalTypeName); - char arrayBraces[] = new char[arrayDimensions]; - Arrays.fill(arrayBraces, '['); - String javaTypeName = new String(arrayBraces); + if (type != null) { + char arrayBraces[] = new char[arrayDimensions]; + Arrays.fill(arrayBraces, '['); + String javaTypeName = new String(arrayBraces); - if (type == boolean.class) { - javaTypeName += "Z"; - } else if (type == byte.class) { - javaTypeName += "B"; - } else if (type == short.class) { - javaTypeName += "S"; - } else if (type == char.class) { - javaTypeName += "C"; - } else if (type == int.class) { - javaTypeName += "I"; - } else if (type == long.class) { - javaTypeName += "J"; - } else if (type == float.class) { - javaTypeName += "F"; - } else if (type == double.class) { - javaTypeName += "D"; - } else { - javaTypeName += "L" + type.getName() + ";"; - } + if (type == boolean.class) { + javaTypeName += "Z"; + } else if (type == byte.class) { + javaTypeName += "B"; + } else if (type == short.class) { + javaTypeName += "S"; + } else if (type == char.class) { + javaTypeName += "C"; + } else if (type == int.class) { + javaTypeName += "I"; + } else if (type == long.class) { + javaTypeName += "J"; + } else if (type == float.class) { + javaTypeName += "F"; + } else if (type == double.class) { + javaTypeName += "D"; + } else { + javaTypeName += "L" + type.getName() + ";"; + } - try { - return Class.forName(javaTypeName); - } catch (ClassNotFoundException cnfe) { - throw new IllegalArgumentException("type [" + canonicalTypeName + "] not found", cnfe); + try { + return Class.forName(javaTypeName); + } catch (ClassNotFoundException cnfe) { + throw new IllegalStateException("internal error", cnfe); + } } } - throw new IllegalArgumentException("type [" + canonicalTypeName + "] not found"); + return null; } /** @@ -152,7 +153,9 @@ public final class PainlessLookupUtility { String canonicalTypeName = type.getCanonicalName(); - if (canonicalTypeName.startsWith(def.class.getCanonicalName())) { + if (canonicalTypeName == null) { + canonicalTypeName = ANONYMOUS_CLASS_NAME; + } else if (canonicalTypeName.startsWith(def.class.getCanonicalName())) { canonicalTypeName = canonicalTypeName.replace(def.class.getCanonicalName(), DEF_CLASS_NAME); } @@ -252,22 +255,6 @@ public final class PainlessLookupUtility { return type; } - /** - * Ensures a type exists based on the terminology specified as part of {@link PainlessLookupUtility}. Throws an - * {@link IllegalArgumentException} if the type does not exist. - */ - public static void validateType(Class type, Collection> classes) { - String canonicalTypeName = typeToCanonicalTypeName(type); - - while (type.getComponentType() != null) { - type = type.getComponentType(); - } - - if (classes.contains(type) == false) { - throw new IllegalArgumentException("type [" + canonicalTypeName + "] not found"); - } - } - /** * Converts a type to its boxed type equivalent if one exists based on the terminology specified as part of * {@link PainlessLookupUtility}. Otherwise, this behaves as an identity function. @@ -357,6 +344,11 @@ public final class PainlessLookupUtility { return fieldName; } + /** + * The name for an anonymous class. + */ + public static final String ANONYMOUS_CLASS_NAME = "$anonymous"; + /** * The def type name as specified in the source for a script. */ diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ECallLocal.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ECallLocal.java index 7605a0c9f7f..1f9973df192 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ECallLocal.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ECallLocal.java @@ -24,7 +24,6 @@ import org.elasticsearch.painless.Locals; import org.elasticsearch.painless.Locals.LocalMethod; import org.elasticsearch.painless.Location; import org.elasticsearch.painless.MethodWriter; -import org.elasticsearch.painless.lookup.PainlessLookupUtility; import org.objectweb.asm.commons.Method; import java.util.List; @@ -59,8 +58,7 @@ public final class ECallLocal extends AExpression { @Override void analyze(Locals locals) { - String methodKey = PainlessLookupUtility.buildPainlessMethodKey(name, arguments.size()); - method = locals.getMethod(methodKey); + method = locals.getMethod(name, arguments.size()); if (method == null) { throw createError(new IllegalArgumentException("Unknown call [" + name + "] with [" + arguments.size() + "] arguments.")); diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ECapturingFunctionRef.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ECapturingFunctionRef.java index e78b3c67210..a649fa7611c 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ECapturingFunctionRef.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ECapturingFunctionRef.java @@ -19,7 +19,6 @@ package org.elasticsearch.painless.node; -import org.elasticsearch.painless.AnalyzerCaster; import org.elasticsearch.painless.DefBootstrap; import org.elasticsearch.painless.FunctionRef; import org.elasticsearch.painless.Globals; @@ -35,8 +34,6 @@ import org.objectweb.asm.Type; import java.util.Objects; import java.util.Set; -import static org.elasticsearch.painless.WriterConstants.LAMBDA_BOOTSTRAP_HANDLE; - /** * Represents a capturing function reference. */ @@ -76,23 +73,8 @@ public final class ECapturingFunctionRef extends AExpression implements ILambda defPointer = null; // static case if (captured.clazz != def.class) { - try { - ref = FunctionRef.resolveFromLookup(locals.getPainlessLookup(), expected, - PainlessLookupUtility.typeToCanonicalTypeName(captured.clazz), call, 1); - - // check casts between the interface method and the delegate method are legal - for (int i = 0; i < ref.interfaceMethod.typeParameters.size(); ++i) { - Class from = ref.interfaceMethod.typeParameters.get(i); - Class to = ref.delegateTypeParameters.get(i); - AnalyzerCaster.getLegalCast(location, from, to, false, true); - } - - if (ref.interfaceMethod.returnType != void.class) { - AnalyzerCaster.getLegalCast(location, ref.delegateReturnType, ref.interfaceMethod.returnType, false, true); - } - } catch (IllegalArgumentException e) { - throw createError(e); - } + ref = FunctionRef.create(locals.getPainlessLookup(), locals.getMethods(), location, + expected, PainlessLookupUtility.typeToCanonicalTypeName(captured.clazz), call, 1); } actual = expected; } @@ -114,17 +96,7 @@ public final class ECapturingFunctionRef extends AExpression implements ILambda } else { // typed interface, typed implementation writer.visitVarInsn(MethodWriter.getType(captured.clazz).getOpcode(Opcodes.ILOAD), captured.getSlot()); - writer.invokeDynamic( - ref.interfaceMethodName, - ref.factoryDescriptor, - LAMBDA_BOOTSTRAP_HANDLE, - ref.interfaceType, - ref.delegateClassName, - ref.delegateInvokeType, - ref.delegateMethodName, - ref.delegateType, - ref.isDelegateInterface ? 1 : 0 - ); + writer.invokeLambdaCall(ref); } } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ECast.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ECast.java index b07613714b8..08236a965fe 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ECast.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ECast.java @@ -63,6 +63,6 @@ final class ECast extends AExpression { @Override public String toString() { - return singleLineToString(PainlessLookupUtility.typeToCanonicalTypeName(cast.to), child); + return singleLineToString(PainlessLookupUtility.typeToCanonicalTypeName(cast.targetType), child); } } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EExplicit.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EExplicit.java index d19068f8fa6..3ad3018c61e 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EExplicit.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EExplicit.java @@ -49,9 +49,9 @@ public final class EExplicit extends AExpression { @Override void analyze(Locals locals) { - try { - actual = locals.getPainlessLookup().getJavaClassFromPainlessType(type); - } catch (IllegalArgumentException exception) { + actual = locals.getPainlessLookup().canonicalTypeNameToType(type); + + if (actual == null) { throw createError(new IllegalArgumentException("Not a type [" + type + "].")); } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EFunctionRef.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EFunctionRef.java index 782991e2958..c97cc66c7c7 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EFunctionRef.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EFunctionRef.java @@ -19,22 +19,16 @@ package org.elasticsearch.painless.node; -import org.elasticsearch.painless.AnalyzerCaster; import org.elasticsearch.painless.FunctionRef; import org.elasticsearch.painless.Globals; import org.elasticsearch.painless.Locals; -import org.elasticsearch.painless.Locals.LocalMethod; import org.elasticsearch.painless.Location; import org.elasticsearch.painless.MethodWriter; -import org.elasticsearch.painless.lookup.PainlessLookupUtility; -import org.elasticsearch.painless.lookup.PainlessMethod; import org.objectweb.asm.Type; import java.util.Objects; import java.util.Set; -import static org.elasticsearch.painless.WriterConstants.LAMBDA_BOOTSTRAP_HANDLE; - /** * Represents a function reference. */ @@ -63,39 +57,7 @@ public final class EFunctionRef extends AExpression implements ILambda { defPointer = "S" + type + "." + call + ",0"; } else { defPointer = null; - try { - if ("this".equals(type)) { - // user's own function - PainlessMethod interfaceMethod = locals.getPainlessLookup().getPainlessStructFromJavaClass(expected).functionalMethod; - if (interfaceMethod == null) { - throw new IllegalArgumentException("Cannot convert function reference [" + type + "::" + call + "] " + - "to [" + PainlessLookupUtility.typeToCanonicalTypeName(expected) + "], not a functional interface"); - } - LocalMethod delegateMethod = locals.getMethod(Locals.buildLocalMethodKey(call, interfaceMethod.typeParameters.size())); - if (delegateMethod == null) { - throw new IllegalArgumentException("Cannot convert function reference [" + type + "::" + call + "] " + - "to [" + PainlessLookupUtility.typeToCanonicalTypeName(expected) + "], function not found"); - } - ref = new FunctionRef(expected, interfaceMethod, delegateMethod, 0); - - // check casts between the interface method and the delegate method are legal - for (int i = 0; i < interfaceMethod.typeParameters.size(); ++i) { - Class from = interfaceMethod.typeParameters.get(i); - Class to = delegateMethod.typeParameters.get(i); - AnalyzerCaster.getLegalCast(location, from, to, false, true); - } - - if (interfaceMethod.returnType != void.class) { - AnalyzerCaster.getLegalCast(location, delegateMethod.returnType, interfaceMethod.returnType, false, true); - } - } else { - // whitelist lookup - ref = FunctionRef.resolveFromLookup(locals.getPainlessLookup(), expected, type, call, 0); - } - - } catch (IllegalArgumentException e) { - throw createError(e); - } + ref = FunctionRef.create(locals.getPainlessLookup(), locals.getMethods(), location, expected, type, call, 0); actual = expected; } } @@ -104,17 +66,7 @@ public final class EFunctionRef extends AExpression implements ILambda { void write(MethodWriter writer, Globals globals) { if (ref != null) { writer.writeDebugInfo(location); - writer.invokeDynamic( - ref.interfaceMethodName, - ref.factoryDescriptor, - LAMBDA_BOOTSTRAP_HANDLE, - ref.interfaceType, - ref.delegateClassName, - ref.delegateInvokeType, - ref.delegateMethodName, - ref.delegateType, - ref.isDelegateInterface ? 1 : 0 - ); + writer.invokeLambdaCall(ref); } else { // TODO: don't do this: its just to cutover :) writer.push((String)null); diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EInstanceof.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EInstanceof.java index 2fa8ca8ca95..73e4f176ea1 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EInstanceof.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EInstanceof.java @@ -54,12 +54,11 @@ public final class EInstanceof extends AExpression { @Override void analyze(Locals locals) { - Class clazz; // ensure the specified type is part of the definition - try { - clazz = locals.getPainlessLookup().getJavaClassFromPainlessType(this.type); - } catch (IllegalArgumentException exception) { + Class clazz = locals.getPainlessLookup().canonicalTypeNameToType(this.type); + + if (clazz == null) { throw createError(new IllegalArgumentException("Not a type [" + this.type + "].")); } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ELambda.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ELambda.java index 6fc4a3a6480..af906416ca7 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ELambda.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ELambda.java @@ -19,11 +19,9 @@ package org.elasticsearch.painless.node; -import org.elasticsearch.painless.AnalyzerCaster; import org.elasticsearch.painless.FunctionRef; import org.elasticsearch.painless.Globals; import org.elasticsearch.painless.Locals; -import org.elasticsearch.painless.Locals.LocalMethod; import org.elasticsearch.painless.Locals.Variable; import org.elasticsearch.painless.Location; import org.elasticsearch.painless.MethodWriter; @@ -40,8 +38,6 @@ import java.util.List; import java.util.Objects; import java.util.Set; -import static org.elasticsearch.painless.WriterConstants.LAMBDA_BOOTSTRAP_HANDLE; - /** * Lambda expression node. *

        @@ -119,9 +115,10 @@ public final class ELambda extends AExpression implements ILambda { actualParamTypeStrs.add(type); } } + } else { // we know the method statically, infer return type and any unknown/def types - interfaceMethod = locals.getPainlessLookup().getPainlessStructFromJavaClass(expected).functionalMethod; + interfaceMethod = locals.getPainlessLookup().lookupFunctionalInterfacePainlessMethod(expected); if (interfaceMethod == null) { throw createError(new IllegalArgumentException("Cannot pass lambda to " + "[" + PainlessLookupUtility.typeToCanonicalTypeName(expected) + "], not a functional interface")); @@ -173,7 +170,7 @@ public final class ELambda extends AExpression implements ILambda { desugared = new SFunction(reserved, location, PainlessLookupUtility.typeToCanonicalTypeName(returnType), name, paramTypes, paramNames, statements, true); desugared.generateSignature(locals.getPainlessLookup()); - desugared.analyze(Locals.newLambdaScope(locals.getProgramScope(), returnType, + desugared.analyze(Locals.newLambdaScope(locals.getProgramScope(), desugared.name, returnType, desugared.parameters, captures.size(), reserved.getMaxLoopCounter())); // setup method reference to synthetic method @@ -183,25 +180,8 @@ public final class ELambda extends AExpression implements ILambda { defPointer = "Sthis." + name + "," + captures.size(); } else { defPointer = null; - try { - LocalMethod localMethod = - new LocalMethod(desugared.name, desugared.returnType, desugared.typeParameters, desugared.methodType); - ref = new FunctionRef(expected, interfaceMethod, localMethod, captures.size()); - } catch (IllegalArgumentException e) { - throw createError(e); - } - - // check casts between the interface method and the delegate method are legal - for (int i = 0; i < interfaceMethod.typeParameters.size(); ++i) { - Class from = interfaceMethod.typeParameters.get(i); - Class to = desugared.parameters.get(i + captures.size()).clazz; - AnalyzerCaster.getLegalCast(location, from, to, false, true); - } - - if (interfaceMethod.returnType != void.class) { - AnalyzerCaster.getLegalCast(location, desugared.returnType, interfaceMethod.returnType, false, true); - } - + ref = FunctionRef.create( + locals.getPainlessLookup(), locals.getMethods(), location, expected, "this", desugared.name, captures.size()); actual = expected; } } @@ -217,17 +197,7 @@ public final class ELambda extends AExpression implements ILambda { writer.visitVarInsn(MethodWriter.getType(capture.clazz).getOpcode(Opcodes.ILOAD), capture.getSlot()); } - writer.invokeDynamic( - ref.interfaceMethodName, - ref.factoryDescriptor, - LAMBDA_BOOTSTRAP_HANDLE, - ref.interfaceType, - ref.delegateClassName, - ref.delegateInvokeType, - ref.delegateMethodName, - ref.delegateType, - ref.isDelegateInterface ? 1 : 0 - ); + writer.invokeLambdaCall(ref); } else { // placeholder writer.push((String)null); diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EListInit.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EListInit.java index 01a4878266e..8c9154aaaf3 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EListInit.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EListInit.java @@ -24,7 +24,6 @@ import org.elasticsearch.painless.Locals; import org.elasticsearch.painless.Location; import org.elasticsearch.painless.MethodWriter; import org.elasticsearch.painless.lookup.PainlessConstructor; -import org.elasticsearch.painless.lookup.PainlessLookupUtility; import org.elasticsearch.painless.lookup.PainlessMethod; import org.elasticsearch.painless.lookup.def; import org.objectweb.asm.Type; @@ -34,6 +33,8 @@ import java.util.ArrayList; import java.util.List; import java.util.Set; +import static org.elasticsearch.painless.lookup.PainlessLookupUtility.typeToCanonicalTypeName; + /** * Represents a list initialization shortcut. */ @@ -64,18 +65,17 @@ public final class EListInit extends AExpression { actual = ArrayList.class; - constructor = locals.getPainlessLookup().getPainlessStructFromJavaClass(actual).constructors.get( - PainlessLookupUtility.buildPainlessConstructorKey(0)); + constructor = locals.getPainlessLookup().lookupPainlessConstructor(actual, 0); if (constructor == null) { - throw createError(new IllegalStateException("Illegal tree structure.")); + throw createError(new IllegalArgumentException( + "constructor [" + typeToCanonicalTypeName(actual) + ", /0] not found")); } - method = locals.getPainlessLookup().getPainlessStructFromJavaClass(actual).methods - .get(PainlessLookupUtility.buildPainlessMethodKey("add", 1)); + method = locals.getPainlessLookup().lookupPainlessMethod(actual, false, "add", 1); if (method == null) { - throw createError(new IllegalStateException("Illegal tree structure.")); + throw createError(new IllegalArgumentException("method [" + typeToCanonicalTypeName(actual) + ", add/1] not found")); } for (int index = 0; index < values.size(); ++index) { diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EMapInit.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EMapInit.java index 73afe7f0dc5..11c12b2cd0a 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EMapInit.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EMapInit.java @@ -24,7 +24,6 @@ import org.elasticsearch.painless.Locals; import org.elasticsearch.painless.Location; import org.elasticsearch.painless.MethodWriter; import org.elasticsearch.painless.lookup.PainlessConstructor; -import org.elasticsearch.painless.lookup.PainlessLookupUtility; import org.elasticsearch.painless.lookup.PainlessMethod; import org.elasticsearch.painless.lookup.def; import org.objectweb.asm.Type; @@ -34,6 +33,8 @@ import java.util.HashMap; import java.util.List; import java.util.Set; +import static org.elasticsearch.painless.lookup.PainlessLookupUtility.typeToCanonicalTypeName; + /** * Represents a map initialization shortcut. */ @@ -70,18 +71,17 @@ public final class EMapInit extends AExpression { actual = HashMap.class; - constructor = locals.getPainlessLookup().getPainlessStructFromJavaClass(actual).constructors.get( - PainlessLookupUtility.buildPainlessConstructorKey(0)); + constructor = locals.getPainlessLookup().lookupPainlessConstructor(actual, 0); if (constructor == null) { - throw createError(new IllegalStateException("Illegal tree structure.")); + throw createError(new IllegalArgumentException( + "constructor [" + typeToCanonicalTypeName(actual) + ", /0] not found")); } - method = locals.getPainlessLookup().getPainlessStructFromJavaClass(actual).methods - .get(PainlessLookupUtility.buildPainlessMethodKey("put", 2)); + method = locals.getPainlessLookup().lookupPainlessMethod(actual, false, "put", 2); if (method == null) { - throw createError(new IllegalStateException("Illegal tree structure.")); + throw createError(new IllegalArgumentException("method [" + typeToCanonicalTypeName(actual) + ", put/2] not found")); } if (keys.size() != values.size()) { diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ENewArray.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ENewArray.java index f9bd4cebc3f..cef005de9c3 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ENewArray.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ENewArray.java @@ -54,15 +54,13 @@ public final class ENewArray extends AExpression { @Override void analyze(Locals locals) { - if (!read) { - throw createError(new IllegalArgumentException("A newly created array must be read from.")); + if (!read) { + throw createError(new IllegalArgumentException("A newly created array must be read from.")); } - Class clazz; + Class clazz = locals.getPainlessLookup().canonicalTypeNameToType(this.type); - try { - clazz = locals.getPainlessLookup().getJavaClassFromPainlessType(this.type); - } catch (IllegalArgumentException exception) { + if (clazz == null) { throw createError(new IllegalArgumentException("Not a type [" + this.type + "].")); } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ENewObj.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ENewObj.java index 4e08f257386..9423ed5d109 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ENewObj.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ENewObj.java @@ -23,7 +23,6 @@ import org.elasticsearch.painless.Globals; import org.elasticsearch.painless.Locals; import org.elasticsearch.painless.Location; import org.elasticsearch.painless.MethodWriter; -import org.elasticsearch.painless.lookup.PainlessClass; import org.elasticsearch.painless.lookup.PainlessConstructor; import org.elasticsearch.painless.lookup.PainlessLookupUtility; import org.objectweb.asm.Type; @@ -33,6 +32,8 @@ import java.util.List; import java.util.Objects; import java.util.Set; +import static org.elasticsearch.painless.lookup.PainlessLookupUtility.typeToCanonicalTypeName; + /** * Represents and object instantiation. */ @@ -59,39 +60,38 @@ public final class ENewObj extends AExpression { @Override void analyze(Locals locals) { - try { - actual = locals.getPainlessLookup().getJavaClassFromPainlessType(this.type); - } catch (IllegalArgumentException exception) { + actual = locals.getPainlessLookup().canonicalTypeNameToType(this.type); + + if (actual == null) { throw createError(new IllegalArgumentException("Not a type [" + this.type + "].")); } - PainlessClass struct = locals.getPainlessLookup().getPainlessStructFromJavaClass(actual); - constructor = struct.constructors.get(PainlessLookupUtility.buildPainlessConstructorKey(arguments.size())); + constructor = locals.getPainlessLookup().lookupPainlessConstructor(actual, arguments.size()); - if (constructor != null) { - Class[] types = new Class[constructor.typeParameters.size()]; - constructor.typeParameters.toArray(types); - - if (constructor.typeParameters.size() != arguments.size()) { - throw createError(new IllegalArgumentException( - "When calling constructor on type [" + PainlessLookupUtility.typeToCanonicalTypeName(actual) + "] " + - "expected [" + constructor.typeParameters.size() + "] arguments, but found [" + arguments.size() + "].")); - } - - for (int argument = 0; argument < arguments.size(); ++argument) { - AExpression expression = arguments.get(argument); - - expression.expected = types[argument]; - expression.internal = true; - expression.analyze(locals); - arguments.set(argument, expression.cast(locals)); - } - - statement = true; - } else { + if (constructor == null) { throw createError(new IllegalArgumentException( - "Unknown new call on type [" + PainlessLookupUtility.typeToCanonicalTypeName(actual) + "].")); + "constructor [" + typeToCanonicalTypeName(actual) + ", /" + arguments.size() + "] not found")); } + + Class[] types = new Class[constructor.typeParameters.size()]; + constructor.typeParameters.toArray(types); + + if (constructor.typeParameters.size() != arguments.size()) { + throw createError(new IllegalArgumentException( + "When calling constructor on type [" + PainlessLookupUtility.typeToCanonicalTypeName(actual) + "] " + + "expected [" + constructor.typeParameters.size() + "] arguments, but found [" + arguments.size() + "].")); + } + + for (int argument = 0; argument < arguments.size(); ++argument) { + AExpression expression = arguments.get(argument); + + expression.expected = types[argument]; + expression.internal = true; + expression.analyze(locals); + arguments.set(argument, expression.cast(locals)); + } + + statement = true; } @Override diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EStatic.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EStatic.java index a556b3ad315..0d8c94db0f1 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EStatic.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EStatic.java @@ -47,9 +47,9 @@ public final class EStatic extends AExpression { @Override void analyze(Locals locals) { - try { - actual = locals.getPainlessLookup().getJavaClassFromPainlessType(type); - } catch (IllegalArgumentException exception) { + actual = locals.getPainlessLookup().canonicalTypeNameToType(type); + + if (actual == null) { throw createError(new IllegalArgumentException("Not a type [" + type + "].")); } } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PCallInvoke.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PCallInvoke.java index 56bc18eadbd..25ae1ed9774 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PCallInvoke.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PCallInvoke.java @@ -23,8 +23,6 @@ import org.elasticsearch.painless.Globals; import org.elasticsearch.painless.Locals; import org.elasticsearch.painless.Location; import org.elasticsearch.painless.MethodWriter; -import org.elasticsearch.painless.lookup.PainlessClass; -import org.elasticsearch.painless.lookup.PainlessLookupUtility; import org.elasticsearch.painless.lookup.PainlessMethod; import org.elasticsearch.painless.lookup.def; @@ -32,6 +30,8 @@ import java.util.List; import java.util.Objects; import java.util.Set; +import static org.elasticsearch.painless.lookup.PainlessLookupUtility.typeToCanonicalTypeName; + /** * Represents a method call and defers to a child subnode. */ @@ -66,26 +66,18 @@ public final class PCallInvoke extends AExpression { prefix.expected = prefix.actual; prefix = prefix.cast(locals); - if (prefix.actual.isArray()) { - throw createError(new IllegalArgumentException("Illegal call [" + name + "] on array type.")); - } - - PainlessClass struct = locals.getPainlessLookup().getPainlessStructFromJavaClass(prefix.actual); - - if (prefix.actual.isPrimitive()) { - struct = locals.getPainlessLookup().getPainlessStructFromJavaClass(PainlessLookupUtility.typeToBoxedType(prefix.actual)); - } - - String methodKey = PainlessLookupUtility.buildPainlessMethodKey(name, arguments.size()); - PainlessMethod method = prefix instanceof EStatic ? struct.staticMethods.get(methodKey) : struct.methods.get(methodKey); - - if (method != null) { - sub = new PSubCallInvoke(location, method, prefix.actual, arguments); - } else if (prefix.actual == def.class) { + if (prefix.actual == def.class) { sub = new PSubDefCall(location, name, arguments); } else { - throw createError(new IllegalArgumentException("Unknown call [" + name + "] with [" + arguments.size() + "] arguments " + - "on type [" + PainlessLookupUtility.typeToCanonicalTypeName(prefix.actual) + "].")); + PainlessMethod method = + locals.getPainlessLookup().lookupPainlessMethod(prefix.actual, prefix instanceof EStatic, name, arguments.size()); + + if (method == null) { + throw createError(new IllegalArgumentException( + "method [" + typeToCanonicalTypeName(prefix.actual) + ", " + name + "/" + arguments.size() + "] not found")); + } + + sub = new PSubCallInvoke(location, method, prefix.actual, arguments); } if (nullSafe) { diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PField.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PField.java index b322d5b1f28..7efd6a29899 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PField.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PField.java @@ -23,7 +23,6 @@ import org.elasticsearch.painless.Globals; import org.elasticsearch.painless.Locals; import org.elasticsearch.painless.Location; import org.elasticsearch.painless.MethodWriter; -import org.elasticsearch.painless.lookup.PainlessClass; import org.elasticsearch.painless.lookup.PainlessField; import org.elasticsearch.painless.lookup.PainlessLookupUtility; import org.elasticsearch.painless.lookup.PainlessMethod; @@ -34,6 +33,8 @@ import java.util.Map; import java.util.Objects; import java.util.Set; +import static org.elasticsearch.painless.lookup.PainlessLookupUtility.typeToCanonicalTypeName; + /** * Represents a field load/store and defers to a child subnode. */ @@ -67,26 +68,25 @@ public final class PField extends AStoreable { } else if (prefix.actual == def.class) { sub = new PSubDefField(location, value); } else { - PainlessClass struct = locals.getPainlessLookup().getPainlessStructFromJavaClass(prefix.actual); - PainlessField field = prefix instanceof EStatic ? struct.staticFields.get(value) : struct.fields.get(value); + PainlessField field = locals.getPainlessLookup().lookupPainlessField(prefix.actual, prefix instanceof EStatic, value); - if (field != null) { - sub = new PSubField(location, field); - } else { - PainlessMethod getter = struct.methods.get(PainlessLookupUtility.buildPainlessMethodKey( - "get" + Character.toUpperCase(value.charAt(0)) + value.substring(1), 0)); + if (field == null) { + PainlessMethod getter; + PainlessMethod setter; + + getter = locals.getPainlessLookup().lookupPainlessMethod(prefix.actual, false, + "get" + Character.toUpperCase(value.charAt(0)) + value.substring(1), 0); if (getter == null) { - getter = struct.methods.get(PainlessLookupUtility.buildPainlessMethodKey( - "is" + Character.toUpperCase(value.charAt(0)) + value.substring(1), 0)); + getter = locals.getPainlessLookup().lookupPainlessMethod(prefix.actual, false, + "is" + Character.toUpperCase(value.charAt(0)) + value.substring(1), 0); } - PainlessMethod setter = struct.methods.get(PainlessLookupUtility.buildPainlessMethodKey( - "set" + Character.toUpperCase(value.charAt(0)) + value.substring(1), 1)); + setter = locals.getPainlessLookup().lookupPainlessMethod(prefix.actual, false, + "set" + Character.toUpperCase(value.charAt(0)) + value.substring(1), 0); if (getter != null || setter != null) { - sub = new PSubShortcut( - location, value, PainlessLookupUtility.typeToCanonicalTypeName(prefix.actual), getter, setter); + sub = new PSubShortcut(location, value, PainlessLookupUtility.typeToCanonicalTypeName(prefix.actual), getter, setter); } else { EConstant index = new EConstant(location, value); index.analyze(locals); @@ -99,12 +99,14 @@ public final class PField extends AStoreable { sub = new PSubListShortcut(location, prefix.actual, index); } } - } - } - if (sub == null) { - throw createError(new IllegalArgumentException( - "Unknown field [" + value + "] for type [" + PainlessLookupUtility.typeToCanonicalTypeName(prefix.actual) + "].")); + if (sub == null) { + throw createError(new IllegalArgumentException( + "field [" + typeToCanonicalTypeName(prefix.actual) + ", " + value + "] not found")); + } + } else { + sub = new PSubField(location, field); + } } if (nullSafe) { diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubField.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubField.java index 007a599e9f8..9e09f810250 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubField.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubField.java @@ -51,22 +51,24 @@ final class PSubField extends AStoreable { @Override void analyze(Locals locals) { - if (write && Modifier.isFinal(field.modifiers)) { - throw createError(new IllegalArgumentException("Cannot write to read-only field [" + field.name + "] for type " + - "[" + PainlessLookupUtility.typeToCanonicalTypeName(field.clazz) + "].")); + if (write && Modifier.isFinal(field.javaField.getModifiers())) { + throw createError(new IllegalArgumentException("Cannot write to read-only field [" + field.javaField.getName() + "] " + + "for type [" + PainlessLookupUtility.typeToCanonicalTypeName(field.javaField.getDeclaringClass()) + "].")); } - actual = field.clazz; + actual = field.typeParameter; } @Override void write(MethodWriter writer, Globals globals) { writer.writeDebugInfo(location); - if (java.lang.reflect.Modifier.isStatic(field.modifiers)) { - writer.getStatic(Type.getType(field.target), field.javaName, MethodWriter.getType(field.clazz)); + if (java.lang.reflect.Modifier.isStatic(field.javaField.getModifiers())) { + writer.getStatic(Type.getType( + field.javaField.getDeclaringClass()), field.javaField.getName(), MethodWriter.getType(field.typeParameter)); } else { - writer.getField(Type.getType(field.target), field.javaName, MethodWriter.getType(field.clazz)); + writer.getField(Type.getType( + field.javaField.getDeclaringClass()), field.javaField.getName(), MethodWriter.getType(field.typeParameter)); } } @@ -94,10 +96,12 @@ final class PSubField extends AStoreable { void load(MethodWriter writer, Globals globals) { writer.writeDebugInfo(location); - if (java.lang.reflect.Modifier.isStatic(field.modifiers)) { - writer.getStatic(Type.getType(field.target), field.javaName, MethodWriter.getType(field.clazz)); + if (java.lang.reflect.Modifier.isStatic(field.javaField.getModifiers())) { + writer.getStatic(Type.getType( + field.javaField.getDeclaringClass()), field.javaField.getName(), MethodWriter.getType(field.typeParameter)); } else { - writer.getField(Type.getType(field.target), field.javaName, MethodWriter.getType(field.clazz)); + writer.getField(Type.getType( + field.javaField.getDeclaringClass()), field.javaField.getName(), MethodWriter.getType(field.typeParameter)); } } @@ -105,15 +109,17 @@ final class PSubField extends AStoreable { void store(MethodWriter writer, Globals globals) { writer.writeDebugInfo(location); - if (java.lang.reflect.Modifier.isStatic(field.modifiers)) { - writer.putStatic(Type.getType(field.target), field.javaName, MethodWriter.getType(field.clazz)); + if (java.lang.reflect.Modifier.isStatic(field.javaField.getModifiers())) { + writer.putStatic(Type.getType( + field.javaField.getDeclaringClass()), field.javaField.getName(), MethodWriter.getType(field.typeParameter)); } else { - writer.putField(Type.getType(field.target), field.javaName, MethodWriter.getType(field.clazz)); + writer.putField(Type.getType( + field.javaField.getDeclaringClass()), field.javaField.getName(), MethodWriter.getType(field.typeParameter)); } } @Override public String toString() { - return singleLineToString(prefix, field.name); + return singleLineToString(prefix, field.javaField.getName()); } } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubListShortcut.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubListShortcut.java index 0738f55c2cf..3bc4913fde9 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubListShortcut.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubListShortcut.java @@ -24,7 +24,6 @@ import org.elasticsearch.painless.Locals; import org.elasticsearch.painless.Location; import org.elasticsearch.painless.MethodWriter; import org.elasticsearch.painless.WriterConstants; -import org.elasticsearch.painless.lookup.PainlessClass; import org.elasticsearch.painless.lookup.PainlessLookupUtility; import org.elasticsearch.painless.lookup.PainlessMethod; @@ -56,11 +55,10 @@ final class PSubListShortcut extends AStoreable { @Override void analyze(Locals locals) { - PainlessClass struct = locals.getPainlessLookup().getPainlessStructFromJavaClass(targetClass); String canonicalClassName = PainlessLookupUtility.typeToCanonicalTypeName(targetClass); - getter = struct.methods.get(PainlessLookupUtility.buildPainlessMethodKey("get", 1)); - setter = struct.methods.get(PainlessLookupUtility.buildPainlessMethodKey("set", 2)); + getter = locals.getPainlessLookup().lookupPainlessMethod(targetClass, false, "get", 1); + setter = locals.getPainlessLookup().lookupPainlessMethod(targetClass, false, "set", 2); if (getter != null && (getter.returnType == void.class || getter.typeParameters.size() != 1 || getter.typeParameters.get(0) != int.class)) { diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubMapShortcut.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubMapShortcut.java index 04ccbc9f534..0a0f099bd68 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubMapShortcut.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubMapShortcut.java @@ -23,7 +23,6 @@ import org.elasticsearch.painless.Globals; import org.elasticsearch.painless.Locals; import org.elasticsearch.painless.Location; import org.elasticsearch.painless.MethodWriter; -import org.elasticsearch.painless.lookup.PainlessClass; import org.elasticsearch.painless.lookup.PainlessLookupUtility; import org.elasticsearch.painless.lookup.PainlessMethod; @@ -55,11 +54,10 @@ final class PSubMapShortcut extends AStoreable { @Override void analyze(Locals locals) { - PainlessClass struct = locals.getPainlessLookup().getPainlessStructFromJavaClass(targetClass); String canonicalClassName = PainlessLookupUtility.typeToCanonicalTypeName(targetClass); - getter = struct.methods.get(PainlessLookupUtility.buildPainlessMethodKey("get", 1)); - setter = struct.methods.get(PainlessLookupUtility.buildPainlessMethodKey("put", 2)); + getter = locals.getPainlessLookup().lookupPainlessMethod(targetClass, false, "get", 1); + setter = locals.getPainlessLookup().lookupPainlessMethod(targetClass, false, "put", 2); if (getter != null && (getter.returnType == void.class || getter.typeParameters.size() != 1)) { throw createError(new IllegalArgumentException("Illegal map get shortcut for type [" + canonicalClassName + "].")); diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SCatch.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SCatch.java index 8a703c80cba..0c8ba5de6b2 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SCatch.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SCatch.java @@ -64,11 +64,9 @@ public final class SCatch extends AStatement { @Override void analyze(Locals locals) { - Class clazz; + Class clazz = locals.getPainlessLookup().canonicalTypeNameToType(this.type); - try { - clazz = locals.getPainlessLookup().getJavaClassFromPainlessType(this.type); - } catch (IllegalArgumentException exception) { + if (clazz == null) { throw createError(new IllegalArgumentException("Not a type [" + this.type + "].")); } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SDeclaration.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SDeclaration.java index fb92c20e89e..7ead673c70b 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SDeclaration.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SDeclaration.java @@ -59,11 +59,9 @@ public final class SDeclaration extends AStatement { @Override void analyze(Locals locals) { - Class clazz; + Class clazz = locals.getPainlessLookup().canonicalTypeNameToType(this.type); - try { - clazz = locals.getPainlessLookup().getJavaClassFromPainlessType(this.type); - } catch (IllegalArgumentException exception) { + if (clazz == null) { throw createError(new IllegalArgumentException("Not a type [" + this.type + "].")); } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SEach.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SEach.java index 9ff57e6b913..cf41105c4fe 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SEach.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SEach.java @@ -68,11 +68,9 @@ public class SEach extends AStatement { expression.expected = expression.actual; expression = expression.cast(locals); - Class clazz; + Class clazz = locals.getPainlessLookup().canonicalTypeNameToType(this.type); - try { - clazz = locals.getPainlessLookup().getJavaClassFromPainlessType(this.type); - } catch (IllegalArgumentException exception) { + if (clazz == null) { throw createError(new IllegalArgumentException("Not a type [" + this.type + "].")); } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SFunction.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SFunction.java index d61a424f83d..6fe09627f9d 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SFunction.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SFunction.java @@ -20,20 +20,16 @@ package org.elasticsearch.painless.node; import org.elasticsearch.painless.CompilerSettings; -import org.elasticsearch.painless.Constant; -import org.elasticsearch.painless.Def; import org.elasticsearch.painless.Globals; import org.elasticsearch.painless.Locals; import org.elasticsearch.painless.Locals.Parameter; import org.elasticsearch.painless.Locals.Variable; import org.elasticsearch.painless.Location; import org.elasticsearch.painless.MethodWriter; -import org.elasticsearch.painless.WriterConstants; import org.elasticsearch.painless.lookup.PainlessLookup; import org.elasticsearch.painless.lookup.PainlessLookupUtility; import org.elasticsearch.painless.node.SSource.Reserved; import org.objectweb.asm.ClassVisitor; -import org.objectweb.asm.Handle; import org.objectweb.asm.Opcodes; import java.lang.invoke.MethodType; @@ -46,7 +42,6 @@ import java.util.Set; import static java.util.Collections.emptyList; import static java.util.Collections.unmodifiableSet; -import static org.elasticsearch.painless.WriterConstants.CLASS_TYPE; /** * Represents a user-defined function. @@ -120,9 +115,9 @@ public final class SFunction extends AStatement { } void generateSignature(PainlessLookup painlessLookup) { - try { - returnType = painlessLookup.getJavaClassFromPainlessType(rtnTypeStr); - } catch (IllegalArgumentException exception) { + returnType = painlessLookup.canonicalTypeNameToType(rtnTypeStr); + + if (returnType == null) { throw createError(new IllegalArgumentException("Illegal return type [" + rtnTypeStr + "] for function [" + name + "].")); } @@ -134,16 +129,16 @@ public final class SFunction extends AStatement { List> paramTypes = new ArrayList<>(); for (int param = 0; param < this.paramTypeStrs.size(); ++param) { - try { - Class paramType = painlessLookup.getJavaClassFromPainlessType(this.paramTypeStrs.get(param)); + Class paramType = painlessLookup.canonicalTypeNameToType(this.paramTypeStrs.get(param)); - paramClasses[param] = PainlessLookupUtility.typeToJavaType(paramType); - paramTypes.add(paramType); - parameters.add(new Parameter(location, paramNameStrs.get(param), paramType)); - } catch (IllegalArgumentException exception) { + if (paramType == null) { throw createError(new IllegalArgumentException( "Illegal parameter type [" + this.paramTypeStrs.get(param) + "] for function [" + name + "].")); } + + paramClasses[param] = PainlessLookupUtility.typeToJavaType(paramType); + paramTypes.add(paramType); + parameters.add(new Parameter(location, paramNameStrs.get(param), paramType)); } typeParameters = paramTypes; @@ -218,15 +213,6 @@ public final class SFunction extends AStatement { throw createError(new IllegalStateException("Illegal tree structure.")); } } - - String staticHandleFieldName = Def.getUserFunctionHandleFieldName(name, parameters.size()); - globals.addConstantInitializer(new Constant(location, WriterConstants.METHOD_HANDLE_TYPE, - staticHandleFieldName, this::initializeConstant)); - } - - private void initializeConstant(MethodWriter writer) { - final Handle handle = new Handle(Opcodes.H_INVOKESTATIC, CLASS_TYPE.getInternalName(), name, method.getDescriptor(), false); - writer.push(handle); } @Override diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SSource.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SSource.java index fe735c0db31..0f7445a38c4 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SSource.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SSource.java @@ -69,6 +69,7 @@ import static org.elasticsearch.painless.WriterConstants.EXCEPTION_TYPE; import static org.elasticsearch.painless.WriterConstants.GET_NAME_METHOD; import static org.elasticsearch.painless.WriterConstants.GET_SOURCE_METHOD; import static org.elasticsearch.painless.WriterConstants.GET_STATEMENTS_METHOD; +import static org.elasticsearch.painless.WriterConstants.MAP_TYPE; import static org.elasticsearch.painless.WriterConstants.OUT_OF_MEMORY_ERROR_TYPE; import static org.elasticsearch.painless.WriterConstants.PAINLESS_ERROR_TYPE; import static org.elasticsearch.painless.WriterConstants.PAINLESS_EXPLAIN_ERROR_GET_HEADERS_METHOD; @@ -163,7 +164,7 @@ public final class SSource extends AStatement { throw new IllegalStateException("Illegal tree structure."); } - public void analyze(PainlessLookup painlessLookup) { + public Map analyze(PainlessLookup painlessLookup) { Map methods = new HashMap<>(); for (SFunction function : functions) { @@ -177,7 +178,10 @@ public final class SSource extends AStatement { } } - analyze(Locals.newProgramScope(painlessLookup, methods.values())); + Locals locals = Locals.newProgramScope(painlessLookup, methods.values()); + analyze(locals); + + return locals.getMethods(); } @Override @@ -253,6 +257,7 @@ public final class SSource extends AStatement { globals.getStatements(), settings); bootstrapDef.visitCode(); bootstrapDef.getStatic(CLASS_TYPE, "$DEFINITION", DEFINITION_TYPE); + bootstrapDef.getStatic(CLASS_TYPE, "$LOCALS", MAP_TYPE); bootstrapDef.loadArgs(); bootstrapDef.invokeStatic(DEF_BOOTSTRAP_DELEGATE_TYPE, DEF_BOOTSTRAP_DELEGATE_METHOD); bootstrapDef.returnValue(); @@ -263,8 +268,9 @@ public final class SSource extends AStatement { visitor.visitField(Opcodes.ACC_PUBLIC | Opcodes.ACC_STATIC, "$SOURCE", STRING_TYPE.getDescriptor(), null, null).visitEnd(); visitor.visitField(Opcodes.ACC_PUBLIC | Opcodes.ACC_STATIC, "$STATEMENTS", BITSET_TYPE.getDescriptor(), null, null).visitEnd(); - // Write the static variable used by the method to bootstrap def calls + // Write the static variables used by the method to bootstrap def calls visitor.visitField(Opcodes.ACC_PUBLIC | Opcodes.ACC_STATIC, "$DEFINITION", DEFINITION_TYPE.getDescriptor(), null, null).visitEnd(); + visitor.visitField(Opcodes.ACC_PUBLIC | Opcodes.ACC_STATIC, "$LOCALS", MAP_TYPE.getDescriptor(), null, null).visitEnd(); org.objectweb.asm.commons.Method init; diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SSubEachIterable.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SSubEachIterable.java index 5450f690f6c..46dfa056874 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SSubEachIterable.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SSubEachIterable.java @@ -40,6 +40,7 @@ import java.util.Set; import static org.elasticsearch.painless.WriterConstants.ITERATOR_HASNEXT; import static org.elasticsearch.painless.WriterConstants.ITERATOR_NEXT; import static org.elasticsearch.painless.WriterConstants.ITERATOR_TYPE; +import static org.elasticsearch.painless.lookup.PainlessLookupUtility.typeToCanonicalTypeName; /** * Represents a for-each loop for iterables. @@ -76,12 +77,11 @@ final class SSubEachIterable extends AStatement { if (expression.actual == def.class) { method = null; } else { - method = locals.getPainlessLookup().getPainlessStructFromJavaClass(expression.actual).methods - .get(PainlessLookupUtility.buildPainlessMethodKey("iterator", 0)); + method = locals.getPainlessLookup().lookupPainlessMethod(expression.actual, false, "iterator", 0); if (method == null) { - throw createError(new IllegalArgumentException("Unable to create iterator for the type " + - "[" + PainlessLookupUtility.typeToCanonicalTypeName(expression.actual) + "].")); + throw createError(new IllegalArgumentException( + "method [" + typeToCanonicalTypeName(expression.actual) + ", iterator/0] not found")); } } diff --git a/modules/lang-painless/src/main/resources/org/elasticsearch/painless/spi/org.elasticsearch.txt b/modules/lang-painless/src/main/resources/org/elasticsearch/painless/spi/org.elasticsearch.txt index 8491d15c27e..a3ff479533b 100644 --- a/modules/lang-painless/src/main/resources/org/elasticsearch/painless/spi/org.elasticsearch.txt +++ b/modules/lang-painless/src/main/resources/org/elasticsearch/painless/spi/org.elasticsearch.txt @@ -24,31 +24,31 @@ #### Primitive types -class void only_fqn { +class void no_import { } -class boolean only_fqn { +class boolean no_import { } -class byte only_fqn { +class byte no_import { } -class short only_fqn { +class short no_import { } -class char only_fqn { +class char no_import { } -class int only_fqn { +class int no_import { } -class long only_fqn { +class long no_import { } -class float only_fqn { +class float no_import { } -class double only_fqn { +class double no_import { } #### Painless debugging API @@ -77,8 +77,8 @@ class org.elasticsearch.index.fielddata.ScriptDocValues$Longs { } class org.elasticsearch.index.fielddata.ScriptDocValues$Dates { - org.joda.time.ReadableDateTime get(int) - org.joda.time.ReadableDateTime getValue() + Object get(int) + Object getValue() List getValues() } @@ -134,7 +134,7 @@ class org.elasticsearch.index.mapper.IpFieldMapper$IpFieldType$IpScriptDocValues # for testing. # currently FeatureTest exposes overloaded constructor, field load store, and overloaded static methods -class org.elasticsearch.painless.FeatureTest only_fqn { +class org.elasticsearch.painless.FeatureTest no_import { int z () (int,int) diff --git a/modules/lang-painless/src/test/java/org/elasticsearch/painless/AnalyzerCasterTests.java b/modules/lang-painless/src/test/java/org/elasticsearch/painless/AnalyzerCasterTests.java index 34bc2c78de6..58864d73c41 100644 --- a/modules/lang-painless/src/test/java/org/elasticsearch/painless/AnalyzerCasterTests.java +++ b/modules/lang-painless/src/test/java/org/elasticsearch/painless/AnalyzerCasterTests.java @@ -35,8 +35,8 @@ public class AnalyzerCasterTests extends ESTestCase { } PainlessCast cast = AnalyzerCaster.getLegalCast(location, actual, expected, true, false); - assertEquals(actual, cast.from); - assertEquals(expected, cast.to); + assertEquals(actual, cast.originalType); + assertEquals(expected, cast.targetType); if (mustBeExplicit) { ClassCastException error = expectThrows(ClassCastException.class, @@ -44,8 +44,8 @@ public class AnalyzerCasterTests extends ESTestCase { assertTrue(error.getMessage().startsWith("Cannot cast")); } else { cast = AnalyzerCaster.getLegalCast(location, actual, expected, false, false); - assertEquals(actual, cast.from); - assertEquals(expected, cast.to); + assertEquals(actual, cast.originalType); + assertEquals(expected, cast.targetType); } } diff --git a/modules/lang-painless/src/test/java/org/elasticsearch/painless/ContextExampleTests.java b/modules/lang-painless/src/test/java/org/elasticsearch/painless/ContextExampleTests.java new file mode 100644 index 00000000000..15eed75bcb8 --- /dev/null +++ b/modules/lang-painless/src/test/java/org/elasticsearch/painless/ContextExampleTests.java @@ -0,0 +1,311 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.painless; + +/** + * These tests run the Painless scripts used in the context docs against + * slightly modified data designed around unit tests rather than a fully- + * running Elasticsearch server. + */ +public class ContextExampleTests extends ScriptTestCase { + + // **** Docs Generator Code **** + + /* + + import java.io.FileWriter; + import java.io.IOException; + + public class Generator { + + public final static String[] theatres = new String[] {"Down Port", "Graye", "Skyline", "Courtyard"}; + public final static String[] plays = new String[] {"Driving", "Pick It Up", "Sway and Pull", "Harriot", + "The Busline", "Ants Underground", "Exploria", "Line and Single", "Shafted", "Sunnyside Down", + "Test Run", "Auntie Jo"}; + public final static String[] actors = new String[] {"James Holland", "Krissy Smith", "Joe Muir", "Ryan Earns", + "Joel Madigan", "Jessica Brown", "Baz Knight", "Jo Hangum", "Rachel Grass", "Phoebe Miller", "Sarah Notch", + "Brayden Green", "Joshua Iller", "Jon Hittle", "Rob Kettleman", "Laura Conrad", "Simon Hower", "Nora Blue", + "Mike Candlestick", "Jacey Bell"}; + + public static void writeSeat(FileWriter writer, int id, String theatre, String play, String[] actors, + String date, String time, int row, int number, double cost, boolean sold) throws IOException { + StringBuilder builder = new StringBuilder(); + builder.append("{ \"create\" : { \"_index\" : \"seats\", \"_type\" : \"seat\", \"_id\" : \""); + builder.append(id); + builder.append("\" } }\n"); + builder.append("{ \"theatre\" : \""); + builder.append(theatre); + builder.append("\", \"play\" : \""); + builder.append(play); + builder.append("\", \"actors\": [ \""); + for (String actor : actors) { + builder.append(actor); + if (actor.equals(actors[actors.length - 1]) == false) { + builder.append("\", \""); + } + } + builder.append("\" ], \"date\": \""); + builder.append(date); + builder.append("\", \"time\": \""); + builder.append(time); + builder.append("\", \"row\": "); + builder.append(row); + builder.append(", \"number\": "); + builder.append(number); + builder.append(", \"cost\": "); + builder.append(cost); + builder.append(", \"sold\": "); + builder.append(sold ? "true" : "false"); + builder.append(" }\n"); + writer.write(builder.toString()); + } + + public static void main(String args[]) throws IOException { + FileWriter writer = new FileWriter("/home/jdconrad/test/seats.json"); + int id = 0; + + for (int playCount = 0; playCount < 12; ++playCount) { + String play = plays[playCount]; + String theatre; + String[] actor; + int startMonth; + int endMonth; + String time; + + if (playCount == 0) { + theatre = theatres[0]; + actor = new String[] {actors[0], actors[1], actors[2], actors[3]}; + startMonth = 4; + endMonth = 5; + time = "3:00PM"; + } else if (playCount == 1) { + theatre = theatres[0]; + actor = new String[] {actors[4], actors[5], actors[6], actors[7], actors[8], actors[9]}; + startMonth = 4; + endMonth = 6; + time = "8:00PM"; + } else if (playCount == 2) { + theatre = theatres[0]; + actor = new String[] {actors[0], actors[1], actors[2], actors[3], + actors[4], actors[5], actors[6], actors[7]}; + startMonth = 6; + endMonth = 8; + time = "3:00 PM"; + } else if (playCount == 3) { + theatre = theatres[0]; + actor = new String[] {actors[9], actors[10], actors[11], actors[12], actors[13], actors[14], + actors[15], actors[16], actors[17], actors[18], actors[19]}; + startMonth = 7; + endMonth = 8; + time = "8:00PM"; + } else if (playCount == 4) { + theatre = theatres[0]; + actor = new String[] {actors[13], actors[14], actors[15], actors[17], actors[18], actors[19]}; + startMonth = 8; + endMonth = 10; + time = "3:00PM"; + } else if (playCount == 5) { + theatre = theatres[0]; + actor = new String[] {actors[8], actors[9], actors[10], actors[11], actors[12]}; + startMonth = 8; + endMonth = 10; + time = "8:00PM"; + } else if (playCount == 6) { + theatre = theatres[1]; + actor = new String[] {actors[10], actors[11], actors[12], actors[13], actors[14], actors[15], actors[16]}; + startMonth = 4; + endMonth = 5; + time = "11:00AM"; + } else if (playCount == 7) { + theatre = theatres[1]; + actor = new String[] {actors[17], actors[18]}; + startMonth = 6; + endMonth = 9; + time = "2:00PM"; + } else if (playCount == 8) { + theatre = theatres[1]; + actor = new String[] {actors[0], actors[1], actors[2], actors[3], actors[16]}; + startMonth = 10; + endMonth = 11; + time = "11:00AM"; + } else if (playCount == 9) { + theatre = theatres[2]; + actor = new String[] {actors[1], actors[2], actors[3], actors[17], actors[18], actors[19]}; + startMonth = 3; + endMonth = 6; + time = "4:00PM"; + } else if (playCount == 10) { + theatre = theatres[2]; + actor = new String[] {actors[2], actors[3], actors[4], actors[5]}; + startMonth = 7; + endMonth = 8; + time = "7:30PM"; + } else if (playCount == 11) { + theatre = theatres[2]; + actor = new String[] {actors[7], actors[13], actors[14], actors[15], actors[16], actors[17]}; + startMonth = 9; + endMonth = 12; + time = "5:40PM"; + } else { + throw new RuntimeException("too many plays"); + } + + int rows; + int number; + + if (playCount < 6) { + rows = 3; + number = 12; + } else if (playCount < 9) { + rows = 5; + number = 9; + } else if (playCount < 12) { + rows = 11; + number = 15; + } else { + throw new RuntimeException("too many seats"); + } + + for (int month = startMonth; month <= endMonth; ++month) { + for (int day = 1; day <= 14; ++day) { + for (int row = 1; row <= rows; ++row) { + for (int count = 1; count <= number; ++count) { + String date = "2018-" + month + "-" + day; + double cost = (25 - row) * 1.25; + + writeSeat(writer, ++id, theatre, play, actor, date, time, row, count, cost, false); + } + } + } + } + } + + writer.write("\n"); + writer.close(); + } + } + + */ + + // **** Initial Mappings **** + + /* + + curl -X PUT "localhost:9200/seats" -H 'Content-Type: application/json' -d' + { + "mappings": { + "seat": { + "properties": { + "theatre": { "type": "keyword" }, + "play": { "type": "text" }, + "actors": { "type": "text" }, + "row": { "type": "integer" }, + "number": { "type": "integer" }, + "cost": { "type": "double" }, + "sold": { "type": "boolean" }, + "datetime": { "type": "date" }, + "date": { "type": "keyword" }, + "time": { "type": "keyword" } + } + } + } + } + ' + + */ + + // Create Ingest to Modify Dates: + + /* + + curl -X PUT "localhost:9200/_ingest/pipeline/seats" -H 'Content-Type: application/json' -d' + { + "description": "update datetime for seats", + "processors": [ + { + "script": { + "source": "String[] split(String s, char d) { int count = 0; for (char c : s.toCharArray()) { if (c == d) { ++count; } } if (count == 0) { return new String[] {s}; } String[] r = new String[count + 1]; int i0 = 0, i1 = 0; count = 0; for (char c : s.toCharArray()) { if (c == d) { r[count++] = s.substring(i0, i1); i0 = i1 + 1; } ++i1; } r[count] = s.substring(i0, i1); return r; } String[] dateSplit = split(ctx.date, (char)\"-\"); String year = dateSplit[0].trim(); String month = dateSplit[1].trim(); if (month.length() == 1) { month = \"0\" + month; } String day = dateSplit[2].trim(); if (day.length() == 1) { day = \"0\" + day; } boolean pm = ctx.time.substring(ctx.time.length() - 2).equals(\"PM\"); String[] timeSplit = split(ctx.time.substring(0, ctx.time.length() - 2), (char)\":\"); int hours = Integer.parseInt(timeSplit[0].trim()); int minutes = Integer.parseInt(timeSplit[1].trim()); if (pm) { hours += 12; } String dts = year + \"-\" + month + \"-\" + day + \"T\" + (hours < 10 ? \"0\" + hours : \"\" + hours) + \":\" + (minutes < 10 ? \"0\" + minutes : \"\" + minutes) + \":00+08:00\"; ZonedDateTime dt = ZonedDateTime.parse(dts, DateTimeFormatter.ISO_OFFSET_DATE_TIME); ctx.datetime = dt.getLong(ChronoField.INSTANT_SECONDS)*1000L;" + } + } + ] + } + ' + + */ + + public void testIngestProcessorScript() { + assertEquals(1535785200000L, + exec("String[] split(String s, char d) {" + + " int count = 0;" + + " for (char c : s.toCharArray()) {" + + " if (c == d) {" + + " ++count;" + + " }" + + " }" + + " if (count == 0) {" + + " return new String[] {s};" + + " }" + + " String[] r = new String[count + 1];" + + " int i0 = 0, i1 = 0;" + + " count = 0;" + + " for (char c : s.toCharArray()) {" + + " if (c == d) {" + + " r[count++] = s.substring(i0, i1);" + + " i0 = i1 + 1;" + + " }" + + " ++i1;" + + " }" + + " r[count] = s.substring(i0, i1);" + + " return r;" + + "}" + + "def x = ['date': '2018-9-1', 'time': '3:00 PM'];" + + "String[] dateSplit = split(x.date, (char)'-');" + + "String year = dateSplit[0].trim();" + + "String month = dateSplit[1].trim();" + + "if (month.length() == 1) {" + + " month = '0' + month;" + + "}" + + "String day = dateSplit[2].trim();" + + "if (day.length() == 1) {" + + " day = '0' + day;" + + "}" + + "boolean pm = x.time.substring(x.time.length() - 2).equals('PM');" + + "String[] timeSplit = split(x.time.substring(0, x.time.length() - 2), (char)':');" + + "int hours = Integer.parseInt(timeSplit[0].trim());" + + "String minutes = timeSplit[1].trim();" + + "if (pm) {" + + " hours += 12;" + + "}" + + "String dts = year + '-' + month + '-' + day + " + + "'T' + (hours < 10 ? '0' + hours : '' + hours) + ':' + minutes + ':00+08:00';" + + "ZonedDateTime dt = ZonedDateTime.parse(dts, DateTimeFormatter.ISO_OFFSET_DATE_TIME);" + + "return dt.getLong(ChronoField.INSTANT_SECONDS) * 1000L" + ) + ); + } + + // Post Generated Data: + + /* + + curl -XPOST localhost:9200/seats/seat/_bulk?pipeline=seats -H "Content-Type: application/x-ndjson" --data-binary "@/home/jdconrad/test/seats.json" + + */ +} diff --git a/modules/lang-painless/src/test/java/org/elasticsearch/painless/DefBootstrapTests.java b/modules/lang-painless/src/test/java/org/elasticsearch/painless/DefBootstrapTests.java index 1ef855d561c..88d257a0672 100644 --- a/modules/lang-painless/src/test/java/org/elasticsearch/painless/DefBootstrapTests.java +++ b/modules/lang-painless/src/test/java/org/elasticsearch/painless/DefBootstrapTests.java @@ -38,6 +38,7 @@ public class DefBootstrapTests extends ESTestCase { /** calls toString() on integers, twice */ public void testOneType() throws Throwable { CallSite site = DefBootstrap.bootstrap(painlessLookup, + Collections.emptyMap(), MethodHandles.publicLookup(), "toString", MethodType.methodType(String.class, Object.class), @@ -58,6 +59,7 @@ public class DefBootstrapTests extends ESTestCase { public void testTwoTypes() throws Throwable { CallSite site = DefBootstrap.bootstrap(painlessLookup, + Collections.emptyMap(), MethodHandles.publicLookup(), "toString", MethodType.methodType(String.class, Object.class), @@ -83,6 +85,7 @@ public class DefBootstrapTests extends ESTestCase { // if this changes, test must be rewritten assertEquals(5, DefBootstrap.PIC.MAX_DEPTH); CallSite site = DefBootstrap.bootstrap(painlessLookup, + Collections.emptyMap(), MethodHandles.publicLookup(), "toString", MethodType.methodType(String.class, Object.class), @@ -109,6 +112,7 @@ public class DefBootstrapTests extends ESTestCase { /** test that we revert to the megamorphic classvalue cache and that it works as expected */ public void testMegamorphic() throws Throwable { DefBootstrap.PIC site = (DefBootstrap.PIC) DefBootstrap.bootstrap(painlessLookup, + Collections.emptyMap(), MethodHandles.publicLookup(), "size", MethodType.methodType(int.class, Object.class), @@ -130,7 +134,7 @@ public class DefBootstrapTests extends ESTestCase { final IllegalArgumentException iae = expectThrows(IllegalArgumentException.class, () -> { Integer.toString((int)handle.invokeExact(new Object())); }); - assertEquals("Unable to find dynamic method [size] with [0] arguments for class [java.lang.Object].", iae.getMessage()); + assertEquals("dynamic method [java.lang.Object, size/0] not found", iae.getMessage()); assertTrue("Does not fail inside ClassValue.computeValue()", Arrays.stream(iae.getStackTrace()).anyMatch(e -> { return e.getMethodName().equals("computeValue") && e.getClassName().startsWith("org.elasticsearch.painless.DefBootstrap$PIC$"); @@ -141,6 +145,7 @@ public class DefBootstrapTests extends ESTestCase { public void testNullGuardAdd() throws Throwable { DefBootstrap.MIC site = (DefBootstrap.MIC) DefBootstrap.bootstrap(painlessLookup, + Collections.emptyMap(), MethodHandles.publicLookup(), "add", MethodType.methodType(Object.class, Object.class, Object.class), @@ -153,6 +158,7 @@ public class DefBootstrapTests extends ESTestCase { public void testNullGuardAddWhenCached() throws Throwable { DefBootstrap.MIC site = (DefBootstrap.MIC) DefBootstrap.bootstrap(painlessLookup, + Collections.emptyMap(), MethodHandles.publicLookup(), "add", MethodType.methodType(Object.class, Object.class, Object.class), @@ -166,6 +172,7 @@ public class DefBootstrapTests extends ESTestCase { public void testNullGuardEq() throws Throwable { DefBootstrap.MIC site = (DefBootstrap.MIC) DefBootstrap.bootstrap(painlessLookup, + Collections.emptyMap(), MethodHandles.publicLookup(), "eq", MethodType.methodType(boolean.class, Object.class, Object.class), @@ -179,6 +186,7 @@ public class DefBootstrapTests extends ESTestCase { public void testNullGuardEqWhenCached() throws Throwable { DefBootstrap.MIC site = (DefBootstrap.MIC) DefBootstrap.bootstrap(painlessLookup, + Collections.emptyMap(), MethodHandles.publicLookup(), "eq", MethodType.methodType(boolean.class, Object.class, Object.class), @@ -197,6 +205,7 @@ public class DefBootstrapTests extends ESTestCase { public void testNoNullGuardAdd() throws Throwable { DefBootstrap.MIC site = (DefBootstrap.MIC) DefBootstrap.bootstrap(painlessLookup, + Collections.emptyMap(), MethodHandles.publicLookup(), "add", MethodType.methodType(Object.class, int.class, Object.class), @@ -211,6 +220,7 @@ public class DefBootstrapTests extends ESTestCase { public void testNoNullGuardAddWhenCached() throws Throwable { DefBootstrap.MIC site = (DefBootstrap.MIC) DefBootstrap.bootstrap(painlessLookup, + Collections.emptyMap(), MethodHandles.publicLookup(), "add", MethodType.methodType(Object.class, int.class, Object.class), diff --git a/modules/lang-painless/src/test/java/org/elasticsearch/painless/FunctionRefTests.java b/modules/lang-painless/src/test/java/org/elasticsearch/painless/FunctionRefTests.java index fd47db6b83d..5829593f524 100644 --- a/modules/lang-painless/src/test/java/org/elasticsearch/painless/FunctionRefTests.java +++ b/modules/lang-painless/src/test/java/org/elasticsearch/painless/FunctionRefTests.java @@ -27,7 +27,6 @@ import java.lang.invoke.LambdaConversionException; import static java.util.Collections.singletonMap; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.endsWith; -import static org.hamcrest.Matchers.startsWith; public class FunctionRefTests extends ScriptTestCase { @@ -193,14 +192,15 @@ public class FunctionRefTests extends ScriptTestCase { Exception e = expectScriptThrows(IllegalArgumentException.class, () -> { exec("List l = [2, 1]; l.sort(Integer::bogus); return l.get(0);"); }); - assertThat(e.getMessage(), startsWith("Unknown reference")); + assertThat(e.getMessage(), containsString("function reference [Integer::bogus/2] matching [java.util.Comparator")); } public void testQualifiedMethodMissing() { Exception e = expectScriptThrows(IllegalArgumentException.class, () -> { exec("List l = [2, 1]; l.sort(org.joda.time.ReadableDateTime::bogus); return l.get(0);", false); }); - assertThat(e.getMessage(), startsWith("Unknown reference")); + assertThat(e.getMessage(), + containsString("function reference [org.joda.time.ReadableDateTime::bogus/2] matching [java.util.Comparator")); } public void testClassMissing() { @@ -223,11 +223,12 @@ public class FunctionRefTests extends ScriptTestCase { IllegalArgumentException expected = expectScriptThrows(IllegalArgumentException.class, () -> { exec("List l = new ArrayList(); l.add(2); l.add(1); l.add(Integer::bogus); return l.get(0);"); }); - assertThat(expected.getMessage(), containsString("Cannot convert function reference")); + assertThat(expected.getMessage(), + containsString("cannot convert function reference [Integer::bogus] to a non-functional interface [def]")); } public void testIncompatible() { - expectScriptThrows(BootstrapMethodError.class, () -> { + expectScriptThrows(ClassCastException.class, () -> { exec("List l = new ArrayList(); l.add(2); l.add(1); l.sort(String::startsWith); return l.get(0);"); }); } @@ -236,28 +237,32 @@ public class FunctionRefTests extends ScriptTestCase { IllegalArgumentException expected = expectScriptThrows(IllegalArgumentException.class, () -> { exec("Optional.empty().orElseGet(String::startsWith);"); }); - assertThat(expected.getMessage(), containsString("Unknown reference")); + assertThat(expected.getMessage(), + containsString("function reference [String::startsWith/0] matching [java.util.function.Supplier")); } public void testWrongArityNotEnough() { IllegalArgumentException expected = expectScriptThrows(IllegalArgumentException.class, () -> { exec("List l = new ArrayList(); l.add(2); l.add(1); l.sort(String::isEmpty);"); }); - assertTrue(expected.getMessage().contains("Unknown reference")); + assertThat(expected.getMessage(), containsString( + "function reference [String::isEmpty/2] matching [java.util.Comparator")); } public void testWrongArityDef() { IllegalArgumentException expected = expectScriptThrows(IllegalArgumentException.class, () -> { exec("def y = Optional.empty(); return y.orElseGet(String::startsWith);"); }); - assertThat(expected.getMessage(), containsString("Unknown reference")); + assertThat(expected.getMessage(), + containsString("function reference [String::startsWith/0] matching [java.util.function.Supplier")); } public void testWrongArityNotEnoughDef() { IllegalArgumentException expected = expectScriptThrows(IllegalArgumentException.class, () -> { exec("def l = new ArrayList(); l.add(2); l.add(1); l.sort(String::isEmpty);"); }); - assertThat(expected.getMessage(), containsString("Unknown reference")); + assertThat(expected.getMessage(), + containsString("function reference [String::isEmpty/2] matching [java.util.Comparator")); } public void testReturnVoid() { diff --git a/modules/lang-painless/src/test/java/org/elasticsearch/painless/InitializerTests.java b/modules/lang-painless/src/test/java/org/elasticsearch/painless/InitializerTests.java index d0d0b2165ca..be3db76bac2 100644 --- a/modules/lang-painless/src/test/java/org/elasticsearch/painless/InitializerTests.java +++ b/modules/lang-painless/src/test/java/org/elasticsearch/painless/InitializerTests.java @@ -53,7 +53,7 @@ public class InitializerTests extends ScriptTestCase { "Object[] x = new Object[] {y, z, 1 + s, s + 'aaa'}; return x;"); assertEquals(4, objects.length); - assertEquals(new Integer(2), objects[0]); + assertEquals(Integer.valueOf(2), objects[0]); assertEquals(new ArrayList(), objects[1]); assertEquals("1aaa", objects[2]); assertEquals("aaaaaa", objects[3]); @@ -85,7 +85,7 @@ public class InitializerTests extends ScriptTestCase { list = (List)exec("int y = 2; List z = new ArrayList(); String s = 'aaa'; List x = [y, z, 1 + s, s + 'aaa']; return x;"); assertEquals(4, list.size()); - assertEquals(new Integer(2), list.get(0)); + assertEquals(Integer.valueOf(2), list.get(0)); assertEquals(new ArrayList(), list.get(1)); assertEquals("1aaa", list.get(2)); assertEquals("aaaaaa", list.get(3)); @@ -100,15 +100,15 @@ public class InitializerTests extends ScriptTestCase { map = (Map)exec("[5 : 7, -1 : 14]"); assertEquals(2, map.size()); - assertEquals(new Integer(7), map.get(5)); - assertEquals(new Integer(14), map.get(-1)); + assertEquals(Integer.valueOf(7), map.get(5)); + assertEquals(Integer.valueOf(14), map.get(-1)); map = (Map)exec("int y = 2; int z = 3; Map x = [y*z : y + z, y - z : y, z : z]; return x;"); assertEquals(3, map.size()); - assertEquals(new Integer(5), map.get(6)); - assertEquals(new Integer(2), map.get(-1)); - assertEquals(new Integer(3), map.get(3)); + assertEquals(Integer.valueOf(5), map.get(6)); + assertEquals(Integer.valueOf(2), map.get(-1)); + assertEquals(Integer.valueOf(3), map.get(3)); map = (Map)exec("int y = 2; List z = new ArrayList(); String s = 'aaa';" + "def x = [y : z, 1 + s : s + 'aaa']; return x;"); @@ -139,7 +139,7 @@ public class InitializerTests extends ScriptTestCase { list3.add(9); assertEquals(3, map.size()); - assertEquals(new Integer(5), map.get(6)); + assertEquals(Integer.valueOf(5), map.get(6)); assertEquals(list2, map.get("s")); assertEquals(list3, map.get(3)); } diff --git a/modules/lang-painless/src/test/java/org/elasticsearch/painless/LambdaTests.java b/modules/lang-painless/src/test/java/org/elasticsearch/painless/LambdaTests.java index 20e257e5747..1f1a6f95b36 100644 --- a/modules/lang-painless/src/test/java/org/elasticsearch/painless/LambdaTests.java +++ b/modules/lang-painless/src/test/java/org/elasticsearch/painless/LambdaTests.java @@ -184,7 +184,7 @@ public class LambdaTests extends ScriptTestCase { IllegalArgumentException expected = expectScriptThrows(IllegalArgumentException.class, () -> { exec("def y = Optional.empty(); return y.orElseGet(x -> x);"); }); - assertTrue(expected.getMessage(), expected.getMessage().contains("Incorrect number of parameters")); + assertTrue(expected.getMessage(), expected.getMessage().contains("due to an incorrect number of arguments")); } public void testWrongArityNotEnough() { @@ -200,7 +200,7 @@ public class LambdaTests extends ScriptTestCase { exec("def l = new ArrayList(); l.add(1); l.add(1); " + "return l.stream().mapToInt(() -> 5).sum();"); }); - assertTrue(expected.getMessage().contains("Incorrect number of parameters")); + assertTrue(expected.getMessage(), expected.getMessage().contains("due to an incorrect number of arguments")); } public void testLambdaInFunction() { diff --git a/modules/lang-painless/src/test/java/org/elasticsearch/painless/OverloadTests.java b/modules/lang-painless/src/test/java/org/elasticsearch/painless/OverloadTests.java index fce827e686c..52c28799fae 100644 --- a/modules/lang-painless/src/test/java/org/elasticsearch/painless/OverloadTests.java +++ b/modules/lang-painless/src/test/java/org/elasticsearch/painless/OverloadTests.java @@ -23,12 +23,12 @@ package org.elasticsearch.painless; public class OverloadTests extends ScriptTestCase { public void testMethod() { - assertEquals(2, exec("return 'abc123abc'.indexOf('c');")); - assertEquals(8, exec("return 'abc123abc'.indexOf('c', 3);")); + //assertEquals(2, exec("return 'abc123abc'.indexOf('c');")); + //assertEquals(8, exec("return 'abc123abc'.indexOf('c', 3);")); IllegalArgumentException expected = expectScriptThrows(IllegalArgumentException.class, () -> { exec("return 'abc123abc'.indexOf('c', 3, 'bogus');"); }); - assertTrue(expected.getMessage().contains("[indexOf] with [3] arguments")); + assertTrue(expected.getMessage().contains("[java.lang.String, indexOf/3]")); } public void testMethodDynamic() { @@ -37,7 +37,7 @@ public class OverloadTests extends ScriptTestCase { IllegalArgumentException expected = expectScriptThrows(IllegalArgumentException.class, () -> { exec("def x = 'abc123abc'; return x.indexOf('c', 3, 'bogus');"); }); - assertTrue(expected.getMessage().contains("dynamic method [indexOf]")); + assertTrue(expected.getMessage().contains("dynamic method [java.lang.String, indexOf/3] not found")); } public void testConstructor() { diff --git a/modules/lang-painless/src/test/java/org/elasticsearch/painless/PainlessDocGenerator.java b/modules/lang-painless/src/test/java/org/elasticsearch/painless/PainlessDocGenerator.java index 1f8410092a0..1460d5f2359 100644 --- a/modules/lang-painless/src/test/java/org/elasticsearch/painless/PainlessDocGenerator.java +++ b/modules/lang-painless/src/test/java/org/elasticsearch/painless/PainlessDocGenerator.java @@ -45,9 +45,9 @@ import java.util.List; import java.util.Map; import java.util.TreeMap; import java.util.function.Consumer; +import java.util.stream.Collectors; import static java.util.Comparator.comparing; -import static java.util.stream.Collectors.toList; /** * Generates an API reference from the method and type whitelists in {@link PainlessLookup}. @@ -56,7 +56,7 @@ public class PainlessDocGenerator { private static final PainlessLookup PAINLESS_LOOKUP = PainlessLookupBuilder.buildFromWhitelists(Whitelist.BASE_WHITELISTS); private static final Logger logger = ESLoggerFactory.getLogger(PainlessDocGenerator.class); - private static final Comparator FIELD_NAME = comparing(f -> f.name); + private static final Comparator FIELD_NAME = comparing(f -> f.javaField.getName()); private static final Comparator METHOD_NAME = comparing(m -> m.javaMethod.getName()); private static final Comparator METHOD_NUMBER_OF_PARAMS = comparing(m -> m.typeParameters.size()); private static final Comparator CONSTRUCTOR_NUMBER_OF_PARAMS = comparing(m -> m.typeParameters.size()); @@ -74,9 +74,10 @@ public class PainlessDocGenerator { Files.newOutputStream(indexPath, StandardOpenOption.CREATE_NEW, StandardOpenOption.WRITE), false, StandardCharsets.UTF_8.name())) { emitGeneratedWarning(indexStream); - List> classes = PAINLESS_LOOKUP.getStructs().stream().sorted(comparing(Class::getCanonicalName)).collect(toList()); + List> classes = PAINLESS_LOOKUP.getClasses().stream().sorted( + Comparator.comparing(Class::getCanonicalName)).collect(Collectors.toList()); for (Class clazz : classes) { - PainlessClass struct = PAINLESS_LOOKUP.getPainlessStructFromJavaClass(clazz); + PainlessClass struct = PAINLESS_LOOKUP.lookupPainlessClass(clazz); String canonicalClassName = PainlessLookupUtility.typeToCanonicalTypeName(clazz); if (clazz.isPrimitive()) { @@ -147,17 +148,17 @@ public class PainlessDocGenerator { emitAnchor(stream, field); stream.print("]]"); - if (Modifier.isStatic(field.modifiers)) { + if (Modifier.isStatic(field.javaField.getModifiers())) { stream.print("static "); } - emitType(stream, field.clazz); + emitType(stream, field.typeParameter); stream.print(' '); String javadocRoot = javadocRoot(field); emitJavadocLink(stream, javadocRoot, field); stream.print('['); - stream.print(field.name); + stream.print(field.javaField.getName()); stream.print(']'); if (javadocRoot.equals("java8")) { @@ -280,9 +281,9 @@ public class PainlessDocGenerator { * Anchor text for a {@link PainlessField}. */ private static void emitAnchor(PrintStream stream, PainlessField field) { - emitAnchor(stream, field.target); + emitAnchor(stream, field.javaField.getDeclaringClass()); stream.print('-'); - stream.print(field.name); + stream.print(field.javaField.getName()); } private static String constructorName(PainlessConstructor constructor) { @@ -391,9 +392,9 @@ public class PainlessDocGenerator { stream.print("link:{"); stream.print(root); stream.print("-javadoc}/"); - stream.print(classUrlPath(field.target)); + stream.print(classUrlPath(field.javaField.getDeclaringClass())); stream.print(".html#"); - stream.print(field.javaName); + stream.print(field.javaField.getName()); } /** @@ -410,7 +411,7 @@ public class PainlessDocGenerator { * Pick the javadoc root for a {@link PainlessField}. */ private static String javadocRoot(PainlessField field) { - return javadocRoot(field.target); + return javadocRoot(field.javaField.getDeclaringClass()); } /** diff --git a/modules/lang-painless/src/test/java/org/elasticsearch/painless/RegexTests.java b/modules/lang-painless/src/test/java/org/elasticsearch/painless/RegexTests.java index 911a50468cc..8143c39ce6f 100644 --- a/modules/lang-painless/src/test/java/org/elasticsearch/painless/RegexTests.java +++ b/modules/lang-painless/src/test/java/org/elasticsearch/painless/RegexTests.java @@ -252,7 +252,7 @@ public class RegexTests extends ScriptTestCase { IllegalArgumentException e = expectScriptThrows(IllegalArgumentException.class, () -> { exec("Pattern.compile('aa')"); }); - assertEquals("Unknown call [compile] with [1] arguments on type [java.util.regex.Pattern].", e.getMessage()); + assertTrue(e.getMessage().contains("[java.util.regex.Pattern, compile/1]")); } public void testBadRegexPattern() { diff --git a/modules/lang-painless/src/test/java/org/elasticsearch/painless/WhenThingsGoWrongTests.java b/modules/lang-painless/src/test/java/org/elasticsearch/painless/WhenThingsGoWrongTests.java index 8eeb25c9676..f2d93aa759d 100644 --- a/modules/lang-painless/src/test/java/org/elasticsearch/painless/WhenThingsGoWrongTests.java +++ b/modules/lang-painless/src/test/java/org/elasticsearch/painless/WhenThingsGoWrongTests.java @@ -219,7 +219,7 @@ public class WhenThingsGoWrongTests extends ScriptTestCase { IllegalArgumentException expected = expectScriptThrows(IllegalArgumentException.class, () -> { exec("def x = 'test'; return x.getClass().toString()"); }); - assertTrue(expected.getMessage().contains("Unable to find dynamic method")); + assertTrue(expected.getMessage().contains("dynamic method [java.lang.String, getClass/0] not found")); } public void testDynamicNPE() { diff --git a/modules/lang-painless/src/test/java/org/elasticsearch/painless/node/NodeToStringTests.java b/modules/lang-painless/src/test/java/org/elasticsearch/painless/node/NodeToStringTests.java index c64014d81a5..12d57fab11d 100644 --- a/modules/lang-painless/src/test/java/org/elasticsearch/painless/node/NodeToStringTests.java +++ b/modules/lang-painless/src/test/java/org/elasticsearch/painless/node/NodeToStringTests.java @@ -162,12 +162,12 @@ public class NodeToStringTests extends ESTestCase { public void testECast() { Location l = new Location(getTestName(), 0); AExpression child = new EConstant(l, "test"); - PainlessCast cast = PainlessCast.standard(String.class, Integer.class, true); + PainlessCast cast = PainlessCast.originalTypetoTargetType(String.class, Integer.class, true); assertEquals("(ECast java.lang.Integer (EConstant String 'test'))", new ECast(l, child, cast).toString()); l = new Location(getTestName(), 1); child = new EBinary(l, Operation.ADD, new EConstant(l, "test"), new EConstant(l, 12)); - cast = PainlessCast.standard(Integer.class, Boolean.class, true); + cast = PainlessCast.originalTypetoTargetType(Integer.class, Boolean.class, true); assertEquals("(ECast java.lang.Boolean (EBinary (EConstant String 'test') + (EConstant Integer 12)))", new ECast(l, child, cast).toString()); } @@ -404,7 +404,7 @@ public class NodeToStringTests extends ESTestCase { public void testPSubCallInvoke() { Location l = new Location(getTestName(), 0); - PainlessClass c = painlessLookup.getPainlessStructFromJavaClass(Integer.class); + PainlessClass c = painlessLookup.lookupPainlessClass(Integer.class); PainlessMethod m = c.methods.get(PainlessLookupUtility.buildPainlessMethodKey("toString", 0)); PSubCallInvoke node = new PSubCallInvoke(l, m, null, emptyList()); node.prefix = new EVariable(l, "a"); @@ -459,7 +459,7 @@ public class NodeToStringTests extends ESTestCase { public void testPSubField() { Location l = new Location(getTestName(), 0); - PainlessClass s = painlessLookup.getPainlessStructFromJavaClass(Boolean.class); + PainlessClass s = painlessLookup.lookupPainlessClass(Boolean.class); PainlessField f = s.staticFields.get("TRUE"); PSubField node = new PSubField(l, f); node.prefix = new EStatic(l, "Boolean"); @@ -497,7 +497,7 @@ public class NodeToStringTests extends ESTestCase { public void testPSubShortcut() { Location l = new Location(getTestName(), 0); - PainlessClass s = painlessLookup.getPainlessStructFromJavaClass(FeatureTest.class); + PainlessClass s = painlessLookup.lookupPainlessClass(FeatureTest.class); PainlessMethod getter = s.methods.get(PainlessLookupUtility.buildPainlessMethodKey("getX", 0)); PainlessMethod setter = s.methods.get(PainlessLookupUtility.buildPainlessMethodKey("setX", 1)); PSubShortcut node = new PSubShortcut(l, "x", FeatureTest.class.getName(), getter, setter); diff --git a/modules/lang-painless/src/test/resources/rest-api-spec/test/painless/15_update.yml b/modules/lang-painless/src/test/resources/rest-api-spec/test/painless/15_update.yml index 20047e7d482..f2e1cb616b9 100644 --- a/modules/lang-painless/src/test/resources/rest-api-spec/test/painless/15_update.yml +++ b/modules/lang-painless/src/test/resources/rest-api-spec/test/painless/15_update.yml @@ -132,7 +132,7 @@ body: script: lang: painless - source: "for (def key : params.keySet()) { ctx._source[key] = params[key]}" + source: "ctx._source.ctx = ctx" params: { bar: 'xxx' } - match: { error.root_cause.0.type: "remote_transport_exception" } diff --git a/modules/lang-painless/src/test/resources/rest-api-spec/test/painless/20_scriptfield.yml b/modules/lang-painless/src/test/resources/rest-api-spec/test/painless/20_scriptfield.yml index 2914e8a916e..3be6601521e 100644 --- a/modules/lang-painless/src/test/resources/rest-api-spec/test/painless/20_scriptfield.yml +++ b/modules/lang-painless/src/test/resources/rest-api-spec/test/painless/20_scriptfield.yml @@ -108,7 +108,7 @@ setup: script_fields: bar: script: - source: "doc.date.value.dayOfWeek" + source: "doc.date.value.dayOfWeek.value" - match: { hits.hits.0.fields.bar.0: 7} @@ -123,7 +123,7 @@ setup: source: > StringBuilder b = new StringBuilder(); for (def date : doc.dates) { - b.append(" ").append(date.getDayOfWeek()); + b.append(" ").append(date.getDayOfWeek().value); } return b.toString().trim() diff --git a/modules/lang-painless/src/test/resources/rest-api-spec/test/painless/50_script_doc_values.yml b/modules/lang-painless/src/test/resources/rest-api-spec/test/painless/50_script_doc_values.yml index 617b8df61b6..4c3c204d2d9 100644 --- a/modules/lang-painless/src/test/resources/rest-api-spec/test/painless/50_script_doc_values.yml +++ b/modules/lang-painless/src/test/resources/rest-api-spec/test/painless/50_script_doc_values.yml @@ -95,7 +95,7 @@ setup: field: script: source: "doc.date.get(0)" - - match: { hits.hits.0.fields.field.0: '2017-01-01T12:11:12.000Z' } + - match: { hits.hits.0.fields.field.0: '2017-01-01T12:11:12Z' } - do: search: @@ -104,7 +104,7 @@ setup: field: script: source: "doc.date.value" - - match: { hits.hits.0.fields.field.0: '2017-01-01T12:11:12.000Z' } + - match: { hits.hits.0.fields.field.0: '2017-01-01T12:11:12Z' } --- "geo_point": diff --git a/modules/parent-join/src/test/java/org/elasticsearch/join/query/HasChildQueryBuilderTests.java b/modules/parent-join/src/test/java/org/elasticsearch/join/query/HasChildQueryBuilderTests.java index f764364380f..546677a2be4 100644 --- a/modules/parent-join/src/test/java/org/elasticsearch/join/query/HasChildQueryBuilderTests.java +++ b/modules/parent-join/src/test/java/org/elasticsearch/join/query/HasChildQueryBuilderTests.java @@ -88,9 +88,9 @@ public class HasChildQueryBuilderTests extends AbstractQueryTestCase> IFD getForField(MappedFieldType fieldType) { - IndexFieldData.Builder builder = fieldType.fielddataBuilder(shardContext.getFullyQualifiedIndexName()); + IndexFieldData.Builder builder = fieldType.fielddataBuilder(shardContext.getFullyQualifiedIndex().getName()); IndexFieldDataCache cache = new IndexFieldDataCache.None(); CircuitBreakerService circuitBreaker = new NoneCircuitBreakerService(); return (IFD) builder.build(shardContext.getIndexSettings(), fieldType, cache, circuitBreaker, @@ -764,5 +764,4 @@ public class PercolateQueryBuilder extends AbstractQueryBuilder params; - private ExecutableScript executable; + private UpdateScript executable; private Map context; public ScriptApplier(WorkerBulkByScrollTaskState taskWorker, @@ -766,7 +766,7 @@ public abstract class AbstractAsyncBulkByScrollAction T applyScript(Consumer> scriptBody) { IndexRequest index = new IndexRequest("index", "type", "1").source(singletonMap("foo", "bar")); ScrollableHitSource.Hit doc = new ScrollableHitSource.BasicHit("test", "type", "id", 0); - ExecutableScript executableScript = new SimpleExecutableScript(scriptBody); - ExecutableScript.Factory factory = params -> executableScript; - when(scriptService.compile(any(), eq(ExecutableScript.CONTEXT))).thenReturn(factory); - when(scriptService.compile(any(), eq(ExecutableScript.UPDATE_CONTEXT))).thenReturn(factory); + UpdateScript updateScript = new UpdateScript(Collections.emptyMap()) { + @Override + public void execute(Map ctx) { + scriptBody.accept(ctx); + } + }; + UpdateScript.Factory factory = params -> updateScript; + ExecutableScript simpleExecutableScript = new SimpleExecutableScript(scriptBody); + when(scriptService.compile(any(), eq(ExecutableScript.CONTEXT))).thenReturn(params -> simpleExecutableScript); + when(scriptService.compile(any(), eq(UpdateScript.CONTEXT))).thenReturn(factory); AbstractAsyncBulkByScrollAction action = action(scriptService, request().setScript(mockScript(""))); RequestWrapper result = action.buildScriptApplier().apply(AbstractAsyncBulkByScrollAction.wrap(index), doc); return (result != null) ? (T) result.self() : null; diff --git a/modules/repository-url/src/test/java/org/elasticsearch/repositories/url/URLSnapshotRestoreTests.java b/modules/repository-url/src/test/java/org/elasticsearch/repositories/url/URLSnapshotRestoreTests.java index 9eb5e37a51c..1a006f56bc3 100644 --- a/modules/repository-url/src/test/java/org/elasticsearch/repositories/url/URLSnapshotRestoreTests.java +++ b/modules/repository-url/src/test/java/org/elasticsearch/repositories/url/URLSnapshotRestoreTests.java @@ -20,9 +20,9 @@ package org.elasticsearch.repositories.url; import org.elasticsearch.action.admin.cluster.snapshots.create.CreateSnapshotResponse; -import org.elasticsearch.action.admin.cluster.snapshots.delete.DeleteSnapshotResponse; import org.elasticsearch.action.admin.cluster.snapshots.get.GetSnapshotsResponse; import org.elasticsearch.action.admin.cluster.snapshots.restore.RestoreSnapshotResponse; +import org.elasticsearch.action.support.master.AcknowledgedResponse; import org.elasticsearch.client.Client; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.ByteSizeUnit; @@ -120,7 +120,7 @@ public class URLSnapshotRestoreTests extends ESIntegTestCase { assertThat(getSnapshotsResponse.getSnapshots().size(), equalTo(1)); logger.info("--> delete snapshot"); - DeleteSnapshotResponse deleteSnapshotResponse = client.admin().cluster().prepareDeleteSnapshot("test-repo", "test-snap").get(); + AcknowledgedResponse deleteSnapshotResponse = client.admin().cluster().prepareDeleteSnapshot("test-repo", "test-snap").get(); assertAcked(deleteSnapshotResponse); logger.info("--> list available shapshot again, no snapshots should be returned"); diff --git a/modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/Netty4Transport.java b/modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/Netty4Transport.java index c8c6fceb543..e310f3012a9 100644 --- a/modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/Netty4Transport.java +++ b/modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/Netty4Transport.java @@ -54,7 +54,6 @@ import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.indices.breaker.CircuitBreakerService; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TcpTransport; -import org.elasticsearch.transport.TransportRequestOptions; import java.io.IOException; import java.net.InetSocketAddress; @@ -147,7 +146,6 @@ public class Netty4Transport extends TcpTransport { bootstrap.handler(getClientChannelInitializer()); - bootstrap.option(ChannelOption.CONNECT_TIMEOUT_MILLIS, Math.toIntExact(defaultConnectionProfile.getConnectTimeout().millis())); bootstrap.option(ChannelOption.TCP_NODELAY, TCP_NO_DELAY.get(settings)); bootstrap.option(ChannelOption.SO_KEEPALIVE, TCP_KEEP_ALIVE.get(settings)); @@ -175,14 +173,8 @@ public class Netty4Transport extends TcpTransport { String name = profileSettings.profileName; if (logger.isDebugEnabled()) { logger.debug("using profile[{}], worker_count[{}], port[{}], bind_host[{}], publish_host[{}], compress[{}], " - + "connect_timeout[{}], connections_per_node[{}/{}/{}/{}/{}], receive_predictor[{}->{}]", + + "receive_predictor[{}->{}]", name, workerCount, profileSettings.portOrRange, profileSettings.bindHosts, profileSettings.publishHosts, compress, - defaultConnectionProfile.getConnectTimeout(), - defaultConnectionProfile.getNumConnectionsPerType(TransportRequestOptions.Type.RECOVERY), - defaultConnectionProfile.getNumConnectionsPerType(TransportRequestOptions.Type.BULK), - defaultConnectionProfile.getNumConnectionsPerType(TransportRequestOptions.Type.REG), - defaultConnectionProfile.getNumConnectionsPerType(TransportRequestOptions.Type.STATE), - defaultConnectionProfile.getNumConnectionsPerType(TransportRequestOptions.Type.PING), receivePredictorMin, receivePredictorMax); } @@ -267,8 +259,12 @@ public class Netty4Transport extends TcpTransport { return esChannel; } - ScheduledPing getPing() { - return scheduledPing; + long successfulPingCount() { + return successfulPings.count(); + } + + long failedPingCount() { + return failedPings.count(); } @Override diff --git a/modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/Netty4HttpClient.java b/modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/Netty4HttpClient.java index 0fa331ba138..a595de3a47e 100644 --- a/modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/Netty4HttpClient.java +++ b/modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/Netty4HttpClient.java @@ -58,6 +58,7 @@ import java.util.concurrent.TimeUnit; import static io.netty.handler.codec.http.HttpHeaderNames.HOST; import static io.netty.handler.codec.http.HttpVersion.HTTP_1_1; +import static org.junit.Assert.fail; /** * Tiny helper to send http requests over netty. @@ -145,7 +146,9 @@ class Netty4HttpClient implements Closeable { for (HttpRequest request : requests) { channelFuture.channel().writeAndFlush(request); } - latch.await(30, TimeUnit.SECONDS); + if (latch.await(30L, TimeUnit.SECONDS) == false) { + fail("Failed to get all expected responses."); + } } finally { if (channelFuture != null) { diff --git a/modules/transport-netty4/src/test/java/org/elasticsearch/transport/netty4/Netty4ScheduledPingTests.java b/modules/transport-netty4/src/test/java/org/elasticsearch/transport/netty4/Netty4ScheduledPingTests.java index 01c5f5b6170..fae4082e818 100644 --- a/modules/transport-netty4/src/test/java/org/elasticsearch/transport/netty4/Netty4ScheduledPingTests.java +++ b/modules/transport-netty4/src/test/java/org/elasticsearch/transport/netty4/Netty4ScheduledPingTests.java @@ -26,16 +26,13 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.indices.breaker.CircuitBreakerService; import org.elasticsearch.indices.breaker.NoneCircuitBreakerService; -import org.elasticsearch.tasks.Task; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.transport.MockTransportService; import org.elasticsearch.threadpool.TestThreadPool; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TcpTransport; -import org.elasticsearch.transport.TransportChannel; import org.elasticsearch.transport.TransportException; import org.elasticsearch.transport.TransportRequest; -import org.elasticsearch.transport.TransportRequestHandler; import org.elasticsearch.transport.TransportRequestOptions; import org.elasticsearch.transport.TransportResponse; import org.elasticsearch.transport.TransportResponseHandler; @@ -83,22 +80,19 @@ public class Netty4ScheduledPingTests extends ESTestCase { serviceB.connectToNode(nodeA); assertBusy(() -> { - assertThat(nettyA.getPing().getSuccessfulPings(), greaterThan(100L)); - assertThat(nettyB.getPing().getSuccessfulPings(), greaterThan(100L)); + assertThat(nettyA.successfulPingCount(), greaterThan(100L)); + assertThat(nettyB.successfulPingCount(), greaterThan(100L)); }); - assertThat(nettyA.getPing().getFailedPings(), equalTo(0L)); - assertThat(nettyB.getPing().getFailedPings(), equalTo(0L)); + assertThat(nettyA.failedPingCount(), equalTo(0L)); + assertThat(nettyB.failedPingCount(), equalTo(0L)); serviceA.registerRequestHandler("internal:sayHello", TransportRequest.Empty::new, ThreadPool.Names.GENERIC, - new TransportRequestHandler() { - @Override - public void messageReceived(TransportRequest.Empty request, TransportChannel channel, Task task) { - try { - channel.sendResponse(TransportResponse.Empty.INSTANCE, TransportResponseOptions.EMPTY); - } catch (IOException e) { - logger.error("Unexpected failure", e); - fail(e.getMessage()); - } + (request, channel, task) -> { + try { + channel.sendResponse(TransportResponse.Empty.INSTANCE, TransportResponseOptions.EMPTY); + } catch (IOException e) { + logger.error("Unexpected failure", e); + fail(e.getMessage()); } }); @@ -130,11 +124,11 @@ public class Netty4ScheduledPingTests extends ESTestCase { } assertBusy(() -> { - assertThat(nettyA.getPing().getSuccessfulPings(), greaterThan(200L)); - assertThat(nettyB.getPing().getSuccessfulPings(), greaterThan(200L)); + assertThat(nettyA.successfulPingCount(), greaterThan(200L)); + assertThat(nettyB.successfulPingCount(), greaterThan(200L)); }); - assertThat(nettyA.getPing().getFailedPings(), equalTo(0L)); - assertThat(nettyB.getPing().getFailedPings(), equalTo(0L)); + assertThat(nettyA.failedPingCount(), equalTo(0L)); + assertThat(nettyB.failedPingCount(), equalTo(0L)); Releasables.close(serviceA, serviceB); terminate(threadPool); diff --git a/modules/transport-netty4/src/test/java/org/elasticsearch/transport/netty4/SimpleNetty4TransportTests.java b/modules/transport-netty4/src/test/java/org/elasticsearch/transport/netty4/SimpleNetty4TransportTests.java index 8d628ace2ee..9d6f016086c 100644 --- a/modules/transport-netty4/src/test/java/org/elasticsearch/transport/netty4/SimpleNetty4TransportTests.java +++ b/modules/transport-netty4/src/test/java/org/elasticsearch/transport/netty4/SimpleNetty4TransportTests.java @@ -22,7 +22,6 @@ package org.elasticsearch.transport.netty4; import org.elasticsearch.Version; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; -import org.elasticsearch.common.network.CloseableChannel; import org.elasticsearch.common.network.NetworkService; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Settings; @@ -87,13 +86,6 @@ public class SimpleNetty4TransportTests extends AbstractSimpleTransportTestCase return transportService; } - @Override - protected void closeConnectionChannel(Transport transport, Transport.Connection connection) throws IOException { - final Netty4Transport t = (Netty4Transport) transport; - final TcpTransport.NodeChannels channels = (TcpTransport.NodeChannels) connection; - CloseableChannel.closeChannels(channels.getChannels().subList(0, randomIntBetween(1, channels.getChannels().size())), true); - } - public void testConnectException() throws UnknownHostException { try { serviceA.connectToNode(new DiscoveryNode("C", new TransportAddress(InetAddress.getByName("localhost"), 9876), diff --git a/plugins/analysis-icu/build.gradle b/plugins/analysis-icu/build.gradle index ad5a7b7c57b..1883e3bf1b9 100644 --- a/plugins/analysis-icu/build.gradle +++ b/plugins/analysis-icu/build.gradle @@ -30,9 +30,9 @@ forbiddenApis { dependencies { compile "org.apache.lucene:lucene-analyzers-icu:${versions.lucene}" - compile 'com.ibm.icu:icu4j:62.1' + compile "com.ibm.icu:icu4j:${versions.icu4j}" } dependencyLicenses { mapping from: /lucene-.*/, to: 'lucene' -} \ No newline at end of file +} diff --git a/plugins/analysis-icu/licenses/lucene-analyzers-icu-7.5.0-snapshot-13b9e28f9d.jar.sha1 b/plugins/analysis-icu/licenses/lucene-analyzers-icu-7.5.0-snapshot-13b9e28f9d.jar.sha1 new file mode 100644 index 00000000000..1e79e1e70ef --- /dev/null +++ b/plugins/analysis-icu/licenses/lucene-analyzers-icu-7.5.0-snapshot-13b9e28f9d.jar.sha1 @@ -0,0 +1 @@ +a010e852be8d56efe1906e6da5292e4541239724 \ No newline at end of file diff --git a/plugins/analysis-icu/licenses/lucene-analyzers-icu-7.5.0-snapshot-608f0277b0.jar.sha1 b/plugins/analysis-icu/licenses/lucene-analyzers-icu-7.5.0-snapshot-608f0277b0.jar.sha1 deleted file mode 100644 index 5b6947a9c75..00000000000 --- a/plugins/analysis-icu/licenses/lucene-analyzers-icu-7.5.0-snapshot-608f0277b0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -7a37816def72a748416c4ae8b0f6817e30efb99f \ No newline at end of file diff --git a/plugins/analysis-kuromoji/licenses/lucene-analyzers-kuromoji-7.5.0-snapshot-13b9e28f9d.jar.sha1 b/plugins/analysis-kuromoji/licenses/lucene-analyzers-kuromoji-7.5.0-snapshot-13b9e28f9d.jar.sha1 new file mode 100644 index 00000000000..2d9669e4362 --- /dev/null +++ b/plugins/analysis-kuromoji/licenses/lucene-analyzers-kuromoji-7.5.0-snapshot-13b9e28f9d.jar.sha1 @@ -0,0 +1 @@ +88e0ed90d433a9088528485cd4f59311735d92a4 \ No newline at end of file diff --git a/plugins/analysis-kuromoji/licenses/lucene-analyzers-kuromoji-7.5.0-snapshot-608f0277b0.jar.sha1 b/plugins/analysis-kuromoji/licenses/lucene-analyzers-kuromoji-7.5.0-snapshot-608f0277b0.jar.sha1 deleted file mode 100644 index d39638c1884..00000000000 --- a/plugins/analysis-kuromoji/licenses/lucene-analyzers-kuromoji-7.5.0-snapshot-608f0277b0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -ca7437178cdbf7b8bfe0d75c75e3c8eb93925724 \ No newline at end of file diff --git a/plugins/analysis-nori/licenses/lucene-analyzers-nori-7.5.0-snapshot-13b9e28f9d.jar.sha1 b/plugins/analysis-nori/licenses/lucene-analyzers-nori-7.5.0-snapshot-13b9e28f9d.jar.sha1 new file mode 100644 index 00000000000..f7b8fdd4bc1 --- /dev/null +++ b/plugins/analysis-nori/licenses/lucene-analyzers-nori-7.5.0-snapshot-13b9e28f9d.jar.sha1 @@ -0,0 +1 @@ +0daec9ac3c4bba5f91b1bc413c651b7a98313982 \ No newline at end of file diff --git a/plugins/analysis-nori/licenses/lucene-analyzers-nori-7.5.0-snapshot-608f0277b0.jar.sha1 b/plugins/analysis-nori/licenses/lucene-analyzers-nori-7.5.0-snapshot-608f0277b0.jar.sha1 deleted file mode 100644 index 21c25d2bb24..00000000000 --- a/plugins/analysis-nori/licenses/lucene-analyzers-nori-7.5.0-snapshot-608f0277b0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -3f5dec44f380d6d58bc1c8aec51964fcb5390b60 \ No newline at end of file diff --git a/plugins/analysis-phonetic/licenses/lucene-analyzers-phonetic-7.5.0-snapshot-13b9e28f9d.jar.sha1 b/plugins/analysis-phonetic/licenses/lucene-analyzers-phonetic-7.5.0-snapshot-13b9e28f9d.jar.sha1 new file mode 100644 index 00000000000..80cf627011b --- /dev/null +++ b/plugins/analysis-phonetic/licenses/lucene-analyzers-phonetic-7.5.0-snapshot-13b9e28f9d.jar.sha1 @@ -0,0 +1 @@ +f5af81eec04c1da0d6969cff18f360ff379b1bf7 \ No newline at end of file diff --git a/plugins/analysis-phonetic/licenses/lucene-analyzers-phonetic-7.5.0-snapshot-608f0277b0.jar.sha1 b/plugins/analysis-phonetic/licenses/lucene-analyzers-phonetic-7.5.0-snapshot-608f0277b0.jar.sha1 deleted file mode 100644 index f58c597eadd..00000000000 --- a/plugins/analysis-phonetic/licenses/lucene-analyzers-phonetic-7.5.0-snapshot-608f0277b0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -453bf1d60df0415439095624e0b3e42492ad4716 \ No newline at end of file diff --git a/plugins/analysis-smartcn/licenses/lucene-analyzers-smartcn-7.5.0-snapshot-13b9e28f9d.jar.sha1 b/plugins/analysis-smartcn/licenses/lucene-analyzers-smartcn-7.5.0-snapshot-13b9e28f9d.jar.sha1 new file mode 100644 index 00000000000..14be684b96f --- /dev/null +++ b/plugins/analysis-smartcn/licenses/lucene-analyzers-smartcn-7.5.0-snapshot-13b9e28f9d.jar.sha1 @@ -0,0 +1 @@ +9e649088ee298293aa95a05391dff9cb0582648e \ No newline at end of file diff --git a/plugins/analysis-smartcn/licenses/lucene-analyzers-smartcn-7.5.0-snapshot-608f0277b0.jar.sha1 b/plugins/analysis-smartcn/licenses/lucene-analyzers-smartcn-7.5.0-snapshot-608f0277b0.jar.sha1 deleted file mode 100644 index 8ccec8dbf37..00000000000 --- a/plugins/analysis-smartcn/licenses/lucene-analyzers-smartcn-7.5.0-snapshot-608f0277b0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -70095a45257bca9f46629b5fb6cedf9eff5e2b07 \ No newline at end of file diff --git a/plugins/analysis-stempel/licenses/lucene-analyzers-stempel-7.5.0-snapshot-13b9e28f9d.jar.sha1 b/plugins/analysis-stempel/licenses/lucene-analyzers-stempel-7.5.0-snapshot-13b9e28f9d.jar.sha1 new file mode 100644 index 00000000000..ea55c790537 --- /dev/null +++ b/plugins/analysis-stempel/licenses/lucene-analyzers-stempel-7.5.0-snapshot-13b9e28f9d.jar.sha1 @@ -0,0 +1 @@ +47fb370054ba7413d050f13c177edf01180c31ca \ No newline at end of file diff --git a/plugins/analysis-stempel/licenses/lucene-analyzers-stempel-7.5.0-snapshot-608f0277b0.jar.sha1 b/plugins/analysis-stempel/licenses/lucene-analyzers-stempel-7.5.0-snapshot-608f0277b0.jar.sha1 deleted file mode 100644 index ec9c33119f5..00000000000 --- a/plugins/analysis-stempel/licenses/lucene-analyzers-stempel-7.5.0-snapshot-608f0277b0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -7199d6962d268b7877f7b5160e98e4ff21cce5c7 \ No newline at end of file diff --git a/plugins/analysis-ukrainian/licenses/lucene-analyzers-morfologik-7.5.0-snapshot-13b9e28f9d.jar.sha1 b/plugins/analysis-ukrainian/licenses/lucene-analyzers-morfologik-7.5.0-snapshot-13b9e28f9d.jar.sha1 new file mode 100644 index 00000000000..2d6f580c35a --- /dev/null +++ b/plugins/analysis-ukrainian/licenses/lucene-analyzers-morfologik-7.5.0-snapshot-13b9e28f9d.jar.sha1 @@ -0,0 +1 @@ +bc0708acbac195772b67b5ad2e9c4683d27ff450 \ No newline at end of file diff --git a/plugins/analysis-ukrainian/licenses/lucene-analyzers-morfologik-7.5.0-snapshot-608f0277b0.jar.sha1 b/plugins/analysis-ukrainian/licenses/lucene-analyzers-morfologik-7.5.0-snapshot-608f0277b0.jar.sha1 deleted file mode 100644 index ba9148ef1b3..00000000000 --- a/plugins/analysis-ukrainian/licenses/lucene-analyzers-morfologik-7.5.0-snapshot-608f0277b0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -12aff508d39d206a1aead5013ecd11882062eb06 \ No newline at end of file diff --git a/plugins/examples/custom-settings/build.gradle b/plugins/examples/custom-settings/build.gradle index e0e728cec24..3caf29c8513 100644 --- a/plugins/examples/custom-settings/build.gradle +++ b/plugins/examples/custom-settings/build.gradle @@ -16,13 +16,14 @@ * specific language governing permissions and limitations * under the License. */ - apply plugin: 'elasticsearch.esplugin' esplugin { name 'custom-settings' description 'An example plugin showing how to register custom settings' classname 'org.elasticsearch.example.customsettings.ExampleCustomSettingsPlugin' + licenseFile rootProject.file('licenses/APACHE-LICENSE-2.0.txt') + noticeFile rootProject.file('NOTICE.txt') } integTestCluster { diff --git a/plugins/examples/custom-suggester/build.gradle b/plugins/examples/custom-suggester/build.gradle new file mode 100644 index 00000000000..977e467391d --- /dev/null +++ b/plugins/examples/custom-suggester/build.gradle @@ -0,0 +1,35 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +apply plugin: 'elasticsearch.esplugin' + +esplugin { + name 'custom-suggester' + description 'An example plugin showing how to write and register a custom suggester' + classname 'org.elasticsearch.example.customsuggester.CustomSuggesterPlugin' + licenseFile rootProject.file('licenses/APACHE-LICENSE-2.0.txt') + noticeFile rootProject.file('NOTICE.txt') +} + +integTestCluster { + numNodes = 2 +} + +// this plugin has no unit tests, only rest tests +tasks.test.enabled = false diff --git a/plugins/examples/custom-suggester/src/main/java/org/elasticsearch/example/customsuggester/CustomSuggester.java b/plugins/examples/custom-suggester/src/main/java/org/elasticsearch/example/customsuggester/CustomSuggester.java new file mode 100644 index 00000000000..b6a5b5e8f84 --- /dev/null +++ b/plugins/examples/custom-suggester/src/main/java/org/elasticsearch/example/customsuggester/CustomSuggester.java @@ -0,0 +1,62 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.example.customsuggester; + +import org.apache.lucene.search.IndexSearcher; +import org.apache.lucene.util.CharsRefBuilder; +import org.elasticsearch.common.text.Text; +import org.elasticsearch.search.suggest.Suggest; +import org.elasticsearch.search.suggest.Suggester; + +import java.util.Locale; + +public class CustomSuggester extends Suggester { + + // This is a pretty dumb implementation which returns the original text + fieldName + custom config option + 12 or 123 + @Override + public Suggest.Suggestion> innerExecute( + String name, + CustomSuggestionContext suggestion, + IndexSearcher searcher, + CharsRefBuilder spare) { + + // Get the suggestion context + String text = suggestion.getText().utf8ToString(); + + // create two suggestions with 12 and 123 appended + CustomSuggestion response = new CustomSuggestion(name, suggestion.getSize(), "suggestion-dummy-value"); + + CustomSuggestion.Entry entry = new CustomSuggestion.Entry(new Text(text), 0, text.length(), "entry-dummy-value"); + + String firstOption = + String.format(Locale.ROOT, "%s-%s-%s-%s", text, suggestion.getField(), suggestion.options.get("suffix"), "12"); + CustomSuggestion.Entry.Option option12 = new CustomSuggestion.Entry.Option(new Text(firstOption), 0.9f, "option-dummy-value-1"); + entry.addOption(option12); + + String secondOption = + String.format(Locale.ROOT, "%s-%s-%s-%s", text, suggestion.getField(), suggestion.options.get("suffix"), "123"); + CustomSuggestion.Entry.Option option123 = new CustomSuggestion.Entry.Option(new Text(secondOption), 0.8f, "option-dummy-value-2"); + entry.addOption(option123); + + response.addTerm(entry); + + return response; + } +} diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/storedscripts/DeleteStoredScriptResponse.java b/plugins/examples/custom-suggester/src/main/java/org/elasticsearch/example/customsuggester/CustomSuggesterPlugin.java similarity index 55% rename from server/src/main/java/org/elasticsearch/action/admin/cluster/storedscripts/DeleteStoredScriptResponse.java rename to plugins/examples/custom-suggester/src/main/java/org/elasticsearch/example/customsuggester/CustomSuggesterPlugin.java index 741c105866f..91ffa672e53 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/storedscripts/DeleteStoredScriptResponse.java +++ b/plugins/examples/custom-suggester/src/main/java/org/elasticsearch/example/customsuggester/CustomSuggesterPlugin.java @@ -17,21 +17,24 @@ * under the License. */ -package org.elasticsearch.action.admin.cluster.storedscripts; +package org.elasticsearch.example.customsuggester; -import org.elasticsearch.action.support.master.AcknowledgedResponse; -import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.plugins.SearchPlugin; -public class DeleteStoredScriptResponse extends AcknowledgedResponse { +import java.util.Collections; +import java.util.List; - DeleteStoredScriptResponse() { - } - - public DeleteStoredScriptResponse(boolean acknowledged) { - super(acknowledged); - } - - public static DeleteStoredScriptResponse fromXContent(XContentParser parser) { - return new DeleteStoredScriptResponse(parseAcknowledged(parser)); +public class CustomSuggesterPlugin extends Plugin implements SearchPlugin { + @Override + public List> getSuggesters() { + return Collections.singletonList( + new SearchPlugin.SuggesterSpec<>( + CustomSuggestionBuilder.SUGGESTION_NAME, + CustomSuggestionBuilder::new, + CustomSuggestionBuilder::fromXContent, + CustomSuggestion::new + ) + ); } } diff --git a/plugins/examples/custom-suggester/src/main/java/org/elasticsearch/example/customsuggester/CustomSuggestion.java b/plugins/examples/custom-suggester/src/main/java/org/elasticsearch/example/customsuggester/CustomSuggestion.java new file mode 100644 index 00000000000..f7ec27b7af0 --- /dev/null +++ b/plugins/examples/custom-suggester/src/main/java/org/elasticsearch/example/customsuggester/CustomSuggestion.java @@ -0,0 +1,227 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.example.customsuggester; + +import org.elasticsearch.common.ParseField; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.text.Text; +import org.elasticsearch.common.xcontent.ConstructingObjectParser; +import org.elasticsearch.common.xcontent.ObjectParser; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.search.suggest.Suggest; + +import java.io.IOException; + +import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg; + +public class CustomSuggestion extends Suggest.Suggestion { + + public static final int TYPE = 999; + + public static final ParseField DUMMY = new ParseField("dummy"); + + private String dummy; + + public CustomSuggestion(String name, int size, String dummy) { + super(name, size); + this.dummy = dummy; + } + + public CustomSuggestion(StreamInput in) throws IOException { + super(in); + dummy = in.readString(); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + out.writeString(dummy); + } + + @Override + public String getWriteableName() { + return CustomSuggestionBuilder.SUGGESTION_NAME; + } + + @Override + public int getWriteableType() { + return TYPE; + } + + /** + * A meaningless value used to test that plugin suggesters can add fields to their Suggestion types + * + * This can't be serialized to xcontent because Suggestions appear in xcontent as an array of entries, so there is no place + * to add a custom field. But we can still use a custom field internally and use it to define a Suggestion's behavior + */ + public String getDummy() { + return dummy; + } + + @Override + protected Entry newEntry() { + return new Entry(); + } + + @Override + protected Entry newEntry(StreamInput in) throws IOException { + return new Entry(in); + } + + public static CustomSuggestion fromXContent(XContentParser parser, String name) throws IOException { + CustomSuggestion suggestion = new CustomSuggestion(name, -1, null); + parseEntries(parser, suggestion, Entry::fromXContent); + return suggestion; + } + + public static class Entry extends Suggest.Suggestion.Entry { + + private static final ObjectParser PARSER = new ObjectParser<>("CustomSuggestionEntryParser", true, Entry::new); + + static { + declareCommonFields(PARSER); + PARSER.declareString((entry, dummy) -> entry.dummy = dummy, DUMMY); + PARSER.declareObjectArray(Entry::addOptions, (p, c) -> Option.fromXContent(p), new ParseField(OPTIONS)); + } + + private String dummy; + + public Entry() {} + + public Entry(Text text, int offset, int length, String dummy) { + super(text, offset, length); + this.dummy = dummy; + } + + public Entry(StreamInput in) throws IOException { + super(in); + dummy = in.readString(); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + out.writeString(dummy); + } + + @Override + protected Option newOption() { + return new Option(); + } + + @Override + protected Option newOption(StreamInput in) throws IOException { + return new Option(in); + } + + /* + * the value of dummy will always be the same, so this just tests that we can merge entries with custom fields + */ + @Override + protected void merge(Suggest.Suggestion.Entry

        listener) { + protected void masterOperation(final PutIndexTemplateRequest request, final ClusterState state, final ActionListener listener) { String cause = request.cause(); if (cause.length() == 0) { cause = "api"; @@ -91,7 +92,7 @@ public class TransportPutIndexTemplateAction extends TransportMasterNodeAction

        listener) { UpgradeSettingsRequest upgradeSettingsRequest = new UpgradeSettingsRequest(upgradeResponse.versions()); - client.executeLocally(UpgradeSettingsAction.INSTANCE, upgradeSettingsRequest, new ActionListener() { + client.executeLocally(UpgradeSettingsAction.INSTANCE, upgradeSettingsRequest, new ActionListener() { @Override - public void onResponse(UpgradeSettingsResponse updateSettingsResponse) { + public void onResponse(AcknowledgedResponse updateSettingsResponse) { listener.onResponse(upgradeResponse); } diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/upgrade/post/TransportUpgradeSettingsAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/upgrade/post/TransportUpgradeSettingsAction.java index 2e428e85efc..7c4aa406b21 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/upgrade/post/TransportUpgradeSettingsAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/upgrade/post/TransportUpgradeSettingsAction.java @@ -22,6 +22,7 @@ package org.elasticsearch.action.admin.indices.upgrade.post; import org.apache.logging.log4j.message.ParameterizedMessage; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.action.support.master.AcknowledgedResponse; import org.elasticsearch.action.support.master.TransportMasterNodeAction; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.ack.ClusterStateUpdateResponse; @@ -35,7 +36,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; -public class TransportUpgradeSettingsAction extends TransportMasterNodeAction { +public class TransportUpgradeSettingsAction extends TransportMasterNodeAction { private final MetaDataUpdateSettingsService updateSettingsService; @@ -58,12 +59,12 @@ public class TransportUpgradeSettingsAction extends TransportMasterNodeAction listener) { + protected void masterOperation(final UpgradeSettingsRequest request, final ClusterState state, final ActionListener listener) { UpgradeSettingsClusterStateUpdateRequest clusterStateUpdateRequest = new UpgradeSettingsClusterStateUpdateRequest() .ackTimeout(request.timeout()) .versions(request.versions()) @@ -72,7 +73,7 @@ public class TransportUpgradeSettingsAction extends TransportMasterNodeAction() { @Override public void onResponse(ClusterStateUpdateResponse response) { - listener.onResponse(new UpgradeSettingsResponse(response.isAcknowledged())); + listener.onResponse(new AcknowledgedResponse(response.isAcknowledged())); } @Override diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/upgrade/post/UpgradeSettingsAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/upgrade/post/UpgradeSettingsAction.java index 5f1ee88e34e..9b1ff0f616a 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/upgrade/post/UpgradeSettingsAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/upgrade/post/UpgradeSettingsAction.java @@ -20,8 +20,9 @@ package org.elasticsearch.action.admin.indices.upgrade.post; import org.elasticsearch.action.Action; +import org.elasticsearch.action.support.master.AcknowledgedResponse; -public class UpgradeSettingsAction extends Action { +public class UpgradeSettingsAction extends Action { public static final UpgradeSettingsAction INSTANCE = new UpgradeSettingsAction(); public static final String NAME = "internal:indices/admin/upgrade"; @@ -31,7 +32,7 @@ public class UpgradeSettingsAction extends Action { } @Override - public UpgradeSettingsResponse newResponse() { - return new UpgradeSettingsResponse(); + public AcknowledgedResponse newResponse() { + return new AcknowledgedResponse(); } } diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/upgrade/post/UpgradeSettingsRequestBuilder.java b/server/src/main/java/org/elasticsearch/action/admin/indices/upgrade/post/UpgradeSettingsRequestBuilder.java index 9ce5aeb2d2d..e3a48066bbf 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/upgrade/post/UpgradeSettingsRequestBuilder.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/upgrade/post/UpgradeSettingsRequestBuilder.java @@ -21,6 +21,7 @@ package org.elasticsearch.action.admin.indices.upgrade.post; import org.elasticsearch.Version; import org.elasticsearch.action.support.master.AcknowledgedRequestBuilder; +import org.elasticsearch.action.support.master.AcknowledgedResponse; import org.elasticsearch.client.ElasticsearchClient; import org.elasticsearch.common.collect.Tuple; @@ -29,7 +30,7 @@ import java.util.Map; /** * Builder for an update index settings request */ -public class UpgradeSettingsRequestBuilder extends AcknowledgedRequestBuilder { +public class UpgradeSettingsRequestBuilder extends AcknowledgedRequestBuilder { public UpgradeSettingsRequestBuilder(ElasticsearchClient client, UpgradeSettingsAction action) { super(client, action, new UpgradeSettingsRequest()); diff --git a/server/src/main/java/org/elasticsearch/action/bulk/BulkItemResultHolder.java b/server/src/main/java/org/elasticsearch/action/bulk/BulkItemResultHolder.java deleted file mode 100644 index 3e7ee41b914..00000000000 --- a/server/src/main/java/org/elasticsearch/action/bulk/BulkItemResultHolder.java +++ /dev/null @@ -1,48 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.action.bulk; - -import org.elasticsearch.action.DocWriteResponse; -import org.elasticsearch.common.Nullable; -import org.elasticsearch.index.engine.Engine; -import org.elasticsearch.index.engine.VersionConflictEngineException; - -/** - * A struct-like holder for a bulk items reponse, result, and the resulting - * replica operation to be executed. - */ -class BulkItemResultHolder { - public final @Nullable DocWriteResponse response; - public final @Nullable Engine.Result operationResult; - public final BulkItemRequest replicaRequest; - - BulkItemResultHolder(@Nullable DocWriteResponse response, - @Nullable Engine.Result operationResult, - BulkItemRequest replicaRequest) { - this.response = response; - this.operationResult = operationResult; - this.replicaRequest = replicaRequest; - } - - public boolean isVersionConflict() { - return operationResult == null ? false : - operationResult.getFailure() instanceof VersionConflictEngineException; - } -} diff --git a/server/src/main/java/org/elasticsearch/action/bulk/BulkPrimaryExecutionContext.java b/server/src/main/java/org/elasticsearch/action/bulk/BulkPrimaryExecutionContext.java new file mode 100644 index 00000000000..5f61d90d500 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/action/bulk/BulkPrimaryExecutionContext.java @@ -0,0 +1,345 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.action.bulk; + +import org.elasticsearch.action.DocWriteRequest; +import org.elasticsearch.action.DocWriteResponse; +import org.elasticsearch.action.delete.DeleteResponse; +import org.elasticsearch.action.index.IndexResponse; +import org.elasticsearch.action.support.replication.ReplicationResponse; +import org.elasticsearch.action.support.replication.TransportWriteAction; +import org.elasticsearch.index.engine.Engine; +import org.elasticsearch.index.shard.IndexShard; +import org.elasticsearch.index.translog.Translog; + +import java.util.Arrays; + +/** + * This is a utility class that holds the per request state needed to perform bulk operations on the primary. + * More specifically, it maintains an index to the current executing bulk item, which allows execution + * to stop and wait for external events such as mapping updates. + */ +class BulkPrimaryExecutionContext { + + enum ItemProcessingState { + /** Item execution is ready to start, no operations have been performed yet */ + INITIAL, + /** + * The incoming request has been translated to a request that can be executed on the shard. + * This is used to convert update requests to a fully specified index or delete requests. + */ + TRANSLATED, + /** + * the request can not execute with the current mapping and should wait for a new mapping + * to arrive from the master. A mapping request for the needed changes has already been + * submitted + */ + WAIT_FOR_MAPPING_UPDATE, + /** + * The request should be executed again, but there is no need to wait for an external event. + * This is needed to support retry on conflicts during updates. + */ + IMMEDIATE_RETRY, + /** The request has been executed on the primary shard (successfully or not) */ + EXECUTED, + /** + * No further handling of current request is needed. The result has been converted to a user response + * and execution can continue to the next item (if available). + */ + COMPLETED + } + + private final BulkShardRequest request; + private final IndexShard primary; + private Translog.Location locationToSync = null; + private int currentIndex = -1; + + private ItemProcessingState currentItemState; + private DocWriteRequest requestToExecute; + private BulkItemResponse executionResult; + private int retryCounter; + + + BulkPrimaryExecutionContext(BulkShardRequest request, IndexShard primary) { + this.request = request; + this.primary = primary; + advance(); + } + + + private int findNextNonAborted(int startIndex) { + final int length = request.items().length; + while (startIndex < length && isAborted(request.items()[startIndex].getPrimaryResponse())) { + startIndex++; + } + return startIndex; + } + + private static boolean isAborted(BulkItemResponse response) { + return response != null && response.isFailed() && response.getFailure().isAborted(); + } + + /** move to the next item to execute */ + private void advance() { + assert currentItemState == ItemProcessingState.COMPLETED || currentIndex == -1 : + "moving to next but current item wasn't completed (state: " + currentItemState + ")"; + currentItemState = ItemProcessingState.INITIAL; + currentIndex = findNextNonAborted(currentIndex + 1); + retryCounter = 0; + requestToExecute = null; + executionResult = null; + assert assertInvariants(ItemProcessingState.INITIAL); + } + + /** gets the current, untranslated item request */ + public DocWriteRequest getCurrent() { + return getCurrentItem().request(); + } + + public BulkShardRequest getBulkShardRequest() { + return request; + } + + /** returns the result of the request that has been executed on the shard */ + public BulkItemResponse getExecutionResult() { + assert assertInvariants(ItemProcessingState.EXECUTED); + return executionResult; + } + + /** returns the number of times the current operation has been retried */ + public int getRetryCounter() { + return retryCounter; + } + + /** returns true if the current request has been executed on the primary */ + public boolean isOperationExecuted() { + return currentItemState == ItemProcessingState.EXECUTED; + } + + /** returns true if the request needs to wait for a mapping update to arrive from the master */ + public boolean requiresWaitingForMappingUpdate() { + return currentItemState == ItemProcessingState.WAIT_FOR_MAPPING_UPDATE; + } + + /** returns true if the current request should be retried without waiting for an external event */ + public boolean requiresImmediateRetry() { + return currentItemState == ItemProcessingState.IMMEDIATE_RETRY; + } + + /** + * returns true if the current request has been completed and it's result translated to a user + * facing response + */ + public boolean isCompleted() { + return currentItemState == ItemProcessingState.COMPLETED; + } + + /** + * returns true if the current request is in INITIAL state + */ + public boolean isInitial() { + return currentItemState == ItemProcessingState.INITIAL; + } + + /** + * returns true if {@link #advance()} has moved the current item beyond the + * end of the {@link BulkShardRequest#items()} array. + */ + public boolean hasMoreOperationsToExecute() { + return currentIndex < request.items().length; + } + + + /** returns the name of the index the current request used */ + public String getConcreteIndex() { + return getCurrentItem().index(); + } + + /** returns any primary response that was set by a previous primary */ + public BulkItemResponse getPreviousPrimaryResponse() { + return getCurrentItem().getPrimaryResponse(); + } + + /** returns a translog location that is needed to be synced in order to persist all operations executed so far */ + public Translog.Location getLocationToSync() { + assert hasMoreOperationsToExecute() == false; + // we always get to the end of the list by using advance, which in turn sets the state to INITIAL + assert assertInvariants(ItemProcessingState.INITIAL); + return locationToSync; + } + + private BulkItemRequest getCurrentItem() { + return request.items()[currentIndex]; + } + + /** returns the primary shard */ + public IndexShard getPrimary() { + return primary; + } + + /** + * sets the request that should actually be executed on the primary. This can be different then the request + * received from the user (specifically, an update request is translated to an indexing or delete request). + */ + public void setRequestToExecute(DocWriteRequest writeRequest) { + assert assertInvariants(ItemProcessingState.INITIAL); + requestToExecute = writeRequest; + currentItemState = ItemProcessingState.TRANSLATED; + assert assertInvariants(ItemProcessingState.TRANSLATED); + } + + /** returns the request that should be executed on the shard. */ + public > T getRequestToExecute() { + assert assertInvariants(ItemProcessingState.TRANSLATED); + return (T) requestToExecute; + } + + /** indicates that the current operation can not be completed and needs to wait for a new mapping from the master */ + public void markAsRequiringMappingUpdate() { + assert assertInvariants(ItemProcessingState.TRANSLATED); + currentItemState = ItemProcessingState.WAIT_FOR_MAPPING_UPDATE; + requestToExecute = null; + assert assertInvariants(ItemProcessingState.WAIT_FOR_MAPPING_UPDATE); + } + + /** resets the current item state, prepare for a new execution */ + public void resetForExecutionForRetry() { + assertInvariants(ItemProcessingState.WAIT_FOR_MAPPING_UPDATE, ItemProcessingState.EXECUTED); + currentItemState = ItemProcessingState.INITIAL; + requestToExecute = null; + executionResult = null; + assertInvariants(ItemProcessingState.INITIAL); + } + + /** completes the operation without doing anything on the primary */ + public void markOperationAsNoOp(DocWriteResponse response) { + assertInvariants(ItemProcessingState.INITIAL); + executionResult = new BulkItemResponse(getCurrentItem().id(), getCurrentItem().request().opType(), response); + currentItemState = ItemProcessingState.EXECUTED; + assertInvariants(ItemProcessingState.EXECUTED); + } + + /** indicates that the operation needs to be failed as the required mapping didn't arrive in time */ + public void failOnMappingUpdate(Exception cause) { + assert assertInvariants(ItemProcessingState.WAIT_FOR_MAPPING_UPDATE); + currentItemState = ItemProcessingState.EXECUTED; + final DocWriteRequest docWriteRequest = getCurrentItem().request(); + executionResult = new BulkItemResponse(getCurrentItem().id(), docWriteRequest.opType(), + // Make sure to use getCurrentItem().index() here, if you use docWriteRequest.index() it will use the + // concrete index instead of an alias if used! + new BulkItemResponse.Failure(getCurrentItem().index(), docWriteRequest.type(), docWriteRequest.id(), cause)); + markAsCompleted(executionResult); + } + + /** the current operation has been executed on the primary with the specified result */ + public void markOperationAsExecuted(Engine.Result result) { + assertInvariants(ItemProcessingState.TRANSLATED); + final BulkItemRequest current = getCurrentItem(); + DocWriteRequest docWriteRequest = getRequestToExecute(); + switch (result.getResultType()) { + case SUCCESS: + final DocWriteResponse response; + if (result.getOperationType() == Engine.Operation.TYPE.INDEX) { + Engine.IndexResult indexResult = (Engine.IndexResult) result; + response = new IndexResponse(primary.shardId(), requestToExecute.type(), requestToExecute.id(), + result.getSeqNo(), result.getTerm(), indexResult.getVersion(), indexResult.isCreated()); + } else if (result.getOperationType() == Engine.Operation.TYPE.DELETE) { + Engine.DeleteResult deleteResult = (Engine.DeleteResult) result; + response = new DeleteResponse(primary.shardId(), requestToExecute.type(), requestToExecute.id(), + deleteResult.getSeqNo(), result.getTerm(), deleteResult.getVersion(), deleteResult.isFound()); + + } else { + throw new AssertionError("unknown result type :" + result.getResultType()); + } + executionResult = new BulkItemResponse(current.id(), current.request().opType(), response); + // set a blank ShardInfo so we can safely send it to the replicas. We won't use it in the real response though. + executionResult.getResponse().setShardInfo(new ReplicationResponse.ShardInfo()); + locationToSync = TransportWriteAction.locationToSync(locationToSync, result.getTranslogLocation()); + break; + case FAILURE: + executionResult = new BulkItemResponse(current.id(), docWriteRequest.opType(), + // Make sure to use request.index() here, if you + // use docWriteRequest.index() it will use the + // concrete index instead of an alias if used! + new BulkItemResponse.Failure(request.index(), docWriteRequest.type(), docWriteRequest.id(), + result.getFailure(), result.getSeqNo())); + break; + default: + throw new AssertionError("unknown result type for " + getCurrentItem() + ": " + result.getResultType()); + } + currentItemState = ItemProcessingState.EXECUTED; + } + + /** finishes the execution of the current request, with the response that should be returned to the user */ + public void markAsCompleted(BulkItemResponse translatedResponse) { + assertInvariants(ItemProcessingState.EXECUTED); + assert executionResult != null && translatedResponse.getItemId() == executionResult.getItemId(); + assert translatedResponse.getItemId() == getCurrentItem().id(); + + if (translatedResponse.isFailed() == false && requestToExecute != null && requestToExecute != getCurrent()) { + request.items()[currentIndex] = new BulkItemRequest(request.items()[currentIndex].id(), requestToExecute); + } + getCurrentItem().setPrimaryResponse(translatedResponse); + currentItemState = ItemProcessingState.COMPLETED; + advance(); + } + + /** builds the bulk shard response to return to the user */ + public BulkShardResponse buildShardResponse() { + assert hasMoreOperationsToExecute() == false; + return new BulkShardResponse(request.shardId(), + Arrays.stream(request.items()).map(BulkItemRequest::getPrimaryResponse).toArray(BulkItemResponse[]::new)); + } + + private boolean assertInvariants(ItemProcessingState... expectedCurrentState) { + assert Arrays.asList(expectedCurrentState).contains(currentItemState): + "expected current state [" + currentItemState + "] to be one of " + Arrays.toString(expectedCurrentState); + assert currentIndex >= 0 : currentIndex; + assert retryCounter >= 0 : retryCounter; + switch (currentItemState) { + case INITIAL: + assert requestToExecute == null : requestToExecute; + assert executionResult == null : executionResult; + break; + case TRANSLATED: + assert requestToExecute != null; + assert executionResult == null : executionResult; + break; + case WAIT_FOR_MAPPING_UPDATE: + assert requestToExecute == null; + assert executionResult == null : executionResult; + break; + case IMMEDIATE_RETRY: + assert requestToExecute != null; + assert executionResult == null : executionResult; + break; + case EXECUTED: + // requestToExecute can be null if the update ended up as NOOP + assert executionResult != null; + break; + case COMPLETED: + assert requestToExecute != null; + assert executionResult != null; + assert getCurrentItem().getPrimaryResponse() != null; + break; + } + return true; + } +} diff --git a/server/src/main/java/org/elasticsearch/action/bulk/BulkRequest.java b/server/src/main/java/org/elasticsearch/action/bulk/BulkRequest.java index 989172b711a..6698aa4b62a 100644 --- a/server/src/main/java/org/elasticsearch/action/bulk/BulkRequest.java +++ b/server/src/main/java/org/elasticsearch/action/bulk/BulkRequest.java @@ -523,22 +523,6 @@ public class BulkRequest extends ActionRequest implements CompositeIndicesReques return -1; } - /** - * @return Whether this bulk request contains index request with an ingest pipeline enabled. - */ - public boolean hasIndexRequestsWithPipelines() { - for (DocWriteRequest actionRequest : requests) { - if (actionRequest instanceof IndexRequest) { - IndexRequest indexRequest = (IndexRequest) actionRequest; - if (Strings.hasText(indexRequest.getPipeline())) { - return true; - } - } - } - - return false; - } - @Override public ActionRequestValidationException validate() { ActionRequestValidationException validationException = null; diff --git a/server/src/main/java/org/elasticsearch/action/bulk/TransportBulkAction.java b/server/src/main/java/org/elasticsearch/action/bulk/TransportBulkAction.java index 939b0b70249..e3e94e82339 100644 --- a/server/src/main/java/org/elasticsearch/action/bulk/TransportBulkAction.java +++ b/server/src/main/java/org/elasticsearch/action/bulk/TransportBulkAction.java @@ -47,6 +47,7 @@ import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.metadata.MappingMetaData; import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.collect.ImmutableOpenMap; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; @@ -54,6 +55,7 @@ import org.elasticsearch.common.util.concurrent.AbstractRunnable; import org.elasticsearch.common.util.concurrent.AtomicArray; import org.elasticsearch.index.Index; import org.elasticsearch.index.IndexNotFoundException; +import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.VersionType; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.indices.IndexClosedException; @@ -125,15 +127,6 @@ public class TransportBulkAction extends HandledTransportAction listener) { - if (bulkRequest.hasIndexRequestsWithPipelines()) { - if (clusterService.localNode().isIngestNode()) { - processBulkIndexIngestRequest(task, bulkRequest, listener); - } else { - ingestForwarder.forwardIngestRequest(BulkAction.INSTANCE, bulkRequest, listener); - } - return; - } - final long startTime = relativeTime(); final AtomicArray responses = new AtomicArray<>(bulkRequest.requests.size()); @@ -167,7 +160,7 @@ public class TransportBulkAction extends HandledTransportAction { + executeIngestAndBulk(task, bulkRequest, startTime, ActionListener.wrap(listener::onResponse, inner -> { inner.addSuppressed(e); listener.onFailure(inner); }), responses, indicesThatCannotBeCreated); @@ -201,7 +194,47 @@ public class TransportBulkAction extends HandledTransportAction listener, final AtomicArray responses, + Map indicesThatCannotBeCreated) { + boolean hasIndexRequestsWithPipelines = false; + ImmutableOpenMap indicesMetaData = clusterService.state().getMetaData().indices(); + for (DocWriteRequest actionRequest : bulkRequest.requests) { + if (actionRequest instanceof IndexRequest) { + IndexRequest indexRequest = (IndexRequest) actionRequest; + String pipeline = indexRequest.getPipeline(); + if (pipeline == null) { + IndexMetaData indexMetaData = indicesMetaData.get(indexRequest.index()); + if (indexMetaData == null) { + indexRequest.setPipeline(IngestService.NOOP_PIPELINE_NAME); + } else { + String defaultPipeline = IndexSettings.DEFAULT_PIPELINE.get(indexMetaData.getSettings()); + indexRequest.setPipeline(defaultPipeline); + if (IngestService.NOOP_PIPELINE_NAME.equals(defaultPipeline) == false) { + hasIndexRequestsWithPipelines = true; + } + } + } else if (IngestService.NOOP_PIPELINE_NAME.equals(pipeline) == false) { + hasIndexRequestsWithPipelines = true; + } + } + } + if (hasIndexRequestsWithPipelines) { + try { + if (clusterService.localNode().isIngestNode()) { + processBulkIndexIngestRequest(task, bulkRequest, listener); + } else { + ingestForwarder.forwardIngestRequest(BulkAction.INSTANCE, bulkRequest, listener); + } + } catch (Exception e) { + listener.onFailure(e); + } + } else { + executeBulk(task, bulkRequest, startTimeNanos, listener, responses, indicesThatCannotBeCreated); } } diff --git a/server/src/main/java/org/elasticsearch/action/bulk/TransportShardBulkAction.java b/server/src/main/java/org/elasticsearch/action/bulk/TransportShardBulkAction.java index 15a98077eac..9c134ba4012 100644 --- a/server/src/main/java/org/elasticsearch/action/bulk/TransportShardBulkAction.java +++ b/server/src/main/java/org/elasticsearch/action/bulk/TransportShardBulkAction.java @@ -29,30 +29,34 @@ import org.elasticsearch.action.delete.DeleteResponse; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.index.IndexResponse; import org.elasticsearch.action.support.ActionFilters; -import org.elasticsearch.action.support.replication.ReplicationOperation; -import org.elasticsearch.action.support.replication.ReplicationResponse.ShardInfo; +import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.action.support.replication.TransportReplicationAction; import org.elasticsearch.action.support.replication.TransportWriteAction; import org.elasticsearch.action.update.UpdateHelper; import org.elasticsearch.action.update.UpdateRequest; import org.elasticsearch.action.update.UpdateResponse; +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.ClusterStateObserver; import org.elasticsearch.cluster.action.index.MappingUpdatedAction; import org.elasticsearch.cluster.action.shard.ShardStateAction; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.metadata.MappingMetaData; import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.CheckedRunnable; import org.elasticsearch.common.CheckedSupplier; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.logging.ESLoggerFactory; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.index.engine.Engine; import org.elasticsearch.index.engine.VersionConflictEngineException; import org.elasticsearch.index.get.GetResult; +import org.elasticsearch.index.mapper.MapperException; import org.elasticsearch.index.mapper.Mapping; import org.elasticsearch.index.mapper.SourceToParse; import org.elasticsearch.index.seqno.SequenceNumbers; @@ -60,12 +64,14 @@ import org.elasticsearch.index.shard.IndexShard; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.index.translog.Translog; import org.elasticsearch.indices.IndicesService; +import org.elasticsearch.node.NodeClosedException; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportRequestOptions; import org.elasticsearch.transport.TransportService; import java.io.IOException; import java.util.Map; +import java.util.function.Consumer; import java.util.function.Function; import java.util.function.LongSupplier; @@ -108,174 +114,167 @@ public class TransportShardBulkAction extends TransportWriteAction shardOperationOnPrimary( - BulkShardRequest request, IndexShard primary) throws Exception { - return performOnPrimary(request, primary, updateHelper, threadPool::absoluteTimeInMillis, new ConcreteMappingUpdatePerformer()); + protected WritePrimaryResult shardOperationOnPrimary(BulkShardRequest request, IndexShard primary) + throws Exception { + ClusterStateObserver observer = new ClusterStateObserver(clusterService, request.timeout(), logger, threadPool.getThreadContext()); + CheckedRunnable waitForMappingUpdate = () -> { + PlainActionFuture waitingFuture = new PlainActionFuture<>(); + observer.waitForNextChange(new ClusterStateObserver.Listener() { + @Override + public void onNewClusterState(ClusterState state) { + waitingFuture.onResponse(null); + } + + @Override + public void onClusterServiceClose() { + waitingFuture.onFailure(new NodeClosedException(clusterService.localNode())); + } + + @Override + public void onTimeout(TimeValue timeout) { + waitingFuture.onFailure( + new MapperException("timed out while waiting for a dynamic mapping update")); + } + }); + waitingFuture.get(); + }; + return performOnPrimary(request, primary, updateHelper, threadPool::absoluteTimeInMillis, + new ConcreteMappingUpdatePerformer(), waitForMappingUpdate); } public static WritePrimaryResult performOnPrimary( - BulkShardRequest request, - IndexShard primary, - UpdateHelper updateHelper, - LongSupplier nowInMillisSupplier, - MappingUpdatePerformer mappingUpdater) throws Exception { - final IndexMetaData metaData = primary.indexSettings().getIndexMetaData(); - Translog.Location location = null; - for (int requestIndex = 0; requestIndex < request.items().length; requestIndex++) { - if (isAborted(request.items()[requestIndex].getPrimaryResponse()) == false) { - location = executeBulkItemRequest(metaData, primary, request, location, requestIndex, - updateHelper, nowInMillisSupplier, mappingUpdater); - } - } - BulkItemResponse[] responses = new BulkItemResponse[request.items().length]; - BulkItemRequest[] items = request.items(); - for (int i = 0; i < items.length; i++) { - responses[i] = items[i].getPrimaryResponse(); - } - BulkShardResponse response = new BulkShardResponse(request.shardId(), responses); - return new WritePrimaryResult<>(request, response, location, null, primary, logger); + BulkShardRequest request, + IndexShard primary, + UpdateHelper updateHelper, + LongSupplier nowInMillisSupplier, + MappingUpdatePerformer mappingUpdater, + CheckedRunnable waitForMappingUpdate) throws Exception { + BulkPrimaryExecutionContext context = new BulkPrimaryExecutionContext(request, primary); + return performOnPrimary(context, updateHelper, nowInMillisSupplier, mappingUpdater, waitForMappingUpdate); } - private static BulkItemResultHolder executeIndexRequest(final IndexRequest indexRequest, - final BulkItemRequest bulkItemRequest, - final IndexShard primary, - final MappingUpdatePerformer mappingUpdater) throws Exception { - Engine.IndexResult indexResult = executeIndexRequestOnPrimary(indexRequest, primary, mappingUpdater); - switch (indexResult.getResultType()) { - case SUCCESS: - IndexResponse response = new IndexResponse(primary.shardId(), indexRequest.type(), indexRequest.id(), - indexResult.getSeqNo(), primary.getPrimaryTerm(), indexResult.getVersion(), indexResult.isCreated()); - return new BulkItemResultHolder(response, indexResult, bulkItemRequest); - case FAILURE: - return new BulkItemResultHolder(null, indexResult, bulkItemRequest); - default: - throw new AssertionError("unknown result type for " + indexRequest + ": " + indexResult.getResultType()); - } - } - - private static BulkItemResultHolder executeDeleteRequest(final DeleteRequest deleteRequest, - final BulkItemRequest bulkItemRequest, - final IndexShard primary, - final MappingUpdatePerformer mappingUpdater) throws Exception { - Engine.DeleteResult deleteResult = executeDeleteRequestOnPrimary(deleteRequest, primary, mappingUpdater); - switch (deleteResult.getResultType()) { - case SUCCESS: - DeleteResponse response = new DeleteResponse(primary.shardId(), deleteRequest.type(), deleteRequest.id(), - deleteResult.getSeqNo(), primary.getPrimaryTerm(), deleteResult.getVersion(), deleteResult.isFound()); - return new BulkItemResultHolder(response, deleteResult, bulkItemRequest); - case FAILURE: - return new BulkItemResultHolder(null, deleteResult, bulkItemRequest); - case MAPPING_UPDATE_REQUIRED: - throw new AssertionError("delete operation leaked a mapping update " + deleteRequest); - default: - throw new AssertionError("unknown result type for " + deleteRequest + ": " + deleteResult.getResultType()); - } - } - - static Translog.Location calculateTranslogLocation(final Translog.Location originalLocation, - final BulkItemResultHolder bulkItemResult) { - final Engine.Result operationResult = bulkItemResult.operationResult; - if (operationResult != null && operationResult.getResultType() == Engine.Result.Type.SUCCESS) { - return locationToSync(originalLocation, operationResult.getTranslogLocation()); - } else { - return originalLocation; - } - } - - // Visible for unit testing - /** - * Creates a BulkItemResponse for the primary operation and returns it. If no bulk response is - * needed (because one already exists and the operation failed), then return null. - */ - static BulkItemResponse createPrimaryResponse(BulkItemResultHolder bulkItemResult, - final DocWriteRequest.OpType opType, - BulkShardRequest request) { - final Engine.Result operationResult = bulkItemResult.operationResult; - final DocWriteResponse response = bulkItemResult.response; - final BulkItemRequest replicaRequest = bulkItemResult.replicaRequest; - - if (operationResult == null) { // in case of noop update operation - assert response.getResult() == DocWriteResponse.Result.NOOP : "only noop updates can have a null operation"; - return new BulkItemResponse(replicaRequest.id(), opType, response); - - } else if (operationResult.getResultType() == Engine.Result.Type.SUCCESS) { - BulkItemResponse primaryResponse = new BulkItemResponse(replicaRequest.id(), opType, response); - // set a blank ShardInfo so we can safely send it to the replicas. We won't use it in the real response though. - primaryResponse.getResponse().setShardInfo(new ShardInfo()); - return primaryResponse; - - } else if (operationResult.getResultType() == Engine.Result.Type.FAILURE) { - DocWriteRequest docWriteRequest = replicaRequest.request(); - Exception failure = operationResult.getFailure(); - if (isConflictException(failure)) { - logger.trace(() -> new ParameterizedMessage("{} failed to execute bulk item ({}) {}", - request.shardId(), docWriteRequest.opType().getLowercase(), request), failure); - } else { - logger.debug(() -> new ParameterizedMessage("{} failed to execute bulk item ({}) {}", - request.shardId(), docWriteRequest.opType().getLowercase(), request), failure); - } - - // if it's a conflict failure, and we already executed the request on a primary (and we execute it - // again, due to primary relocation and only processing up to N bulk items when the shard gets closed) - // then just use the response we got from the failed execution - if (replicaRequest.getPrimaryResponse() == null || isConflictException(failure) == false) { - return new BulkItemResponse(replicaRequest.id(), docWriteRequest.opType(), - // Make sure to use request.index() here, if you - // use docWriteRequest.index() it will use the - // concrete index instead of an alias if used! - new BulkItemResponse.Failure(request.index(), docWriteRequest.type(), docWriteRequest.id(), - failure, operationResult.getSeqNo())); - } else { - assert replicaRequest.getPrimaryResponse() != null : "replica request must have a primary response"; - return null; - } - } else { - throw new AssertionError("unknown result type for " + request + ": " + operationResult.getResultType()); + private static WritePrimaryResult performOnPrimary( + BulkPrimaryExecutionContext context, UpdateHelper updateHelper, LongSupplier nowInMillisSupplier, + MappingUpdatePerformer mappingUpdater, CheckedRunnable waitForMappingUpdate) throws Exception { + + while (context.hasMoreOperationsToExecute()) { + executeBulkItemRequest(context, updateHelper, nowInMillisSupplier, mappingUpdater, waitForMappingUpdate); + assert context.isInitial(); // either completed and moved to next or reset } + return new WritePrimaryResult<>(context.getBulkShardRequest(), context.buildShardResponse(), context.getLocationToSync(), + null, context.getPrimary(), logger); } /** Executes bulk item requests and handles request execution exceptions */ - static Translog.Location executeBulkItemRequest(IndexMetaData metaData, IndexShard primary, - BulkShardRequest request, Translog.Location location, - int requestIndex, UpdateHelper updateHelper, - LongSupplier nowInMillisSupplier, - final MappingUpdatePerformer mappingUpdater) throws Exception { - final DocWriteRequest itemRequest = request.items()[requestIndex].request(); - final DocWriteRequest.OpType opType = itemRequest.opType(); - final BulkItemResultHolder responseHolder; - switch (itemRequest.opType()) { - case CREATE: - case INDEX: - responseHolder = executeIndexRequest((IndexRequest) itemRequest, - request.items()[requestIndex], primary, mappingUpdater); - break; - case UPDATE: - responseHolder = executeUpdateRequest((UpdateRequest) itemRequest, primary, metaData, request, - requestIndex, updateHelper, nowInMillisSupplier, mappingUpdater); - break; - case DELETE: - responseHolder = executeDeleteRequest((DeleteRequest) itemRequest, request.items()[requestIndex], primary, mappingUpdater); - break; - default: throw new IllegalStateException("unexpected opType [" + itemRequest.opType() + "] found"); + static void executeBulkItemRequest(BulkPrimaryExecutionContext context, UpdateHelper updateHelper, LongSupplier nowInMillisSupplier, + MappingUpdatePerformer mappingUpdater, CheckedRunnable waitForMappingUpdate) + throws Exception { + final DocWriteRequest.OpType opType = context.getCurrent().opType(); + + final UpdateHelper.Result updateResult; + if (opType == DocWriteRequest.OpType.UPDATE) { + final UpdateRequest updateRequest = (UpdateRequest) context.getCurrent(); + try { + updateResult = updateHelper.prepare(updateRequest, context.getPrimary(), nowInMillisSupplier); + } catch (Exception failure) { + // we may fail translating a update to index or delete operation + // we use index result to communicate failure while translating update request + final Engine.Result result = new Engine.IndexResult(failure, updateRequest.version(), SequenceNumbers.UNASSIGNED_SEQ_NO); + context.setRequestToExecute(updateRequest); + context.markOperationAsExecuted(result); + context.markAsCompleted(context.getExecutionResult()); + return; + } + // execute translated update request + switch (updateResult.getResponseResult()) { + case CREATED: + case UPDATED: + IndexRequest indexRequest = updateResult.action(); + IndexMetaData metaData = context.getPrimary().indexSettings().getIndexMetaData(); + MappingMetaData mappingMd = metaData.mappingOrDefault(indexRequest.type()); + indexRequest.process(metaData.getCreationVersion(), mappingMd, updateRequest.concreteIndex()); + context.setRequestToExecute(indexRequest); + break; + case DELETED: + context.setRequestToExecute(updateResult.action()); + break; + case NOOP: + context.markOperationAsNoOp(updateResult.action()); + context.markAsCompleted(context.getExecutionResult()); + return; + default: + throw new IllegalStateException("Illegal update operation " + updateResult.getResponseResult()); + } + } else { + context.setRequestToExecute(context.getCurrent()); + updateResult = null; } - final BulkItemRequest replicaRequest = responseHolder.replicaRequest; + assert context.getRequestToExecute() != null; // also checks that we're in TRANSLATED state - // update the bulk item request because update request execution can mutate the bulk item request - request.items()[requestIndex] = replicaRequest; - - // Retrieve the primary response, and update the replica request with the primary's response - BulkItemResponse primaryResponse = createPrimaryResponse(responseHolder, opType, request); - if (primaryResponse != null) { - replicaRequest.setPrimaryResponse(primaryResponse); + if (context.getRequestToExecute().opType() == DocWriteRequest.OpType.DELETE) { + executeDeleteRequestOnPrimary(context, mappingUpdater); + } else { + executeIndexRequestOnPrimary(context, mappingUpdater); } - // Update the translog with the new location, if needed - return calculateTranslogLocation(location, responseHolder); + if (context.requiresWaitingForMappingUpdate()) { + try { + waitForMappingUpdate.run(); + context.resetForExecutionForRetry(); + } catch (Exception e) { + context.failOnMappingUpdate(e); + } + return; + } + + assert context.isOperationExecuted(); + + if (opType == DocWriteRequest.OpType.UPDATE && + context.getExecutionResult().isFailed() && + isConflictException(context.getExecutionResult().getFailure().getCause())) { + final UpdateRequest updateRequest = (UpdateRequest) context.getCurrent(); + if (context.getRetryCounter() < updateRequest.retryOnConflict()) { + context.resetForExecutionForRetry(); + return; + } + } + + finalizePrimaryOperationOnCompletion(context, opType, updateResult); } - private static boolean isAborted(BulkItemResponse response) { - return response != null && response.isFailed() && response.getFailure().isAborted(); + private static void finalizePrimaryOperationOnCompletion(BulkPrimaryExecutionContext context, DocWriteRequest.OpType opType, + UpdateHelper.Result updateResult) { + final BulkItemResponse executionResult = context.getExecutionResult(); + if (opType == DocWriteRequest.OpType.UPDATE) { + final UpdateRequest updateRequest = (UpdateRequest) context.getCurrent(); + context.markAsCompleted( + processUpdateResponse(updateRequest, context.getConcreteIndex(), executionResult, updateResult)); + } else if (executionResult.isFailed()) { + final Exception failure = executionResult.getFailure().getCause(); + final DocWriteRequest docWriteRequest = context.getCurrent(); + if (TransportShardBulkAction.isConflictException(failure)) { + logger.trace(() -> new ParameterizedMessage("{} failed to execute bulk item ({}) {}", + context.getPrimary().shardId(), docWriteRequest.opType().getLowercase(), docWriteRequest), failure); + } else { + logger.debug(() -> new ParameterizedMessage("{} failed to execute bulk item ({}) {}", + context.getPrimary().shardId(), docWriteRequest.opType().getLowercase(), docWriteRequest), failure); + } + + final BulkItemResponse primaryResponse; + // if it's a conflict failure, and we already executed the request on a primary (and we execute it + // again, due to primary relocation and only processing up to N bulk items when the shard gets closed) + // then just use the response we got from the failed execution + if (TransportShardBulkAction.isConflictException(failure) && context.getPreviousPrimaryResponse() != null) { + primaryResponse = context.getPreviousPrimaryResponse(); + } else { + primaryResponse = executionResult; + } + context.markAsCompleted(primaryResponse); + } else { + context.markAsCompleted(executionResult); + } + assert context.isInitial(); } private static boolean isConflictException(final Exception e) { @@ -285,150 +284,50 @@ public class TransportShardBulkAction extends TransportWriteAction> sourceAndContent = + if (updateRequest.fetchSource() != null && updateRequest.fetchSource().fetchSource()) { + final BytesReference indexSourceAsBytes = updateIndexRequest.source(); + final Tuple> sourceAndContent = XContentHelper.convertToMap(indexSourceAsBytes, true, updateIndexRequest.getContentType()); - updateResponse.setGetResult(UpdateHelper.extractGetResult(updateRequest, concreteIndex, - indexResponse.getVersion(), sourceAndContent.v2(), sourceAndContent.v1(), indexSourceAsBytes)); - } - // set translated request as replica request - replicaRequest = new BulkItemRequest(bulkReqId, updateIndexRequest); - - } else if (opType == Engine.Operation.TYPE.DELETE) { - assert result instanceof Engine.DeleteResult : result.getClass(); - final DeleteRequest updateDeleteRequest = translate.action(); - - final DeleteResponse deleteResponse = new DeleteResponse(primary.shardId(), updateDeleteRequest.type(), updateDeleteRequest.id(), - result.getSeqNo(), primary.getPrimaryTerm(), result.getVersion(), ((Engine.DeleteResult) result).isFound()); - - updateResponse = new UpdateResponse(deleteResponse.getShardInfo(), deleteResponse.getShardId(), + updateResponse.setGetResult(UpdateHelper.extractGetResult(updateRequest, concreteIndex, + indexResponse.getVersion(), sourceAndContent.v2(), sourceAndContent.v1(), indexSourceAsBytes)); + } + } else if (translatedResult == DocWriteResponse.Result.DELETED) { + final DeleteResponse deleteResponse = operationResponse.getResponse(); + updateResponse = new UpdateResponse(deleteResponse.getShardInfo(), deleteResponse.getShardId(), deleteResponse.getType(), deleteResponse.getId(), deleteResponse.getSeqNo(), deleteResponse.getPrimaryTerm(), deleteResponse.getVersion(), deleteResponse.getResult()); - final GetResult getResult = UpdateHelper.extractGetResult(updateRequest, concreteIndex, deleteResponse.getVersion(), + final GetResult getResult = UpdateHelper.extractGetResult(updateRequest, concreteIndex, deleteResponse.getVersion(), translate.updatedSourceAsMap(), translate.updateSourceContentType(), null); - updateResponse.setGetResult(getResult); - // set translated request as replica request - replicaRequest = new BulkItemRequest(bulkReqId, updateDeleteRequest); - - } else { - throw new IllegalArgumentException("unknown operation type: " + opType); - } - - return new BulkItemResultHolder(updateResponse, result, replicaRequest); - } - - /** - * Executes update request once, delegating to a index or delete operation after translation. - * NOOP updates are indicated by returning a null operation in {@link BulkItemResultHolder} - */ - static BulkItemResultHolder executeUpdateRequestOnce(UpdateRequest updateRequest, IndexShard primary, - IndexMetaData metaData, String concreteIndex, - UpdateHelper updateHelper, LongSupplier nowInMillis, - BulkItemRequest primaryItemRequest, int bulkReqId, - final MappingUpdatePerformer mappingUpdater) throws Exception { - final UpdateHelper.Result translate; - // translate update request - try { - translate = updateHelper.prepare(updateRequest, primary, nowInMillis); - } catch (Exception failure) { - // we may fail translating a update to index or delete operation - // we use index result to communicate failure while translating update request - final Engine.Result result = new Engine.IndexResult(failure, updateRequest.version(), SequenceNumbers.UNASSIGNED_SEQ_NO); - return new BulkItemResultHolder(null, result, primaryItemRequest); - } - - final Engine.Result result; - // execute translated update request - switch (translate.getResponseResult()) { - case CREATED: - case UPDATED: - IndexRequest indexRequest = translate.action(); - MappingMetaData mappingMd = metaData.mappingOrDefault(indexRequest.type()); - indexRequest.process(metaData.getCreationVersion(), mappingMd, concreteIndex); - result = executeIndexRequestOnPrimary(indexRequest, primary, mappingUpdater); - break; - case DELETED: - DeleteRequest deleteRequest = translate.action(); - result = executeDeleteRequestOnPrimary(deleteRequest, primary, mappingUpdater); - break; - case NOOP: - primary.noopUpdate(updateRequest.type()); - result = null; - break; - default: throw new IllegalStateException("Illegal update operation " + translate.getResponseResult()); - } - - if (result == null) { - // this is a noop operation - final UpdateResponse updateResponse = translate.action(); - return new BulkItemResultHolder(updateResponse, result, primaryItemRequest); - } else if (result.getResultType() == Engine.Result.Type.FAILURE) { - // There was a result, and the result was a failure - return new BulkItemResultHolder(null, result, primaryItemRequest); - } else if (result.getResultType() == Engine.Result.Type.SUCCESS) { - // It was successful, we need to construct the response and return it - return processUpdateResponse(updateRequest, concreteIndex, result, translate, primary, bulkReqId); - } else { - throw new AssertionError("unknown result type for " + updateRequest + ": " + result.getResultType()); - } - } - - /** - * Executes update request, delegating to a index or delete operation after translation, - * handles retries on version conflict and constructs update response - * NOOP updates are indicated by returning a null operation - * in {@link BulkItemResultHolder} - */ - private static BulkItemResultHolder executeUpdateRequest(UpdateRequest updateRequest, IndexShard primary, - IndexMetaData metaData, BulkShardRequest request, - int requestIndex, UpdateHelper updateHelper, - LongSupplier nowInMillis, - final MappingUpdatePerformer mappingUpdater) throws Exception { - BulkItemRequest primaryItemRequest = request.items()[requestIndex]; - assert primaryItemRequest.request() == updateRequest - : "expected bulk item request to contain the original update request, got: " + - primaryItemRequest.request() + " and " + updateRequest; - - BulkItemResultHolder holder = null; - // There must be at least one attempt - int maxAttempts = Math.max(1, updateRequest.retryOnConflict()); - for (int attemptCount = 0; attemptCount < maxAttempts; attemptCount++) { - - holder = executeUpdateRequestOnce(updateRequest, primary, metaData, request.index(), updateHelper, - nowInMillis, primaryItemRequest, request.items()[requestIndex].id(), mappingUpdater); - - // It was either a successful request, or it was a non-conflict failure - if (holder.isVersionConflict() == false) { - return holder; + updateResponse.setGetResult(getResult); + } else { + throw new IllegalArgumentException("unknown operation type: " + translatedResult); } + response = new BulkItemResponse(operationResponse.getItemId(), DocWriteRequest.OpType.UPDATE, updateResponse); } - // We ran out of tries and haven't returned a valid bulk item response, so return the last one generated - return holder; + return response; } + /** Modes for executing item request on replica depending on corresponding primary execution result */ public enum ReplicaItemExecutionMode { @@ -451,6 +350,7 @@ public class TransportShardBulkAction extends TransportWriteAction primary.applyIndexOperationOnPrimary(request.version(), request.versionType(), sourceToParse, request.getAutoGeneratedTimestamp(), request.isRetry()), - e -> new Engine.IndexResult(e, request.version()), - mappingUpdater); + e -> primary.getFailedIndexResult(e, request.version()), + context::markOperationAsExecuted, + mapping -> mappingUpdater.updateMappings(mapping, primary.shardId(), request.type())); } - private static Engine.DeleteResult executeDeleteRequestOnPrimary(DeleteRequest request, IndexShard primary, - MappingUpdatePerformer mappingUpdater) throws Exception { - return executeOnPrimaryWhileHandlingMappingUpdates(primary.shardId(), request.type(), + private static void executeDeleteRequestOnPrimary(BulkPrimaryExecutionContext context, + MappingUpdatePerformer mappingUpdater) throws Exception { + final DeleteRequest request = context.getRequestToExecute(); + final IndexShard primary = context.getPrimary(); + executeOnPrimaryWhileHandlingMappingUpdates(context, () -> primary.applyDeleteOperationOnPrimary(request.version(), request.type(), request.id(), request.versionType()), - e -> new Engine.DeleteResult(e, request.version()), - mappingUpdater); + e -> primary.getFailedDeleteResult(e, request.version()), + context::markOperationAsExecuted, + mapping -> mappingUpdater.updateMappings(mapping, primary.shardId(), request.type())); } - private static T executeOnPrimaryWhileHandlingMappingUpdates(ShardId shardId, String type, - CheckedSupplier toExecute, - Function onError, - MappingUpdatePerformer mappingUpdater) + private static void executeOnPrimaryWhileHandlingMappingUpdates( + BulkPrimaryExecutionContext context, CheckedSupplier toExecute, + Function exceptionToResult, Consumer onComplete, Consumer mappingUpdater) throws IOException { T result = toExecute.get(); if (result.getResultType() == Engine.Result.Type.MAPPING_UPDATE_REQUIRED) { // try to update the mappings and try again. try { - mappingUpdater.updateMappings(result.getRequiredMappingUpdate(), shardId, type); + mappingUpdater.accept(result.getRequiredMappingUpdate()); } catch (Exception e) { // failure to update the mapping should translate to a failure of specific requests. Other requests // still need to be executed and replicated. - return onError.apply(e); + onComplete.accept(exceptionToResult.apply(e)); + return; } + // TODO - we can fall back to a wait for cluster state update but I'm keeping the logic the same for now result = toExecute.get(); if (result.getResultType() == Engine.Result.Type.MAPPING_UPDATE_REQUIRED) { // double mapping update. We assume that the successful mapping update wasn't yet processed on the node // and retry the entire request again. - throw new ReplicationOperation.RetryOnPrimaryException(shardId, - "Dynamic mappings are not available on the node that holds the primary yet"); + context.markAsRequiringMappingUpdate(); + } else { + onComplete.accept(result); } + } else { + onComplete.accept(result); } - assert result.getFailure() instanceof ReplicationOperation.RetryOnPrimaryException == false : - "IndexShard shouldn't use RetryOnPrimaryException. got " + result.getFailure(); - return result; - } class ConcreteMappingUpdatePerformer implements MappingUpdatePerformer { diff --git a/server/src/main/java/org/elasticsearch/action/index/IndexRequest.java b/server/src/main/java/org/elasticsearch/action/index/IndexRequest.java index 57e8ea66138..339880dad44 100644 --- a/server/src/main/java/org/elasticsearch/action/index/IndexRequest.java +++ b/server/src/main/java/org/elasticsearch/action/index/IndexRequest.java @@ -185,6 +185,10 @@ public class IndexRequest extends ReplicatedWriteRequest implement validationException = addValidationError("an id must be provided if version type or value are set", validationException); } + if (pipeline != null && pipeline.isEmpty()) { + validationException = addValidationError("pipeline cannot be an empty string", validationException); + } + return validationException; } diff --git a/server/src/main/java/org/elasticsearch/action/ingest/DeletePipelineAction.java b/server/src/main/java/org/elasticsearch/action/ingest/DeletePipelineAction.java index e0df57a6dad..902614a3e83 100644 --- a/server/src/main/java/org/elasticsearch/action/ingest/DeletePipelineAction.java +++ b/server/src/main/java/org/elasticsearch/action/ingest/DeletePipelineAction.java @@ -20,8 +20,9 @@ package org.elasticsearch.action.ingest; import org.elasticsearch.action.Action; +import org.elasticsearch.action.support.master.AcknowledgedResponse; -public class DeletePipelineAction extends Action { +public class DeletePipelineAction extends Action { public static final DeletePipelineAction INSTANCE = new DeletePipelineAction(); public static final String NAME = "cluster:admin/ingest/pipeline/delete"; @@ -31,7 +32,7 @@ public class DeletePipelineAction extends Action { } @Override - public WritePipelineResponse newResponse() { - return new WritePipelineResponse(); + public AcknowledgedResponse newResponse() { + return new AcknowledgedResponse(); } } diff --git a/server/src/main/java/org/elasticsearch/action/ingest/DeletePipelineRequestBuilder.java b/server/src/main/java/org/elasticsearch/action/ingest/DeletePipelineRequestBuilder.java index afe4ea1b590..b08e0cb2472 100644 --- a/server/src/main/java/org/elasticsearch/action/ingest/DeletePipelineRequestBuilder.java +++ b/server/src/main/java/org/elasticsearch/action/ingest/DeletePipelineRequestBuilder.java @@ -20,9 +20,10 @@ package org.elasticsearch.action.ingest; import org.elasticsearch.action.ActionRequestBuilder; +import org.elasticsearch.action.support.master.AcknowledgedResponse; import org.elasticsearch.client.ElasticsearchClient; -public class DeletePipelineRequestBuilder extends ActionRequestBuilder { +public class DeletePipelineRequestBuilder extends ActionRequestBuilder { public DeletePipelineRequestBuilder(ElasticsearchClient client, DeletePipelineAction action) { super(client, action, new DeletePipelineRequest()); diff --git a/server/src/main/java/org/elasticsearch/action/ingest/DeletePipelineTransportAction.java b/server/src/main/java/org/elasticsearch/action/ingest/DeletePipelineTransportAction.java index 45cb83634f8..d3cd052ecad 100644 --- a/server/src/main/java/org/elasticsearch/action/ingest/DeletePipelineTransportAction.java +++ b/server/src/main/java/org/elasticsearch/action/ingest/DeletePipelineTransportAction.java @@ -21,6 +21,7 @@ package org.elasticsearch.action.ingest; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.action.support.master.AcknowledgedResponse; import org.elasticsearch.action.support.master.TransportMasterNodeAction; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.block.ClusterBlockException; @@ -34,7 +35,7 @@ import org.elasticsearch.node.NodeService; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; -public class DeletePipelineTransportAction extends TransportMasterNodeAction { +public class DeletePipelineTransportAction extends TransportMasterNodeAction { private final PipelineStore pipelineStore; private final ClusterService clusterService; @@ -54,12 +55,12 @@ public class DeletePipelineTransportAction extends TransportMasterNodeAction listener) throws Exception { + protected void masterOperation(DeletePipelineRequest request, ClusterState state, ActionListener listener) throws Exception { pipelineStore.delete(clusterService, request, listener); } diff --git a/server/src/main/java/org/elasticsearch/action/ingest/PutPipelineAction.java b/server/src/main/java/org/elasticsearch/action/ingest/PutPipelineAction.java index c4784598ae7..4ebcff127cc 100644 --- a/server/src/main/java/org/elasticsearch/action/ingest/PutPipelineAction.java +++ b/server/src/main/java/org/elasticsearch/action/ingest/PutPipelineAction.java @@ -20,8 +20,9 @@ package org.elasticsearch.action.ingest; import org.elasticsearch.action.Action; +import org.elasticsearch.action.support.master.AcknowledgedResponse; -public class PutPipelineAction extends Action { +public class PutPipelineAction extends Action { public static final PutPipelineAction INSTANCE = new PutPipelineAction(); public static final String NAME = "cluster:admin/ingest/pipeline/put"; @@ -31,7 +32,7 @@ public class PutPipelineAction extends Action { } @Override - public WritePipelineResponse newResponse() { - return new WritePipelineResponse(); + public AcknowledgedResponse newResponse() { + return new AcknowledgedResponse(); } } diff --git a/server/src/main/java/org/elasticsearch/action/ingest/PutPipelineRequestBuilder.java b/server/src/main/java/org/elasticsearch/action/ingest/PutPipelineRequestBuilder.java index ffbb94d27a0..1919d98c7e1 100644 --- a/server/src/main/java/org/elasticsearch/action/ingest/PutPipelineRequestBuilder.java +++ b/server/src/main/java/org/elasticsearch/action/ingest/PutPipelineRequestBuilder.java @@ -20,11 +20,12 @@ package org.elasticsearch.action.ingest; import org.elasticsearch.action.ActionRequestBuilder; +import org.elasticsearch.action.support.master.AcknowledgedResponse; import org.elasticsearch.client.ElasticsearchClient; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.xcontent.XContentType; -public class PutPipelineRequestBuilder extends ActionRequestBuilder { +public class PutPipelineRequestBuilder extends ActionRequestBuilder { public PutPipelineRequestBuilder(ElasticsearchClient client, PutPipelineAction action) { super(client, action, new PutPipelineRequest()); diff --git a/server/src/main/java/org/elasticsearch/action/ingest/PutPipelineTransportAction.java b/server/src/main/java/org/elasticsearch/action/ingest/PutPipelineTransportAction.java index 17af73c1677..abe8f49272c 100644 --- a/server/src/main/java/org/elasticsearch/action/ingest/PutPipelineTransportAction.java +++ b/server/src/main/java/org/elasticsearch/action/ingest/PutPipelineTransportAction.java @@ -24,6 +24,7 @@ import org.elasticsearch.action.admin.cluster.node.info.NodeInfo; import org.elasticsearch.action.admin.cluster.node.info.NodesInfoRequest; import org.elasticsearch.action.admin.cluster.node.info.NodesInfoResponse; import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.action.support.master.AcknowledgedResponse; import org.elasticsearch.action.support.master.TransportMasterNodeAction; import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.cluster.ClusterState; @@ -43,7 +44,7 @@ import org.elasticsearch.transport.TransportService; import java.util.HashMap; import java.util.Map; -public class PutPipelineTransportAction extends TransportMasterNodeAction { +public class PutPipelineTransportAction extends TransportMasterNodeAction { private final PipelineStore pipelineStore; private final ClusterService clusterService; @@ -66,12 +67,12 @@ public class PutPipelineTransportAction extends TransportMasterNodeAction listener) throws Exception { + protected void masterOperation(PutPipelineRequest request, ClusterState state, ActionListener listener) throws Exception { NodesInfoRequest nodesInfoRequest = new NodesInfoRequest(); nodesInfoRequest.clear(); nodesInfoRequest.ingest(true); diff --git a/server/src/main/java/org/elasticsearch/action/ingest/WritePipelineResponse.java b/server/src/main/java/org/elasticsearch/action/ingest/WritePipelineResponse.java deleted file mode 100644 index 293a62b66f2..00000000000 --- a/server/src/main/java/org/elasticsearch/action/ingest/WritePipelineResponse.java +++ /dev/null @@ -1,38 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.action.ingest; - -import org.elasticsearch.action.support.master.AcknowledgedResponse; -import org.elasticsearch.common.xcontent.ToXContentObject; -import org.elasticsearch.common.xcontent.XContentParser; - -public class WritePipelineResponse extends AcknowledgedResponse implements ToXContentObject { - - WritePipelineResponse() { - } - - public WritePipelineResponse(boolean acknowledged) { - super(acknowledged); - } - - public static WritePipelineResponse fromXContent(XContentParser parser) { - return new WritePipelineResponse(parseAcknowledged(parser)); - } -} diff --git a/server/src/main/java/org/elasticsearch/action/search/SearchPhaseExecutionException.java b/server/src/main/java/org/elasticsearch/action/search/SearchPhaseExecutionException.java index c6e0b21dffd..3d3737b0638 100644 --- a/server/src/main/java/org/elasticsearch/action/search/SearchPhaseExecutionException.java +++ b/server/src/main/java/org/elasticsearch/action/search/SearchPhaseExecutionException.java @@ -138,8 +138,7 @@ public class SearchPhaseExecutionException extends ElasticsearchException { builder.field("grouped", group); // notify that it's grouped builder.field("failed_shards"); builder.startArray(); - ShardOperationFailedException[] failures = params.paramAsBoolean("group_shard_failures", true) ? - ExceptionsHelper.groupBy(shardFailures) : shardFailures; + ShardOperationFailedException[] failures = group ? ExceptionsHelper.groupBy(shardFailures) : shardFailures; for (ShardOperationFailedException failure : failures) { builder.startObject(); failure.toXContent(builder, params); diff --git a/server/src/main/java/org/elasticsearch/action/search/ShardSearchFailure.java b/server/src/main/java/org/elasticsearch/action/search/ShardSearchFailure.java index f2ba62fefd4..98418153d50 100644 --- a/server/src/main/java/org/elasticsearch/action/search/ShardSearchFailure.java +++ b/server/src/main/java/org/elasticsearch/action/search/ShardSearchFailure.java @@ -34,6 +34,7 @@ import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.search.SearchException; import org.elasticsearch.search.SearchShardTarget; +import org.elasticsearch.transport.RemoteClusterAware; import java.io.IOException; @@ -42,7 +43,7 @@ import static org.elasticsearch.common.xcontent.XContentParserUtils.ensureExpect /** * Represents a failure to search on a specific shard. */ -public class ShardSearchFailure implements ShardOperationFailedException { +public class ShardSearchFailure extends ShardOperationFailedException { private static final String REASON_FIELD = "reason"; private static final String NODE_FIELD = "node"; @@ -52,9 +53,6 @@ public class ShardSearchFailure implements ShardOperationFailedException { public static final ShardSearchFailure[] EMPTY_ARRAY = new ShardSearchFailure[0]; private SearchShardTarget shardTarget; - private String reason; - private RestStatus status; - private Throwable cause; private ShardSearchFailure() { @@ -65,25 +63,18 @@ public class ShardSearchFailure implements ShardOperationFailedException { } public ShardSearchFailure(Exception e, @Nullable SearchShardTarget shardTarget) { + super(shardTarget == null ? null : shardTarget.getFullyQualifiedIndexName(), + shardTarget == null ? -1 : shardTarget.getShardId().getId(), + ExceptionsHelper.detailedMessage(e), + ExceptionsHelper.status(ExceptionsHelper.unwrapCause(e)), + ExceptionsHelper.unwrapCause(e)); + final Throwable actual = ExceptionsHelper.unwrapCause(e); - if (actual != null && actual instanceof SearchException) { + if (actual instanceof SearchException) { this.shardTarget = ((SearchException) actual).shard(); } else if (shardTarget != null) { this.shardTarget = shardTarget; } - status = ExceptionsHelper.status(actual); - this.reason = ExceptionsHelper.detailedMessage(e); - this.cause = actual; - } - - public ShardSearchFailure(String reason, SearchShardTarget shardTarget) { - this(reason, shardTarget, RestStatus.INTERNAL_SERVER_ERROR); - } - - private ShardSearchFailure(String reason, SearchShardTarget shardTarget, RestStatus status) { - this.shardTarget = shardTarget; - this.reason = reason; - this.status = status; } /** @@ -94,41 +85,6 @@ public class ShardSearchFailure implements ShardOperationFailedException { return this.shardTarget; } - @Override - public RestStatus status() { - return this.status; - } - - /** - * The index the search failed on. - */ - @Override - public String index() { - if (shardTarget != null) { - return shardTarget.getIndex(); - } - return null; - } - - /** - * The shard id the search failed on. - */ - @Override - public int shardId() { - if (shardTarget != null) { - return shardTarget.getShardId().id(); - } - return -1; - } - - /** - * The reason of the failure. - */ - @Override - public String reason() { - return this.reason; - } - @Override public String toString() { return "shard [" + (shardTarget == null ? "_na" : shardTarget) + "], reason [" + reason + "], cause [" + @@ -171,12 +127,10 @@ public class ShardSearchFailure implements ShardOperationFailedException { if (shardTarget != null) { builder.field(NODE_FIELD, shardTarget.getNodeId()); } - if (cause != null) { - builder.field(REASON_FIELD); - builder.startObject(); - ElasticsearchException.generateThrowableXContent(builder, params, cause); - builder.endObject(); - } + builder.field(REASON_FIELD); + builder.startObject(); + ElasticsearchException.generateThrowableXContent(builder, params, cause); + builder.endObject(); return builder; } @@ -186,6 +140,7 @@ public class ShardSearchFailure implements ShardOperationFailedException { String currentFieldName = null; int shardId = -1; String indexName = null; + String clusterAlias = null; String nodeId = null; ElasticsearchException exception = null; while((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { @@ -196,6 +151,11 @@ public class ShardSearchFailure implements ShardOperationFailedException { shardId = parser.intValue(); } else if (INDEX_FIELD.equals(currentFieldName)) { indexName = parser.text(); + int indexOf = indexName.indexOf(RemoteClusterAware.REMOTE_CLUSTER_INDEX_SEPARATOR); + if (indexOf > 0) { + clusterAlias = indexName.substring(0, indexOf); + indexName = indexName.substring(indexOf + 1); + } } else if (NODE_FIELD.equals(currentFieldName)) { nodeId = parser.text(); } else { @@ -214,13 +174,8 @@ public class ShardSearchFailure implements ShardOperationFailedException { SearchShardTarget searchShardTarget = null; if (nodeId != null) { searchShardTarget = new SearchShardTarget(nodeId, - new ShardId(new Index(indexName, IndexMetaData.INDEX_UUID_NA_VALUE), shardId), null, OriginalIndices.NONE); + new ShardId(new Index(indexName, IndexMetaData.INDEX_UUID_NA_VALUE), shardId), clusterAlias, OriginalIndices.NONE); } return new ShardSearchFailure(exception, searchShardTarget); } - - @Override - public Throwable getCause() { - return cause; - } } diff --git a/server/src/main/java/org/elasticsearch/action/support/DefaultShardOperationFailedException.java b/server/src/main/java/org/elasticsearch/action/support/DefaultShardOperationFailedException.java index 8a4a787fbe5..d297df478a4 100644 --- a/server/src/main/java/org/elasticsearch/action/support/DefaultShardOperationFailedException.java +++ b/server/src/main/java/org/elasticsearch/action/support/DefaultShardOperationFailedException.java @@ -28,8 +28,6 @@ import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.ConstructingObjectParser; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.index.Index; -import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.rest.RestStatus; import java.io.IOException; @@ -37,7 +35,7 @@ import java.io.IOException; import static org.elasticsearch.ExceptionsHelper.detailedMessage; import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg; -public class DefaultShardOperationFailedException implements ShardOperationFailedException { +public class DefaultShardOperationFailedException extends ShardOperationFailedException { private static final String INDEX = "index"; private static final String SHARD_ID = "shard"; @@ -52,56 +50,16 @@ public class DefaultShardOperationFailedException implements ShardOperationFaile PARSER.declareObject(constructorArg(), (p, c) -> ElasticsearchException.fromXContent(p), new ParseField(REASON)); } - private String index; - - private int shardId; - - private Throwable reason; - - private RestStatus status; - protected DefaultShardOperationFailedException() { } public DefaultShardOperationFailedException(ElasticsearchException e) { - Index index = e.getIndex(); - this.index = index == null ? null : index.getName(); - ShardId shardId = e.getShardId(); - this.shardId = shardId == null ? -1 : shardId.id(); - this.reason = e; - this.status = e.status(); + super(e.getIndex() == null ? null : e.getIndex().getName(), e.getShardId() == null ? -1 : e.getShardId().getId(), + detailedMessage(e), e.status(), e); } - public DefaultShardOperationFailedException(String index, int shardId, Throwable reason) { - this.index = index; - this.shardId = shardId; - this.reason = reason; - this.status = ExceptionsHelper.status(reason); - } - - @Override - public String index() { - return this.index; - } - - @Override - public int shardId() { - return this.shardId; - } - - @Override - public String reason() { - return detailedMessage(reason); - } - - @Override - public RestStatus status() { - return status; - } - - @Override - public Throwable getCause() { - return reason; + public DefaultShardOperationFailedException(String index, int shardId, Throwable cause) { + super(index, shardId, detailedMessage(cause), ExceptionsHelper.status(cause), cause); } public static DefaultShardOperationFailedException readShardOperationFailed(StreamInput in) throws IOException { @@ -112,24 +70,17 @@ public class DefaultShardOperationFailedException implements ShardOperationFaile @Override public void readFrom(StreamInput in) throws IOException { - if (in.readBoolean()) { - index = in.readString(); - } + index = in.readOptionalString(); shardId = in.readVInt(); - reason = in.readException(); + cause = in.readException(); status = RestStatus.readFrom(in); } @Override public void writeTo(StreamOutput out) throws IOException { - if (index == null) { - out.writeBoolean(false); - } else { - out.writeBoolean(true); - out.writeString(index); - } + out.writeOptionalString(index); out.writeVInt(shardId); - out.writeException(reason); + out.writeException(cause); RestStatus.writeTo(out, status); } @@ -145,7 +96,7 @@ public class DefaultShardOperationFailedException implements ShardOperationFaile builder.field("status", status.name()); if (reason != null) { builder.startObject("reason"); - ElasticsearchException.generateThrowableXContent(builder, params, reason); + ElasticsearchException.generateThrowableXContent(builder, params, cause); builder.endObject(); } return builder; diff --git a/server/src/main/java/org/elasticsearch/action/support/master/AcknowledgedResponse.java b/server/src/main/java/org/elasticsearch/action/support/master/AcknowledgedResponse.java index 594dcda8c66..21a0d3f21a8 100644 --- a/server/src/main/java/org/elasticsearch/action/support/master/AcknowledgedResponse.java +++ b/server/src/main/java/org/elasticsearch/action/support/master/AcknowledgedResponse.java @@ -34,10 +34,9 @@ import java.util.Objects; import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg; /** - * Abstract class that allows to mark action responses that support acknowledgements. - * Facilitates consistency across different api. + * A response that indicates that a request has been acknowledged */ -public abstract class AcknowledgedResponse extends ActionResponse implements ToXContentObject { +public class AcknowledgedResponse extends ActionResponse implements ToXContentObject { private static final ParseField ACKNOWLEDGED = new ParseField("acknowledged"); @@ -48,11 +47,10 @@ public abstract class AcknowledgedResponse extends ActionResponse implements ToX protected boolean acknowledged; - protected AcknowledgedResponse() { - + public AcknowledgedResponse() { } - protected AcknowledgedResponse(boolean acknowledged) { + public AcknowledgedResponse(boolean acknowledged) { this.acknowledged = acknowledged; } @@ -100,8 +98,8 @@ public abstract class AcknowledgedResponse extends ActionResponse implements ToX ObjectParser.ValueType.BOOLEAN); } - protected static boolean parseAcknowledged(XContentParser parser) { - return ACKNOWLEDGED_FLAG_PARSER.apply(parser, null); + public static AcknowledgedResponse fromXContent(XContentParser parser) throws IOException { + return new AcknowledgedResponse(ACKNOWLEDGED_FLAG_PARSER.apply(parser, null)); } @Override diff --git a/server/src/main/java/org/elasticsearch/action/support/replication/ReplicationResponse.java b/server/src/main/java/org/elasticsearch/action/support/replication/ReplicationResponse.java index b8a5f3782bd..bc5c696894a 100644 --- a/server/src/main/java/org/elasticsearch/action/support/replication/ReplicationResponse.java +++ b/server/src/main/java/org/elasticsearch/action/support/replication/ReplicationResponse.java @@ -218,13 +218,13 @@ public class ReplicationResponse extends ActionResponse { '}'; } - public static ShardInfo readShardInfo(StreamInput in) throws IOException { + static ShardInfo readShardInfo(StreamInput in) throws IOException { ShardInfo shardInfo = new ShardInfo(); shardInfo.readFrom(in); return shardInfo; } - public static class Failure implements ShardOperationFailedException, ToXContentObject { + public static class Failure extends ShardOperationFailedException implements ToXContentObject { private static final String _INDEX = "_index"; private static final String _SHARD = "_shard"; @@ -235,37 +235,18 @@ public class ReplicationResponse extends ActionResponse { private ShardId shardId; private String nodeId; - private Exception cause; - private RestStatus status; private boolean primary; public Failure(ShardId shardId, @Nullable String nodeId, Exception cause, RestStatus status, boolean primary) { + super(shardId.getIndexName(), shardId.getId(), ExceptionsHelper.detailedMessage(cause), status, cause); this.shardId = shardId; this.nodeId = nodeId; - this.cause = cause; - this.status = status; this.primary = primary; } Failure() { } - /** - * @return On what index the failure occurred. - */ - @Override - public String index() { - return shardId.getIndexName(); - } - - /** - * @return On what shard id the failure occurred. - */ - @Override - public int shardId() { - return shardId.id(); - } - public ShardId fullShardId() { return shardId; } @@ -278,27 +259,6 @@ public class ReplicationResponse extends ActionResponse { return nodeId; } - /** - * @return A text description of the failure - */ - @Override - public String reason() { - return ExceptionsHelper.detailedMessage(cause); - } - - /** - * @return The status to report if this failure was a primary failure. - */ - @Override - public RestStatus status() { - return status; - } - - @Override - public Throwable getCause() { - return cause; - } - /** * @return Whether this failure occurred on a primary shard. * (this only reports true for delete by query) @@ -310,6 +270,8 @@ public class ReplicationResponse extends ActionResponse { @Override public void readFrom(StreamInput in) throws IOException { shardId = ShardId.readShardId(in); + super.shardId = shardId.getId(); + super.index = shardId.getIndexName(); nodeId = in.readOptionalString(); cause = in.readException(); status = RestStatus.readFrom(in); diff --git a/server/src/main/java/org/elasticsearch/action/support/replication/TransportReplicationAction.java b/server/src/main/java/org/elasticsearch/action/support/replication/TransportReplicationAction.java index 53d9752f4ed..dbdd5acae1f 100644 --- a/server/src/main/java/org/elasticsearch/action/support/replication/TransportReplicationAction.java +++ b/server/src/main/java/org/elasticsearch/action/support/replication/TransportReplicationAction.java @@ -929,7 +929,7 @@ public abstract class TransportReplicationAction< if (actualAllocationId.equals(allocationId) == false) { throw new ShardNotFoundException(shardId, "expected aID [{}] but found [{}]", allocationId, actualAllocationId); } - final long actualTerm = indexShard.getPrimaryTerm(); + final long actualTerm = indexShard.getPendingPrimaryTerm(); if (actualTerm != primaryTerm) { throw new ShardNotFoundException(shardId, "expected aID [{}] with term [{}] but found [{}]", allocationId, primaryTerm, actualTerm); @@ -983,7 +983,7 @@ public abstract class TransportReplicationAction< } public boolean isRelocated() { - return indexShard.isPrimaryMode() == false; + return indexShard.isRelocatedPrimary(); } @Override diff --git a/server/src/main/java/org/elasticsearch/action/support/replication/TransportWriteAction.java b/server/src/main/java/org/elasticsearch/action/support/replication/TransportWriteAction.java index ca91a32a17a..ae029ce3f93 100644 --- a/server/src/main/java/org/elasticsearch/action/support/replication/TransportWriteAction.java +++ b/server/src/main/java/org/elasticsearch/action/support/replication/TransportWriteAction.java @@ -83,7 +83,7 @@ public abstract class TransportWriteAction< return location; } - protected static Location locationToSync(Location current, Location next) { + public static Location locationToSync(Location current, Location next) { /* here we are moving forward in the translog with each operation. Under the hood this might * cross translog files which is ok since from the user perspective the translog is like a * tape where only the highest location needs to be fsynced in order to sync all previous diff --git a/server/src/main/java/org/elasticsearch/action/update/UpdateHelper.java b/server/src/main/java/org/elasticsearch/action/update/UpdateHelper.java index 5212b1f3521..77485f81e58 100644 --- a/server/src/main/java/org/elasticsearch/action/update/UpdateHelper.java +++ b/server/src/main/java/org/elasticsearch/action/update/UpdateHelper.java @@ -19,6 +19,11 @@ package org.elasticsearch.action.update; +import java.io.IOException; +import java.util.Collections; +import java.util.HashMap; +import java.util.Map; +import java.util.function.LongSupplier; import org.apache.logging.log4j.Logger; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.action.DocWriteResponse; @@ -42,21 +47,22 @@ import org.elasticsearch.index.get.GetResult; import org.elasticsearch.index.mapper.RoutingFieldMapper; import org.elasticsearch.index.shard.IndexShard; import org.elasticsearch.index.shard.ShardId; -import org.elasticsearch.script.ExecutableScript; import org.elasticsearch.script.Script; import org.elasticsearch.script.ScriptService; +import org.elasticsearch.script.UpdateScript; import org.elasticsearch.search.lookup.SourceLookup; -import java.io.IOException; -import java.util.Collections; -import java.util.HashMap; -import java.util.Map; -import java.util.function.LongSupplier; +import static org.elasticsearch.common.Booleans.parseBoolean; /** * Helper for translating an update request to an index, delete request or update response. */ public class UpdateHelper extends AbstractComponent { + + /** Whether scripts should add the ctx variable to the params map. */ + private static final boolean CTX_IN_PARAMS = + parseBoolean(System.getProperty("es.scripting.update.ctx_in_params"), true); + private final ScriptService scriptService; public UpdateHelper(Settings settings, ScriptService scriptService) { @@ -279,10 +285,18 @@ public class UpdateHelper extends AbstractComponent { private Map executeScript(Script script, Map ctx) { try { if (scriptService != null) { - ExecutableScript.Factory factory = scriptService.compile(script, ExecutableScript.UPDATE_CONTEXT); - ExecutableScript executableScript = factory.newInstance(script.getParams()); - executableScript.setNextVar(ContextFields.CTX, ctx); - executableScript.run(); + UpdateScript.Factory factory = scriptService.compile(script, UpdateScript.CONTEXT); + final Map params; + if (CTX_IN_PARAMS) { + params = new HashMap<>(script.getParams()); + params.put(ContextFields.CTX, ctx); + deprecationLogger.deprecated("Using `ctx` via `params.ctx` is deprecated. " + + "Use -Des.scripting.update.ctx_in_params=false to enforce non-deprecated usage."); + } else { + params = script.getParams(); + } + UpdateScript executableScript = factory.newInstance(params); + executableScript.execute(ctx); } } catch (Exception e) { throw new IllegalArgumentException("failed to execute script", e); diff --git a/server/src/main/java/org/elasticsearch/bootstrap/Bootstrap.java b/server/src/main/java/org/elasticsearch/bootstrap/Bootstrap.java index 76db6db7674..bc2fe747c03 100644 --- a/server/src/main/java/org/elasticsearch/bootstrap/Bootstrap.java +++ b/server/src/main/java/org/elasticsearch/bootstrap/Bootstrap.java @@ -345,10 +345,7 @@ final class Bootstrap { if (foreground && maybeConsoleAppender != null) { Loggers.removeAppender(rootLogger, maybeConsoleAppender); } - Logger logger = Loggers.getLogger(Bootstrap.class); - if (INSTANCE.node != null) { - logger = Loggers.getLogger(Bootstrap.class, Node.NODE_NAME_SETTING.get(INSTANCE.node.settings())); - } + Logger logger = LogManager.getLogger(Bootstrap.class); // HACK, it sucks to do this, but we will run users out of disk space otherwise if (e instanceof CreationException) { // guice: log the shortened exc to the log file diff --git a/server/src/main/java/org/elasticsearch/client/ClusterAdminClient.java b/server/src/main/java/org/elasticsearch/client/ClusterAdminClient.java index 949b0110fff..624c7999637 100644 --- a/server/src/main/java/org/elasticsearch/client/ClusterAdminClient.java +++ b/server/src/main/java/org/elasticsearch/client/ClusterAdminClient.java @@ -51,13 +51,11 @@ import org.elasticsearch.action.admin.cluster.node.usage.NodesUsageRequestBuilde import org.elasticsearch.action.admin.cluster.node.usage.NodesUsageResponse; import org.elasticsearch.action.admin.cluster.repositories.delete.DeleteRepositoryRequest; import org.elasticsearch.action.admin.cluster.repositories.delete.DeleteRepositoryRequestBuilder; -import org.elasticsearch.action.admin.cluster.repositories.delete.DeleteRepositoryResponse; import org.elasticsearch.action.admin.cluster.repositories.get.GetRepositoriesRequest; import org.elasticsearch.action.admin.cluster.repositories.get.GetRepositoriesRequestBuilder; import org.elasticsearch.action.admin.cluster.repositories.get.GetRepositoriesResponse; import org.elasticsearch.action.admin.cluster.repositories.put.PutRepositoryRequest; import org.elasticsearch.action.admin.cluster.repositories.put.PutRepositoryRequestBuilder; -import org.elasticsearch.action.admin.cluster.repositories.put.PutRepositoryResponse; import org.elasticsearch.action.admin.cluster.repositories.verify.VerifyRepositoryRequest; import org.elasticsearch.action.admin.cluster.repositories.verify.VerifyRepositoryRequestBuilder; import org.elasticsearch.action.admin.cluster.repositories.verify.VerifyRepositoryResponse; @@ -75,7 +73,6 @@ import org.elasticsearch.action.admin.cluster.snapshots.create.CreateSnapshotReq import org.elasticsearch.action.admin.cluster.snapshots.create.CreateSnapshotResponse; import org.elasticsearch.action.admin.cluster.snapshots.delete.DeleteSnapshotRequest; import org.elasticsearch.action.admin.cluster.snapshots.delete.DeleteSnapshotRequestBuilder; -import org.elasticsearch.action.admin.cluster.snapshots.delete.DeleteSnapshotResponse; import org.elasticsearch.action.admin.cluster.snapshots.get.GetSnapshotsRequest; import org.elasticsearch.action.admin.cluster.snapshots.get.GetSnapshotsRequestBuilder; import org.elasticsearch.action.admin.cluster.snapshots.get.GetSnapshotsResponse; @@ -93,13 +90,11 @@ import org.elasticsearch.action.admin.cluster.stats.ClusterStatsRequestBuilder; import org.elasticsearch.action.admin.cluster.stats.ClusterStatsResponse; import org.elasticsearch.action.admin.cluster.storedscripts.DeleteStoredScriptRequest; import org.elasticsearch.action.admin.cluster.storedscripts.DeleteStoredScriptRequestBuilder; -import org.elasticsearch.action.admin.cluster.storedscripts.DeleteStoredScriptResponse; import org.elasticsearch.action.admin.cluster.storedscripts.GetStoredScriptRequest; import org.elasticsearch.action.admin.cluster.storedscripts.GetStoredScriptRequestBuilder; import org.elasticsearch.action.admin.cluster.storedscripts.GetStoredScriptResponse; import org.elasticsearch.action.admin.cluster.storedscripts.PutStoredScriptRequest; import org.elasticsearch.action.admin.cluster.storedscripts.PutStoredScriptRequestBuilder; -import org.elasticsearch.action.admin.cluster.storedscripts.PutStoredScriptResponse; import org.elasticsearch.action.admin.cluster.tasks.PendingClusterTasksRequest; import org.elasticsearch.action.admin.cluster.tasks.PendingClusterTasksRequestBuilder; import org.elasticsearch.action.admin.cluster.tasks.PendingClusterTasksResponse; @@ -113,7 +108,7 @@ import org.elasticsearch.action.ingest.PutPipelineRequestBuilder; import org.elasticsearch.action.ingest.SimulatePipelineRequest; import org.elasticsearch.action.ingest.SimulatePipelineRequestBuilder; import org.elasticsearch.action.ingest.SimulatePipelineResponse; -import org.elasticsearch.action.ingest.WritePipelineResponse; +import org.elasticsearch.action.support.master.AcknowledgedResponse; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.tasks.TaskId; @@ -416,12 +411,12 @@ public interface ClusterAdminClient extends ElasticsearchClient { /** * Registers a snapshot repository. */ - ActionFuture putRepository(PutRepositoryRequest request); + ActionFuture putRepository(PutRepositoryRequest request); /** * Registers a snapshot repository. */ - void putRepository(PutRepositoryRequest request, ActionListener listener); + void putRepository(PutRepositoryRequest request, ActionListener listener); /** * Registers a snapshot repository. @@ -431,12 +426,12 @@ public interface ClusterAdminClient extends ElasticsearchClient { /** * Unregisters a repository. */ - ActionFuture deleteRepository(DeleteRepositoryRequest request); + ActionFuture deleteRepository(DeleteRepositoryRequest request); /** * Unregisters a repository. */ - void deleteRepository(DeleteRepositoryRequest request, ActionListener listener); + void deleteRepository(DeleteRepositoryRequest request, ActionListener listener); /** * Unregisters a repository. @@ -506,12 +501,12 @@ public interface ClusterAdminClient extends ElasticsearchClient { /** * Delete snapshot. */ - ActionFuture deleteSnapshot(DeleteSnapshotRequest request); + ActionFuture deleteSnapshot(DeleteSnapshotRequest request); /** * Delete snapshot. */ - void deleteSnapshot(DeleteSnapshotRequest request, ActionListener listener); + void deleteSnapshot(DeleteSnapshotRequest request, ActionListener listener); /** * Delete snapshot. @@ -574,12 +569,12 @@ public interface ClusterAdminClient extends ElasticsearchClient { /** * Stores an ingest pipeline */ - void putPipeline(PutPipelineRequest request, ActionListener listener); + void putPipeline(PutPipelineRequest request, ActionListener listener); /** * Stores an ingest pipeline */ - ActionFuture putPipeline(PutPipelineRequest request); + ActionFuture putPipeline(PutPipelineRequest request); /** * Stores an ingest pipeline @@ -596,12 +591,12 @@ public interface ClusterAdminClient extends ElasticsearchClient { /** * Deletes a stored ingest pipeline */ - void deletePipeline(DeletePipelineRequest request, ActionListener listener); + void deletePipeline(DeletePipelineRequest request, ActionListener listener); /** * Deletes a stored ingest pipeline */ - ActionFuture deletePipeline(DeletePipelineRequest request); + ActionFuture deletePipeline(DeletePipelineRequest request); /** * Deletes a stored ingest pipeline @@ -672,12 +667,12 @@ public interface ClusterAdminClient extends ElasticsearchClient { /** * Delete a script from the cluster state */ - void deleteStoredScript(DeleteStoredScriptRequest request, ActionListener listener); + void deleteStoredScript(DeleteStoredScriptRequest request, ActionListener listener); /** * Delete a script from the cluster state */ - ActionFuture deleteStoredScript(DeleteStoredScriptRequest request); + ActionFuture deleteStoredScript(DeleteStoredScriptRequest request); /** * Delete a script from the cluster state @@ -692,12 +687,12 @@ public interface ClusterAdminClient extends ElasticsearchClient { /** * Store a script in the cluster state */ - void putStoredScript(PutStoredScriptRequest request, ActionListener listener); + void putStoredScript(PutStoredScriptRequest request, ActionListener listener); /** * Store a script in the cluster state */ - ActionFuture putStoredScript(PutStoredScriptRequest request); + ActionFuture putStoredScript(PutStoredScriptRequest request); /** * Get a script from the cluster state diff --git a/server/src/main/java/org/elasticsearch/client/IndicesAdminClient.java b/server/src/main/java/org/elasticsearch/client/IndicesAdminClient.java index 81de57f91af..60e9334c87d 100644 --- a/server/src/main/java/org/elasticsearch/client/IndicesAdminClient.java +++ b/server/src/main/java/org/elasticsearch/client/IndicesAdminClient.java @@ -23,7 +23,6 @@ import org.elasticsearch.action.ActionFuture; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequest; import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequestBuilder; -import org.elasticsearch.action.admin.indices.alias.IndicesAliasesResponse; import org.elasticsearch.action.admin.indices.alias.exists.AliasesExistRequestBuilder; import org.elasticsearch.action.admin.indices.alias.exists.AliasesExistResponse; import org.elasticsearch.action.admin.indices.alias.get.GetAliasesRequest; @@ -37,13 +36,11 @@ import org.elasticsearch.action.admin.indices.cache.clear.ClearIndicesCacheReque import org.elasticsearch.action.admin.indices.cache.clear.ClearIndicesCacheResponse; import org.elasticsearch.action.admin.indices.close.CloseIndexRequest; import org.elasticsearch.action.admin.indices.close.CloseIndexRequestBuilder; -import org.elasticsearch.action.admin.indices.close.CloseIndexResponse; import org.elasticsearch.action.admin.indices.create.CreateIndexRequest; import org.elasticsearch.action.admin.indices.create.CreateIndexRequestBuilder; import org.elasticsearch.action.admin.indices.create.CreateIndexResponse; import org.elasticsearch.action.admin.indices.delete.DeleteIndexRequest; import org.elasticsearch.action.admin.indices.delete.DeleteIndexRequestBuilder; -import org.elasticsearch.action.admin.indices.delete.DeleteIndexResponse; import org.elasticsearch.action.admin.indices.exists.indices.IndicesExistsRequest; import org.elasticsearch.action.admin.indices.exists.indices.IndicesExistsRequestBuilder; import org.elasticsearch.action.admin.indices.exists.indices.IndicesExistsResponse; @@ -70,7 +67,6 @@ import org.elasticsearch.action.admin.indices.mapping.get.GetMappingsRequestBuil import org.elasticsearch.action.admin.indices.mapping.get.GetMappingsResponse; import org.elasticsearch.action.admin.indices.mapping.put.PutMappingRequest; import org.elasticsearch.action.admin.indices.mapping.put.PutMappingRequestBuilder; -import org.elasticsearch.action.admin.indices.mapping.put.PutMappingResponse; import org.elasticsearch.action.admin.indices.open.OpenIndexRequest; import org.elasticsearch.action.admin.indices.open.OpenIndexRequestBuilder; import org.elasticsearch.action.admin.indices.open.OpenIndexResponse; @@ -91,7 +87,6 @@ import org.elasticsearch.action.admin.indices.settings.get.GetSettingsRequestBui import org.elasticsearch.action.admin.indices.settings.get.GetSettingsResponse; import org.elasticsearch.action.admin.indices.settings.put.UpdateSettingsRequest; import org.elasticsearch.action.admin.indices.settings.put.UpdateSettingsRequestBuilder; -import org.elasticsearch.action.admin.indices.settings.put.UpdateSettingsResponse; import org.elasticsearch.action.admin.indices.shards.IndicesShardStoreRequestBuilder; import org.elasticsearch.action.admin.indices.shards.IndicesShardStoresRequest; import org.elasticsearch.action.admin.indices.shards.IndicesShardStoresResponse; @@ -103,13 +98,11 @@ import org.elasticsearch.action.admin.indices.stats.IndicesStatsRequestBuilder; import org.elasticsearch.action.admin.indices.stats.IndicesStatsResponse; import org.elasticsearch.action.admin.indices.template.delete.DeleteIndexTemplateRequest; import org.elasticsearch.action.admin.indices.template.delete.DeleteIndexTemplateRequestBuilder; -import org.elasticsearch.action.admin.indices.template.delete.DeleteIndexTemplateResponse; import org.elasticsearch.action.admin.indices.template.get.GetIndexTemplatesRequest; import org.elasticsearch.action.admin.indices.template.get.GetIndexTemplatesRequestBuilder; import org.elasticsearch.action.admin.indices.template.get.GetIndexTemplatesResponse; import org.elasticsearch.action.admin.indices.template.put.PutIndexTemplateRequest; import org.elasticsearch.action.admin.indices.template.put.PutIndexTemplateRequestBuilder; -import org.elasticsearch.action.admin.indices.template.put.PutIndexTemplateResponse; import org.elasticsearch.action.admin.indices.upgrade.get.UpgradeStatusRequest; import org.elasticsearch.action.admin.indices.upgrade.get.UpgradeStatusRequestBuilder; import org.elasticsearch.action.admin.indices.upgrade.get.UpgradeStatusResponse; @@ -119,6 +112,7 @@ import org.elasticsearch.action.admin.indices.upgrade.post.UpgradeResponse; import org.elasticsearch.action.admin.indices.validate.query.ValidateQueryRequest; import org.elasticsearch.action.admin.indices.validate.query.ValidateQueryRequestBuilder; import org.elasticsearch.action.admin.indices.validate.query.ValidateQueryResponse; +import org.elasticsearch.action.support.master.AcknowledgedResponse; import org.elasticsearch.common.Nullable; /** @@ -281,7 +275,7 @@ public interface IndicesAdminClient extends ElasticsearchClient { * @return The result future * @see org.elasticsearch.client.Requests#deleteIndexRequest(String) */ - ActionFuture delete(DeleteIndexRequest request); + ActionFuture delete(DeleteIndexRequest request); /** * Deletes an index based on the index name. @@ -290,7 +284,7 @@ public interface IndicesAdminClient extends ElasticsearchClient { * @param listener A listener to be notified with a result * @see org.elasticsearch.client.Requests#deleteIndexRequest(String) */ - void delete(DeleteIndexRequest request, ActionListener listener); + void delete(DeleteIndexRequest request, ActionListener listener); /** * Deletes an index based on the index name. @@ -306,7 +300,7 @@ public interface IndicesAdminClient extends ElasticsearchClient { * @return The result future * @see org.elasticsearch.client.Requests#closeIndexRequest(String) */ - ActionFuture close(CloseIndexRequest request); + ActionFuture close(CloseIndexRequest request); /** * Closes an index based on the index name. @@ -315,7 +309,7 @@ public interface IndicesAdminClient extends ElasticsearchClient { * @param listener A listener to be notified with a result * @see org.elasticsearch.client.Requests#closeIndexRequest(String) */ - void close(CloseIndexRequest request, ActionListener listener); + void close(CloseIndexRequest request, ActionListener listener); /** * Closes one or more indices based on their index name. @@ -524,7 +518,7 @@ public interface IndicesAdminClient extends ElasticsearchClient { * @return A result future * @see org.elasticsearch.client.Requests#putMappingRequest(String...) */ - ActionFuture putMapping(PutMappingRequest request); + ActionFuture putMapping(PutMappingRequest request); /** * Add mapping definition for a type into one or more indices. @@ -533,7 +527,7 @@ public interface IndicesAdminClient extends ElasticsearchClient { * @param listener A listener to be notified with a result * @see org.elasticsearch.client.Requests#putMappingRequest(String...) */ - void putMapping(PutMappingRequest request, ActionListener listener); + void putMapping(PutMappingRequest request, ActionListener listener); /** * Add mapping definition for a type into one or more indices. @@ -547,7 +541,7 @@ public interface IndicesAdminClient extends ElasticsearchClient { * @return The result future * @see Requests#indexAliasesRequest() */ - ActionFuture aliases(IndicesAliasesRequest request); + ActionFuture aliases(IndicesAliasesRequest request); /** * Allows to add/remove aliases from indices. @@ -556,7 +550,7 @@ public interface IndicesAdminClient extends ElasticsearchClient { * @param listener A listener to be notified with a result * @see Requests#indexAliasesRequest() */ - void aliases(IndicesAliasesRequest request, ActionListener listener); + void aliases(IndicesAliasesRequest request, ActionListener listener); /** * Allows to add/remove aliases from indices. @@ -652,7 +646,7 @@ public interface IndicesAdminClient extends ElasticsearchClient { * @param request the update settings request * @return The result future */ - ActionFuture updateSettings(UpdateSettingsRequest request); + ActionFuture updateSettings(UpdateSettingsRequest request); /** * Updates settings of one or more indices. @@ -660,7 +654,7 @@ public interface IndicesAdminClient extends ElasticsearchClient { * @param request the update settings request * @param listener A listener to be notified with the response */ - void updateSettings(UpdateSettingsRequest request, ActionListener listener); + void updateSettings(UpdateSettingsRequest request, ActionListener listener); /** * Update indices settings. @@ -701,12 +695,12 @@ public interface IndicesAdminClient extends ElasticsearchClient { /** * Puts an index template. */ - ActionFuture putTemplate(PutIndexTemplateRequest request); + ActionFuture putTemplate(PutIndexTemplateRequest request); /** * Puts an index template. */ - void putTemplate(PutIndexTemplateRequest request, ActionListener listener); + void putTemplate(PutIndexTemplateRequest request, ActionListener listener); /** * Puts an index template. @@ -718,12 +712,12 @@ public interface IndicesAdminClient extends ElasticsearchClient { /** * Deletes index template. */ - ActionFuture deleteTemplate(DeleteIndexTemplateRequest request); + ActionFuture deleteTemplate(DeleteIndexTemplateRequest request); /** * Deletes an index template. */ - void deleteTemplate(DeleteIndexTemplateRequest request, ActionListener listener); + void deleteTemplate(DeleteIndexTemplateRequest request, ActionListener listener); /** * Deletes an index template. diff --git a/server/src/main/java/org/elasticsearch/client/support/AbstractClient.java b/server/src/main/java/org/elasticsearch/client/support/AbstractClient.java index 31e5e319007..86d9d2c445f 100644 --- a/server/src/main/java/org/elasticsearch/client/support/AbstractClient.java +++ b/server/src/main/java/org/elasticsearch/client/support/AbstractClient.java @@ -65,7 +65,6 @@ import org.elasticsearch.action.admin.cluster.node.usage.NodesUsageResponse; import org.elasticsearch.action.admin.cluster.repositories.delete.DeleteRepositoryAction; import org.elasticsearch.action.admin.cluster.repositories.delete.DeleteRepositoryRequest; import org.elasticsearch.action.admin.cluster.repositories.delete.DeleteRepositoryRequestBuilder; -import org.elasticsearch.action.admin.cluster.repositories.delete.DeleteRepositoryResponse; import org.elasticsearch.action.admin.cluster.repositories.get.GetRepositoriesAction; import org.elasticsearch.action.admin.cluster.repositories.get.GetRepositoriesRequest; import org.elasticsearch.action.admin.cluster.repositories.get.GetRepositoriesRequestBuilder; @@ -73,7 +72,6 @@ import org.elasticsearch.action.admin.cluster.repositories.get.GetRepositoriesRe import org.elasticsearch.action.admin.cluster.repositories.put.PutRepositoryAction; import org.elasticsearch.action.admin.cluster.repositories.put.PutRepositoryRequest; import org.elasticsearch.action.admin.cluster.repositories.put.PutRepositoryRequestBuilder; -import org.elasticsearch.action.admin.cluster.repositories.put.PutRepositoryResponse; import org.elasticsearch.action.admin.cluster.repositories.verify.VerifyRepositoryAction; import org.elasticsearch.action.admin.cluster.repositories.verify.VerifyRepositoryRequest; import org.elasticsearch.action.admin.cluster.repositories.verify.VerifyRepositoryRequestBuilder; @@ -97,7 +95,6 @@ import org.elasticsearch.action.admin.cluster.snapshots.create.CreateSnapshotRes import org.elasticsearch.action.admin.cluster.snapshots.delete.DeleteSnapshotAction; import org.elasticsearch.action.admin.cluster.snapshots.delete.DeleteSnapshotRequest; import org.elasticsearch.action.admin.cluster.snapshots.delete.DeleteSnapshotRequestBuilder; -import org.elasticsearch.action.admin.cluster.snapshots.delete.DeleteSnapshotResponse; import org.elasticsearch.action.admin.cluster.snapshots.get.GetSnapshotsAction; import org.elasticsearch.action.admin.cluster.snapshots.get.GetSnapshotsRequest; import org.elasticsearch.action.admin.cluster.snapshots.get.GetSnapshotsRequestBuilder; @@ -121,7 +118,6 @@ import org.elasticsearch.action.admin.cluster.stats.ClusterStatsResponse; import org.elasticsearch.action.admin.cluster.storedscripts.DeleteStoredScriptAction; import org.elasticsearch.action.admin.cluster.storedscripts.DeleteStoredScriptRequest; import org.elasticsearch.action.admin.cluster.storedscripts.DeleteStoredScriptRequestBuilder; -import org.elasticsearch.action.admin.cluster.storedscripts.DeleteStoredScriptResponse; import org.elasticsearch.action.admin.cluster.storedscripts.GetStoredScriptAction; import org.elasticsearch.action.admin.cluster.storedscripts.GetStoredScriptRequest; import org.elasticsearch.action.admin.cluster.storedscripts.GetStoredScriptRequestBuilder; @@ -129,7 +125,6 @@ import org.elasticsearch.action.admin.cluster.storedscripts.GetStoredScriptRespo import org.elasticsearch.action.admin.cluster.storedscripts.PutStoredScriptAction; import org.elasticsearch.action.admin.cluster.storedscripts.PutStoredScriptRequest; import org.elasticsearch.action.admin.cluster.storedscripts.PutStoredScriptRequestBuilder; -import org.elasticsearch.action.admin.cluster.storedscripts.PutStoredScriptResponse; import org.elasticsearch.action.admin.cluster.tasks.PendingClusterTasksAction; import org.elasticsearch.action.admin.cluster.tasks.PendingClusterTasksRequest; import org.elasticsearch.action.admin.cluster.tasks.PendingClusterTasksRequestBuilder; @@ -137,7 +132,6 @@ import org.elasticsearch.action.admin.cluster.tasks.PendingClusterTasksResponse; import org.elasticsearch.action.admin.indices.alias.IndicesAliasesAction; import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequest; import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequestBuilder; -import org.elasticsearch.action.admin.indices.alias.IndicesAliasesResponse; import org.elasticsearch.action.admin.indices.alias.exists.AliasesExistAction; import org.elasticsearch.action.admin.indices.alias.exists.AliasesExistRequestBuilder; import org.elasticsearch.action.admin.indices.alias.exists.AliasesExistResponse; @@ -156,7 +150,6 @@ import org.elasticsearch.action.admin.indices.cache.clear.ClearIndicesCacheRespo import org.elasticsearch.action.admin.indices.close.CloseIndexAction; import org.elasticsearch.action.admin.indices.close.CloseIndexRequest; import org.elasticsearch.action.admin.indices.close.CloseIndexRequestBuilder; -import org.elasticsearch.action.admin.indices.close.CloseIndexResponse; import org.elasticsearch.action.admin.indices.create.CreateIndexAction; import org.elasticsearch.action.admin.indices.create.CreateIndexRequest; import org.elasticsearch.action.admin.indices.create.CreateIndexRequestBuilder; @@ -164,7 +157,6 @@ import org.elasticsearch.action.admin.indices.create.CreateIndexResponse; import org.elasticsearch.action.admin.indices.delete.DeleteIndexAction; import org.elasticsearch.action.admin.indices.delete.DeleteIndexRequest; import org.elasticsearch.action.admin.indices.delete.DeleteIndexRequestBuilder; -import org.elasticsearch.action.admin.indices.delete.DeleteIndexResponse; import org.elasticsearch.action.admin.indices.exists.indices.IndicesExistsAction; import org.elasticsearch.action.admin.indices.exists.indices.IndicesExistsRequest; import org.elasticsearch.action.admin.indices.exists.indices.IndicesExistsRequestBuilder; @@ -200,7 +192,6 @@ import org.elasticsearch.action.admin.indices.mapping.get.GetMappingsResponse; import org.elasticsearch.action.admin.indices.mapping.put.PutMappingAction; import org.elasticsearch.action.admin.indices.mapping.put.PutMappingRequest; import org.elasticsearch.action.admin.indices.mapping.put.PutMappingRequestBuilder; -import org.elasticsearch.action.admin.indices.mapping.put.PutMappingResponse; import org.elasticsearch.action.admin.indices.open.OpenIndexAction; import org.elasticsearch.action.admin.indices.open.OpenIndexRequest; import org.elasticsearch.action.admin.indices.open.OpenIndexRequestBuilder; @@ -228,7 +219,6 @@ import org.elasticsearch.action.admin.indices.settings.get.GetSettingsResponse; import org.elasticsearch.action.admin.indices.settings.put.UpdateSettingsAction; import org.elasticsearch.action.admin.indices.settings.put.UpdateSettingsRequest; import org.elasticsearch.action.admin.indices.settings.put.UpdateSettingsRequestBuilder; -import org.elasticsearch.action.admin.indices.settings.put.UpdateSettingsResponse; import org.elasticsearch.action.admin.indices.shards.IndicesShardStoreRequestBuilder; import org.elasticsearch.action.admin.indices.shards.IndicesShardStoresAction; import org.elasticsearch.action.admin.indices.shards.IndicesShardStoresRequest; @@ -244,7 +234,6 @@ import org.elasticsearch.action.admin.indices.stats.IndicesStatsResponse; import org.elasticsearch.action.admin.indices.template.delete.DeleteIndexTemplateAction; import org.elasticsearch.action.admin.indices.template.delete.DeleteIndexTemplateRequest; import org.elasticsearch.action.admin.indices.template.delete.DeleteIndexTemplateRequestBuilder; -import org.elasticsearch.action.admin.indices.template.delete.DeleteIndexTemplateResponse; import org.elasticsearch.action.admin.indices.template.get.GetIndexTemplatesAction; import org.elasticsearch.action.admin.indices.template.get.GetIndexTemplatesRequest; import org.elasticsearch.action.admin.indices.template.get.GetIndexTemplatesRequestBuilder; @@ -252,7 +241,6 @@ import org.elasticsearch.action.admin.indices.template.get.GetIndexTemplatesResp import org.elasticsearch.action.admin.indices.template.put.PutIndexTemplateAction; import org.elasticsearch.action.admin.indices.template.put.PutIndexTemplateRequest; import org.elasticsearch.action.admin.indices.template.put.PutIndexTemplateRequestBuilder; -import org.elasticsearch.action.admin.indices.template.put.PutIndexTemplateResponse; import org.elasticsearch.action.admin.indices.upgrade.get.UpgradeStatusAction; import org.elasticsearch.action.admin.indices.upgrade.get.UpgradeStatusRequest; import org.elasticsearch.action.admin.indices.upgrade.get.UpgradeStatusRequestBuilder; @@ -307,7 +295,6 @@ import org.elasticsearch.action.ingest.SimulatePipelineAction; import org.elasticsearch.action.ingest.SimulatePipelineRequest; import org.elasticsearch.action.ingest.SimulatePipelineRequestBuilder; import org.elasticsearch.action.ingest.SimulatePipelineResponse; -import org.elasticsearch.action.ingest.WritePipelineResponse; import org.elasticsearch.action.search.ClearScrollAction; import org.elasticsearch.action.search.ClearScrollRequest; import org.elasticsearch.action.search.ClearScrollRequestBuilder; @@ -325,6 +312,7 @@ import org.elasticsearch.action.search.SearchScrollRequest; import org.elasticsearch.action.search.SearchScrollRequestBuilder; import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.action.support.ThreadedActionListener; +import org.elasticsearch.action.support.master.AcknowledgedResponse; import org.elasticsearch.action.termvectors.MultiTermVectorsAction; import org.elasticsearch.action.termvectors.MultiTermVectorsRequest; import org.elasticsearch.action.termvectors.MultiTermVectorsRequestBuilder; @@ -939,12 +927,12 @@ public abstract class AbstractClient extends AbstractComponent implements Client } @Override - public ActionFuture putRepository(PutRepositoryRequest request) { + public ActionFuture putRepository(PutRepositoryRequest request) { return execute(PutRepositoryAction.INSTANCE, request); } @Override - public void putRepository(PutRepositoryRequest request, ActionListener listener) { + public void putRepository(PutRepositoryRequest request, ActionListener listener) { execute(PutRepositoryAction.INSTANCE, request, listener); } @@ -985,12 +973,12 @@ public abstract class AbstractClient extends AbstractComponent implements Client @Override - public ActionFuture deleteSnapshot(DeleteSnapshotRequest request) { + public ActionFuture deleteSnapshot(DeleteSnapshotRequest request) { return execute(DeleteSnapshotAction.INSTANCE, request); } @Override - public void deleteSnapshot(DeleteSnapshotRequest request, ActionListener listener) { + public void deleteSnapshot(DeleteSnapshotRequest request, ActionListener listener) { execute(DeleteSnapshotAction.INSTANCE, request, listener); } @@ -1001,12 +989,12 @@ public abstract class AbstractClient extends AbstractComponent implements Client @Override - public ActionFuture deleteRepository(DeleteRepositoryRequest request) { + public ActionFuture deleteRepository(DeleteRepositoryRequest request) { return execute(DeleteRepositoryAction.INSTANCE, request); } @Override - public void deleteRepository(DeleteRepositoryRequest request, ActionListener listener) { + public void deleteRepository(DeleteRepositoryRequest request, ActionListener listener) { execute(DeleteRepositoryAction.INSTANCE, request, listener); } @@ -1082,12 +1070,12 @@ public abstract class AbstractClient extends AbstractComponent implements Client } @Override - public void putPipeline(PutPipelineRequest request, ActionListener listener) { + public void putPipeline(PutPipelineRequest request, ActionListener listener) { execute(PutPipelineAction.INSTANCE, request, listener); } @Override - public ActionFuture putPipeline(PutPipelineRequest request) { + public ActionFuture putPipeline(PutPipelineRequest request) { return execute(PutPipelineAction.INSTANCE, request); } @@ -1102,12 +1090,12 @@ public abstract class AbstractClient extends AbstractComponent implements Client } @Override - public void deletePipeline(DeletePipelineRequest request, ActionListener listener) { + public void deletePipeline(DeletePipelineRequest request, ActionListener listener) { execute(DeletePipelineAction.INSTANCE, request, listener); } @Override - public ActionFuture deletePipeline(DeletePipelineRequest request) { + public ActionFuture deletePipeline(DeletePipelineRequest request) { return execute(DeletePipelineAction.INSTANCE, request); } @@ -1197,23 +1185,23 @@ public abstract class AbstractClient extends AbstractComponent implements Client } @Override - public void putStoredScript(final PutStoredScriptRequest request, ActionListener listener){ + public void putStoredScript(final PutStoredScriptRequest request, ActionListener listener){ execute(PutStoredScriptAction.INSTANCE, request, listener); } @Override - public ActionFuture putStoredScript(final PutStoredScriptRequest request){ + public ActionFuture putStoredScript(final PutStoredScriptRequest request){ return execute(PutStoredScriptAction.INSTANCE, request); } @Override - public void deleteStoredScript(DeleteStoredScriptRequest request, ActionListener listener){ + public void deleteStoredScript(DeleteStoredScriptRequest request, ActionListener listener){ execute(DeleteStoredScriptAction.INSTANCE, request, listener); } @Override - public ActionFuture deleteStoredScript(DeleteStoredScriptRequest request){ + public ActionFuture deleteStoredScript(DeleteStoredScriptRequest request){ return execute(DeleteStoredScriptAction.INSTANCE, request); } @@ -1284,12 +1272,12 @@ public abstract class AbstractClient extends AbstractComponent implements Client } @Override - public ActionFuture aliases(final IndicesAliasesRequest request) { + public ActionFuture aliases(final IndicesAliasesRequest request) { return execute(IndicesAliasesAction.INSTANCE, request); } @Override - public void aliases(final IndicesAliasesRequest request, final ActionListener listener) { + public void aliases(final IndicesAliasesRequest request, final ActionListener listener) { execute(IndicesAliasesAction.INSTANCE, request, listener); } @@ -1374,12 +1362,12 @@ public abstract class AbstractClient extends AbstractComponent implements Client } @Override - public ActionFuture delete(final DeleteIndexRequest request) { + public ActionFuture delete(final DeleteIndexRequest request) { return execute(DeleteIndexAction.INSTANCE, request); } @Override - public void delete(final DeleteIndexRequest request, final ActionListener listener) { + public void delete(final DeleteIndexRequest request, final ActionListener listener) { execute(DeleteIndexAction.INSTANCE, request, listener); } @@ -1389,12 +1377,12 @@ public abstract class AbstractClient extends AbstractComponent implements Client } @Override - public ActionFuture close(final CloseIndexRequest request) { + public ActionFuture close(final CloseIndexRequest request) { return execute(CloseIndexAction.INSTANCE, request); } @Override - public void close(final CloseIndexRequest request, final ActionListener listener) { + public void close(final CloseIndexRequest request, final ActionListener listener) { execute(CloseIndexAction.INSTANCE, request, listener); } @@ -1479,12 +1467,12 @@ public abstract class AbstractClient extends AbstractComponent implements Client } @Override - public ActionFuture putMapping(final PutMappingRequest request) { + public ActionFuture putMapping(final PutMappingRequest request) { return execute(PutMappingAction.INSTANCE, request); } @Override - public void putMapping(final PutMappingRequest request, final ActionListener listener) { + public void putMapping(final PutMappingRequest request, final ActionListener listener) { execute(PutMappingAction.INSTANCE, request, listener); } @@ -1614,12 +1602,12 @@ public abstract class AbstractClient extends AbstractComponent implements Client } @Override - public ActionFuture updateSettings(final UpdateSettingsRequest request) { + public ActionFuture updateSettings(final UpdateSettingsRequest request) { return execute(UpdateSettingsAction.INSTANCE, request); } @Override - public void updateSettings(final UpdateSettingsRequest request, final ActionListener listener) { + public void updateSettings(final UpdateSettingsRequest request, final ActionListener listener) { execute(UpdateSettingsAction.INSTANCE, request, listener); } @@ -1654,12 +1642,12 @@ public abstract class AbstractClient extends AbstractComponent implements Client } @Override - public ActionFuture putTemplate(final PutIndexTemplateRequest request) { + public ActionFuture putTemplate(final PutIndexTemplateRequest request) { return execute(PutIndexTemplateAction.INSTANCE, request); } @Override - public void putTemplate(final PutIndexTemplateRequest request, final ActionListener listener) { + public void putTemplate(final PutIndexTemplateRequest request, final ActionListener listener) { execute(PutIndexTemplateAction.INSTANCE, request, listener); } @@ -1684,12 +1672,12 @@ public abstract class AbstractClient extends AbstractComponent implements Client } @Override - public ActionFuture deleteTemplate(final DeleteIndexTemplateRequest request) { + public ActionFuture deleteTemplate(final DeleteIndexTemplateRequest request) { return execute(DeleteIndexTemplateAction.INSTANCE, request); } @Override - public void deleteTemplate(final DeleteIndexTemplateRequest request, final ActionListener listener) { + public void deleteTemplate(final DeleteIndexTemplateRequest request, final ActionListener listener) { execute(DeleteIndexTemplateAction.INSTANCE, request, listener); } diff --git a/server/src/main/java/org/elasticsearch/cluster/ClusterModule.java b/server/src/main/java/org/elasticsearch/cluster/ClusterModule.java index 9c5c642df6b..8362198a12c 100644 --- a/server/src/main/java/org/elasticsearch/cluster/ClusterModule.java +++ b/server/src/main/java/org/elasticsearch/cluster/ClusterModule.java @@ -116,23 +116,6 @@ public class ClusterModule extends AbstractModule { this.allocationService = new AllocationService(settings, allocationDeciders, shardsAllocator, clusterInfoService); } - public static Map> getClusterStateCustomSuppliers(List clusterPlugins) { - final Map> customSupplier = new HashMap<>(); - customSupplier.put(SnapshotDeletionsInProgress.TYPE, SnapshotDeletionsInProgress::new); - customSupplier.put(RestoreInProgress.TYPE, RestoreInProgress::new); - customSupplier.put(SnapshotsInProgress.TYPE, SnapshotsInProgress::new); - for (ClusterPlugin plugin : clusterPlugins) { - Map> initialCustomSupplier = plugin.getInitialClusterStateCustomSupplier(); - for (String key : initialCustomSupplier.keySet()) { - if (customSupplier.containsKey(key)) { - throw new IllegalStateException("custom supplier key [" + key + "] is registered more than once"); - } - } - customSupplier.putAll(initialCustomSupplier); - } - return Collections.unmodifiableMap(customSupplier); - } - public static List getNamedWriteables() { List entries = new ArrayList<>(); // Cluster State diff --git a/server/src/main/java/org/elasticsearch/cluster/NodeConnectionsService.java b/server/src/main/java/org/elasticsearch/cluster/NodeConnectionsService.java index 998cd5ba0a8..2a9d960f8cc 100644 --- a/server/src/main/java/org/elasticsearch/cluster/NodeConnectionsService.java +++ b/server/src/main/java/org/elasticsearch/cluster/NodeConnectionsService.java @@ -101,7 +101,7 @@ public class NodeConnectionsService extends AbstractLifecycleComponent { } @Override - protected void doRun() throws Exception { + protected void doRun() { try (Releasable ignored = nodeLocks.acquire(node)) { validateAndConnectIfNeeded(node); } diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/IndexGraveyard.java b/server/src/main/java/org/elasticsearch/cluster/metadata/IndexGraveyard.java index 3bb9d42a578..8d0ad8efb7f 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/IndexGraveyard.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/IndexGraveyard.java @@ -26,9 +26,10 @@ import org.elasticsearch.common.ParseField; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; -import org.elasticsearch.common.joda.Joda; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.time.CompoundDateTimeFormatter; +import org.elasticsearch.common.time.DateFormatters; import org.elasticsearch.common.xcontent.ContextParser; import org.elasticsearch.common.xcontent.ObjectParser; import org.elasticsearch.common.xcontent.ToXContentObject; @@ -37,6 +38,8 @@ import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.index.Index; import java.io.IOException; +import java.time.Instant; +import java.time.ZoneOffset; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; @@ -365,6 +368,9 @@ public final class IndexGraveyard implements MetaData.Custom { TOMBSTONE_PARSER.declareString((b, s) -> {}, new ParseField(DELETE_DATE_KEY)); } + static final CompoundDateTimeFormatter FORMATTER = + DateFormatters.forPattern("strict_date_optional_time").withZone(ZoneOffset.UTC); + static ContextParser getParser() { return (parser, context) -> TOMBSTONE_PARSER.apply(parser, null).build(); } @@ -428,7 +434,8 @@ public final class IndexGraveyard implements MetaData.Custom { @Override public String toString() { - return "[index=" + index + ", deleteDate=" + Joda.getStrictStandardDateFormatter().printer().print(deleteDateInMillis) + "]"; + String date = FORMATTER.format(Instant.ofEpochMilli(deleteDateInMillis)); + return "[index=" + index + ", deleteDate=" + date + "]"; } @Override diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/TemplateUpgradeService.java b/server/src/main/java/org/elasticsearch/cluster/metadata/TemplateUpgradeService.java index 024cc44dd6a..af8289d03c1 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/TemplateUpgradeService.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/TemplateUpgradeService.java @@ -24,9 +24,8 @@ import org.apache.logging.log4j.message.ParameterizedMessage; import org.elasticsearch.Version; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.admin.indices.template.delete.DeleteIndexTemplateRequest; -import org.elasticsearch.action.admin.indices.template.delete.DeleteIndexTemplateResponse; import org.elasticsearch.action.admin.indices.template.put.PutIndexTemplateRequest; -import org.elasticsearch.action.admin.indices.template.put.PutIndexTemplateResponse; +import org.elasticsearch.action.support.master.AcknowledgedResponse; import org.elasticsearch.client.Client; import org.elasticsearch.cluster.ClusterChangedEvent; import org.elasticsearch.cluster.ClusterState; @@ -147,9 +146,9 @@ public class TemplateUpgradeService extends AbstractComponent implements Cluster PutIndexTemplateRequest request = new PutIndexTemplateRequest(change.getKey()).source(change.getValue(), XContentType.JSON); request.masterNodeTimeout(TimeValue.timeValueMinutes(1)); - client.admin().indices().putTemplate(request, new ActionListener() { + client.admin().indices().putTemplate(request, new ActionListener() { @Override - public void onResponse(PutIndexTemplateResponse response) { + public void onResponse(AcknowledgedResponse response) { if (response.isAcknowledged() == false) { anyUpgradeFailed.set(true); logger.warn("Error updating template [{}], request was not acknowledged", change.getKey()); @@ -169,9 +168,9 @@ public class TemplateUpgradeService extends AbstractComponent implements Cluster for (String template : deletions) { DeleteIndexTemplateRequest request = new DeleteIndexTemplateRequest(template); request.masterNodeTimeout(TimeValue.timeValueMinutes(1)); - client.admin().indices().deleteTemplate(request, new ActionListener() { + client.admin().indices().deleteTemplate(request, new ActionListener() { @Override - public void onResponse(DeleteIndexTemplateResponse response) { + public void onResponse(AcknowledgedResponse response) { if (response.isAcknowledged() == false) { anyUpgradeFailed.set(true); logger.warn("Error deleting template [{}], request was not acknowledged", template); diff --git a/server/src/main/java/org/elasticsearch/cluster/routing/UnassignedInfo.java b/server/src/main/java/org/elasticsearch/cluster/routing/UnassignedInfo.java index a543f4c3d3b..ad715500a9e 100644 --- a/server/src/main/java/org/elasticsearch/cluster/routing/UnassignedInfo.java +++ b/server/src/main/java/org/elasticsearch/cluster/routing/UnassignedInfo.java @@ -28,17 +28,18 @@ import org.elasticsearch.common.Nullable; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; -import org.elasticsearch.common.joda.FormatDateTimeFormatter; -import org.elasticsearch.common.joda.Joda; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Setting.Property; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.time.CompoundDateTimeFormatter; +import org.elasticsearch.common.time.DateFormatters; import org.elasticsearch.common.unit.TimeValue; -import org.elasticsearch.common.xcontent.ToXContent.Params; import org.elasticsearch.common.xcontent.ToXContentFragment; import org.elasticsearch.common.xcontent.XContentBuilder; import java.io.IOException; +import java.time.Instant; +import java.time.ZoneOffset; import java.util.Locale; import java.util.Objects; @@ -47,7 +48,8 @@ import java.util.Objects; */ public final class UnassignedInfo implements ToXContentFragment, Writeable { - public static final FormatDateTimeFormatter DATE_TIME_FORMATTER = Joda.forPattern("dateOptionalTime"); + public static final CompoundDateTimeFormatter DATE_TIME_FORMATTER = + DateFormatters.forPattern("dateOptionalTime").withZone(ZoneOffset.UTC); public static final Setting INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING = Setting.positiveTimeSetting("index.unassigned.node_left.delayed_timeout", TimeValue.timeValueMinutes(1), Property.Dynamic, @@ -409,7 +411,7 @@ public final class UnassignedInfo implements ToXContentFragment, Writeable { public String shortSummary() { StringBuilder sb = new StringBuilder(); sb.append("[reason=").append(reason).append("]"); - sb.append(", at[").append(DATE_TIME_FORMATTER.printer().print(unassignedTimeMillis)).append("]"); + sb.append(", at[").append(DATE_TIME_FORMATTER.format(Instant.ofEpochMilli(unassignedTimeMillis))).append("]"); if (failedAllocations > 0) { sb.append(", failed_attempts[").append(failedAllocations).append("]"); } @@ -432,7 +434,7 @@ public final class UnassignedInfo implements ToXContentFragment, Writeable { public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject("unassigned_info"); builder.field("reason", reason); - builder.field("at", DATE_TIME_FORMATTER.printer().print(unassignedTimeMillis)); + builder.field("at", DATE_TIME_FORMATTER.format(Instant.ofEpochMilli(unassignedTimeMillis))); if (failedAllocations > 0) { builder.field("failed_attempts", failedAllocations); } diff --git a/server/src/main/java/org/elasticsearch/cluster/service/ClusterApplier.java b/server/src/main/java/org/elasticsearch/cluster/service/ClusterApplier.java index c587ab272e9..781adb3742a 100644 --- a/server/src/main/java/org/elasticsearch/cluster/service/ClusterApplier.java +++ b/server/src/main/java/org/elasticsearch/cluster/service/ClusterApplier.java @@ -39,11 +39,6 @@ public interface ClusterApplier { */ void onNewClusterState(String source, Supplier clusterStateSupplier, ClusterApplyListener listener); - /** - * Creates a new cluster state builder that is initialized with the cluster name and all initial cluster state customs. - */ - ClusterState.Builder newClusterStateBuilder(); - /** * Listener for results of cluster state application */ diff --git a/server/src/main/java/org/elasticsearch/cluster/service/ClusterApplierService.java b/server/src/main/java/org/elasticsearch/cluster/service/ClusterApplierService.java index 2fb7c25671c..5dd36b9b1bc 100644 --- a/server/src/main/java/org/elasticsearch/cluster/service/ClusterApplierService.java +++ b/server/src/main/java/org/elasticsearch/cluster/service/ClusterApplierService.java @@ -96,17 +96,14 @@ public class ClusterApplierService extends AbstractLifecycleComponent implements private final AtomicReference state; // last applied state private NodeConnectionsService nodeConnectionsService; - private Supplier stateBuilderSupplier; - public ClusterApplierService(Settings settings, ClusterSettings clusterSettings, ThreadPool threadPool, Supplier stateBuilderSupplier) { + public ClusterApplierService(Settings settings, ClusterSettings clusterSettings, ThreadPool threadPool) { super(settings); this.clusterSettings = clusterSettings; this.threadPool = threadPool; this.state = new AtomicReference<>(); this.slowTaskLoggingThreshold = CLUSTER_SERVICE_SLOW_TASK_LOGGING_THRESHOLD_SETTING.get(settings); this.localNodeMasterListeners = new LocalNodeMasterListeners(threadPool); - this.stateBuilderSupplier = stateBuilderSupplier; } public void setSlowTaskLoggingThreshold(TimeValue slowTaskLoggingThreshold) { @@ -652,8 +649,4 @@ public class ClusterApplierService extends AbstractLifecycleComponent implements return System.nanoTime(); } - @Override - public ClusterState.Builder newClusterStateBuilder() { - return stateBuilderSupplier.get(); - } } diff --git a/server/src/main/java/org/elasticsearch/cluster/service/ClusterService.java b/server/src/main/java/org/elasticsearch/cluster/service/ClusterService.java index df7f20ca357..fc5dc678bd0 100644 --- a/server/src/main/java/org/elasticsearch/cluster/service/ClusterService.java +++ b/server/src/main/java/org/elasticsearch/cluster/service/ClusterService.java @@ -42,7 +42,6 @@ import org.elasticsearch.threadpool.ThreadPool; import java.util.Collections; import java.util.Map; -import java.util.function.Supplier; public class ClusterService extends AbstractLifecycleComponent { @@ -60,35 +59,15 @@ public class ClusterService extends AbstractLifecycleComponent { private final ClusterSettings clusterSettings; - public ClusterService(Settings settings, ClusterSettings clusterSettings, ThreadPool threadPool, - Map> initialClusterStateCustoms) { - this(settings, clusterSettings, new MasterService(settings, threadPool), - new ClusterApplierService(settings, clusterSettings, threadPool, - () -> ClusterService.newClusterStateBuilder(settings, initialClusterStateCustoms))); - } - - public ClusterService(Settings settings, ClusterSettings clusterSettings, - MasterService masterService, ClusterApplierService clusterApplierService) { + public ClusterService(Settings settings, ClusterSettings clusterSettings, ThreadPool threadPool) { super(settings); - this.masterService = masterService; + this.masterService = new MasterService(settings, threadPool); this.operationRouting = new OperationRouting(settings, clusterSettings); this.clusterSettings = clusterSettings; this.clusterName = ClusterName.CLUSTER_NAME_SETTING.get(settings); this.clusterSettings.addSettingsUpdateConsumer(CLUSTER_SERVICE_SLOW_TASK_LOGGING_THRESHOLD_SETTING, this::setSlowTaskLoggingThreshold); - this.clusterApplierService = clusterApplierService; - } - - /** - * Creates a new cluster state builder that is initialized with the cluster name and all initial cluster state customs. - */ - private static ClusterState.Builder newClusterStateBuilder(Settings settings, - Map> initialClusterStateCustoms) { - ClusterState.Builder builder = ClusterState.builder(ClusterName.CLUSTER_NAME_SETTING.get(settings)); - for (Map.Entry> entry : initialClusterStateCustoms.entrySet()) { - builder.putCustom(entry.getKey(), entry.getValue().get()); - } - return builder; + this.clusterApplierService = new ClusterApplierService(settings, clusterSettings, threadPool); } private void setSlowTaskLoggingThreshold(TimeValue slowTaskLoggingThreshold) { diff --git a/server/src/main/java/org/elasticsearch/common/Rounding.java b/server/src/main/java/org/elasticsearch/common/Rounding.java new file mode 100644 index 00000000000..593964f61e9 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/common/Rounding.java @@ -0,0 +1,530 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.common; + +import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.common.unit.TimeValue; + +import java.io.IOException; +import java.time.DayOfWeek; +import java.time.Instant; +import java.time.LocalDateTime; +import java.time.LocalTime; +import java.time.OffsetDateTime; +import java.time.ZoneId; +import java.time.ZoneOffset; +import java.time.ZonedDateTime; +import java.time.temporal.ChronoField; +import java.time.temporal.ChronoUnit; +import java.time.temporal.IsoFields; +import java.time.temporal.TemporalField; +import java.time.zone.ZoneOffsetTransition; +import java.util.List; +import java.util.Objects; + +/** + * A strategy for rounding date/time based values. + * + * There are two implementations for rounding. + * The first one requires a date time unit and rounds to the supplied date time unit (i.e. quarter of year, day of month) + * The second one allows you to specify an interval to round to + */ +public abstract class Rounding implements Writeable { + + public static String format(long epochMillis) { + return Instant.ofEpochMilli(epochMillis) + "/" + epochMillis; + } + + public enum DateTimeUnit { + WEEK_OF_WEEKYEAR( (byte) 1, IsoFields.WEEK_OF_WEEK_BASED_YEAR), + YEAR_OF_CENTURY( (byte) 2, ChronoField.YEAR_OF_ERA), + QUARTER_OF_YEAR( (byte) 3, IsoFields.QUARTER_OF_YEAR), + MONTH_OF_YEAR( (byte) 4, ChronoField.MONTH_OF_YEAR), + DAY_OF_MONTH( (byte) 5, ChronoField.DAY_OF_MONTH), + HOUR_OF_DAY( (byte) 6, ChronoField.HOUR_OF_DAY), + MINUTES_OF_HOUR( (byte) 7, ChronoField.MINUTE_OF_HOUR), + SECOND_OF_MINUTE( (byte) 8, ChronoField.SECOND_OF_MINUTE); + + private final byte id; + private final TemporalField field; + + DateTimeUnit(byte id, TemporalField field) { + this.id = id; + this.field = field; + } + + public byte getId() { + return id; + } + + public TemporalField getField() { + return field; + } + + public static DateTimeUnit resolve(byte id) { + switch (id) { + case 1: return WEEK_OF_WEEKYEAR; + case 2: return YEAR_OF_CENTURY; + case 3: return QUARTER_OF_YEAR; + case 4: return MONTH_OF_YEAR; + case 5: return DAY_OF_MONTH; + case 6: return HOUR_OF_DAY; + case 7: return MINUTES_OF_HOUR; + case 8: return SECOND_OF_MINUTE; + default: throw new ElasticsearchException("Unknown date time unit id [" + id + "]"); + } + } + } + + public abstract void innerWriteTo(StreamOutput out) throws IOException; + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeByte(id()); + innerWriteTo(out); + } + + public abstract byte id(); + + /** + * Rounds the given value. + */ + public abstract long round(long value); + + /** + * Given the rounded value (which was potentially generated by {@link #round(long)}, returns the next rounding value. For example, with + * interval based rounding, if the interval is 3, {@code nextRoundValue(6) = 9 }. + * + * @param value The current rounding value + * @return The next rounding value + */ + public abstract long nextRoundingValue(long value); + + @Override + public abstract boolean equals(Object obj); + + @Override + public abstract int hashCode(); + + public static Builder builder(DateTimeUnit unit) { + return new Builder(unit); + } + + public static Builder builder(TimeValue interval) { + return new Builder(interval); + } + + public static class Builder { + + private final DateTimeUnit unit; + private final long interval; + + private ZoneId timeZone = ZoneOffset.UTC; + + public Builder(DateTimeUnit unit) { + this.unit = unit; + this.interval = -1; + } + + public Builder(TimeValue interval) { + this.unit = null; + if (interval.millis() < 1) + throw new IllegalArgumentException("Zero or negative time interval not supported"); + this.interval = interval.millis(); + } + + public Builder timeZone(ZoneId timeZone) { + if (timeZone == null) { + throw new IllegalArgumentException("Setting null as timezone is not supported"); + } + this.timeZone = timeZone; + return this; + } + + public Rounding build() { + Rounding timeZoneRounding; + if (unit != null) { + timeZoneRounding = new TimeUnitRounding(unit, timeZone); + } else { + timeZoneRounding = new TimeIntervalRounding(interval, timeZone); + } + return timeZoneRounding; + } + } + + static class TimeUnitRounding extends Rounding { + + static final byte ID = 1; + + private final DateTimeUnit unit; + private final ZoneId timeZone; + private final boolean unitRoundsToMidnight; + + + TimeUnitRounding(DateTimeUnit unit, ZoneId timeZone) { + this.unit = unit; + this.timeZone = timeZone; + this.unitRoundsToMidnight = this.unit.field.getBaseUnit().getDuration().toMillis() > 60L * 60L * 1000L; + } + + TimeUnitRounding(StreamInput in) throws IOException { + unit = DateTimeUnit.resolve(in.readByte()); + timeZone = ZoneId.of(in.readString()); + unitRoundsToMidnight = unit.getField().getBaseUnit().getDuration().toMillis() > 60L * 60L * 1000L; + } + + @Override + public byte id() { + return ID; + } + + private LocalDateTime truncateLocalDateTime(LocalDateTime localDateTime) { + localDateTime = localDateTime.withNano(0); + assert localDateTime.getNano() == 0; + if (unit.equals(DateTimeUnit.SECOND_OF_MINUTE)) { + return localDateTime; + } + + localDateTime = localDateTime.withSecond(0); + assert localDateTime.getSecond() == 0; + if (unit.equals(DateTimeUnit.MINUTES_OF_HOUR)) { + return localDateTime; + } + + localDateTime = localDateTime.withMinute(0); + assert localDateTime.getMinute() == 0; + if (unit.equals(DateTimeUnit.HOUR_OF_DAY)) { + return localDateTime; + } + + localDateTime = localDateTime.withHour(0); + assert localDateTime.getHour() == 0; + if (unit.equals(DateTimeUnit.DAY_OF_MONTH)) { + return localDateTime; + } + + if (unit.equals(DateTimeUnit.WEEK_OF_WEEKYEAR)) { + localDateTime = localDateTime.with(ChronoField.DAY_OF_WEEK, 1); + assert localDateTime.getDayOfWeek() == DayOfWeek.MONDAY; + return localDateTime; + } + + localDateTime = localDateTime.withDayOfMonth(1); + assert localDateTime.getDayOfMonth() == 1; + if (unit.equals(DateTimeUnit.MONTH_OF_YEAR)) { + return localDateTime; + } + + if (unit.equals(DateTimeUnit.QUARTER_OF_YEAR)) { + int quarter = (int) IsoFields.QUARTER_OF_YEAR.getFrom(localDateTime); + int month = ((quarter - 1) * 3) + 1; + localDateTime = localDateTime.withMonth(month); + assert localDateTime.getMonthValue() % 3 == 1; + return localDateTime; + } + + if (unit.equals(DateTimeUnit.YEAR_OF_CENTURY)) { + localDateTime = localDateTime.withMonth(1); + assert localDateTime.getMonthValue() == 1; + return localDateTime; + } + + throw new IllegalArgumentException("NOT YET IMPLEMENTED for unit " + unit); + } + + @Override + public long round(long utcMillis) { + if (unitRoundsToMidnight) { + final ZonedDateTime zonedDateTime = Instant.ofEpochMilli(utcMillis).atZone(timeZone); + final LocalDateTime localDateTime = zonedDateTime.toLocalDateTime(); + final LocalDateTime localMidnight = truncateLocalDateTime(localDateTime); + return firstTimeOnDay(localMidnight); + } else { + while (true) { + final Instant truncatedTime = truncateAsLocalTime(utcMillis); + final ZoneOffsetTransition previousTransition = timeZone.getRules().previousTransition(Instant.ofEpochMilli(utcMillis)); + + if (previousTransition == null) { + // truncateAsLocalTime cannot have failed if there were no previous transitions + return truncatedTime.toEpochMilli(); + } + + final long previousTransitionMillis = previousTransition.getInstant().toEpochMilli(); + + if (truncatedTime != null && previousTransitionMillis <= truncatedTime.toEpochMilli()) { + return truncatedTime.toEpochMilli(); + } + + // There was a transition in between the input time and the truncated time. Return to the transition time and + // round that down instead. + utcMillis = previousTransitionMillis - 1; + } + } + } + + private long firstTimeOnDay(LocalDateTime localMidnight) { + assert localMidnight.toLocalTime().equals(LocalTime.of(0, 0, 0)) : "firstTimeOnDay should only be called at midnight"; + assert unitRoundsToMidnight : "firstTimeOnDay should only be called if unitRoundsToMidnight"; + + // Now work out what localMidnight actually means + final List currentOffsets = timeZone.getRules().getValidOffsets(localMidnight); + if (currentOffsets.size() >= 1) { + // There is at least one midnight on this day, so choose the first + final ZoneOffset firstOffset = currentOffsets.get(0); + final OffsetDateTime offsetMidnight = localMidnight.atOffset(firstOffset); + return offsetMidnight.toInstant().toEpochMilli(); + } else { + // There were no midnights on this day, so we must have entered the day via an offset transition. + // Use the time of the transition as it is the earliest time on the right day. + ZoneOffsetTransition zoneOffsetTransition = timeZone.getRules().getTransition(localMidnight); + return zoneOffsetTransition.getInstant().toEpochMilli(); + } + } + + private Instant truncateAsLocalTime(long utcMillis) { + assert unitRoundsToMidnight == false : "truncateAsLocalTime should not be called if unitRoundsToMidnight"; + + final LocalDateTime truncatedLocalDateTime + = truncateLocalDateTime(Instant.ofEpochMilli(utcMillis).atZone(timeZone).toLocalDateTime()); + final List currentOffsets = timeZone.getRules().getValidOffsets(truncatedLocalDateTime); + + if (currentOffsets.size() >= 1) { + // at least one possibilities - choose the latest one that's still no later than the input time + for (int offsetIndex = currentOffsets.size() - 1; offsetIndex >= 0; offsetIndex--) { + final Instant result = truncatedLocalDateTime.atOffset(currentOffsets.get(offsetIndex)).toInstant(); + if (result.toEpochMilli() <= utcMillis) { + return result; + } + } + + assert false : "rounded time not found for " + utcMillis + " with " + this; + return null; + } else { + // The chosen local time didn't happen. This means we were given a time in an hour (or a minute) whose start + // is missing due to an offset transition, so the time cannot be truncated. + return null; + } + } + + private LocalDateTime nextRelevantMidnight(LocalDateTime localMidnight) { + assert localMidnight.toLocalTime().equals(LocalTime.of(0, 0, 0)) : "nextRelevantMidnight should only be called at midnight"; + assert unitRoundsToMidnight : "firstTimeOnDay should only be called if unitRoundsToMidnight"; + + switch (unit) { + case DAY_OF_MONTH: + return localMidnight.plus(1, ChronoUnit.DAYS); + case WEEK_OF_WEEKYEAR: + return localMidnight.plus(7, ChronoUnit.DAYS); + case MONTH_OF_YEAR: + return localMidnight.plus(1, ChronoUnit.MONTHS); + case QUARTER_OF_YEAR: + return localMidnight.plus(3, ChronoUnit.MONTHS); + case YEAR_OF_CENTURY: + return localMidnight.plus(1, ChronoUnit.YEARS); + default: + throw new IllegalArgumentException("Unknown round-to-midnight unit: " + unit); + } + } + + @Override + public long nextRoundingValue(long utcMillis) { + if (unitRoundsToMidnight) { + final ZonedDateTime zonedDateTime = Instant.ofEpochMilli(utcMillis).atZone(timeZone); + final LocalDateTime localDateTime = zonedDateTime.toLocalDateTime(); + final LocalDateTime earlierLocalMidnight = truncateLocalDateTime(localDateTime); + final LocalDateTime localMidnight = nextRelevantMidnight(earlierLocalMidnight); + return firstTimeOnDay(localMidnight); + } else { + final long unitSize = unit.field.getBaseUnit().getDuration().toMillis(); + final long roundedAfterOneIncrement = round(utcMillis + unitSize); + if (utcMillis < roundedAfterOneIncrement) { + return roundedAfterOneIncrement; + } else { + return round(utcMillis + 2 * unitSize); + } + } + } + + @Override + public void innerWriteTo(StreamOutput out) throws IOException { + out.writeByte(unit.getId()); + String tz = ZoneOffset.UTC.equals(timeZone) ? "UTC" : timeZone.getId(); // stay joda compatible + out.writeString(tz); + } + + @Override + public int hashCode() { + return Objects.hash(unit, timeZone); + } + + @Override + public boolean equals(Object obj) { + if (obj == null) { + return false; + } + if (getClass() != obj.getClass()) { + return false; + } + TimeUnitRounding other = (TimeUnitRounding) obj; + return Objects.equals(unit, other.unit) && Objects.equals(timeZone, other.timeZone); + } + + @Override + public String toString() { + return "[" + timeZone + "][" + unit + "]"; + } + } + + static class TimeIntervalRounding extends Rounding { + @Override + public String toString() { + return "TimeIntervalRounding{" + + "interval=" + interval + + ", timeZone=" + timeZone + + '}'; + } + + static final byte ID = 2; + + private final long interval; + private final ZoneId timeZone; + + TimeIntervalRounding(long interval, ZoneId timeZone) { + if (interval < 1) + throw new IllegalArgumentException("Zero or negative time interval not supported"); + this.interval = interval; + this.timeZone = timeZone; + } + + TimeIntervalRounding(StreamInput in) throws IOException { + interval = in.readVLong(); + timeZone = ZoneId.of(in.readString()); + } + + @Override + public byte id() { + return ID; + } + + @Override + public long round(final long utcMillis) { + final Instant utcInstant = Instant.ofEpochMilli(utcMillis); + final LocalDateTime rawLocalDateTime = Instant.ofEpochMilli(utcMillis).atZone(timeZone).toLocalDateTime(); + + // a millisecond value with the same local time, in UTC, as `utcMillis` has in `timeZone` + final long localMillis = utcMillis + timeZone.getRules().getOffset(utcInstant).getTotalSeconds() * 1000; + assert localMillis == rawLocalDateTime.toInstant(ZoneOffset.UTC).toEpochMilli(); + + final long roundedMillis = roundKey(localMillis, interval) * interval; + final LocalDateTime roundedLocalDateTime = Instant.ofEpochMilli(roundedMillis).atZone(ZoneOffset.UTC).toLocalDateTime(); + + // Now work out what roundedLocalDateTime actually means + final List currentOffsets = timeZone.getRules().getValidOffsets(roundedLocalDateTime); + if (currentOffsets.isEmpty() == false) { + // There is at least one instant with the desired local time. In general the desired result is + // the latest rounded time that's no later than the input time, but this could involve rounding across + // a timezone transition, which may yield the wrong result + final ZoneOffsetTransition previousTransition = timeZone.getRules().previousTransition(utcInstant.plusMillis(1)); + for (int offsetIndex = currentOffsets.size() - 1; 0 <= offsetIndex; offsetIndex--) { + final OffsetDateTime offsetTime = roundedLocalDateTime.atOffset(currentOffsets.get(offsetIndex)); + final Instant offsetInstant = offsetTime.toInstant(); + if (previousTransition != null && offsetInstant.isBefore(previousTransition.getInstant())) { + // Rounding down across the transition can yield the wrong result. It's best to return to the transition time + // and round that down. + return round(previousTransition.getInstant().toEpochMilli() - 1); + } + + if (utcInstant.isBefore(offsetTime.toInstant()) == false) { + return offsetInstant.toEpochMilli(); + } + } + + final OffsetDateTime offsetTime = roundedLocalDateTime.atOffset(currentOffsets.get(0)); + final Instant offsetInstant = offsetTime.toInstant(); + assert false : this + " failed to round " + utcMillis + " down: " + offsetInstant + " is the earliest possible"; + return offsetInstant.toEpochMilli(); // TODO or throw something? + } else { + // The desired time isn't valid because within a gap, so just return the gap time. + ZoneOffsetTransition zoneOffsetTransition = timeZone.getRules().getTransition(roundedLocalDateTime); + return zoneOffsetTransition.getInstant().toEpochMilli(); + } + } + + private static long roundKey(long value, long interval) { + if (value < 0) { + return (value - interval + 1) / interval; + } else { + return value / interval; + } + } + + @Override + public long nextRoundingValue(long time) { + int offsetSeconds = timeZone.getRules().getOffset(Instant.ofEpochMilli(time)).getTotalSeconds(); + return ZonedDateTime.ofInstant(Instant.ofEpochMilli(time), ZoneOffset.UTC) + .plusSeconds(offsetSeconds) + .plusNanos(interval * 1_000_000) + .withZoneSameLocal(timeZone) + .toInstant().toEpochMilli(); + } + + @Override + public void innerWriteTo(StreamOutput out) throws IOException { + out.writeVLong(interval); + String tz = ZoneOffset.UTC.equals(timeZone) ? "UTC" : timeZone.getId(); // stay joda compatible + out.writeString(tz); + } + + @Override + public int hashCode() { + return Objects.hash(interval, timeZone); + } + + @Override + public boolean equals(Object obj) { + if (obj == null) { + return false; + } + if (getClass() != obj.getClass()) { + return false; + } + TimeIntervalRounding other = (TimeIntervalRounding) obj; + return Objects.equals(interval, other.interval) && Objects.equals(timeZone, other.timeZone); + } + } + + public static Rounding read(StreamInput in) throws IOException { + Rounding rounding; + byte id = in.readByte(); + switch (id) { + case TimeUnitRounding.ID: + rounding = new TimeUnitRounding(in); + break; + case TimeIntervalRounding.ID: + rounding = new TimeIntervalRounding(in); + break; + default: + throw new ElasticsearchException("unknown rounding id [" + id + "]"); + } + return rounding; + } +} diff --git a/server/src/main/java/org/elasticsearch/common/Table.java b/server/src/main/java/org/elasticsearch/common/Table.java index 430070ee19c..13d13066e16 100644 --- a/server/src/main/java/org/elasticsearch/common/Table.java +++ b/server/src/main/java/org/elasticsearch/common/Table.java @@ -19,9 +19,11 @@ package org.elasticsearch.common; -import org.joda.time.format.DateTimeFormat; -import org.joda.time.format.DateTimeFormatter; +import org.elasticsearch.common.time.CompoundDateTimeFormatter; +import org.elasticsearch.common.time.DateFormatters; +import java.time.Instant; +import java.time.ZoneOffset; import java.util.ArrayList; import java.util.HashMap; import java.util.List; @@ -83,7 +85,7 @@ public class Table { return this; } - private DateTimeFormatter dateFormat = DateTimeFormat.forPattern("HH:mm:ss"); + private static final CompoundDateTimeFormatter FORMATTER = DateFormatters.forPattern("HH:mm:ss").withZone(ZoneOffset.UTC); public Table startRow() { if (headers.isEmpty()) { @@ -93,7 +95,7 @@ public class Table { if (withTime) { long time = System.currentTimeMillis(); addCell(TimeUnit.SECONDS.convert(time, TimeUnit.MILLISECONDS)); - addCell(dateFormat.print(time)); + addCell(FORMATTER.format(Instant.ofEpochMilli(time))); } return this; } diff --git a/server/src/main/java/org/elasticsearch/common/component/AbstractComponent.java b/server/src/main/java/org/elasticsearch/common/component/AbstractComponent.java index 62d6e7e311d..167ffdb7bea 100644 --- a/server/src/main/java/org/elasticsearch/common/component/AbstractComponent.java +++ b/server/src/main/java/org/elasticsearch/common/component/AbstractComponent.java @@ -23,7 +23,6 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.elasticsearch.common.Strings; import org.elasticsearch.common.logging.DeprecationLogger; -import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.node.Node; @@ -34,13 +33,7 @@ public abstract class AbstractComponent { protected final Settings settings; public AbstractComponent(Settings settings) { - this.logger = Loggers.getLogger(getClass(), settings); - this.deprecationLogger = new DeprecationLogger(logger); - this.settings = settings; - } - - public AbstractComponent(Settings settings, Class customClass) { - this.logger = LogManager.getLogger(customClass); + this.logger = LogManager.getLogger(getClass()); this.deprecationLogger = new DeprecationLogger(logger); this.settings = settings; } diff --git a/server/src/main/java/org/elasticsearch/common/component/AbstractLifecycleComponent.java b/server/src/main/java/org/elasticsearch/common/component/AbstractLifecycleComponent.java index 3c4b35d5c34..8a472954ab4 100644 --- a/server/src/main/java/org/elasticsearch/common/component/AbstractLifecycleComponent.java +++ b/server/src/main/java/org/elasticsearch/common/component/AbstractLifecycleComponent.java @@ -35,10 +35,6 @@ public abstract class AbstractLifecycleComponent extends AbstractComponent imple super(settings); } - protected AbstractLifecycleComponent(Settings settings, Class customClass) { - super(settings, customClass); - } - @Override public Lifecycle.State lifecycleState() { return this.lifecycle.state(); diff --git a/server/src/main/java/org/elasticsearch/common/inject/matcher/Matchers.java b/server/src/main/java/org/elasticsearch/common/inject/matcher/Matchers.java index cc354145b11..dd5d98dfabb 100644 --- a/server/src/main/java/org/elasticsearch/common/inject/matcher/Matchers.java +++ b/server/src/main/java/org/elasticsearch/common/inject/matcher/Matchers.java @@ -16,6 +16,8 @@ package org.elasticsearch.common.inject.matcher; +import org.elasticsearch.common.SuppressForbidden; + import java.lang.annotation.Annotation; import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; @@ -327,7 +329,9 @@ public class Matchers { return "inPackage(" + targetPackage.getName() + ")"; } + @SuppressForbidden(reason = "ClassLoader.getDefinedPackage not available yet") public Object readResolve() { + // TODO minJava >= 9 : use ClassLoader.getDefinedPackage and remove @SuppressForbidden return inPackage(Package.getPackage(packageName)); } } diff --git a/server/src/main/java/org/elasticsearch/common/logging/ESLoggerFactory.java b/server/src/main/java/org/elasticsearch/common/logging/ESLoggerFactory.java index caa922b92cf..2159014f825 100644 --- a/server/src/main/java/org/elasticsearch/common/logging/ESLoggerFactory.java +++ b/server/src/main/java/org/elasticsearch/common/logging/ESLoggerFactory.java @@ -38,9 +38,13 @@ public final class ESLoggerFactory { public static Logger getLogger(String prefix, Class clazz) { /* - * Do not use LogManager#getLogger(Class) as this now uses Class#getCanonicalName under the hood; as this returns null for local and - * anonymous classes, any place we create, for example, an abstract component defined as an anonymous class (e.g., in tests) will - * result in a logger with a null name which will blow up in a lookup inside of Log4j. + * At one point we didn't use LogManager.getLogger(clazz) because + * of a bug in log4j that has since been fixed: + * https://github.com/apache/logging-log4j2/commit/ae33698a1846a5e10684ec3e52a99223f06047af + * + * For now we continue to use LogManager.getLogger(clazz.getName()) + * because we expect to eventually migrate away from needing this + * method entirely. */ return getLogger(prefix, LogManager.getLogger(clazz.getName())); } @@ -58,14 +62,29 @@ public final class ESLoggerFactory { return new PrefixLogger((ExtendedLogger)logger, logger.getName(), prefix); } + /** + * Get or build a logger. + * @deprecated Prefer {@link LogManager#getLogger} + */ + @Deprecated public static Logger getLogger(Class clazz) { return getLogger(null, clazz); } + /** + * Get or build a logger. + * @deprecated Prefer {@link LogManager#getLogger} + */ + @Deprecated public static Logger getLogger(String name) { return getLogger(null, name); } + /** + * Get the root logger. + * @deprecated Prefer {@link LogManager#getRootLogger} + */ + @Deprecated public static Logger getRootLogger() { return LogManager.getRootLogger(); } diff --git a/server/src/main/java/org/elasticsearch/common/logging/Loggers.java b/server/src/main/java/org/elasticsearch/common/logging/Loggers.java index 58ffe277531..b2a24faf643 100644 --- a/server/src/main/java/org/elasticsearch/common/logging/Loggers.java +++ b/server/src/main/java/org/elasticsearch/common/logging/Loggers.java @@ -67,11 +67,11 @@ public class Loggers { } public static Logger getLogger(Class clazz, Settings settings, String... prefixes) { - return Loggers.getLogger(clazz, prefixes); + return ESLoggerFactory.getLogger(formatPrefix(prefixes), clazz); } public static Logger getLogger(String loggerName, Settings settings, String... prefixes) { - return Loggers.getLogger(loggerName, prefixes); + return ESLoggerFactory.getLogger(formatPrefix(prefixes), loggerName); } public static Logger getLogger(Logger parentLogger, String s) { @@ -82,22 +82,24 @@ public class Loggers { return ESLoggerFactory.getLogger(prefix, parentLogger.getName() + s); } + /** + * Get or build a logger. + * @deprecated Prefer {@link LogManager#getLogger} + */ + @Deprecated public static Logger getLogger(String s) { return ESLoggerFactory.getLogger(s); } + /** + * Get or build a logger. + * @deprecated Prefer {@link LogManager#getLogger} + */ + @Deprecated public static Logger getLogger(Class clazz) { return ESLoggerFactory.getLogger(clazz); } - public static Logger getLogger(Class clazz, String... prefixes) { - return ESLoggerFactory.getLogger(formatPrefix(prefixes), clazz); - } - - public static Logger getLogger(String name, String... prefixes) { - return ESLoggerFactory.getLogger(formatPrefix(prefixes), name); - } - private static String formatPrefix(String... prefixes) { String prefix = null; if (prefixes != null && prefixes.length > 0) { diff --git a/server/src/main/java/org/elasticsearch/common/rounding/Rounding.java b/server/src/main/java/org/elasticsearch/common/rounding/Rounding.java index 93be4cc65e2..f8339079926 100644 --- a/server/src/main/java/org/elasticsearch/common/rounding/Rounding.java +++ b/server/src/main/java/org/elasticsearch/common/rounding/Rounding.java @@ -32,7 +32,10 @@ import java.util.Objects; /** * A strategy for rounding long values. + * + * Use the java based Rounding class where applicable */ +@Deprecated public abstract class Rounding implements Writeable { public abstract byte id(); @@ -404,7 +407,7 @@ public abstract class Rounding implements Writeable { } public static Rounding read(StreamInput in) throws IOException { - Rounding rounding = null; + Rounding rounding; byte id = in.readByte(); switch (id) { case TimeUnitRounding.ID: rounding = new TimeUnitRounding(in); break; diff --git a/server/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java b/server/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java index a4f9cc6487b..acfecdf665a 100644 --- a/server/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java +++ b/server/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java @@ -299,13 +299,13 @@ public final class ClusterSettings extends AbstractScopedSettings { TcpTransport.TCP_REUSE_ADDRESS_PROFILE, TcpTransport.TCP_SEND_BUFFER_SIZE_PROFILE, TcpTransport.TCP_RECEIVE_BUFFER_SIZE_PROFILE, - TcpTransport.CONNECTIONS_PER_NODE_RECOVERY, - TcpTransport.CONNECTIONS_PER_NODE_BULK, - TcpTransport.CONNECTIONS_PER_NODE_REG, - TcpTransport.CONNECTIONS_PER_NODE_STATE, - TcpTransport.CONNECTIONS_PER_NODE_PING, + TransportService.CONNECTIONS_PER_NODE_RECOVERY, + TransportService.CONNECTIONS_PER_NODE_BULK, + TransportService.CONNECTIONS_PER_NODE_REG, + TransportService.CONNECTIONS_PER_NODE_STATE, + TransportService.CONNECTIONS_PER_NODE_PING, + TransportService.TCP_CONNECT_TIMEOUT, TcpTransport.PING_SCHEDULE, - TcpTransport.TCP_CONNECT_TIMEOUT, NetworkService.NETWORK_SERVER, TcpTransport.TCP_NO_DELAY, TcpTransport.TCP_KEEP_ALIVE, diff --git a/server/src/main/java/org/elasticsearch/common/settings/IndexScopedSettings.java b/server/src/main/java/org/elasticsearch/common/settings/IndexScopedSettings.java index 6f45a5d462b..137378f509d 100644 --- a/server/src/main/java/org/elasticsearch/common/settings/IndexScopedSettings.java +++ b/server/src/main/java/org/elasticsearch/common/settings/IndexScopedSettings.java @@ -155,6 +155,7 @@ public final class IndexScopedSettings extends AbstractScopedSettings { EngineConfig.INDEX_CODEC_SETTING, EngineConfig.INDEX_OPTIMIZE_AUTO_GENERATED_IDS, IndexMetaData.SETTING_WAIT_FOR_ACTIVE_SHARDS, + IndexSettings.DEFAULT_PIPELINE, // validate that built-in similarities don't get redefined Setting.groupSetting("index.similarity.", (s) -> { diff --git a/server/src/main/java/org/elasticsearch/common/time/CompoundDateTimeFormatter.java b/server/src/main/java/org/elasticsearch/common/time/CompoundDateTimeFormatter.java index df459679c22..0332c03814d 100644 --- a/server/src/main/java/org/elasticsearch/common/time/CompoundDateTimeFormatter.java +++ b/server/src/main/java/org/elasticsearch/common/time/CompoundDateTimeFormatter.java @@ -20,8 +20,14 @@ package org.elasticsearch.common.time; import java.time.ZoneId; import java.time.format.DateTimeFormatter; +import java.time.format.DateTimeFormatterBuilder; import java.time.format.DateTimeParseException; import java.time.temporal.TemporalAccessor; +import java.time.temporal.TemporalField; +import java.util.Arrays; +import java.util.Locale; +import java.util.Map; +import java.util.function.Consumer; /** * wrapper class around java.time.DateTimeFormatter that supports multiple formats for easier parsing, @@ -29,6 +35,13 @@ import java.time.temporal.TemporalAccessor; */ public class CompoundDateTimeFormatter { + private static final Consumer SAME_TIME_ZONE_VALIDATOR = (parsers) -> { + long distinctZones = Arrays.stream(parsers).map(DateTimeFormatter::getZone).distinct().count(); + if (distinctZones > 1) { + throw new IllegalArgumentException("formatters must have the same time zone"); + } + }; + final DateTimeFormatter printer; final DateTimeFormatter[] parsers; @@ -36,6 +49,7 @@ public class CompoundDateTimeFormatter { if (parsers.length == 0) { throw new IllegalArgumentException("at least one date time formatter is required"); } + SAME_TIME_ZONE_VALIDATOR.accept(parsers); this.printer = parsers[0]; this.parsers = parsers; } @@ -58,7 +72,18 @@ public class CompoundDateTimeFormatter { throw failure; } + /** + * Configure a specific time zone for a date formatter + * + * @param zoneId The zoneId this formatter shoulduse + * @return The new formatter with all parsers switched to the specified timezone + */ public CompoundDateTimeFormatter withZone(ZoneId zoneId) { + // shortcurt to not create new objects unnecessarily + if (zoneId.equals(parsers[0].getZone())) { + return this; + } + final DateTimeFormatter[] parsersWithZone = new DateTimeFormatter[parsers.length]; for (int i = 0; i < parsers.length; i++) { parsersWithZone[i] = parsers[i].withZone(zoneId); @@ -67,7 +92,22 @@ public class CompoundDateTimeFormatter { return new CompoundDateTimeFormatter(parsersWithZone); } + /** + * Configure defaults for missing values in a parser, then return a new compound date formatter + */ + CompoundDateTimeFormatter parseDefaulting(Map fields) { + final DateTimeFormatter[] parsersWithDefaulting = new DateTimeFormatter[parsers.length]; + for (int i = 0; i < parsers.length; i++) { + DateTimeFormatterBuilder builder = new DateTimeFormatterBuilder().append(parsers[i]); + fields.forEach(builder::parseDefaulting); + parsersWithDefaulting[i] = builder.toFormatter(Locale.ROOT); + } + + return new CompoundDateTimeFormatter(parsersWithDefaulting); + } + public String format(TemporalAccessor accessor) { return printer.format(accessor); } + } diff --git a/server/src/main/java/org/elasticsearch/common/time/DateFormatters.java b/server/src/main/java/org/elasticsearch/common/time/DateFormatters.java index eef2ab55587..37efff5a0be 100644 --- a/server/src/main/java/org/elasticsearch/common/time/DateFormatters.java +++ b/server/src/main/java/org/elasticsearch/common/time/DateFormatters.java @@ -52,30 +52,10 @@ import static java.time.temporal.ChronoField.SECOND_OF_MINUTE; public class DateFormatters { - private static final DateTimeFormatter TIME_ZONE_FORMATTER_ZONE_ID = new DateTimeFormatterBuilder() - .appendZoneId() - .toFormatter(Locale.ROOT); - - private static final DateTimeFormatter TIME_ZONE_FORMATTER_WITHOUT_COLON = new DateTimeFormatterBuilder() + private static final DateTimeFormatter TIME_ZONE_FORMATTER_NO_COLON = new DateTimeFormatterBuilder() .appendOffset("+HHmm", "Z") .toFormatter(Locale.ROOT); - private static final DateTimeFormatter TIME_ZONE_FORMATTER_WITH_COLON = new DateTimeFormatterBuilder() - .appendOffset("+HH:mm", "Z") - .toFormatter(Locale.ROOT); - - private static final DateTimeFormatter TIME_ZONE_FORMATTER = new DateTimeFormatterBuilder() - .optionalStart().appendZoneId().optionalEnd() - .optionalStart().appendOffset("+HHmm", "Z").optionalEnd() - .optionalStart().appendOffset("+HH:mm", "Z").optionalEnd() - .toFormatter(Locale.ROOT); - - private static final DateTimeFormatter OPTIONAL_TIME_ZONE_FORMATTER = new DateTimeFormatterBuilder() - .optionalStart() - .append(TIME_ZONE_FORMATTER) - .optionalEnd() - .toFormatter(Locale.ROOT); - private static final DateTimeFormatter STRICT_YEAR_MONTH_DAY_FORMATTER = new DateTimeFormatterBuilder() .appendValue(ChronoField.YEAR, 4, 10, SignStyle.EXCEEDS_PAD) .appendLiteral("-") @@ -101,7 +81,7 @@ public class DateFormatters { .appendFraction(MILLI_OF_SECOND, 3, 3, true) .optionalEnd() .optionalStart() - .append(TIME_ZONE_FORMATTER_WITHOUT_COLON) + .appendOffset("+HHmm", "Z") .optionalEnd() .optionalEnd() .toFormatter(Locale.ROOT); @@ -115,89 +95,164 @@ public class DateFormatters { .appendFraction(MILLI_OF_SECOND, 3, 3, true) .optionalEnd() .optionalStart() - .append(TIME_ZONE_FORMATTER_WITH_COLON) - .optionalEnd() - .optionalEnd() - .toFormatter(Locale.ROOT); - - private static final DateTimeFormatter STRICT_DATE_OPTIONAL_TIME_FORMATTER_3 = new DateTimeFormatterBuilder() - .append(STRICT_YEAR_MONTH_DAY_FORMATTER) - .optionalStart() - .appendLiteral('T') - .append(STRICT_HOUR_MINUTE_SECOND_FORMATTER) - .optionalStart() - .appendFraction(MILLI_OF_SECOND, 3, 3, true) - .optionalEnd() - .optionalStart() - .append(TIME_ZONE_FORMATTER_ZONE_ID) + .appendZoneOrOffsetId() .optionalEnd() .optionalEnd() .toFormatter(Locale.ROOT); + /** + * Returns a generic ISO datetime parser where the date is mandatory and the time is optional. + */ private static final CompoundDateTimeFormatter STRICT_DATE_OPTIONAL_TIME = - new CompoundDateTimeFormatter(STRICT_DATE_OPTIONAL_TIME_FORMATTER_1, STRICT_DATE_OPTIONAL_TIME_FORMATTER_2, - STRICT_DATE_OPTIONAL_TIME_FORMATTER_3); + new CompoundDateTimeFormatter(STRICT_DATE_OPTIONAL_TIME_FORMATTER_1, STRICT_DATE_OPTIONAL_TIME_FORMATTER_2); - private static final DateTimeFormatter BASIC_TIME_NO_MILLIS_FORMATTER = new DateTimeFormatterBuilder() + ///////////////////////////////////////// + // + // BEGIN basic time formatters + // + // these formatters to not have any splitting characters between hours, minutes, seconds, milliseconds + // this means they have to be strict with the exception of the last element + // + ///////////////////////////////////////// + + private static final DateTimeFormatter BASIC_TIME_NO_MILLIS_BASE = new DateTimeFormatterBuilder() .appendValue(HOUR_OF_DAY, 2, 2, SignStyle.NOT_NEGATIVE) .appendValue(MINUTE_OF_HOUR, 2, 2, SignStyle.NOT_NEGATIVE) .appendValue(SECOND_OF_MINUTE, 2, 2, SignStyle.NOT_NEGATIVE) - .append(OPTIONAL_TIME_ZONE_FORMATTER) .toFormatter(Locale.ROOT); - private static final CompoundDateTimeFormatter BASIC_TIME_NO_MILLIS = new CompoundDateTimeFormatter(BASIC_TIME_NO_MILLIS_FORMATTER); + /* + * Returns a basic formatter for a two digit hour of day, two digit minute + * of hour, two digit second of minute, and time zone offset (HHmmssZ). + */ + private static final CompoundDateTimeFormatter BASIC_TIME_NO_MILLIS = new CompoundDateTimeFormatter( + new DateTimeFormatterBuilder().append(BASIC_TIME_NO_MILLIS_BASE).appendZoneOrOffsetId().toFormatter(Locale.ROOT), + new DateTimeFormatterBuilder().append(BASIC_TIME_NO_MILLIS_BASE).append(TIME_ZONE_FORMATTER_NO_COLON).toFormatter(Locale.ROOT) + ); private static final DateTimeFormatter BASIC_TIME_FORMATTER = new DateTimeFormatterBuilder() .appendValue(HOUR_OF_DAY, 2, 2, SignStyle.NOT_NEGATIVE) .appendValue(MINUTE_OF_HOUR, 2, 2, SignStyle.NOT_NEGATIVE) .appendValue(SECOND_OF_MINUTE, 2, 2, SignStyle.NOT_NEGATIVE) .appendFraction(MILLI_OF_SECOND, 1, 3, true) - .append(OPTIONAL_TIME_ZONE_FORMATTER) .toFormatter(Locale.ROOT); - private static final CompoundDateTimeFormatter BASIC_TIME = new CompoundDateTimeFormatter(BASIC_TIME_FORMATTER); - - private static final DateTimeFormatter BASIC_T_TIME_FORMATTER = new DateTimeFormatterBuilder() - .appendLiteral("T") - .append(BASIC_TIME_FORMATTER) + private static final DateTimeFormatter BASIC_TIME_PRINTER = new DateTimeFormatterBuilder() + .appendValue(HOUR_OF_DAY, 2, 2, SignStyle.NOT_NEGATIVE) + .appendValue(MINUTE_OF_HOUR, 2, 2, SignStyle.NOT_NEGATIVE) + .appendValue(SECOND_OF_MINUTE, 2, 2, SignStyle.NOT_NEGATIVE) + .appendFraction(MILLI_OF_SECOND, 3, 3, true) .toFormatter(Locale.ROOT); - private static final CompoundDateTimeFormatter BASIC_T_TIME = new CompoundDateTimeFormatter(BASIC_T_TIME_FORMATTER); + /* + * Returns a basic formatter for a two digit hour of day, two digit minute + * of hour, two digit second of minute, three digit millis, and time zone + * offset (HHmmss.SSSZ). + */ + private static final CompoundDateTimeFormatter BASIC_TIME = new CompoundDateTimeFormatter( + new DateTimeFormatterBuilder().append(BASIC_TIME_PRINTER).appendZoneOrOffsetId().toFormatter(Locale.ROOT), + new DateTimeFormatterBuilder().append(BASIC_TIME_FORMATTER).appendZoneOrOffsetId().toFormatter(Locale.ROOT), + new DateTimeFormatterBuilder().append(BASIC_TIME_FORMATTER).append(TIME_ZONE_FORMATTER_NO_COLON).toFormatter(Locale.ROOT) + ); - private static final CompoundDateTimeFormatter BASIC_T_TIME_NO_MILLIS = new CompoundDateTimeFormatter(new DateTimeFormatterBuilder() - .appendLiteral("T") - .append(BASIC_TIME_NO_MILLIS_FORMATTER) - .toFormatter(Locale.ROOT)); + private static final DateTimeFormatter BASIC_T_TIME_PRINTER = + new DateTimeFormatterBuilder().appendLiteral("T").append(BASIC_TIME_PRINTER).toFormatter(Locale.ROOT); - private static final CompoundDateTimeFormatter BASIC_DATE_TIME = new CompoundDateTimeFormatter(new DateTimeFormatterBuilder() + private static final DateTimeFormatter BASIC_T_TIME_FORMATTER = + new DateTimeFormatterBuilder().appendLiteral("T").append(BASIC_TIME_FORMATTER).toFormatter(Locale.ROOT); + + /* + * Returns a basic formatter for a two digit hour of day, two digit minute + * of hour, two digit second of minute, three digit millis, and time zone + * offset prefixed by 'T' ('T'HHmmss.SSSZ). + */ + private static final CompoundDateTimeFormatter BASIC_T_TIME = new CompoundDateTimeFormatter( + new DateTimeFormatterBuilder().append(BASIC_T_TIME_PRINTER).appendZoneOrOffsetId().toFormatter(Locale.ROOT), + new DateTimeFormatterBuilder().append(BASIC_T_TIME_FORMATTER).appendZoneOrOffsetId().toFormatter(Locale.ROOT), + new DateTimeFormatterBuilder().append(BASIC_T_TIME_FORMATTER).append(TIME_ZONE_FORMATTER_NO_COLON).toFormatter(Locale.ROOT) + ); + + /* + * Returns a basic formatter for a two digit hour of day, two digit minute + * of hour, two digit second of minute, and time zone offset prefixed by 'T' + * ('T'HHmmssZ). + */ + private static final CompoundDateTimeFormatter BASIC_T_TIME_NO_MILLIS = new CompoundDateTimeFormatter( + new DateTimeFormatterBuilder().appendLiteral("T").append(BASIC_TIME_NO_MILLIS_BASE) + .appendZoneOrOffsetId().toFormatter(Locale.ROOT), + new DateTimeFormatterBuilder().appendLiteral("T").append(BASIC_TIME_NO_MILLIS_BASE) + .append(TIME_ZONE_FORMATTER_NO_COLON).toFormatter(Locale.ROOT) + ); + + private static final DateTimeFormatter BASIC_YEAR_MONTH_DAY_FORMATTER = new DateTimeFormatterBuilder() .appendValue(ChronoField.YEAR, 4, 4, SignStyle.NORMAL) .appendValue(MONTH_OF_YEAR, 2, 2, SignStyle.NOT_NEGATIVE) .appendValue(DAY_OF_MONTH, 2, 2, SignStyle.NOT_NEGATIVE) + .toFormatter(Locale.ROOT); + + private static final DateTimeFormatter BASIC_DATE_TIME_FORMATTER = new DateTimeFormatterBuilder() + .append(BASIC_YEAR_MONTH_DAY_FORMATTER) .append(BASIC_T_TIME_FORMATTER) - .toFormatter(Locale.ROOT)); + .toFormatter(Locale.ROOT); - private static final CompoundDateTimeFormatter BASIC_DATE_TIME_NO_MILLIS = new CompoundDateTimeFormatter(new DateTimeFormatterBuilder() - .appendValue(ChronoField.YEAR, 4, 4, SignStyle.NORMAL) - .appendValue(MONTH_OF_YEAR, 2, 2, SignStyle.NOT_NEGATIVE) - .appendValue(DAY_OF_MONTH, 2, 2, SignStyle.NOT_NEGATIVE) - .appendLiteral("T") - .append(BASIC_TIME_NO_MILLIS_FORMATTER) - .toFormatter(Locale.ROOT)); + private static final DateTimeFormatter BASIC_DATE_TIME_PRINTER = new DateTimeFormatterBuilder() + .append(BASIC_YEAR_MONTH_DAY_FORMATTER) + .append(BASIC_T_TIME_PRINTER) + .toFormatter(Locale.ROOT); + /* + * Returns a basic formatter that combines a basic date and time, separated + * by a 'T' (yyyyMMdd'T'HHmmss.SSSZ). + */ + private static final CompoundDateTimeFormatter BASIC_DATE_TIME = new CompoundDateTimeFormatter( + new DateTimeFormatterBuilder().append(BASIC_DATE_TIME_PRINTER).appendZoneOrOffsetId().toFormatter(Locale.ROOT), + new DateTimeFormatterBuilder().append(BASIC_DATE_TIME_FORMATTER).appendZoneOrOffsetId().toFormatter(Locale.ROOT), + new DateTimeFormatterBuilder().append(BASIC_DATE_TIME_FORMATTER).append(TIME_ZONE_FORMATTER_NO_COLON).toFormatter(Locale.ROOT) + ); + + private static final DateTimeFormatter BASIC_DATE_T = + new DateTimeFormatterBuilder().append(BASIC_YEAR_MONTH_DAY_FORMATTER).appendLiteral("T").toFormatter(Locale.ROOT); + + /* + * Returns a basic formatter that combines a basic date and time without millis, + * separated by a 'T' (yyyyMMdd'T'HHmmssZ). + */ + private static final CompoundDateTimeFormatter BASIC_DATE_TIME_NO_MILLIS = new CompoundDateTimeFormatter( + new DateTimeFormatterBuilder().append(BASIC_DATE_T).append(BASIC_TIME_NO_MILLIS_BASE) + .appendZoneOrOffsetId().toFormatter(Locale.ROOT), + new DateTimeFormatterBuilder().append(BASIC_DATE_T).append(BASIC_TIME_NO_MILLIS_BASE) + .append(TIME_ZONE_FORMATTER_NO_COLON).toFormatter(Locale.ROOT) + ); + + /* + * Returns a formatter for a full ordinal date, using a four + * digit year and three digit dayOfYear (yyyyDDD). + */ private static final CompoundDateTimeFormatter BASIC_ORDINAL_DATE = new CompoundDateTimeFormatter( DateTimeFormatter.ofPattern("yyyyDDD", Locale.ROOT)); - private static final CompoundDateTimeFormatter BASIC_ORDINAL_DATE_TIME = new CompoundDateTimeFormatter(new DateTimeFormatterBuilder() - .appendPattern("yyyyDDD") - .append(BASIC_T_TIME_FORMATTER) - .toFormatter(Locale.ROOT)); + /* + * Returns a formatter for a full ordinal date and time, using a four + * digit year and three digit dayOfYear (yyyyDDD'T'HHmmss.SSSZ). + */ + private static final CompoundDateTimeFormatter BASIC_ORDINAL_DATE_TIME = new CompoundDateTimeFormatter( + new DateTimeFormatterBuilder().appendPattern("yyyyDDD").append(BASIC_T_TIME_PRINTER) + .appendZoneOrOffsetId().toFormatter(Locale.ROOT), + new DateTimeFormatterBuilder().appendPattern("yyyyDDD").append(BASIC_T_TIME_FORMATTER) + .append(TIME_ZONE_FORMATTER_NO_COLON).toFormatter(Locale.ROOT) + ); + + /* + * Returns a formatter for a full ordinal date and time without millis, + * using a four digit year and three digit dayOfYear (yyyyDDD'T'HHmmssZ). + */ private static final CompoundDateTimeFormatter BASIC_ORDINAL_DATE_TIME_NO_MILLIS = new CompoundDateTimeFormatter( - new DateTimeFormatterBuilder() - .appendPattern("yyyyDDD") - .appendLiteral("T") - .append(BASIC_TIME_NO_MILLIS_FORMATTER) - .toFormatter(Locale.ROOT)); + new DateTimeFormatterBuilder().appendPattern("yyyyDDD").appendLiteral("T").append(BASIC_TIME_NO_MILLIS_BASE) + .appendZoneOrOffsetId().toFormatter(Locale.ROOT), + new DateTimeFormatterBuilder().appendPattern("yyyyDDD").appendLiteral("T").append(BASIC_TIME_NO_MILLIS_BASE) + .append(TIME_ZONE_FORMATTER_NO_COLON).toFormatter(Locale.ROOT) + ); private static final DateTimeFormatter BASIC_WEEK_DATE_FORMATTER = new DateTimeFormatterBuilder() .appendValue(IsoFields.WEEK_BASED_YEAR) @@ -206,47 +261,694 @@ public class DateFormatters { .appendValue(ChronoField.DAY_OF_WEEK) .toFormatter(Locale.ROOT); - private static final CompoundDateTimeFormatter BASIC_WEEK_DATE = new CompoundDateTimeFormatter(BASIC_WEEK_DATE_FORMATTER); + ///////////////////////////////////////// + // + // END basic time formatters + // + ///////////////////////////////////////// - private static final CompoundDateTimeFormatter BASIC_WEEK_DATE_TIME_NO_MILLIS = new CompoundDateTimeFormatter( - new DateTimeFormatterBuilder() - .append(BASIC_WEEK_DATE_FORMATTER) - .appendLiteral("T") - .append(BASIC_TIME_NO_MILLIS_FORMATTER) - .toFormatter(Locale.ROOT)); - - private static final CompoundDateTimeFormatter BASIC_WEEK_DATE_TIME = new CompoundDateTimeFormatter(new DateTimeFormatterBuilder() - .append(BASIC_WEEK_DATE_FORMATTER) - .append(BASIC_T_TIME_FORMATTER) - .toFormatter(Locale.ROOT)); - - private static final DateTimeFormatter DATE_FORMATTER = new DateTimeFormatterBuilder() - .appendValue(ChronoField.YEAR, 1, 4, SignStyle.NORMAL) - .appendLiteral('-') - .appendValue(MONTH_OF_YEAR, 1, 2, SignStyle.NOT_NEGATIVE) - .appendLiteral('-') - .appendValue(DAY_OF_MONTH, 1, 2, SignStyle.NOT_NEGATIVE) + ///////////////////////////////////////// + // + // start strict formatters + // + ///////////////////////////////////////// + private static final DateTimeFormatter STRICT_BASIC_WEEK_DATE_FORMATTER = new DateTimeFormatterBuilder() + .parseStrict() + .appendValue(IsoFields.WEEK_BASED_YEAR, 4) + .appendLiteral("W") + .appendValue(IsoFields.WEEK_OF_WEEK_BASED_YEAR, 1, 2, SignStyle.NEVER) + .appendValue(ChronoField.DAY_OF_WEEK) .toFormatter(Locale.ROOT); - private static final CompoundDateTimeFormatter DATE = new CompoundDateTimeFormatter(DATE_FORMATTER); + private static final DateTimeFormatter STRICT_BASIC_WEEK_DATE_PRINTER = new DateTimeFormatterBuilder() + .parseStrict() + .appendValue(IsoFields.WEEK_BASED_YEAR, 4) + .appendLiteral("W") + .appendValue(IsoFields.WEEK_OF_WEEK_BASED_YEAR, 2, 2, SignStyle.NEVER) + .appendValue(ChronoField.DAY_OF_WEEK) + .toFormatter(Locale.ROOT); - private static final CompoundDateTimeFormatter HOUR = new CompoundDateTimeFormatter(new DateTimeFormatterBuilder() - .appendValue(HOUR_OF_DAY, 1, 2, SignStyle.NOT_NEGATIVE) + /* + * Returns a basic formatter for a full date as four digit weekyear, two + * digit week of weekyear, and one digit day of week (xxxx'W'wwe). + */ + private static final CompoundDateTimeFormatter STRICT_BASIC_WEEK_DATE = + new CompoundDateTimeFormatter(STRICT_BASIC_WEEK_DATE_PRINTER, STRICT_BASIC_WEEK_DATE_FORMATTER); + + /* + * Returns a basic formatter that combines a basic weekyear date and time + * without millis, separated by a 'T' (xxxx'W'wwe'T'HHmmssX). + */ + private static final CompoundDateTimeFormatter STRICT_BASIC_WEEK_DATE_TIME_NO_MILLIS = new CompoundDateTimeFormatter( + new DateTimeFormatterBuilder() + .append(STRICT_BASIC_WEEK_DATE_PRINTER).append(DateTimeFormatter.ofPattern("'T'HHmmssX", Locale.ROOT)) + .toFormatter(Locale.ROOT), + new DateTimeFormatterBuilder() + .append(STRICT_BASIC_WEEK_DATE_FORMATTER).append(DateTimeFormatter.ofPattern("'T'HHmmssX", Locale.ROOT)) + .toFormatter(Locale.ROOT) + ); + + /* + * Returns a basic formatter that combines a basic weekyear date and time, + * separated by a 'T' (xxxx'W'wwe'T'HHmmss.SSSX). + */ + private static final CompoundDateTimeFormatter STRICT_BASIC_WEEK_DATE_TIME = new CompoundDateTimeFormatter( + new DateTimeFormatterBuilder() + .append(STRICT_BASIC_WEEK_DATE_PRINTER) + .append(DateTimeFormatter.ofPattern("'T'HHmmss.SSSX", Locale.ROOT)) + .toFormatter(Locale.ROOT), + new DateTimeFormatterBuilder() + .append(STRICT_BASIC_WEEK_DATE_FORMATTER) + .append(DateTimeFormatter.ofPattern("'T'HHmmss.SSSX", Locale.ROOT)) + .toFormatter(Locale.ROOT) + ); + + /* + * An ISO date formatter that formats or parses a date without an offset, such as '2011-12-03'. + */ + private static final CompoundDateTimeFormatter STRICT_DATE = new CompoundDateTimeFormatter( + DateTimeFormatter.ISO_LOCAL_DATE.withResolverStyle(ResolverStyle.LENIENT)); + + /* + * A date formatter that formats or parses a date plus an hour without an offset, such as '2011-12-03T01'. + */ + private static final CompoundDateTimeFormatter STRICT_DATE_HOUR = new CompoundDateTimeFormatter( + DateTimeFormatter.ofPattern("yyyy-MM-dd'T'HH", Locale.ROOT)); + + /* + * A date formatter that formats or parses a date plus an hour/minute without an offset, such as '2011-12-03T01:10'. + */ + private static final CompoundDateTimeFormatter STRICT_DATE_HOUR_MINUTE = new CompoundDateTimeFormatter( + DateTimeFormatter.ofPattern("yyyy-MM-dd'T'HH:mm", Locale.ROOT)); + + /* + * A strict date formatter that formats or parses a date without an offset, such as '2011-12-03'. + */ + private static final CompoundDateTimeFormatter STRICT_YEAR_MONTH_DAY = new CompoundDateTimeFormatter(STRICT_YEAR_MONTH_DAY_FORMATTER); + + /* + * A strict formatter that formats or parses a year and a month, such as '2011-12'. + */ + private static final CompoundDateTimeFormatter STRICT_YEAR_MONTH = new CompoundDateTimeFormatter(new DateTimeFormatterBuilder() + .appendValue(ChronoField.YEAR, 4, 10, SignStyle.EXCEEDS_PAD) + .appendLiteral("-") + .appendValue(MONTH_OF_YEAR, 2, 2, SignStyle.NOT_NEGATIVE) .toFormatter(Locale.ROOT)); - private static final CompoundDateTimeFormatter DATE_HOUR = new CompoundDateTimeFormatter(new DateTimeFormatterBuilder() - .append(DATE_FORMATTER) - .appendLiteral("T") - .appendValue(HOUR_OF_DAY, 1, 2, SignStyle.NOT_NEGATIVE) + /* + * A strict formatter that formats or parses a year, such as '2011'. + */ + private static final CompoundDateTimeFormatter STRICT_YEAR = new CompoundDateTimeFormatter(new DateTimeFormatterBuilder() + .appendValue(ChronoField.YEAR, 4, 10, SignStyle.EXCEEDS_PAD) .toFormatter(Locale.ROOT)); + /* + * A strict formatter that formats or parses a hour, minute and second, such as '09:43:25'. + */ + private static final CompoundDateTimeFormatter STRICT_HOUR_MINUTE_SECOND = + new CompoundDateTimeFormatter(STRICT_HOUR_MINUTE_SECOND_FORMATTER); + + private static final DateTimeFormatter STRICT_DATE_FORMATTER = new DateTimeFormatterBuilder() + .append(STRICT_YEAR_MONTH_DAY_FORMATTER) + .appendLiteral('T') + .append(STRICT_HOUR_MINUTE_SECOND_FORMATTER) + .optionalStart() + .appendFraction(MILLI_OF_SECOND, 3, 3, true) + .optionalEnd() + .toFormatter(Locale.ROOT); + + /* + * Returns a formatter that combines a full date and time, separated by a 'T' + * (yyyy-MM-dd'T'HH:mm:ss.SSSZZ). + */ + private static final CompoundDateTimeFormatter STRICT_DATE_TIME = new CompoundDateTimeFormatter( + new DateTimeFormatterBuilder().append(STRICT_DATE_FORMATTER).appendZoneOrOffsetId().toFormatter(Locale.ROOT), + new DateTimeFormatterBuilder().append(STRICT_DATE_FORMATTER).append(TIME_ZONE_FORMATTER_NO_COLON).toFormatter(Locale.ROOT) + ); + + private static final DateTimeFormatter STRICT_ORDINAL_DATE_TIME_NO_MILLIS_BASE = new DateTimeFormatterBuilder() + .appendValue(ChronoField.YEAR, 4, 10, SignStyle.EXCEEDS_PAD) + .appendLiteral('-') + .appendValue(DAY_OF_YEAR, 3, 3, SignStyle.NOT_NEGATIVE) + .appendLiteral('T') + .append(STRICT_HOUR_MINUTE_SECOND_FORMATTER) + .toFormatter(Locale.ROOT); + + /* + * Returns a formatter for a full ordinal date and time without millis, + * using a four digit year and three digit dayOfYear (yyyy-DDD'T'HH:mm:ssZZ). + */ + private static final CompoundDateTimeFormatter STRICT_ORDINAL_DATE_TIME_NO_MILLIS = new CompoundDateTimeFormatter( + new DateTimeFormatterBuilder().append(STRICT_ORDINAL_DATE_TIME_NO_MILLIS_BASE) + .appendZoneOrOffsetId().toFormatter(Locale.ROOT), + new DateTimeFormatterBuilder().append(STRICT_ORDINAL_DATE_TIME_NO_MILLIS_BASE) + .append(TIME_ZONE_FORMATTER_NO_COLON).toFormatter(Locale.ROOT) + ); + + private static final DateTimeFormatter STRICT_DATE_TIME_NO_MILLIS_FORMATTER = new DateTimeFormatterBuilder() + .append(STRICT_YEAR_MONTH_DAY_FORMATTER) + .appendLiteral('T') + .append(STRICT_HOUR_MINUTE_SECOND_FORMATTER) + .toFormatter(Locale.ROOT); + + /* + * Returns a formatter that combines a full date and time without millis, + * separated by a 'T' (yyyy-MM-dd'T'HH:mm:ssZZ). + */ + private static final CompoundDateTimeFormatter STRICT_DATE_TIME_NO_MILLIS = new CompoundDateTimeFormatter( + new DateTimeFormatterBuilder().append(STRICT_DATE_TIME_NO_MILLIS_FORMATTER) + .appendZoneOrOffsetId().toFormatter(Locale.ROOT), + new DateTimeFormatterBuilder().append(STRICT_DATE_TIME_NO_MILLIS_FORMATTER) + .append(TIME_ZONE_FORMATTER_NO_COLON).toFormatter(Locale.ROOT) + ); + + // NOTE: this is not a strict formatter to retain the joda time based behaviour, even though it's named like this + private static final DateTimeFormatter STRICT_HOUR_MINUTE_SECOND_MILLIS_FORMATTER = new DateTimeFormatterBuilder() + .append(STRICT_HOUR_MINUTE_SECOND_FORMATTER) + .appendFraction(MILLI_OF_SECOND, 1, 3, true) + .toFormatter(Locale.ROOT); + + private static final DateTimeFormatter STRICT_HOUR_MINUTE_SECOND_MILLIS_PRINTER = new DateTimeFormatterBuilder() + .append(STRICT_HOUR_MINUTE_SECOND_FORMATTER) + .appendFraction(MILLI_OF_SECOND, 3, 3, true) + .toFormatter(Locale.ROOT); + + /* + * Returns a formatter for a two digit hour of day, two digit minute of + * hour, two digit second of minute, and three digit fraction of + * second (HH:mm:ss.SSS). + * + * NOTE: this is not a strict formatter to retain the joda time based behaviour, + * even though it's named like this + */ + private static final CompoundDateTimeFormatter STRICT_HOUR_MINUTE_SECOND_MILLIS = + new CompoundDateTimeFormatter(STRICT_HOUR_MINUTE_SECOND_MILLIS_PRINTER, STRICT_HOUR_MINUTE_SECOND_MILLIS_FORMATTER); + + private static final CompoundDateTimeFormatter STRICT_HOUR_MINUTE_SECOND_FRACTION = STRICT_HOUR_MINUTE_SECOND_MILLIS; + + /* + * Returns a formatter that combines a full date, two digit hour of day, + * two digit minute of hour, two digit second of minute, and three digit + * fraction of second (yyyy-MM-dd'T'HH:mm:ss.SSS). + */ + private static final CompoundDateTimeFormatter STRICT_DATE_HOUR_MINUTE_SECOND_FRACTION = new CompoundDateTimeFormatter( + new DateTimeFormatterBuilder() + .append(STRICT_YEAR_MONTH_DAY_FORMATTER) + .appendLiteral("T") + .append(STRICT_HOUR_MINUTE_SECOND_MILLIS_PRINTER) + .toFormatter(Locale.ROOT), + new DateTimeFormatterBuilder() + .append(STRICT_YEAR_MONTH_DAY_FORMATTER) + .appendLiteral("T") + .append(STRICT_HOUR_MINUTE_SECOND_FORMATTER) + // this one here is lenient as well to retain joda time based bwc compatibility + .appendFraction(MILLI_OF_SECOND, 1, 3, true) + .toFormatter(Locale.ROOT) + ); + + private static final CompoundDateTimeFormatter STRICT_DATE_HOUR_MINUTE_SECOND_MILLIS = STRICT_DATE_HOUR_MINUTE_SECOND_FRACTION; + + /* + * Returns a formatter for a two digit hour of day. (HH) + */ + private static final CompoundDateTimeFormatter STRICT_HOUR = + new CompoundDateTimeFormatter(DateTimeFormatter.ofPattern("HH", Locale.ROOT)); + + /* + * Returns a formatter for a two digit hour of day and two digit minute of + * hour. (HH:mm) + */ + private static final CompoundDateTimeFormatter STRICT_HOUR_MINUTE = + new CompoundDateTimeFormatter(DateTimeFormatter.ofPattern("HH:mm", Locale.ROOT)); + + private static final DateTimeFormatter STRICT_ORDINAL_DATE_TIME_FORMATTER_BASE = new DateTimeFormatterBuilder() + .appendValue(ChronoField.YEAR, 4, 10, SignStyle.EXCEEDS_PAD) + .appendLiteral('-') + .appendValue(DAY_OF_YEAR, 3, 3, SignStyle.NOT_NEGATIVE) + .appendLiteral('T') + .appendPattern("HH:mm") + .optionalStart() + .appendLiteral(':') + .appendValue(SECOND_OF_MINUTE, 2, 2, SignStyle.NOT_NEGATIVE) + .appendFraction(MILLI_OF_SECOND, 3, 3, true) + .optionalEnd() + .toFormatter(Locale.ROOT); + + /* + * Returns a formatter for a full ordinal date and time, using a four + * digit year and three digit dayOfYear (yyyy-DDD'T'HH:mm:ss.SSSZZ). + */ + private static final CompoundDateTimeFormatter STRICT_ORDINAL_DATE_TIME = new CompoundDateTimeFormatter( + new DateTimeFormatterBuilder().append(STRICT_ORDINAL_DATE_TIME_FORMATTER_BASE) + .appendZoneOrOffsetId().toFormatter(Locale.ROOT), + new DateTimeFormatterBuilder().append(STRICT_ORDINAL_DATE_TIME_FORMATTER_BASE) + .append(TIME_ZONE_FORMATTER_NO_COLON).toFormatter(Locale.ROOT) + ); + + // Note: milliseconds parsing is not strict, others are + private static final DateTimeFormatter STRICT_TIME_FORMATTER_BASE = new DateTimeFormatterBuilder() + .appendValue(HOUR_OF_DAY, 2, 2, SignStyle.NOT_NEGATIVE) + .appendLiteral(':') + .appendValue(MINUTE_OF_HOUR, 2, 2, SignStyle.NOT_NEGATIVE) + .appendLiteral(':') + .appendValue(SECOND_OF_MINUTE, 2, 2, SignStyle.NOT_NEGATIVE) + .appendFraction(MILLI_OF_SECOND, 1, 3, true) + .toFormatter(Locale.ROOT); + + private static final DateTimeFormatter STRICT_TIME_PRINTER = new DateTimeFormatterBuilder() + .appendValue(HOUR_OF_DAY, 2, 2, SignStyle.NOT_NEGATIVE) + .appendLiteral(':') + .appendValue(MINUTE_OF_HOUR, 2, 2, SignStyle.NOT_NEGATIVE) + .appendLiteral(':') + .appendValue(SECOND_OF_MINUTE, 2, 2, SignStyle.NOT_NEGATIVE) + .appendFraction(MILLI_OF_SECOND, 3, 3, true) + .toFormatter(Locale.ROOT); + + /* + * Returns a formatter for a two digit hour of day, two digit minute of + * hour, two digit second of minute, three digit fraction of second, and + * time zone offset (HH:mm:ss.SSSZZ). + */ + private static final CompoundDateTimeFormatter STRICT_TIME = new CompoundDateTimeFormatter( + new DateTimeFormatterBuilder().append(STRICT_TIME_PRINTER).appendZoneOrOffsetId().toFormatter(Locale.ROOT), + new DateTimeFormatterBuilder().append(STRICT_TIME_FORMATTER_BASE).appendZoneOrOffsetId().toFormatter(Locale.ROOT), + new DateTimeFormatterBuilder().append(STRICT_TIME_FORMATTER_BASE).append(TIME_ZONE_FORMATTER_NO_COLON).toFormatter(Locale.ROOT) + ); + + /* + * Returns a formatter for a two digit hour of day, two digit minute of + * hour, two digit second of minute, three digit fraction of second, and + * time zone offset prefixed by 'T' ('T'HH:mm:ss.SSSZZ). + */ + private static final CompoundDateTimeFormatter STRICT_T_TIME = new CompoundDateTimeFormatter( + new DateTimeFormatterBuilder().appendLiteral('T').append(STRICT_TIME_PRINTER).appendZoneOrOffsetId().toFormatter(Locale.ROOT), + new DateTimeFormatterBuilder().appendLiteral('T').append(STRICT_TIME_FORMATTER_BASE) + .appendZoneOrOffsetId().toFormatter(Locale.ROOT), + new DateTimeFormatterBuilder().appendLiteral('T').append(STRICT_TIME_FORMATTER_BASE) + .append(TIME_ZONE_FORMATTER_NO_COLON).toFormatter(Locale.ROOT) + ); + + private static final DateTimeFormatter STRICT_TIME_NO_MILLIS_BASE = new DateTimeFormatterBuilder() + .appendValue(HOUR_OF_DAY, 2, 2, SignStyle.NOT_NEGATIVE) + .appendLiteral(':') + .appendValue(MINUTE_OF_HOUR, 2, 2, SignStyle.NOT_NEGATIVE) + .appendLiteral(':') + .appendValue(SECOND_OF_MINUTE, 2, 2, SignStyle.NOT_NEGATIVE) + .toFormatter(Locale.ROOT); + + /* + * Returns a formatter for a two digit hour of day, two digit minute of + * hour, two digit second of minute, and time zone offset (HH:mm:ssZZ). + */ + private static final CompoundDateTimeFormatter STRICT_TIME_NO_MILLIS = new CompoundDateTimeFormatter( + new DateTimeFormatterBuilder().append(STRICT_TIME_NO_MILLIS_BASE).appendZoneOrOffsetId().toFormatter(Locale.ROOT), + new DateTimeFormatterBuilder().append(STRICT_TIME_NO_MILLIS_BASE).append(TIME_ZONE_FORMATTER_NO_COLON).toFormatter(Locale.ROOT) + ); + + /* + * Returns a formatter for a two digit hour of day, two digit minute of + * hour, two digit second of minute, and time zone offset prefixed + * by 'T' ('T'HH:mm:ssZZ). + */ + private static final CompoundDateTimeFormatter STRICT_T_TIME_NO_MILLIS = new CompoundDateTimeFormatter( + new DateTimeFormatterBuilder().appendLiteral("T").append(STRICT_TIME_NO_MILLIS_BASE) + .appendZoneOrOffsetId().toFormatter(Locale.ROOT), + new DateTimeFormatterBuilder().appendLiteral("T").append(STRICT_TIME_NO_MILLIS_BASE) + .append(TIME_ZONE_FORMATTER_NO_COLON).toFormatter(Locale.ROOT) + ); + + private static final DateTimeFormatter ISO_WEEK_DATE = new DateTimeFormatterBuilder() + .parseCaseInsensitive() + .appendValue(IsoFields.WEEK_BASED_YEAR, 4, 10, SignStyle.EXCEEDS_PAD) + .appendLiteral("-W") + .appendValue(IsoFields.WEEK_OF_WEEK_BASED_YEAR, 2) + .appendLiteral('-') + .appendValue(DAY_OF_WEEK, 1) + .toFormatter(Locale.ROOT); + + private static final DateTimeFormatter ISO_WEEK_DATE_T = new DateTimeFormatterBuilder() + .append(ISO_WEEK_DATE) + .appendLiteral('T') + .toFormatter(Locale.ROOT); + + /* + * Returns a formatter for a full date as four digit weekyear, two digit + * week of weekyear, and one digit day of week (xxxx-'W'ww-e). + */ + private static final CompoundDateTimeFormatter STRICT_WEEK_DATE = new CompoundDateTimeFormatter(ISO_WEEK_DATE); + + /* + * Returns a formatter that combines a full weekyear date and time without millis, + * separated by a 'T' (xxxx-'W'ww-e'T'HH:mm:ssZZ). + */ + private static final CompoundDateTimeFormatter STRICT_WEEK_DATE_TIME_NO_MILLIS = new CompoundDateTimeFormatter( + new DateTimeFormatterBuilder().append(ISO_WEEK_DATE_T) + .append(STRICT_TIME_NO_MILLIS_BASE).appendZoneOrOffsetId().toFormatter(Locale.ROOT), + new DateTimeFormatterBuilder().append(ISO_WEEK_DATE_T) + .append(STRICT_TIME_NO_MILLIS_BASE).append(TIME_ZONE_FORMATTER_NO_COLON).toFormatter(Locale.ROOT) + ); + + /* + * Returns a formatter that combines a full weekyear date and time, + * separated by a 'T' (xxxx-'W'ww-e'T'HH:mm:ss.SSSZZ). + */ + private static final CompoundDateTimeFormatter STRICT_WEEK_DATE_TIME = new CompoundDateTimeFormatter( + new DateTimeFormatterBuilder().append(ISO_WEEK_DATE_T).append(STRICT_TIME_PRINTER).appendZoneOrOffsetId().toFormatter(Locale.ROOT), + new DateTimeFormatterBuilder().append(ISO_WEEK_DATE_T).append(STRICT_TIME_FORMATTER_BASE) + .appendZoneOrOffsetId().toFormatter(Locale.ROOT), + new DateTimeFormatterBuilder().append(ISO_WEEK_DATE_T).append(STRICT_TIME_FORMATTER_BASE) + .append(TIME_ZONE_FORMATTER_NO_COLON).toFormatter(Locale.ROOT) + ); + + /* + * Returns a formatter for a four digit weekyear + */ + private static final CompoundDateTimeFormatter STRICT_WEEKYEAR = new CompoundDateTimeFormatter(new DateTimeFormatterBuilder() + .appendValue(WeekFields.ISO.weekBasedYear(), 4, 10, SignStyle.EXCEEDS_PAD) + .toFormatter(Locale.ROOT)); + + private static final DateTimeFormatter STRICT_WEEKYEAR_WEEK_FORMATTER = new DateTimeFormatterBuilder() + .appendValue(WeekFields.ISO.weekBasedYear(), 4, 10, SignStyle.EXCEEDS_PAD) + .appendLiteral("-W") + .appendValue(WeekFields.ISO.weekOfWeekBasedYear(), 2, 2, SignStyle.NOT_NEGATIVE) + .toFormatter(Locale.ROOT); + + /* + * Returns a formatter for a four digit weekyear and two digit week of + * weekyear. (xxxx-'W'ww) + */ + private static final CompoundDateTimeFormatter STRICT_WEEKYEAR_WEEK = new CompoundDateTimeFormatter(STRICT_WEEKYEAR_WEEK_FORMATTER); + + /* + * Returns a formatter for a four digit weekyear, two digit week of + * weekyear, and one digit day of week. (xxxx-'W'ww-e) + */ + private static final CompoundDateTimeFormatter STRICT_WEEKYEAR_WEEK_DAY = new CompoundDateTimeFormatter(new DateTimeFormatterBuilder() + .append(STRICT_WEEKYEAR_WEEK_FORMATTER) + .appendLiteral("-") + .appendValue(WeekFields.ISO.dayOfWeek()) + .toFormatter(Locale.ROOT)); + + /* + * Returns a formatter that combines a full date, two digit hour of day, + * two digit minute of hour, and two digit second of + * minute. (yyyy-MM-dd'T'HH:mm:ss) + */ + private static final CompoundDateTimeFormatter STRICT_DATE_HOUR_MINUTE_SECOND = + new CompoundDateTimeFormatter(DateTimeFormatter.ofPattern("yyyy-MM-dd'T'HH:mm:ss", Locale.ROOT)); + + /* + * A basic formatter for a full date as four digit year, two digit + * month of year, and two digit day of month (yyyyMMdd). + */ + private static final CompoundDateTimeFormatter BASIC_DATE = new CompoundDateTimeFormatter( + new DateTimeFormatterBuilder() + .appendValue(ChronoField.YEAR, 4, 4, SignStyle.NORMAL) + .appendValue(MONTH_OF_YEAR, 2, 2, SignStyle.NOT_NEGATIVE) + .appendValue(DAY_OF_MONTH, 2, 2, SignStyle.NOT_NEGATIVE) + .toFormatter(Locale.ROOT).withZone(ZoneOffset.UTC), + new DateTimeFormatterBuilder() + .appendValue(ChronoField.YEAR, 1, 4, SignStyle.NORMAL) + .appendValue(MONTH_OF_YEAR, 1, 2, SignStyle.NOT_NEGATIVE) + .appendValue(DAY_OF_MONTH, 1, 2, SignStyle.NOT_NEGATIVE) + .toFormatter(Locale.ROOT).withZone(ZoneOffset.UTC) + ); + + private static final DateTimeFormatter STRICT_ORDINAL_DATE_FORMATTER = new DateTimeFormatterBuilder() + .parseCaseInsensitive() + .appendValue(ChronoField.YEAR, 4, 10, SignStyle.EXCEEDS_PAD) + .appendLiteral('-') + .appendValue(DAY_OF_YEAR, 3) + .optionalStart() + .toFormatter(Locale.ROOT); + + /* + * Returns a formatter for a full ordinal date, using a four + * digit year and three digit dayOfYear (yyyy-DDD). + */ + private static final CompoundDateTimeFormatter STRICT_ORDINAL_DATE = new CompoundDateTimeFormatter(STRICT_ORDINAL_DATE_FORMATTER); + + ///////////////////////////////////////// + // + // end strict formatters + // + ///////////////////////////////////////// + + ///////////////////////////////////////// + // + // start lenient formatters + // + ///////////////////////////////////////// + + private static final DateTimeFormatter DATE_FORMATTER = new DateTimeFormatterBuilder() + .appendValue(ChronoField.YEAR, 1, 5, SignStyle.NORMAL) + .optionalStart() + .appendLiteral('-') + .appendValue(MONTH_OF_YEAR, 1, 2, SignStyle.NOT_NEGATIVE) + .optionalStart() + .appendLiteral('-') + .appendValue(DAY_OF_MONTH, 1, 2, SignStyle.NOT_NEGATIVE) + .optionalEnd() + .optionalEnd() + .toFormatter(Locale.ROOT); + private static final DateTimeFormatter HOUR_MINUTE_FORMATTER = new DateTimeFormatterBuilder() .appendValue(HOUR_OF_DAY, 1, 2, SignStyle.NOT_NEGATIVE) .appendLiteral(':') .appendValue(MINUTE_OF_HOUR, 1, 2, SignStyle.NOT_NEGATIVE) .toFormatter(Locale.ROOT); - private static final CompoundDateTimeFormatter HOUR_MINUTE = new CompoundDateTimeFormatter(HOUR_MINUTE_FORMATTER); + /* + * a date formatter with optional time, being very lenient, format is + * yyyy-MM-dd'T'HH:mm:ss.SSSZ + */ + private static final CompoundDateTimeFormatter DATE_OPTIONAL_TIME = new CompoundDateTimeFormatter(STRICT_DATE_OPTIONAL_TIME.printer, + new DateTimeFormatterBuilder() + .append(DATE_FORMATTER) + .optionalStart() + .appendLiteral('T') + .optionalStart() + .appendValue(HOUR_OF_DAY, 1, 2, SignStyle.NOT_NEGATIVE) + .optionalStart() + .appendLiteral(':') + .appendValue(MINUTE_OF_HOUR, 1, 2, SignStyle.NOT_NEGATIVE) + .optionalStart() + .appendLiteral(':') + .appendValue(SECOND_OF_MINUTE, 1, 2, SignStyle.NOT_NEGATIVE) + .optionalEnd() + .optionalStart() + .appendFraction(MILLI_OF_SECOND, 1, 3, true) + .optionalEnd() + .optionalStart().appendZoneOrOffsetId().optionalEnd() + .optionalEnd() + .optionalEnd() + .optionalEnd() + .toFormatter(Locale.ROOT), + new DateTimeFormatterBuilder() + .append(DATE_FORMATTER) + .optionalStart() + .appendLiteral('T') + .optionalStart() + .appendValue(HOUR_OF_DAY, 1, 2, SignStyle.NOT_NEGATIVE) + .optionalStart() + .appendLiteral(':') + .appendValue(MINUTE_OF_HOUR, 1, 2, SignStyle.NOT_NEGATIVE) + .optionalStart() + .appendLiteral(':') + .appendValue(SECOND_OF_MINUTE, 1, 2, SignStyle.NOT_NEGATIVE) + .optionalEnd() + .optionalStart() + .appendFraction(MILLI_OF_SECOND, 1, 3, true) + .optionalEnd() + .optionalStart().appendOffset("+HHmm", "Z").optionalEnd() + .optionalEnd() + .optionalEnd() + .optionalEnd() + .toFormatter(Locale.ROOT)); + + private static final DateTimeFormatter HOUR_MINUTE_SECOND_FORMATTER = new DateTimeFormatterBuilder() + .append(HOUR_MINUTE_FORMATTER) + .appendLiteral(":") + .appendValue(SECOND_OF_MINUTE, 1, 2, SignStyle.NOT_NEGATIVE) + .toFormatter(Locale.ROOT); + + private static final DateTimeFormatter HOUR_MINUTE_SECOND_MILLIS_FORMATTER = new DateTimeFormatterBuilder() + .appendValue(HOUR_OF_DAY, 1, 2, SignStyle.NOT_NEGATIVE) + .appendLiteral(':') + .appendValue(MINUTE_OF_HOUR, 1, 2, SignStyle.NOT_NEGATIVE) + .appendLiteral(':') + .appendValue(SECOND_OF_MINUTE, 1, 2, SignStyle.NOT_NEGATIVE) + .appendFraction(MILLI_OF_SECOND, 1, 3, true) + .toFormatter(Locale.ROOT); + + private static final DateTimeFormatter ORDINAL_DATE_FORMATTER = new DateTimeFormatterBuilder() + .appendValue(ChronoField.YEAR, 4, 10, SignStyle.EXCEEDS_PAD) + .appendLiteral('-') + .appendValue(DAY_OF_YEAR, 1, 3, SignStyle.NOT_NEGATIVE) + .toFormatter(Locale.ROOT); + + private static final DateTimeFormatter ORDINAL_DATE_PRINTER = new DateTimeFormatterBuilder() + .appendValue(ChronoField.YEAR, 4, 10, SignStyle.EXCEEDS_PAD) + .appendLiteral('-') + .appendValue(DAY_OF_YEAR, 3, 3, SignStyle.NOT_NEGATIVE) + .toFormatter(Locale.ROOT); + + /* + * Returns a formatter for a full ordinal date, using a four + * digit year and three digit dayOfYear (yyyy-DDD). + */ + private static final CompoundDateTimeFormatter ORDINAL_DATE = + new CompoundDateTimeFormatter(ORDINAL_DATE_PRINTER, ORDINAL_DATE_FORMATTER); + + private static final DateTimeFormatter TIME_NO_MILLIS_FORMATTER = new DateTimeFormatterBuilder() + .appendValue(HOUR_OF_DAY, 1, 2, SignStyle.NOT_NEGATIVE) + .appendLiteral(':') + .appendValue(MINUTE_OF_HOUR, 1, 2, SignStyle.NOT_NEGATIVE) + .appendLiteral(':') + .appendValue(SECOND_OF_MINUTE, 1, 2, SignStyle.NOT_NEGATIVE) + .toFormatter(Locale.ROOT); + + private static final DateTimeFormatter T_TIME_NO_MILLIS_FORMATTER = + new DateTimeFormatterBuilder().appendLiteral("T").append(TIME_NO_MILLIS_FORMATTER).toFormatter(Locale.ROOT); + + private static final DateTimeFormatter TIME_PREFIX = new DateTimeFormatterBuilder() + .append(TIME_NO_MILLIS_FORMATTER) + .appendFraction(MILLI_OF_SECOND, 1, 3, true) + .toFormatter(Locale.ROOT); + + private static final DateTimeFormatter WEEK_DATE_FORMATTER = new DateTimeFormatterBuilder() + .appendValue(IsoFields.WEEK_BASED_YEAR, 4, 10, SignStyle.EXCEEDS_PAD) + .appendLiteral("-W") + .appendValue(IsoFields.WEEK_OF_WEEK_BASED_YEAR, 1, 2, SignStyle.NOT_NEGATIVE) + .appendLiteral('-') + .appendValue(DAY_OF_WEEK, 1) + .toFormatter(Locale.ROOT); + + /* + * Returns a formatter for a four digit weekyear. (YYYY) + */ + private static final CompoundDateTimeFormatter WEEK_YEAR = new CompoundDateTimeFormatter( + new DateTimeFormatterBuilder().appendValue(WeekFields.ISO.weekBasedYear()).toFormatter(Locale.ROOT)); + + /* + * Returns a formatter for a four digit weekyear. (uuuu) + */ + private static final CompoundDateTimeFormatter YEAR = new CompoundDateTimeFormatter( + new DateTimeFormatterBuilder().appendValue(ChronoField.YEAR).toFormatter(Locale.ROOT)); + + /* + * Returns a formatter for parsing the seconds since the epoch + */ + private static final CompoundDateTimeFormatter EPOCH_SECOND = new CompoundDateTimeFormatter( + new DateTimeFormatterBuilder().appendValue(ChronoField.INSTANT_SECONDS).toFormatter(Locale.ROOT)); + + /* + * Returns a formatter for parsing the milliseconds since the epoch + */ + private static final CompoundDateTimeFormatter EPOCH_MILLIS = new CompoundDateTimeFormatter(new DateTimeFormatterBuilder() + .appendValue(ChronoField.INSTANT_SECONDS, 1, 19, SignStyle.NEVER) + .appendValue(ChronoField.MILLI_OF_SECOND, 3) + .toFormatter(Locale.ROOT)); + + /* + * Returns a formatter that combines a full date and two digit hour of + * day. (yyyy-MM-dd'T'HH) + */ + private static final CompoundDateTimeFormatter DATE_HOUR = new CompoundDateTimeFormatter(STRICT_DATE_HOUR.printer, + new DateTimeFormatterBuilder() + .append(DATE_FORMATTER) + .appendLiteral("T") + .appendValue(HOUR_OF_DAY, 1, 2, SignStyle.NOT_NEGATIVE) + .toFormatter(Locale.ROOT)); + + /* + * Returns a formatter that combines a full date, two digit hour of day, + * two digit minute of hour, two digit second of minute, and three digit + * fraction of second (yyyy-MM-dd'T'HH:mm:ss.SSS). Parsing will parse up + * to 3 fractional second digits. + */ + private static final CompoundDateTimeFormatter DATE_HOUR_MINUTE_SECOND_MILLIS = + new CompoundDateTimeFormatter( + new DateTimeFormatterBuilder() + .append(STRICT_YEAR_MONTH_DAY_FORMATTER) + .appendLiteral("T") + .append(STRICT_HOUR_MINUTE_SECOND_MILLIS_PRINTER) + .toFormatter(Locale.ROOT), + new DateTimeFormatterBuilder() + .append(DATE_FORMATTER) + .appendLiteral("T") + .append(HOUR_MINUTE_SECOND_MILLIS_FORMATTER) + .toFormatter(Locale.ROOT)); + + private static final CompoundDateTimeFormatter DATE_HOUR_MINUTE_SECOND_FRACTION = DATE_HOUR_MINUTE_SECOND_MILLIS; + + /* + * Returns a formatter that combines a full date, two digit hour of day, + * and two digit minute of hour. (yyyy-MM-dd'T'HH:mm) + */ + private static final CompoundDateTimeFormatter DATE_HOUR_MINUTE = new CompoundDateTimeFormatter(STRICT_DATE_HOUR_MINUTE.printer, + new DateTimeFormatterBuilder() + .append(DATE_FORMATTER) + .appendLiteral("T") + .append(HOUR_MINUTE_FORMATTER) + .toFormatter(Locale.ROOT)); + + /* + * Returns a formatter that combines a full date, two digit hour of day, + * two digit minute of hour, and two digit second of + * minute. (yyyy-MM-dd'T'HH:mm:ss) + */ + private static final CompoundDateTimeFormatter DATE_HOUR_MINUTE_SECOND = new CompoundDateTimeFormatter( + STRICT_DATE_HOUR_MINUTE_SECOND.printer, + new DateTimeFormatterBuilder() + .append(DATE_FORMATTER) + .appendLiteral("T") + .append(HOUR_MINUTE_SECOND_FORMATTER) + .toFormatter(Locale.ROOT)); + + private static final DateTimeFormatter DATE_TIME_FORMATTER = new DateTimeFormatterBuilder() + .append(DATE_FORMATTER) + .appendLiteral('T') + .append(HOUR_MINUTE_FORMATTER) + .optionalStart() + .appendLiteral(':') + .appendValue(SECOND_OF_MINUTE, 1, 2, SignStyle.NOT_NEGATIVE) + .appendFraction(MILLI_OF_SECOND, 1, 3, true) + .optionalEnd() + .toFormatter(Locale.ROOT); + + /* + * Returns a formatter that combines a full date and time, separated by a 'T' + * (yyyy-MM-dd'T'HH:mm:ss.SSSZZ). + */ + private static final CompoundDateTimeFormatter DATE_TIME = new CompoundDateTimeFormatter( + STRICT_DATE_TIME.printer, + new DateTimeFormatterBuilder().append(DATE_TIME_FORMATTER).appendZoneOrOffsetId().toFormatter(Locale.ROOT), + new DateTimeFormatterBuilder().append(DATE_TIME_FORMATTER).append(TIME_ZONE_FORMATTER_NO_COLON).toFormatter(Locale.ROOT) + ); + + /* + * Returns a basic formatter for a full date as four digit weekyear, two + * digit week of weekyear, and one digit day of week (YYYY'W'wwe). + */ + private static final CompoundDateTimeFormatter BASIC_WEEK_DATE = + new CompoundDateTimeFormatter(STRICT_BASIC_WEEK_DATE.printer, BASIC_WEEK_DATE_FORMATTER); + + /* + * Returns a formatter for a full date as four digit year, two digit month + * of year, and two digit day of month (yyyy-MM-dd). + */ + private static final CompoundDateTimeFormatter DATE = new CompoundDateTimeFormatter(STRICT_DATE.printer, DATE_FORMATTER); + + // only the formatter, nothing optional here + private static final DateTimeFormatter DATE_TIME_NO_MILLIS_PRINTER = new DateTimeFormatterBuilder() + .append(STRICT_DATE.printer) + .appendLiteral('T') + .append(STRICT_HOUR_MINUTE.printer) + .appendLiteral(':') + .appendValue(SECOND_OF_MINUTE, 2, 2, SignStyle.NOT_NEGATIVE) + .appendZoneId() + .toFormatter(Locale.ROOT); private static final DateTimeFormatter DATE_TIME_PREFIX = new DateTimeFormatterBuilder() .append(DATE_FORMATTER) @@ -258,151 +960,55 @@ public class DateFormatters { .optionalEnd() .toFormatter(Locale.ROOT); - // only the formatter, nothing optional here - private static final DateTimeFormatter DATE_TIME_NO_MILLIS_FORMATTER = new DateTimeFormatterBuilder() - .append(DATE_FORMATTER) - .appendLiteral('T') - .append(HOUR_MINUTE_FORMATTER) - .appendLiteral(':') - .appendValue(SECOND_OF_MINUTE, 1, 2, SignStyle.NOT_NEGATIVE) - .appendZoneId() - .toFormatter(Locale.ROOT); - - private static final DateTimeFormatter DATE_TIME_NO_MILLIS_1 = new DateTimeFormatterBuilder() - .append(DATE_TIME_PREFIX) - .append(TIME_ZONE_FORMATTER_WITH_COLON) - .toFormatter(Locale.ROOT); - - private static final DateTimeFormatter DATE_TIME_NO_MILLIS_2 = new DateTimeFormatterBuilder() - .append(DATE_TIME_PREFIX) - .append(TIME_ZONE_FORMATTER_WITHOUT_COLON) - .toFormatter(Locale.ROOT); - - private static final DateTimeFormatter DATE_TIME_NO_MILLIS_3 = new DateTimeFormatterBuilder() - .append(DATE_TIME_PREFIX) - .append(TIME_ZONE_FORMATTER_ZONE_ID) - .toFormatter(Locale.ROOT); - - private static final DateTimeFormatter DATE_TIME_NO_MILLIS_4 = new DateTimeFormatterBuilder() - .append(DATE_TIME_PREFIX) - .optionalStart() - .append(TIME_ZONE_FORMATTER_WITH_COLON) - .optionalEnd() - .toFormatter(Locale.ROOT); - - private static final DateTimeFormatter DATE_TIME_NO_MILLIS_5 = new DateTimeFormatterBuilder() - .append(DATE_TIME_PREFIX) - .optionalStart() - .append(TIME_ZONE_FORMATTER_WITHOUT_COLON) - .optionalEnd() - .toFormatter(Locale.ROOT); - - private static final DateTimeFormatter DATE_TIME_NO_MILLIS_6 = new DateTimeFormatterBuilder() - .append(DATE_TIME_PREFIX) - .optionalStart() - .append(TIME_ZONE_FORMATTER_ZONE_ID) - .optionalEnd() - .toFormatter(Locale.ROOT); - - private static final CompoundDateTimeFormatter DATE_TIME_NO_MILLIS = new CompoundDateTimeFormatter(DATE_TIME_NO_MILLIS_FORMATTER, - DATE_TIME_NO_MILLIS_1, DATE_TIME_NO_MILLIS_2, DATE_TIME_NO_MILLIS_3, DATE_TIME_NO_MILLIS_4, DATE_TIME_NO_MILLIS_5, - DATE_TIME_NO_MILLIS_6); - - private static final CompoundDateTimeFormatter DATE_TIME = new CompoundDateTimeFormatter(new DateTimeFormatterBuilder() - .append(DATE_FORMATTER) - .appendLiteral('T') - .append(HOUR_MINUTE_FORMATTER) - .optionalStart() - .appendLiteral(':') - .appendValue(SECOND_OF_MINUTE, 1, 2, SignStyle.NOT_NEGATIVE) - .appendFraction(MILLI_OF_SECOND, 1, 3, true) - .optionalEnd() - .append(OPTIONAL_TIME_ZONE_FORMATTER) - .toFormatter(Locale.ROOT)); - - private static final CompoundDateTimeFormatter DATE_OPTIONAL_TIME = new CompoundDateTimeFormatter(STRICT_DATE_OPTIONAL_TIME.printer, - new DateTimeFormatterBuilder() - .append(DATE_FORMATTER) - .parseLenient() - .optionalStart() - .appendLiteral('T') - .append(HOUR_MINUTE_FORMATTER) - .optionalStart() - .appendLiteral(':') - .appendValue(SECOND_OF_MINUTE, 1, 2, SignStyle.NOT_NEGATIVE) - .appendFraction(MILLI_OF_SECOND, 1, 3, true) - .optionalEnd() - .append(OPTIONAL_TIME_ZONE_FORMATTER) - .optionalEnd() - .toFormatter(Locale.ROOT)); - - private static final DateTimeFormatter HOUR_MINUTE_SECOND_FORMATTER = new DateTimeFormatterBuilder() - .append(HOUR_MINUTE_FORMATTER) - .appendLiteral(":") - .appendValue(SECOND_OF_MINUTE, 1, 2, SignStyle.NOT_NEGATIVE) - .toFormatter(Locale.ROOT); - - private static final CompoundDateTimeFormatter HOUR_MINUTE_SECOND = new CompoundDateTimeFormatter(new DateTimeFormatterBuilder() - .append(HOUR_MINUTE_FORMATTER) - .appendLiteral(":") - .appendValue(SECOND_OF_MINUTE, 1, 2, SignStyle.NOT_NEGATIVE) - .toFormatter(Locale.ROOT)); - - private static final CompoundDateTimeFormatter DATE_HOUR_MINUTE_SECOND = new CompoundDateTimeFormatter(new DateTimeFormatterBuilder() - .append(DATE_FORMATTER) - .appendLiteral("T") - .append(HOUR_MINUTE_SECOND_FORMATTER) - .toFormatter(Locale.ROOT)); - - private static final CompoundDateTimeFormatter DATE_HOUR_MINUTE = new CompoundDateTimeFormatter(new DateTimeFormatterBuilder() - .append(DATE_FORMATTER) - .appendLiteral("T") - .append(HOUR_MINUTE_FORMATTER) - .toFormatter(Locale.ROOT)); - - private static final DateTimeFormatter HOUR_MINUTE_SECOND_MILLIS_FORMATTER = new DateTimeFormatterBuilder() - .appendValue(HOUR_OF_DAY, 1, 2, SignStyle.NOT_NEGATIVE) - .appendLiteral(':') - .appendValue(MINUTE_OF_HOUR, 1, 2, SignStyle.NOT_NEGATIVE) - .appendLiteral(':') - .appendValue(SECOND_OF_MINUTE, 1, 2, SignStyle.NOT_NEGATIVE) - .appendFraction(MILLI_OF_SECOND, 1, 3, true) - .toFormatter(Locale.ROOT); + /* + * Returns a formatter that combines a full date and time without millis, but with a timezone that can be optional + * separated by a 'T' (yyyy-MM-dd'T'HH:mm:ssZ). + */ + private static final CompoundDateTimeFormatter DATE_TIME_NO_MILLIS = new CompoundDateTimeFormatter(DATE_TIME_NO_MILLIS_PRINTER, + new DateTimeFormatterBuilder().append(DATE_TIME_PREFIX).appendZoneOrOffsetId().toFormatter(Locale.ROOT), + new DateTimeFormatterBuilder().append(DATE_TIME_PREFIX).append(TIME_ZONE_FORMATTER_NO_COLON).toFormatter(Locale.ROOT), + new DateTimeFormatterBuilder().append(DATE_TIME_PREFIX) + .optionalStart().appendZoneOrOffsetId().optionalEnd().toFormatter(Locale.ROOT), + new DateTimeFormatterBuilder().append(DATE_TIME_PREFIX) + .optionalStart().append(TIME_ZONE_FORMATTER_NO_COLON).optionalEnd().toFormatter(Locale.ROOT) + ); + /* + * Returns a formatter for a two digit hour of day, two digit minute of + * hour, two digit second of minute, and three digit fraction of + * second (HH:mm:ss.SSS). + */ private static final CompoundDateTimeFormatter HOUR_MINUTE_SECOND_MILLIS = - new CompoundDateTimeFormatter(HOUR_MINUTE_SECOND_MILLIS_FORMATTER); + new CompoundDateTimeFormatter(STRICT_HOUR_MINUTE_SECOND_FRACTION.printer, HOUR_MINUTE_SECOND_MILLIS_FORMATTER); - private static final CompoundDateTimeFormatter DATE_HOUR_MINUTE_SECOND_MILLIS = - new CompoundDateTimeFormatter(new DateTimeFormatterBuilder() - .append(DATE_FORMATTER) - .appendLiteral("T") - .append(HOUR_MINUTE_SECOND_MILLIS_FORMATTER) - .toFormatter(Locale.ROOT)); + /* + * Returns a formatter for a two digit hour of day and two digit minute of + * hour. (HH:mm) + */ + private static final CompoundDateTimeFormatter HOUR_MINUTE = + new CompoundDateTimeFormatter(STRICT_HOUR_MINUTE.printer, HOUR_MINUTE_FORMATTER); - private static final CompoundDateTimeFormatter DATE_HOUR_MINUTE_SECOND_FRACTION = - new CompoundDateTimeFormatter(new DateTimeFormatterBuilder() - .append(DATE_FORMATTER) - .appendLiteral("T") - .append(HOUR_MINUTE_SECOND_MILLIS_FORMATTER) - .toFormatter(Locale.ROOT)); - - private static final DateTimeFormatter ORDINAL_DATE_FORMATTER = new DateTimeFormatterBuilder() - .appendValue(ChronoField.YEAR, 4, 10, SignStyle.EXCEEDS_PAD) - .appendLiteral('-') - .appendValue(DAY_OF_YEAR, 1, 3, SignStyle.NOT_NEGATIVE) - .toFormatter(Locale.ROOT); - - private static final CompoundDateTimeFormatter ORDINAL_DATE = new CompoundDateTimeFormatter(ORDINAL_DATE_FORMATTER); - - private static final CompoundDateTimeFormatter ORDINAL_DATE_TIME_NO_MILLIS = new CompoundDateTimeFormatter( + /* + * A strict formatter that formats or parses a hour, minute and second, such as '09:43:25'. + */ + private static final CompoundDateTimeFormatter HOUR_MINUTE_SECOND = new CompoundDateTimeFormatter( + STRICT_HOUR_MINUTE_SECOND.printer, new DateTimeFormatterBuilder() - .append(ORDINAL_DATE_FORMATTER) - .appendLiteral('T') - .append(HOUR_MINUTE_SECOND_FORMATTER) - .append(OPTIONAL_TIME_ZONE_FORMATTER) - .toFormatter(Locale.ROOT)); + .append(HOUR_MINUTE_FORMATTER) + .appendLiteral(":") + .appendValue(SECOND_OF_MINUTE, 1, 2, SignStyle.NOT_NEGATIVE) + .toFormatter(Locale.ROOT) + ); - private static final CompoundDateTimeFormatter ORDINAL_DATE_TIME = new CompoundDateTimeFormatter(new DateTimeFormatterBuilder() + /* + * Returns a formatter for a two digit hour of day. (HH) + */ + private static final CompoundDateTimeFormatter HOUR = new CompoundDateTimeFormatter( + STRICT_HOUR.printer, + new DateTimeFormatterBuilder().appendValue(HOUR_OF_DAY, 1, 2, SignStyle.NOT_NEGATIVE).toFormatter(Locale.ROOT) + ); + + private static final DateTimeFormatter ORDINAL_DATE_TIME_FORMATTER_BASE = new DateTimeFormatterBuilder() .append(ORDINAL_DATE_FORMATTER) .appendLiteral('T') .append(HOUR_MINUTE_FORMATTER) @@ -411,372 +1017,191 @@ public class DateFormatters { .appendValue(SECOND_OF_MINUTE, 1, 2, SignStyle.NOT_NEGATIVE) .appendFraction(MILLI_OF_SECOND, 1, 3, true) .optionalEnd() - .append(TIME_ZONE_FORMATTER) - .toFormatter(Locale.ROOT)); - - private static final DateTimeFormatter TIME_FORMATTER_1 = new DateTimeFormatterBuilder() - .appendValue(HOUR_OF_DAY, 1, 2, SignStyle.NOT_NEGATIVE) - .appendLiteral(':') - .appendValue(MINUTE_OF_HOUR, 1, 2, SignStyle.NOT_NEGATIVE) - .appendLiteral(':') - .appendValue(SECOND_OF_MINUTE, 1, 2, SignStyle.NOT_NEGATIVE) - .appendFraction(MILLI_OF_SECOND, 1, 3, true) - .append(TIME_ZONE_FORMATTER_ZONE_ID) .toFormatter(Locale.ROOT); - private static final DateTimeFormatter TIME_FORMATTER_2 = new DateTimeFormatterBuilder() - .appendValue(HOUR_OF_DAY, 1, 2, SignStyle.NOT_NEGATIVE) - .appendLiteral(':') - .appendValue(MINUTE_OF_HOUR, 1, 2, SignStyle.NOT_NEGATIVE) - .appendLiteral(':') - .appendValue(SECOND_OF_MINUTE, 1, 2, SignStyle.NOT_NEGATIVE) - .appendFraction(MILLI_OF_SECOND, 1, 3, true) - .append(TIME_ZONE_FORMATTER_WITH_COLON) - .toFormatter(Locale.ROOT); - - private static final DateTimeFormatter TIME_FORMATTER_3 = new DateTimeFormatterBuilder() - .appendValue(HOUR_OF_DAY, 1, 2, SignStyle.NOT_NEGATIVE) - .appendLiteral(':') - .appendValue(MINUTE_OF_HOUR, 1, 2, SignStyle.NOT_NEGATIVE) - .appendLiteral(':') - .appendValue(SECOND_OF_MINUTE, 1, 2, SignStyle.NOT_NEGATIVE) - .appendFraction(MILLI_OF_SECOND, 1, 3, true) - .append(TIME_ZONE_FORMATTER_WITHOUT_COLON) - .toFormatter(Locale.ROOT); - - private static final DateTimeFormatter TIME_PREFIX = new DateTimeFormatterBuilder() - .appendValue(HOUR_OF_DAY, 1, 2, SignStyle.NOT_NEGATIVE) - .appendLiteral(':') - .appendValue(MINUTE_OF_HOUR, 1, 2, SignStyle.NOT_NEGATIVE) - .appendLiteral(':') - .appendValue(SECOND_OF_MINUTE, 1, 2, SignStyle.NOT_NEGATIVE) - .appendFraction(MILLI_OF_SECOND, 1, 3, true) - .toFormatter(Locale.ROOT); - - private static final DateTimeFormatter TIME_ZONE_ID = new DateTimeFormatterBuilder() - .append(TIME_PREFIX) - .append(TIME_ZONE_FORMATTER_ZONE_ID) - .toFormatter(Locale.ROOT); - - private static final DateTimeFormatter TIME_ZONE_WITH_COLON = new DateTimeFormatterBuilder() - .append(TIME_PREFIX) - .append(TIME_ZONE_FORMATTER_WITH_COLON) - .toFormatter(Locale.ROOT); - - private static final DateTimeFormatter TIME_ZONE_WITHOUT_COLON = new DateTimeFormatterBuilder() - .append(TIME_PREFIX) - .append(TIME_ZONE_FORMATTER_WITHOUT_COLON) - .toFormatter(Locale.ROOT); - - private static final CompoundDateTimeFormatter T_TIME = new CompoundDateTimeFormatter( - new DateTimeFormatterBuilder().appendLiteral("T").append(TIME_FORMATTER_1).toFormatter(Locale.ROOT), - new DateTimeFormatterBuilder().appendLiteral("T").append(TIME_FORMATTER_2).toFormatter(Locale.ROOT), - new DateTimeFormatterBuilder().appendLiteral("T").append(TIME_FORMATTER_3).toFormatter(Locale.ROOT) + /* + * Returns a formatter for a full ordinal date and time, using a four + * digit year and three digit dayOfYear (yyyy-DDD'T'HH:mm:ss.SSSZZ). + */ + private static final CompoundDateTimeFormatter ORDINAL_DATE_TIME = new CompoundDateTimeFormatter( + STRICT_ORDINAL_DATE_TIME.printer, + new DateTimeFormatterBuilder().append(ORDINAL_DATE_TIME_FORMATTER_BASE) + .appendZoneOrOffsetId().toFormatter(Locale.ROOT), + new DateTimeFormatterBuilder().append(ORDINAL_DATE_TIME_FORMATTER_BASE) + .append(TIME_ZONE_FORMATTER_NO_COLON).toFormatter(Locale.ROOT) ); - private static final DateTimeFormatter TIME_NO_MILLIS_FORMATTER_1 = new DateTimeFormatterBuilder() - .appendValue(HOUR_OF_DAY, 1, 2, SignStyle.NOT_NEGATIVE) - .appendLiteral(':') - .appendValue(MINUTE_OF_HOUR, 1, 2, SignStyle.NOT_NEGATIVE) - .appendLiteral(':') - .appendValue(SECOND_OF_MINUTE, 1, 2, SignStyle.NOT_NEGATIVE) - .append(TIME_ZONE_FORMATTER_ZONE_ID) + private static final DateTimeFormatter ORDINAL_DATE_TIME_NO_MILLIS_BASE = new DateTimeFormatterBuilder() + .append(ORDINAL_DATE_FORMATTER) + .appendLiteral('T') + .append(HOUR_MINUTE_SECOND_FORMATTER) .toFormatter(Locale.ROOT); - private static final DateTimeFormatter TIME_NO_MILLIS_FORMATTER_2 = new DateTimeFormatterBuilder() - .appendValue(HOUR_OF_DAY, 1, 2, SignStyle.NOT_NEGATIVE) - .appendLiteral(':') - .appendValue(MINUTE_OF_HOUR, 1, 2, SignStyle.NOT_NEGATIVE) - .appendLiteral(':') - .appendValue(SECOND_OF_MINUTE, 1, 2, SignStyle.NOT_NEGATIVE) - .append(TIME_ZONE_FORMATTER_WITH_COLON) - .toFormatter(Locale.ROOT); - - private static final DateTimeFormatter TIME_NO_MILLIS_FORMATTER_3 = new DateTimeFormatterBuilder() - .appendValue(HOUR_OF_DAY, 1, 2, SignStyle.NOT_NEGATIVE) - .appendLiteral(':') - .appendValue(MINUTE_OF_HOUR, 1, 2, SignStyle.NOT_NEGATIVE) - .appendLiteral(':') - .appendValue(SECOND_OF_MINUTE, 1, 2, SignStyle.NOT_NEGATIVE) - .append(TIME_ZONE_FORMATTER_WITHOUT_COLON) - .toFormatter(Locale.ROOT); - - private static final CompoundDateTimeFormatter TIME = new CompoundDateTimeFormatter(TIME_ZONE_ID, TIME_ZONE_WITH_COLON, - TIME_ZONE_WITHOUT_COLON); - - private static final CompoundDateTimeFormatter TIME_NO_MILLIS = - new CompoundDateTimeFormatter(TIME_NO_MILLIS_FORMATTER_1, TIME_NO_MILLIS_FORMATTER_2, TIME_NO_MILLIS_FORMATTER_3); - - private static final DateTimeFormatter T_TIME_NO_MILLIS_FORMATTER_1 = new DateTimeFormatterBuilder() - .appendLiteral("T") - .append(TIME_NO_MILLIS_FORMATTER_1) - .toFormatter(Locale.ROOT); - - private static final DateTimeFormatter T_TIME_NO_MILLIS_FORMATTER_2 = new DateTimeFormatterBuilder() - .appendLiteral("T") - .append(TIME_NO_MILLIS_FORMATTER_2) - .toFormatter(Locale.ROOT); - - private static final DateTimeFormatter T_TIME_NO_MILLIS_FORMATTER_3 = new DateTimeFormatterBuilder() - .appendLiteral("T") - .append(TIME_NO_MILLIS_FORMATTER_3) - .toFormatter(Locale.ROOT); - - private static final CompoundDateTimeFormatter T_TIME_NO_MILLIS = - new CompoundDateTimeFormatter(T_TIME_NO_MILLIS_FORMATTER_1, T_TIME_NO_MILLIS_FORMATTER_2, T_TIME_NO_MILLIS_FORMATTER_3); - - private static final DateTimeFormatter WEEK_DATE_FORMATTER = new DateTimeFormatterBuilder() - .appendValue(IsoFields.WEEK_BASED_YEAR, 4, 10, SignStyle.EXCEEDS_PAD) - .appendLiteral("-W") - .appendValue(IsoFields.WEEK_OF_WEEK_BASED_YEAR, 1, 2, SignStyle.NOT_NEGATIVE) - .appendLiteral('-') - .appendValue(DAY_OF_WEEK, 1) - .toFormatter(Locale.ROOT); - - private static final CompoundDateTimeFormatter WEEK_DATE = new CompoundDateTimeFormatter(WEEK_DATE_FORMATTER); - - private static final CompoundDateTimeFormatter WEEK_DATE_TIME_NO_MILLIS = new CompoundDateTimeFormatter( - new DateTimeFormatterBuilder().append(WEEK_DATE_FORMATTER).append(T_TIME_NO_MILLIS_FORMATTER_1).toFormatter(Locale.ROOT), - new DateTimeFormatterBuilder().append(WEEK_DATE_FORMATTER).append(T_TIME_NO_MILLIS_FORMATTER_2).toFormatter(Locale.ROOT), - new DateTimeFormatterBuilder().append(WEEK_DATE_FORMATTER).append(T_TIME_NO_MILLIS_FORMATTER_3).toFormatter(Locale.ROOT) - ); + /* + * Returns a formatter for a full ordinal date and time without millis, + * using a four digit year and three digit dayOfYear (yyyy-DDD'T'HH:mm:ssZZ). + */ + private static final CompoundDateTimeFormatter ORDINAL_DATE_TIME_NO_MILLIS = new CompoundDateTimeFormatter( + STRICT_ORDINAL_DATE_TIME_NO_MILLIS.printer, + new DateTimeFormatterBuilder().append(ORDINAL_DATE_TIME_NO_MILLIS_BASE) + .appendZoneOrOffsetId().toFormatter(Locale.ROOT), + new DateTimeFormatterBuilder().append(ORDINAL_DATE_TIME_NO_MILLIS_BASE) + .append(TIME_ZONE_FORMATTER_NO_COLON).toFormatter(Locale.ROOT) + ); + /* + * Returns a formatter that combines a full weekyear date and time, + * separated by a 'T' (xxxx-'W'ww-e'T'HH:mm:ss.SSSZZ). + */ private static final CompoundDateTimeFormatter WEEK_DATE_TIME = new CompoundDateTimeFormatter( - new DateTimeFormatterBuilder().append(WEEK_DATE_FORMATTER).appendLiteral("T").append(TIME_FORMATTER_1).toFormatter(Locale.ROOT), - new DateTimeFormatterBuilder().append(WEEK_DATE_FORMATTER).appendLiteral("T").append(TIME_FORMATTER_2).toFormatter(Locale.ROOT), - new DateTimeFormatterBuilder().append(WEEK_DATE_FORMATTER).appendLiteral("T").append(TIME_FORMATTER_3).toFormatter(Locale.ROOT) + STRICT_WEEK_DATE_TIME.printer, + new DateTimeFormatterBuilder().append(WEEK_DATE_FORMATTER).appendLiteral("T").append(TIME_PREFIX) + .appendZoneOrOffsetId().toFormatter(Locale.ROOT), + new DateTimeFormatterBuilder().append(WEEK_DATE_FORMATTER).appendLiteral("T").append(TIME_PREFIX) + .append(TIME_ZONE_FORMATTER_NO_COLON).toFormatter(Locale.ROOT) ); - private static final CompoundDateTimeFormatter WEEK_YEAR = new CompoundDateTimeFormatter(new DateTimeFormatterBuilder() - .appendValue(WeekFields.ISO.weekBasedYear()) - .toFormatter(Locale.ROOT)); + /* + * Returns a formatter that combines a full weekyear date and time, + * separated by a 'T' (xxxx-'W'ww-e'T'HH:mm:ssZZ). + */ + private static final CompoundDateTimeFormatter WEEK_DATE_TIME_NO_MILLIS = new CompoundDateTimeFormatter( + STRICT_WEEK_DATE_TIME_NO_MILLIS.printer, + new DateTimeFormatterBuilder().append(WEEK_DATE_FORMATTER).append(T_TIME_NO_MILLIS_FORMATTER) + .appendZoneOrOffsetId().toFormatter(Locale.ROOT), + new DateTimeFormatterBuilder().append(WEEK_DATE_FORMATTER).append(T_TIME_NO_MILLIS_FORMATTER) + .append(TIME_ZONE_FORMATTER_NO_COLON).toFormatter(Locale.ROOT) + ); - private static final CompoundDateTimeFormatter WEEKYEAR_WEEK = new CompoundDateTimeFormatter(new DateTimeFormatterBuilder() - .appendValue(WeekFields.ISO.weekBasedYear()) - .appendLiteral("-W") - .appendValue(WeekFields.ISO.weekOfWeekBasedYear()) - .toFormatter(Locale.ROOT)); + /* + * Returns a basic formatter that combines a basic weekyear date and time, + * separated by a 'T' (xxxx'W'wwe'T'HHmmss.SSSX). + */ + private static final CompoundDateTimeFormatter BASIC_WEEK_DATE_TIME = new CompoundDateTimeFormatter( + STRICT_BASIC_WEEK_DATE_TIME.printer, + new DateTimeFormatterBuilder().append(BASIC_WEEK_DATE_FORMATTER).append(BASIC_T_TIME_FORMATTER) + .appendZoneOrOffsetId().toFormatter(Locale.ROOT), + new DateTimeFormatterBuilder().append(BASIC_WEEK_DATE_FORMATTER).append(BASIC_T_TIME_FORMATTER) + .append(TIME_ZONE_FORMATTER_NO_COLON).toFormatter(Locale.ROOT) + ); - private static final CompoundDateTimeFormatter WEEKYEAR_WEEK_DAY = new CompoundDateTimeFormatter(new DateTimeFormatterBuilder() - .appendValue(WeekFields.ISO.weekBasedYear()) - .appendLiteral("-W") - .appendValue(WeekFields.ISO.weekOfWeekBasedYear()) - .appendLiteral("-") - .appendValue(WeekFields.ISO.dayOfWeek()) - .toFormatter(Locale.ROOT)); + /* + * Returns a basic formatter that combines a basic weekyear date and time, + * separated by a 'T' (xxxx'W'wwe'T'HHmmssX). + */ + private static final CompoundDateTimeFormatter BASIC_WEEK_DATE_TIME_NO_MILLIS = new CompoundDateTimeFormatter( + STRICT_BASIC_WEEK_DATE_TIME_NO_MILLIS.printer, + new DateTimeFormatterBuilder().append(BASIC_WEEK_DATE_FORMATTER).appendLiteral("T").append(BASIC_TIME_NO_MILLIS_BASE) + .appendZoneOrOffsetId().toFormatter(Locale.ROOT), + new DateTimeFormatterBuilder().append(BASIC_WEEK_DATE_FORMATTER).appendLiteral("T").append(BASIC_TIME_NO_MILLIS_BASE) + .append(TIME_ZONE_FORMATTER_NO_COLON).toFormatter(Locale.ROOT) + ); - private static final CompoundDateTimeFormatter YEAR = new CompoundDateTimeFormatter(new DateTimeFormatterBuilder() - .appendValue(ChronoField.YEAR) - .toFormatter(Locale.ROOT)); + /* + * Returns a formatter for a two digit hour of day, two digit minute of + * hour, two digit second of minute, three digit fraction of second, and + * time zone offset (HH:mm:ss.SSSZZ). + */ + private static final CompoundDateTimeFormatter TIME = new CompoundDateTimeFormatter( + STRICT_TIME.printer, + new DateTimeFormatterBuilder().append(TIME_PREFIX).appendZoneOrOffsetId().toFormatter(Locale.ROOT), + new DateTimeFormatterBuilder().append(TIME_PREFIX).append(TIME_ZONE_FORMATTER_NO_COLON).toFormatter(Locale.ROOT) + ); - private static final CompoundDateTimeFormatter YEAR_MONTH = new CompoundDateTimeFormatter(new DateTimeFormatterBuilder() - .appendValue(ChronoField.YEAR) - .appendLiteral("-") - .appendValue(MONTH_OF_YEAR) - .toFormatter(Locale.ROOT)); + /* + * Returns a formatter for a two digit hour of day, two digit minute of + * hour, two digit second of minute, andtime zone offset (HH:mm:ssZZ). + */ + private static final CompoundDateTimeFormatter TIME_NO_MILLIS = new CompoundDateTimeFormatter( + STRICT_TIME_NO_MILLIS.printer, + new DateTimeFormatterBuilder().append(TIME_NO_MILLIS_FORMATTER).appendZoneOrOffsetId().toFormatter(Locale.ROOT), + new DateTimeFormatterBuilder().append(TIME_NO_MILLIS_FORMATTER).append(TIME_ZONE_FORMATTER_NO_COLON).toFormatter(Locale.ROOT) + ); - private static final CompoundDateTimeFormatter YEAR_MONTH_DAY = new CompoundDateTimeFormatter(new DateTimeFormatterBuilder() - .appendValue(ChronoField.YEAR) - .appendLiteral("-") - .appendValue(MONTH_OF_YEAR) - .appendLiteral("-") - .appendValue(DAY_OF_MONTH) - .toFormatter(Locale.ROOT)); + /* + * Returns a formatter for a two digit hour of day, two digit minute of + * hour, two digit second of minute, three digit fraction of second, and + * time zone offset prefixed by 'T' ('T'HH:mm:ss.SSSZZ). + */ + private static final CompoundDateTimeFormatter T_TIME = new CompoundDateTimeFormatter( + STRICT_T_TIME.printer, + new DateTimeFormatterBuilder().appendLiteral("T").append(TIME_PREFIX) + .appendZoneOrOffsetId().toFormatter(Locale.ROOT), + new DateTimeFormatterBuilder().appendLiteral("T").append(TIME_PREFIX) + .append(TIME_ZONE_FORMATTER_NO_COLON).toFormatter(Locale.ROOT) + ); - private static final CompoundDateTimeFormatter EPOCH_SECOND = new CompoundDateTimeFormatter(new DateTimeFormatterBuilder() - .appendValue(ChronoField.INSTANT_SECONDS) - .toFormatter(Locale.ROOT)); + /* + * Returns a formatter for a two digit hour of day, two digit minute of + * hour, two digit second of minute, and time zone offset prefixed + * by 'T' ('T'HH:mm:ssZZ). + */ + private static final CompoundDateTimeFormatter T_TIME_NO_MILLIS = new CompoundDateTimeFormatter( + STRICT_T_TIME_NO_MILLIS.printer, + new DateTimeFormatterBuilder().append(T_TIME_NO_MILLIS_FORMATTER).appendZoneOrOffsetId().toFormatter(Locale.ROOT), + new DateTimeFormatterBuilder().append(T_TIME_NO_MILLIS_FORMATTER).append(TIME_ZONE_FORMATTER_NO_COLON).toFormatter(Locale.ROOT) + ); - private static final CompoundDateTimeFormatter EPOCH_MILLIS = new CompoundDateTimeFormatter(new DateTimeFormatterBuilder() - .appendValue(ChronoField.INSTANT_SECONDS, 1, 19, SignStyle.NEVER) - .appendValue(ChronoField.MILLI_OF_SECOND, 3) - .toFormatter(Locale.ROOT)); + /* + * A strict formatter that formats or parses a year and a month, such as '2011-12'. + */ + private static final CompoundDateTimeFormatter YEAR_MONTH = new CompoundDateTimeFormatter( + STRICT_YEAR_MONTH.printer, + new DateTimeFormatterBuilder().appendValue(ChronoField.YEAR).appendLiteral("-").appendValue(MONTH_OF_YEAR).toFormatter(Locale.ROOT) + ); - private static final DateTimeFormatter STRICT_BASIC_WEEK_DATE_FORMATTER = new DateTimeFormatterBuilder() - .parseStrict() - .appendValue(IsoFields.WEEK_BASED_YEAR, 4) - .appendLiteral("W") - .appendValue(IsoFields.WEEK_OF_WEEK_BASED_YEAR, 1, 2, SignStyle.NEVER) - .appendValue(ChronoField.DAY_OF_WEEK) - .toFormatter(Locale.ROOT); - - private static final CompoundDateTimeFormatter STRICT_BASIC_WEEK_DATE = new CompoundDateTimeFormatter(STRICT_BASIC_WEEK_DATE_FORMATTER); - - private static final CompoundDateTimeFormatter STRICT_BASIC_WEEK_DATE_TIME_NO_MILLIS = new CompoundDateTimeFormatter( + /* + * A strict date formatter that formats or parses a date without an offset, such as '2011-12-03'. + */ + private static final CompoundDateTimeFormatter YEAR_MONTH_DAY = new CompoundDateTimeFormatter( + STRICT_YEAR_MONTH_DAY.printer, new DateTimeFormatterBuilder() - .append(STRICT_BASIC_WEEK_DATE_FORMATTER) - .append(DateTimeFormatter.ofPattern("'T'HHmmssX", Locale.ROOT)) - .toFormatter(Locale.ROOT)); + .appendValue(ChronoField.YEAR) + .appendLiteral("-") + .appendValue(MONTH_OF_YEAR) + .appendLiteral("-") + .appendValue(DAY_OF_MONTH) + .toFormatter(Locale.ROOT) + ); - private static final CompoundDateTimeFormatter STRICT_BASIC_WEEK_DATE_TIME = new CompoundDateTimeFormatter( + /* + * Returns a formatter for a full date as four digit weekyear, two digit + * week of weekyear, and one digit day of week (xxxx-'W'ww-e). + */ + private static final CompoundDateTimeFormatter WEEK_DATE = new CompoundDateTimeFormatter(STRICT_WEEK_DATE.printer, WEEK_DATE_FORMATTER); + + /* + * Returns a formatter for a four digit weekyear and two digit week of + * weekyear. (xxxx-'W'ww) + */ + private static final CompoundDateTimeFormatter WEEKYEAR_WEEK = new CompoundDateTimeFormatter(STRICT_WEEKYEAR_WEEK.printer, new DateTimeFormatterBuilder() - .append(STRICT_BASIC_WEEK_DATE_FORMATTER) - .append(DateTimeFormatter.ofPattern("'T'HHmmss.SSSX", Locale.ROOT)) - .toFormatter(Locale.ROOT)); + .appendValue(WeekFields.ISO.weekBasedYear()) + .appendLiteral("-W") + .appendValue(WeekFields.ISO.weekOfWeekBasedYear()) + .toFormatter(Locale.ROOT) + ); - private static final CompoundDateTimeFormatter STRICT_DATE = new CompoundDateTimeFormatter( - DateTimeFormatter.ISO_LOCAL_DATE.withResolverStyle(ResolverStyle.LENIENT)); - - private static final CompoundDateTimeFormatter STRICT_DATE_HOUR = new CompoundDateTimeFormatter( - DateTimeFormatter.ofPattern("yyyy-MM-dd'T'HH", Locale.ROOT)); - - private static final CompoundDateTimeFormatter STRICT_DATE_HOUR_MINUTE = new CompoundDateTimeFormatter( - DateTimeFormatter.ofPattern("yyyy-MM-dd'T'HH:mm", Locale.ROOT)); - - private static final CompoundDateTimeFormatter STRICT_YEAR_MONTH_DAY = new CompoundDateTimeFormatter(STRICT_YEAR_MONTH_DAY_FORMATTER); - - private static final CompoundDateTimeFormatter STRICT_YEAR_MONTH = new CompoundDateTimeFormatter(new DateTimeFormatterBuilder() - .appendValue(ChronoField.YEAR, 4, 10, SignStyle.EXCEEDS_PAD) - .appendLiteral("-") - .appendValue(MONTH_OF_YEAR, 2, 2, SignStyle.NOT_NEGATIVE) - .toFormatter(Locale.ROOT)); - - private static final CompoundDateTimeFormatter STRICT_YEAR = new CompoundDateTimeFormatter(new DateTimeFormatterBuilder() - .appendValue(ChronoField.YEAR, 4, 10, SignStyle.EXCEEDS_PAD) - .toFormatter(Locale.ROOT)); - - private static final CompoundDateTimeFormatter STRICT_HOUR_MINUTE_SECOND = - new CompoundDateTimeFormatter(STRICT_HOUR_MINUTE_SECOND_FORMATTER); - - private static final CompoundDateTimeFormatter STRICT_DATE_TIME = new CompoundDateTimeFormatter(new DateTimeFormatterBuilder() - .append(STRICT_YEAR_MONTH_DAY_FORMATTER) - .appendLiteral('T') - .append(STRICT_HOUR_MINUTE_SECOND_FORMATTER) - .optionalStart() - .appendFraction(MILLI_OF_SECOND, 3, 3, true) - .optionalEnd() - .append(OPTIONAL_TIME_ZONE_FORMATTER) - .toFormatter(Locale.ROOT)); - - private static final CompoundDateTimeFormatter STRICT_ORDINAL_DATE_TIME_NO_MILLIS = new CompoundDateTimeFormatter( + /* + * Returns a formatter for a four digit weekyear, two digit week of + * weekyear, and one digit day of week. (xxxx-'W'ww-e) + */ + private static final CompoundDateTimeFormatter WEEKYEAR_WEEK_DAY = new CompoundDateTimeFormatter( + STRICT_WEEKYEAR_WEEK_DAY.printer, new DateTimeFormatterBuilder() - .appendValue(ChronoField.YEAR, 4, 10, SignStyle.EXCEEDS_PAD) - .appendLiteral('-') - .appendValue(DAY_OF_YEAR, 3, 3, SignStyle.NOT_NEGATIVE) - .appendLiteral('T') - .append(STRICT_HOUR_MINUTE_SECOND_FORMATTER) - .append(OPTIONAL_TIME_ZONE_FORMATTER) - .toFormatter(Locale.ROOT)); + .appendValue(WeekFields.ISO.weekBasedYear()) + .appendLiteral("-W") + .appendValue(WeekFields.ISO.weekOfWeekBasedYear()) + .appendLiteral("-") + .appendValue(WeekFields.ISO.dayOfWeek()) + .toFormatter(Locale.ROOT) + ); - private static final CompoundDateTimeFormatter STRICT_DATE_TIME_NO_MILLIS = new CompoundDateTimeFormatter(new DateTimeFormatterBuilder() - .append(STRICT_YEAR_MONTH_DAY_FORMATTER) - .appendLiteral('T') - .append(STRICT_HOUR_MINUTE_SECOND_FORMATTER) - .append(OPTIONAL_TIME_ZONE_FORMATTER) - .toFormatter(Locale.ROOT)); - - private static final DateTimeFormatter STRICT_HOUR_MINUTE_SECOND_MILLIS_FORMATTER = new DateTimeFormatterBuilder() - .append(STRICT_HOUR_MINUTE_SECOND_FORMATTER) - .appendFraction(MILLI_OF_SECOND, 1, 3, true) - .toFormatter(Locale.ROOT); - - private static final CompoundDateTimeFormatter STRICT_HOUR_MINUTE_SECOND_MILLIS = - new CompoundDateTimeFormatter(STRICT_HOUR_MINUTE_SECOND_MILLIS_FORMATTER); - - private static final CompoundDateTimeFormatter STRICT_HOUR_MINUTE_SECOND_FRACTION = STRICT_HOUR_MINUTE_SECOND_MILLIS; - - private static final CompoundDateTimeFormatter STRICT_DATE_HOUR_MINUTE_SECOND_FRACTION = new CompoundDateTimeFormatter( - new DateTimeFormatterBuilder() - .append(STRICT_YEAR_MONTH_DAY_FORMATTER) - .appendLiteral("T") - .append(STRICT_HOUR_MINUTE_SECOND_MILLIS_FORMATTER) - .toFormatter(Locale.ROOT)); - - private static final CompoundDateTimeFormatter STRICT_DATE_HOUR_MINUTE_SECOND_MILLIS = STRICT_DATE_HOUR_MINUTE_SECOND_FRACTION; - - private static final CompoundDateTimeFormatter STRICT_HOUR = - new CompoundDateTimeFormatter(DateTimeFormatter.ofPattern("HH", Locale.ROOT)); - - private static final CompoundDateTimeFormatter STRICT_HOUR_MINUTE = - new CompoundDateTimeFormatter(DateTimeFormatter.ofPattern("HH:mm", Locale.ROOT)); - - private static final CompoundDateTimeFormatter STRICT_ORDINAL_DATE_TIME = new CompoundDateTimeFormatter(new DateTimeFormatterBuilder() - .appendValue(ChronoField.YEAR, 4, 10, SignStyle.EXCEEDS_PAD) - .appendLiteral('-') - .appendValue(DAY_OF_YEAR, 3, 3, SignStyle.NOT_NEGATIVE) - .appendLiteral('T') - .appendPattern("HH:mm") - .optionalStart() - .appendLiteral(':') - .appendValue(SECOND_OF_MINUTE, 2, 2, SignStyle.NOT_NEGATIVE) - .appendFraction(MILLI_OF_SECOND, 1, 3, true) - .optionalEnd() - .append(OPTIONAL_TIME_ZONE_FORMATTER) - .toFormatter(Locale.ROOT)); - - private static final DateTimeFormatter STRICT_TIME_FORMATTER = new DateTimeFormatterBuilder() - .appendValue(HOUR_OF_DAY, 2, 2, SignStyle.NOT_NEGATIVE) - .appendLiteral(':') - .appendValue(MINUTE_OF_HOUR, 2, 2, SignStyle.NOT_NEGATIVE) - .appendLiteral(':') - .appendValue(SECOND_OF_MINUTE, 2, 2, SignStyle.NOT_NEGATIVE) - .appendFraction(MILLI_OF_SECOND, 1, 3, true) - .append(TIME_ZONE_FORMATTER) - .toFormatter(Locale.ROOT); - - private static final CompoundDateTimeFormatter STRICT_TIME = new CompoundDateTimeFormatter(STRICT_TIME_FORMATTER); - - private static final DateTimeFormatter STRICT_T_TIME_FORMATTER = new DateTimeFormatterBuilder() - .appendLiteral("T") - .append(STRICT_TIME_FORMATTER) - .toFormatter(Locale.ROOT); - - private static final CompoundDateTimeFormatter STRICT_T_TIME = new CompoundDateTimeFormatter(STRICT_T_TIME_FORMATTER); - - private static final DateTimeFormatter STRICT_TIME_NO_MILLIS_FORMATTER = new DateTimeFormatterBuilder() - .appendValue(HOUR_OF_DAY, 2, 2, SignStyle.NOT_NEGATIVE) - .appendLiteral(':') - .appendValue(MINUTE_OF_HOUR, 2, 2, SignStyle.NOT_NEGATIVE) - .appendLiteral(':') - .appendValue(SECOND_OF_MINUTE, 2, 2, SignStyle.NOT_NEGATIVE) - .append(TIME_ZONE_FORMATTER) - .toFormatter(Locale.ROOT); - - private static final CompoundDateTimeFormatter STRICT_TIME_NO_MILLIS = new CompoundDateTimeFormatter(STRICT_TIME_NO_MILLIS_FORMATTER); - - private static final DateTimeFormatter STRICT_T_TIME_NO_MILLIS_FORMATTER = new DateTimeFormatterBuilder() - .appendLiteral("T") - .append(STRICT_TIME_NO_MILLIS_FORMATTER) - .toFormatter(Locale.ROOT); - - private static final CompoundDateTimeFormatter STRICT_T_TIME_NO_MILLIS = - new CompoundDateTimeFormatter(STRICT_T_TIME_NO_MILLIS_FORMATTER); - - private static final CompoundDateTimeFormatter STRICT_WEEK_DATE = new CompoundDateTimeFormatter(DateTimeFormatter.ISO_WEEK_DATE); - - private static final CompoundDateTimeFormatter STRICT_WEEK_DATE_TIME_NO_MILLIS = new CompoundDateTimeFormatter( - new DateTimeFormatterBuilder() - .append(DateTimeFormatter.ISO_WEEK_DATE) - .append(STRICT_T_TIME_NO_MILLIS_FORMATTER) - .toFormatter(Locale.ROOT)); - - private static final CompoundDateTimeFormatter STRICT_WEEK_DATE_TIME = new CompoundDateTimeFormatter(new DateTimeFormatterBuilder() - .append(DateTimeFormatter.ISO_WEEK_DATE) - .append(STRICT_T_TIME_FORMATTER) - .toFormatter(Locale.ROOT)); - - private static final CompoundDateTimeFormatter STRICT_WEEKYEAR = new CompoundDateTimeFormatter(new DateTimeFormatterBuilder() - .appendValue(WeekFields.ISO.weekBasedYear(), 4, 10, SignStyle.EXCEEDS_PAD) - .toFormatter(Locale.ROOT)); - - private static final DateTimeFormatter STRICT_WEEKYEAR_WEEK_FORMATTER = new DateTimeFormatterBuilder() - .appendValue(WeekFields.ISO.weekBasedYear(), 4, 10, SignStyle.EXCEEDS_PAD) - .appendLiteral("-W") - .appendValue(WeekFields.ISO.weekOfWeekBasedYear(), 2, 2, SignStyle.NOT_NEGATIVE) - .toFormatter(Locale.ROOT); - - private static final CompoundDateTimeFormatter STRICT_WEEKYEAR_WEEK = new CompoundDateTimeFormatter(STRICT_WEEKYEAR_WEEK_FORMATTER); - - private static final CompoundDateTimeFormatter STRICT_WEEKYEAR_WEEK_DAY = new CompoundDateTimeFormatter(new DateTimeFormatterBuilder() - .append(STRICT_WEEKYEAR_WEEK_FORMATTER) - .appendLiteral("-") - .appendValue(WeekFields.ISO.dayOfWeek()) - .toFormatter(Locale.ROOT)); - - private static final CompoundDateTimeFormatter BASIC_ISO_DATE = new CompoundDateTimeFormatter(DateTimeFormatter.BASIC_ISO_DATE); - private static final CompoundDateTimeFormatter ISO_ORDINAL_DATE = new CompoundDateTimeFormatter(DateTimeFormatter.ISO_ORDINAL_DATE); - private static final CompoundDateTimeFormatter STRICT_DATE_HOUR_MINUTE_SECOND = - new CompoundDateTimeFormatter(DateTimeFormatter.ofPattern("yyyy-MM-dd'T'HH:mm:ss", Locale.ROOT)); + ///////////////////////////////////////// + // + // end lenient formatters + // + ///////////////////////////////////////// public static CompoundDateTimeFormatter forPattern(String input) { return forPattern(input, Locale.ROOT); @@ -791,7 +1216,7 @@ public class DateFormatters { } if ("basicDate".equals(input) || "basic_date".equals(input)) { - return BASIC_ISO_DATE; + return BASIC_DATE; } else if ("basicDateTime".equals(input) || "basic_date_time".equals(input)) { return BASIC_DATE_TIME; } else if ("basicDateTimeNoMillis".equals(input) || "basic_date_time_no_millis".equals(input)) { @@ -916,7 +1341,7 @@ public class DateFormatters { } else if ("strictHourMinuteSecondMillis".equals(input) || "strict_hour_minute_second_millis".equals(input)) { return STRICT_HOUR_MINUTE_SECOND_MILLIS; } else if ("strictOrdinalDate".equals(input) || "strict_ordinal_date".equals(input)) { - return ISO_ORDINAL_DATE; + return STRICT_ORDINAL_DATE; } else if ("strictOrdinalDateTime".equals(input) || "strict_ordinal_date_time".equals(input)) { return STRICT_ORDINAL_DATE_TIME; } else if ("strictOrdinalDateTimeNoMillis".equals(input) || "strict_ordinal_date_time_no_millis".equals(input)) { diff --git a/server/src/main/java/org/elasticsearch/common/time/DateMathParser.java b/server/src/main/java/org/elasticsearch/common/time/DateMathParser.java new file mode 100644 index 00000000000..39f6dabbdb2 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/common/time/DateMathParser.java @@ -0,0 +1,267 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.common.time; + +import org.elasticsearch.ElasticsearchParseException; + +import java.time.DateTimeException; +import java.time.DayOfWeek; +import java.time.Instant; +import java.time.LocalTime; +import java.time.ZoneId; +import java.time.ZoneOffset; +import java.time.ZonedDateTime; +import java.time.temporal.ChronoField; +import java.time.temporal.TemporalAccessor; +import java.time.temporal.TemporalAdjusters; +import java.time.temporal.TemporalField; +import java.time.temporal.TemporalQueries; +import java.util.HashMap; +import java.util.Map; +import java.util.Objects; +import java.util.function.LongSupplier; + +/** + * A parser for date/time formatted text with optional date math. + * + * The format of the datetime is configurable, and unix timestamps can also be used. Datemath + * is appended to a datetime with the following syntax: + * ||[+-/](\d+)?[yMwdhHms]. + */ +public class DateMathParser { + + // base fields which should be used for default parsing, when we round up + private static final Map ROUND_UP_BASE_FIELDS = new HashMap<>(6); + { + ROUND_UP_BASE_FIELDS.put(ChronoField.MONTH_OF_YEAR, 1L); + ROUND_UP_BASE_FIELDS.put(ChronoField.DAY_OF_MONTH, 1L); + ROUND_UP_BASE_FIELDS.put(ChronoField.HOUR_OF_DAY, 23L); + ROUND_UP_BASE_FIELDS.put(ChronoField.MINUTE_OF_HOUR, 59L); + ROUND_UP_BASE_FIELDS.put(ChronoField.SECOND_OF_MINUTE, 59L); + ROUND_UP_BASE_FIELDS.put(ChronoField.MILLI_OF_SECOND, 999L); + } + + private final CompoundDateTimeFormatter formatter; + private final CompoundDateTimeFormatter roundUpFormatter; + + public DateMathParser(CompoundDateTimeFormatter formatter) { + Objects.requireNonNull(formatter); + this.formatter = formatter; + this.roundUpFormatter = formatter.parseDefaulting(ROUND_UP_BASE_FIELDS); + } + + public long parse(String text, LongSupplier now) { + return parse(text, now, false, null); + } + + /** + * Parse text, that potentially contains date math into the milliseconds since the epoch + * + * Examples are + * + * 2014-11-18||-2y substracts two years from the input date + * now/m rounds the current time to minute granularity + * + * Supported rounding units are + * y year + * M month + * w week (beginning on a monday) + * d day + * h/H hour + * m minute + * s second + * + * + * @param text the input + * @param now a supplier to retrieve the current date in milliseconds, if needed for additions + * @param roundUp should the result be rounded up + * @param timeZone an optional timezone that should be applied before returning the milliseconds since the epoch + * @return the parsed date in milliseconds since the epoch + */ + public long parse(String text, LongSupplier now, boolean roundUp, ZoneId timeZone) { + long time; + String mathString; + if (text.startsWith("now")) { + try { + time = now.getAsLong(); + } catch (Exception e) { + throw new ElasticsearchParseException("could not read the current timestamp", e); + } + mathString = text.substring("now".length()); + } else { + int index = text.indexOf("||"); + if (index == -1) { + return parseDateTime(text, timeZone, roundUp); + } + time = parseDateTime(text.substring(0, index), timeZone, false); + mathString = text.substring(index + 2); + } + + return parseMath(mathString, time, roundUp, timeZone); + } + + private long parseMath(final String mathString, final long time, final boolean roundUp, + ZoneId timeZone) throws ElasticsearchParseException { + if (timeZone == null) { + timeZone = ZoneOffset.UTC; + } + ZonedDateTime dateTime = ZonedDateTime.ofInstant(Instant.ofEpochMilli(time), timeZone); + for (int i = 0; i < mathString.length(); ) { + char c = mathString.charAt(i++); + final boolean round; + final int sign; + if (c == '/') { + round = true; + sign = 1; + } else { + round = false; + if (c == '+') { + sign = 1; + } else if (c == '-') { + sign = -1; + } else { + throw new ElasticsearchParseException("operator not supported for date math [{}]", mathString); + } + } + + if (i >= mathString.length()) { + throw new ElasticsearchParseException("truncated date math [{}]", mathString); + } + + final int num; + if (!Character.isDigit(mathString.charAt(i))) { + num = 1; + } else { + int numFrom = i; + while (i < mathString.length() && Character.isDigit(mathString.charAt(i))) { + i++; + } + if (i >= mathString.length()) { + throw new ElasticsearchParseException("truncated date math [{}]", mathString); + } + num = Integer.parseInt(mathString.substring(numFrom, i)); + } + if (round) { + if (num != 1) { + throw new ElasticsearchParseException("rounding `/` can only be used on single unit types [{}]", mathString); + } + } + char unit = mathString.charAt(i++); + switch (unit) { + case 'y': + if (round) { + dateTime = dateTime.withDayOfYear(1).with(LocalTime.MIN); + } else { + dateTime = dateTime.plusYears(sign * num); + } + if (roundUp) { + dateTime = dateTime.plusYears(1); + } + break; + case 'M': + if (round) { + dateTime = dateTime.withDayOfMonth(1).with(LocalTime.MIN); + } else { + dateTime = dateTime.plusMonths(sign * num); + } + if (roundUp) { + dateTime = dateTime.plusMonths(1); + } + break; + case 'w': + if (round) { + dateTime = dateTime.with(TemporalAdjusters.previousOrSame(DayOfWeek.MONDAY)).with(LocalTime.MIN); + } else { + dateTime = dateTime.plusWeeks(sign * num); + } + if (roundUp) { + dateTime = dateTime.plusWeeks(1); + } + break; + case 'd': + if (round) { + dateTime = dateTime.with(LocalTime.MIN); + } else { + dateTime = dateTime.plusDays(sign * num); + } + if (roundUp) { + dateTime = dateTime.plusDays(1); + } + break; + case 'h': + case 'H': + if (round) { + dateTime = dateTime.withMinute(0).withSecond(0).withNano(0); + } else { + dateTime = dateTime.plusHours(sign * num); + } + if (roundUp) { + dateTime = dateTime.plusHours(1); + } + break; + case 'm': + if (round) { + dateTime = dateTime.withSecond(0).withNano(0); + } else { + dateTime = dateTime.plusMinutes(sign * num); + } + if (roundUp) { + dateTime = dateTime.plusMinutes(1); + } + break; + case 's': + if (round) { + dateTime = dateTime.withNano(0); + } else { + dateTime = dateTime.plusSeconds(sign * num); + } + if (roundUp) { + dateTime = dateTime.plusSeconds(1); + } + break; + default: + throw new ElasticsearchParseException("unit [{}] not supported for date math [{}]", unit, mathString); + } + if (roundUp) { + dateTime = dateTime.minus(1, ChronoField.MILLI_OF_SECOND.getBaseUnit()); + } + } + return dateTime.toInstant().toEpochMilli(); + } + + private long parseDateTime(String value, ZoneId timeZone, boolean roundUpIfNoTime) { + CompoundDateTimeFormatter formatter = roundUpIfNoTime ? this.roundUpFormatter : this.formatter; + try { + if (timeZone == null) { + return DateFormatters.toZonedDateTime(formatter.parse(value)).toInstant().toEpochMilli(); + } else { + TemporalAccessor accessor = formatter.parse(value); + ZoneId zoneId = TemporalQueries.zone().queryFrom(accessor); + if (zoneId != null) { + timeZone = zoneId; + } + + return DateFormatters.toZonedDateTime(accessor).withZoneSameLocal(timeZone).toInstant().toEpochMilli(); + } + } catch (IllegalArgumentException | DateTimeException e) { + throw new ElasticsearchParseException("failed to parse date field [{}]: [{}]", e, value, e.getMessage()); + } + } +} diff --git a/server/src/main/java/org/elasticsearch/discovery/single/SingleNodeDiscovery.java b/server/src/main/java/org/elasticsearch/discovery/single/SingleNodeDiscovery.java index 84718186824..a5a41522272 100644 --- a/server/src/main/java/org/elasticsearch/discovery/single/SingleNodeDiscovery.java +++ b/server/src/main/java/org/elasticsearch/discovery/single/SingleNodeDiscovery.java @@ -22,6 +22,7 @@ package org.elasticsearch.discovery.single; import org.apache.logging.log4j.message.ParameterizedMessage; import org.elasticsearch.action.ActionListener; import org.elasticsearch.cluster.ClusterChangedEvent; +import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.block.ClusterBlocks; import org.elasticsearch.cluster.node.DiscoveryNode; @@ -104,7 +105,7 @@ public class SingleNodeDiscovery extends AbstractLifecycleComponent implements D } protected ClusterState createInitialState(DiscoveryNode localNode) { - ClusterState.Builder builder = clusterApplier.newClusterStateBuilder(); + ClusterState.Builder builder = ClusterState.builder(ClusterName.CLUSTER_NAME_SETTING.get(settings)); return builder.nodes(DiscoveryNodes.builder().add(localNode) .localNodeId(localNode.getId()) .masterNodeId(localNode.getId()) diff --git a/server/src/main/java/org/elasticsearch/discovery/zen/ZenDiscovery.java b/server/src/main/java/org/elasticsearch/discovery/zen/ZenDiscovery.java index d042f0b35de..ede3eb95cbd 100644 --- a/server/src/main/java/org/elasticsearch/discovery/zen/ZenDiscovery.java +++ b/server/src/main/java/org/elasticsearch/discovery/zen/ZenDiscovery.java @@ -253,7 +253,7 @@ public class ZenDiscovery extends AbstractLifecycleComponent implements Discover // set initial state assert committedState.get() == null; assert localNode != null; - ClusterState.Builder builder = clusterApplier.newClusterStateBuilder(); + ClusterState.Builder builder = ClusterState.builder(ClusterName.CLUSTER_NAME_SETTING.get(settings)); ClusterState initialState = builder .blocks(ClusterBlocks.builder() .addGlobalBlock(STATE_NOT_RECOVERED_BLOCK) diff --git a/server/src/main/java/org/elasticsearch/gateway/Gateway.java b/server/src/main/java/org/elasticsearch/gateway/Gateway.java index 0a6e54ea6e1..d2261e5d1b4 100644 --- a/server/src/main/java/org/elasticsearch/gateway/Gateway.java +++ b/server/src/main/java/org/elasticsearch/gateway/Gateway.java @@ -23,6 +23,7 @@ import com.carrotsearch.hppc.ObjectFloatHashMap; import com.carrotsearch.hppc.cursors.ObjectCursor; import org.apache.logging.log4j.message.ParameterizedMessage; import org.elasticsearch.action.FailedNodeException; +import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.MetaData; @@ -147,7 +148,7 @@ public class Gateway extends AbstractComponent { metaDataBuilder.transientSettings(), e -> logUnknownSetting("transient", e), (e, ex) -> logInvalidSetting("transient", e, ex))); - ClusterState.Builder builder = clusterService.getClusterApplierService().newClusterStateBuilder(); + ClusterState.Builder builder = ClusterState.builder(ClusterName.CLUSTER_NAME_SETTING.get(settings)); builder.metaData(metaDataBuilder); listener.onSuccess(builder.build()); } diff --git a/server/src/main/java/org/elasticsearch/index/IndexSettings.java b/server/src/main/java/org/elasticsearch/index/IndexSettings.java index 486515e6755..44cd743bbd4 100644 --- a/server/src/main/java/org/elasticsearch/index/IndexSettings.java +++ b/server/src/main/java/org/elasticsearch/index/IndexSettings.java @@ -31,6 +31,7 @@ import org.elasticsearch.common.unit.ByteSizeUnit; import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.index.translog.Translog; +import org.elasticsearch.ingest.IngestService; import org.elasticsearch.node.Node; import java.util.Collections; @@ -254,6 +255,14 @@ public final class IndexSettings { public static final Setting MAX_REGEX_LENGTH_SETTING = Setting.intSetting("index.max_regex_length", 1000, 1, Property.Dynamic, Property.IndexScope); + public static final Setting DEFAULT_PIPELINE = + new Setting<>("index.default_pipeline", IngestService.NOOP_PIPELINE_NAME, s -> { + if (s == null || s.isEmpty()) { + throw new IllegalArgumentException("Value for [index.default_pipeline] must be a non-empty string."); + } + return s; + }, Property.Dynamic, Property.IndexScope); + private final Index index; private final Version version; private final Logger logger; @@ -293,6 +302,7 @@ public final class IndexSettings { private volatile TimeValue searchIdleAfter; private volatile int maxAnalyzedOffset; private volatile int maxTermsCount; + private volatile String defaultPipeline; /** * The maximum number of refresh listeners allows on this shard. @@ -408,6 +418,7 @@ public final class IndexSettings { this.mergePolicyConfig = new MergePolicyConfig(logger, this); this.indexSortConfig = new IndexSortConfig(this); searchIdleAfter = scopedSettings.get(INDEX_SEARCH_IDLE_AFTER); + defaultPipeline = scopedSettings.get(DEFAULT_PIPELINE); scopedSettings.addSettingsUpdateConsumer(MergePolicyConfig.INDEX_COMPOUND_FORMAT_SETTING, mergePolicyConfig::setNoCFSRatio); scopedSettings.addSettingsUpdateConsumer(MergePolicyConfig.INDEX_MERGE_POLICY_EXPUNGE_DELETES_ALLOWED_SETTING, mergePolicyConfig::setExpungeDeletesAllowed); @@ -446,6 +457,7 @@ public final class IndexSettings { scopedSettings.addSettingsUpdateConsumer(DEFAULT_FIELD_SETTING, this::setDefaultFields); scopedSettings.addSettingsUpdateConsumer(INDEX_SEARCH_IDLE_AFTER, this::setSearchIdleAfter); scopedSettings.addSettingsUpdateConsumer(MAX_REGEX_LENGTH_SETTING, this::setMaxRegexLength); + scopedSettings.addSettingsUpdateConsumer(DEFAULT_PIPELINE, this::setDefaultPipeline); } private void setSearchIdleAfter(TimeValue searchIdleAfter) { this.searchIdleAfter = searchIdleAfter; } @@ -821,4 +833,12 @@ public final class IndexSettings { * Returns the time that an index shard becomes search idle unless it's accessed in between */ public TimeValue getSearchIdleAfter() { return searchIdleAfter; } + + public String getDefaultPipeline() { + return defaultPipeline; + } + + public void setDefaultPipeline(String defaultPipeline) { + this.defaultPipeline = defaultPipeline; + } } diff --git a/server/src/main/java/org/elasticsearch/index/analysis/ESSolrSynonymParser.java b/server/src/main/java/org/elasticsearch/index/analysis/ESSolrSynonymParser.java index bcc249f8a8a..006973dd9b6 100644 --- a/server/src/main/java/org/elasticsearch/index/analysis/ESSolrSynonymParser.java +++ b/server/src/main/java/org/elasticsearch/index/analysis/ESSolrSynonymParser.java @@ -20,19 +20,18 @@ package org.elasticsearch.index.analysis; import org.apache.logging.log4j.Logger; +import org.apache.logging.log4j.LogManager; import org.apache.lucene.analysis.Analyzer; import org.apache.lucene.analysis.synonym.SolrSynonymParser; import org.apache.lucene.util.CharsRef; import org.apache.lucene.util.CharsRefBuilder; -import org.elasticsearch.common.logging.Loggers; import java.io.IOException; public class ESSolrSynonymParser extends SolrSynonymParser { + private static final Logger logger = LogManager.getLogger(ESSolrSynonymParser.class); private final boolean lenient; - private static final Logger logger = - Loggers.getLogger(ESSolrSynonymParser.class, "ESSolrSynonymParser"); public ESSolrSynonymParser(boolean dedup, boolean expand, boolean lenient, Analyzer analyzer) { super(dedup, expand, analyzer); diff --git a/server/src/main/java/org/elasticsearch/index/analysis/ESWordnetSynonymParser.java b/server/src/main/java/org/elasticsearch/index/analysis/ESWordnetSynonymParser.java index 3764820c434..ebcd84e39d7 100644 --- a/server/src/main/java/org/elasticsearch/index/analysis/ESWordnetSynonymParser.java +++ b/server/src/main/java/org/elasticsearch/index/analysis/ESWordnetSynonymParser.java @@ -20,19 +20,18 @@ package org.elasticsearch.index.analysis; import org.apache.logging.log4j.Logger; +import org.apache.logging.log4j.LogManager; import org.apache.lucene.analysis.Analyzer; import org.apache.lucene.analysis.synonym.WordnetSynonymParser; import org.apache.lucene.util.CharsRef; import org.apache.lucene.util.CharsRefBuilder; -import org.elasticsearch.common.logging.Loggers; import java.io.IOException; public class ESWordnetSynonymParser extends WordnetSynonymParser { + private static final Logger logger = LogManager.getLogger(ESWordnetSynonymParser.class); private final boolean lenient; - private static final Logger logger = - Loggers.getLogger(ESSolrSynonymParser.class, "ESWordnetSynonymParser"); public ESWordnetSynonymParser(boolean dedup, boolean expand, boolean lenient, Analyzer analyzer) { super(dedup, expand, analyzer); diff --git a/server/src/main/java/org/elasticsearch/index/engine/Engine.java b/server/src/main/java/org/elasticsearch/index/engine/Engine.java index b7c938b469f..31da7afc51a 100644 --- a/server/src/main/java/org/elasticsearch/index/engine/Engine.java +++ b/server/src/main/java/org/elasticsearch/index/engine/Engine.java @@ -304,6 +304,7 @@ public abstract class Engine implements Closeable { private final Operation.TYPE operationType; private final Result.Type resultType; private final long version; + private final long term; private final long seqNo; private final Exception failure; private final SetOnce freeze = new SetOnce<>(); @@ -311,19 +312,21 @@ public abstract class Engine implements Closeable { private Translog.Location translogLocation; private long took; - protected Result(Operation.TYPE operationType, Exception failure, long version, long seqNo) { + protected Result(Operation.TYPE operationType, Exception failure, long version, long term, long seqNo) { this.operationType = operationType; this.failure = Objects.requireNonNull(failure); this.version = version; + this.term = term; this.seqNo = seqNo; this.requiredMappingUpdate = null; this.resultType = Type.FAILURE; } - protected Result(Operation.TYPE operationType, long version, long seqNo) { + protected Result(Operation.TYPE operationType, long version, long term, long seqNo) { this.operationType = operationType; this.version = version; this.seqNo = seqNo; + this.term = term; this.failure = null; this.requiredMappingUpdate = null; this.resultType = Type.SUCCESS; @@ -333,6 +336,7 @@ public abstract class Engine implements Closeable { this.operationType = operationType; this.version = Versions.NOT_FOUND; this.seqNo = SequenceNumbers.UNASSIGNED_SEQ_NO; + this.term = 0L; this.failure = null; this.requiredMappingUpdate = requiredMappingUpdate; this.resultType = Type.MAPPING_UPDATE_REQUIRED; @@ -357,6 +361,10 @@ public abstract class Engine implements Closeable { return seqNo; } + public long getTerm() { + return term; + } + /** * If the operation was aborted due to missing mappings, this method will return the mappings * that are required to complete the operation. @@ -415,20 +423,20 @@ public abstract class Engine implements Closeable { private final boolean created; - public IndexResult(long version, long seqNo, boolean created) { - super(Operation.TYPE.INDEX, version, seqNo); + public IndexResult(long version, long term, long seqNo, boolean created) { + super(Operation.TYPE.INDEX, version, term, seqNo); this.created = created; } /** * use in case of the index operation failed before getting to internal engine **/ - public IndexResult(Exception failure, long version) { - this(failure, version, SequenceNumbers.UNASSIGNED_SEQ_NO); + public IndexResult(Exception failure, long version, long term) { + this(failure, version, term, SequenceNumbers.UNASSIGNED_SEQ_NO); } - public IndexResult(Exception failure, long version, long seqNo) { - super(Operation.TYPE.INDEX, failure, version, seqNo); + public IndexResult(Exception failure, long version, long term, long seqNo) { + super(Operation.TYPE.INDEX, failure, version, term, seqNo); this.created = false; } @@ -447,20 +455,20 @@ public abstract class Engine implements Closeable { private final boolean found; - public DeleteResult(long version, long seqNo, boolean found) { - super(Operation.TYPE.DELETE, version, seqNo); + public DeleteResult(long version, long term, long seqNo, boolean found) { + super(Operation.TYPE.DELETE, version, term, seqNo); this.found = found; } /** * use in case of the delete operation failed before getting to internal engine **/ - public DeleteResult(Exception failure, long version) { - this(failure, version, SequenceNumbers.UNASSIGNED_SEQ_NO, false); + public DeleteResult(Exception failure, long version, long term) { + this(failure, version, term, SequenceNumbers.UNASSIGNED_SEQ_NO, false); } - public DeleteResult(Exception failure, long version, long seqNo, boolean found) { - super(Operation.TYPE.DELETE, failure, version, seqNo); + public DeleteResult(Exception failure, long version, long term, long seqNo, boolean found) { + super(Operation.TYPE.DELETE, failure, version, term, seqNo); this.found = found; } @@ -477,12 +485,12 @@ public abstract class Engine implements Closeable { public static class NoOpResult extends Result { - NoOpResult(long seqNo) { - super(Operation.TYPE.NO_OP, 0, seqNo); + NoOpResult(long term, long seqNo) { + super(Operation.TYPE.NO_OP, term, 0, seqNo); } - NoOpResult(long seqNo, Exception failure) { - super(Operation.TYPE.NO_OP, failure, 0, seqNo); + NoOpResult(long term, long seqNo, Exception failure) { + super(Operation.TYPE.NO_OP, failure, term, 0, seqNo); } } diff --git a/server/src/main/java/org/elasticsearch/index/engine/InternalEngine.java b/server/src/main/java/org/elasticsearch/index/engine/InternalEngine.java index bdcfb2fc731..a30127a24ae 100644 --- a/server/src/main/java/org/elasticsearch/index/engine/InternalEngine.java +++ b/server/src/main/java/org/elasticsearch/index/engine/InternalEngine.java @@ -736,6 +736,10 @@ public class InternalEngine extends Engine { return localCheckpointTracker.generateSeqNo(); } + private long getPrimaryTerm() { + return engineConfig.getPrimaryTermSupplier().getAsLong(); + } + @Override public IndexResult index(Index index) throws IOException { assert Objects.equals(index.uid().field(), IdFieldMapper.NAME) : index.uid().field(); @@ -788,7 +792,7 @@ public class InternalEngine extends Engine { indexResult = indexIntoLucene(index, plan); } else { indexResult = new IndexResult( - plan.versionForIndexing, plan.seqNoForIndexing, plan.currentNotFoundOrDeleted); + plan.versionForIndexing, getPrimaryTerm(), plan.seqNoForIndexing, plan.currentNotFoundOrDeleted); } if (index.origin() != Operation.Origin.LOCAL_TRANSLOG_RECOVERY) { final Translog.Location location; @@ -900,7 +904,7 @@ public class InternalEngine extends Engine { currentVersion, index.version(), currentNotFoundOrDeleted)) { final VersionConflictEngineException e = new VersionConflictEngineException(shardId, index, currentVersion, currentNotFoundOrDeleted); - plan = IndexingStrategy.skipDueToVersionConflict(e, currentNotFoundOrDeleted, currentVersion); + plan = IndexingStrategy.skipDueToVersionConflict(e, currentNotFoundOrDeleted, currentVersion, getPrimaryTerm()); } else { plan = IndexingStrategy.processNormally(currentNotFoundOrDeleted, generateSeqNoForOperation(index), @@ -930,7 +934,7 @@ public class InternalEngine extends Engine { assert assertDocDoesNotExist(index, canOptimizeAddDocument(index) == false); addDocs(index.docs(), indexWriter); } - return new IndexResult(plan.versionForIndexing, plan.seqNoForIndexing, plan.currentNotFoundOrDeleted); + return new IndexResult(plan.versionForIndexing, getPrimaryTerm(), plan.seqNoForIndexing, plan.currentNotFoundOrDeleted); } catch (Exception ex) { if (indexWriter.getTragicException() == null) { /* There is no tragic event recorded so this must be a document failure. @@ -946,7 +950,7 @@ public class InternalEngine extends Engine { * we return a `MATCH_ANY` version to indicate no document was index. The value is * not used anyway */ - return new IndexResult(ex, Versions.MATCH_ANY, plan.seqNoForIndexing); + return new IndexResult(ex, Versions.MATCH_ANY, getPrimaryTerm(), plan.seqNoForIndexing); } else { throw ex; } @@ -1019,8 +1023,8 @@ public class InternalEngine extends Engine { } static IndexingStrategy skipDueToVersionConflict( - VersionConflictEngineException e, boolean currentNotFoundOrDeleted, long currentVersion) { - final IndexResult result = new IndexResult(e, currentVersion); + VersionConflictEngineException e, boolean currentNotFoundOrDeleted, long currentVersion, long term) { + final IndexResult result = new IndexResult(e, currentVersion, term); return new IndexingStrategy( currentNotFoundOrDeleted, false, false, SequenceNumbers.UNASSIGNED_SEQ_NO, Versions.NOT_FOUND, result); } @@ -1097,7 +1101,7 @@ public class InternalEngine extends Engine { deleteResult = deleteInLucene(delete, plan); } else { deleteResult = new DeleteResult( - plan.versionOfDeletion, plan.seqNoOfDeletion, plan.currentlyDeleted == false); + plan.versionOfDeletion, getPrimaryTerm(), plan.seqNoOfDeletion, plan.currentlyDeleted == false); } if (delete.origin() != Operation.Origin.LOCAL_TRANSLOG_RECOVERY) { final Translog.Location location; @@ -1178,7 +1182,7 @@ public class InternalEngine extends Engine { final DeletionStrategy plan; if (delete.versionType().isVersionConflictForWrites(currentVersion, delete.version(), currentlyDeleted)) { final VersionConflictEngineException e = new VersionConflictEngineException(shardId, delete, currentVersion, currentlyDeleted); - plan = DeletionStrategy.skipDueToVersionConflict(e, currentVersion, currentlyDeleted); + plan = DeletionStrategy.skipDueToVersionConflict(e, currentVersion, getPrimaryTerm(), currentlyDeleted); } else { plan = DeletionStrategy.processNormally( currentlyDeleted, @@ -1201,12 +1205,12 @@ public class InternalEngine extends Engine { new DeleteVersionValue(plan.versionOfDeletion, plan.seqNoOfDeletion, delete.primaryTerm(), engineConfig.getThreadPool().relativeTimeInMillis())); return new DeleteResult( - plan.versionOfDeletion, plan.seqNoOfDeletion, plan.currentlyDeleted == false); + plan.versionOfDeletion, getPrimaryTerm(), plan.seqNoOfDeletion, plan.currentlyDeleted == false); } catch (Exception ex) { if (indexWriter.getTragicException() == null) { // there is no tragic event and such it must be a document level failure return new DeleteResult( - ex, plan.versionOfDeletion, plan.seqNoOfDeletion, plan.currentlyDeleted == false); + ex, plan.versionOfDeletion, getPrimaryTerm(), plan.seqNoOfDeletion, plan.currentlyDeleted == false); } else { throw ex; } @@ -1237,9 +1241,9 @@ public class InternalEngine extends Engine { } static DeletionStrategy skipDueToVersionConflict( - VersionConflictEngineException e, long currentVersion, boolean currentlyDeleted) { + VersionConflictEngineException e, long currentVersion, long term, boolean currentlyDeleted) { final long unassignedSeqNo = SequenceNumbers.UNASSIGNED_SEQ_NO; - final DeleteResult deleteResult = new DeleteResult(e, currentVersion, unassignedSeqNo, currentlyDeleted == false); + final DeleteResult deleteResult = new DeleteResult(e, currentVersion, term, unassignedSeqNo, currentlyDeleted == false); return new DeletionStrategy(false, currentlyDeleted, unassignedSeqNo, Versions.NOT_FOUND, deleteResult); } @@ -1268,7 +1272,7 @@ public class InternalEngine extends Engine { try (ReleasableLock ignored = readLock.acquire()) { noOpResult = innerNoOp(noOp); } catch (final Exception e) { - noOpResult = new NoOpResult(noOp.seqNo(), e); + noOpResult = new NoOpResult(getPrimaryTerm(), noOp.seqNo(), e); } return noOpResult; } @@ -1278,7 +1282,7 @@ public class InternalEngine extends Engine { assert noOp.seqNo() > SequenceNumbers.NO_OPS_PERFORMED; final long seqNo = noOp.seqNo(); try { - final NoOpResult noOpResult = new NoOpResult(noOp.seqNo()); + final NoOpResult noOpResult = new NoOpResult(getPrimaryTerm(), noOp.seqNo()); if (noOp.origin() != Operation.Origin.LOCAL_TRANSLOG_RECOVERY) { final Translog.Location location = translog.add(new Translog.NoOp(noOp.seqNo(), noOp.primaryTerm(), noOp.reason())); noOpResult.setTranslogLocation(location); diff --git a/server/src/main/java/org/elasticsearch/index/fielddata/ScriptDocValues.java b/server/src/main/java/org/elasticsearch/index/fielddata/ScriptDocValues.java index fedad6e134b..35284cb655d 100644 --- a/server/src/main/java/org/elasticsearch/index/fielddata/ScriptDocValues.java +++ b/server/src/main/java/org/elasticsearch/index/fielddata/ScriptDocValues.java @@ -19,7 +19,6 @@ package org.elasticsearch.index.fielddata; - import org.apache.lucene.index.SortedNumericDocValues; import org.apache.lucene.util.ArrayUtil; import org.apache.lucene.util.BytesRef; @@ -29,18 +28,23 @@ import org.elasticsearch.common.geo.GeoPoint; import org.elasticsearch.common.geo.GeoUtils; import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.common.logging.ESLoggerFactory; -import org.joda.time.DateTime; import org.joda.time.DateTimeZone; import org.joda.time.MutableDateTime; -import org.joda.time.ReadableDateTime; import java.io.IOException; +import java.security.AccessController; +import java.security.PrivilegedAction; +import java.time.Instant; +import java.time.ZoneOffset; +import java.time.ZonedDateTime; import java.util.AbstractList; import java.util.Arrays; import java.util.Comparator; import java.util.List; +import java.util.function.Consumer; import java.util.function.UnaryOperator; +import static org.elasticsearch.common.Booleans.parseBoolean; /** * Script level doc values, the assumption is that any implementation will @@ -52,6 +56,7 @@ import java.util.function.UnaryOperator; * values form multiple documents. */ public abstract class ScriptDocValues extends AbstractList { + /** * Set the current doc ID. */ @@ -142,31 +147,55 @@ public abstract class ScriptDocValues extends AbstractList { } } - public static final class Dates extends ScriptDocValues { - protected static final DeprecationLogger deprecationLogger = new DeprecationLogger(ESLoggerFactory.getLogger(Dates.class)); + public static final class Dates extends ScriptDocValues { - private static final ReadableDateTime EPOCH = new DateTime(0, DateTimeZone.UTC); + /** Whether scripts should expose dates as java time objects instead of joda time. */ + private static final boolean USE_JAVA_TIME = parseBoolean(System.getProperty("es.scripting.use_java_time"), false); + + private static final DeprecationLogger deprecationLogger = new DeprecationLogger(ESLoggerFactory.getLogger(Dates.class)); private final SortedNumericDocValues in; + /** - * Values wrapped in {@link MutableDateTime}. Null by default an allocated on first usage so we allocate a reasonably size. We keep - * this array so we don't have allocate new {@link MutableDateTime}s on every usage. Instead we reuse them for every document. + * Method call to add deprecation message. Normally this is + * {@link #deprecationLogger} but tests override. */ - private MutableDateTime[] dates; + private final Consumer deprecationCallback; + + /** + * Whether java time or joda time should be used. This is normally {@link #USE_JAVA_TIME} but tests override it. + */ + private final boolean useJavaTime; + + /** + * Values wrapped in a date time object. The concrete type depends on the system property {@code es.scripting.use_java_time}. + * When that system property is {@code false}, the date time objects are of type {@link MutableDateTime}. When the system + * property is {@code true}, the date time objects are of type {@link java.time.ZonedDateTime}. + */ + private Object[] dates; private int count; /** * Standard constructor. */ public Dates(SortedNumericDocValues in) { + this(in, message -> deprecationLogger.deprecatedAndMaybeLog("scripting_joda_time_deprecation", message), USE_JAVA_TIME); + } + + /** + * Constructor for testing with a deprecation callback. + */ + Dates(SortedNumericDocValues in, Consumer deprecationCallback, boolean useJavaTime) { this.in = in; + this.deprecationCallback = deprecationCallback; + this.useJavaTime = useJavaTime; } /** * Fetch the first field value or 0 millis after epoch if there are no * in. */ - public ReadableDateTime getValue() { + public Object getValue() { if (count == 0) { throw new IllegalStateException("A document doesn't have a value for a field! " + "Use doc[].size()==0 to check if a document is missing a field!"); @@ -175,7 +204,7 @@ public abstract class ScriptDocValues extends AbstractList { } @Override - public ReadableDateTime get(int index) { + public Object get(int index) { if (index >= count) { throw new IndexOutOfBoundsException( "attempted to fetch the [" + index + "] date when there are only [" @@ -206,30 +235,41 @@ public abstract class ScriptDocValues extends AbstractList { if (count == 0) { return; } - if (dates == null) { - // Happens for the document. We delay allocating dates so we can allocate it with a reasonable size. - dates = new MutableDateTime[count]; - for (int i = 0; i < dates.length; i++) { + if (useJavaTime) { + if (dates == null || count > dates.length) { + // Happens for the document. We delay allocating dates so we can allocate it with a reasonable size. + dates = new ZonedDateTime[count]; + } + for (int i = 0; i < count; ++i) { + dates[i] = ZonedDateTime.ofInstant(Instant.ofEpochMilli(in.nextValue()), ZoneOffset.UTC); + } + } else { + deprecated("The joda time api for doc values is deprecated. Use -Des.scripting.use_java_time=true" + + " to use the java time api for date field doc values"); + if (dates == null || count > dates.length) { + // Happens for the document. We delay allocating dates so we can allocate it with a reasonable size. + dates = new MutableDateTime[count]; + } + for (int i = 0; i < count; i++) { dates[i] = new MutableDateTime(in.nextValue(), DateTimeZone.UTC); } - return; } - if (count > dates.length) { - // Happens when we move to a new document and it has more dates than any documents before it. - MutableDateTime[] backup = dates; - dates = new MutableDateTime[count]; - System.arraycopy(backup, 0, dates, 0, backup.length); - for (int i = 0; i < backup.length; i++) { - dates[i].setMillis(in.nextValue()); + } + + /** + * Log a deprecation log, with the server's permissions, not the permissions of the + * script calling this method. We need to do this to prevent errors when rolling + * the log file. + */ + private void deprecated(String message) { + // Intentionally not calling SpecialPermission.check because this is supposed to be called by scripts + AccessController.doPrivileged(new PrivilegedAction() { + @Override + public Void run() { + deprecationCallback.accept(message); + return null; } - for (int i = backup.length; i < dates.length; i++) { - dates[i] = new MutableDateTime(in.nextValue(), DateTimeZone.UTC); - } - return; - } - for (int i = 0; i < count; i++) { - dates[i] = new MutableDateTime(in.nextValue(), DateTimeZone.UTC); - } + }); } } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/FieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/FieldMapper.java index cbb008c9d00..2e949f027d1 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/FieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/FieldMapper.java @@ -276,7 +276,8 @@ public abstract class FieldMapper extends Mapper implements Cloneable { context.doc().add(field); } } catch (Exception e) { - throw new MapperParsingException("failed to parse [" + fieldType().name() + "]", e); + throw new MapperParsingException("failed to parse field [{}] of type [{}]", e, fieldType().name(), + fieldType().typeName()); } multiFields.parse(this, context); return null; diff --git a/server/src/main/java/org/elasticsearch/index/mapper/GeoShapeFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/GeoShapeFieldMapper.java index 318d9cfc6fa..fb9e16cbe13 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/GeoShapeFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/GeoShapeFieldMapper.java @@ -510,7 +510,8 @@ public class GeoShapeFieldMapper extends FieldMapper { indexShape(context, shape); } catch (Exception e) { if (ignoreMalformed.value() == false) { - throw new MapperParsingException("failed to parse [" + fieldType().name() + "]", e); + throw new MapperParsingException("failed to parse field [{}] of type [{}]", e, fieldType().name(), + fieldType().typeName()); } context.addIgnoredField(fieldType.name()); } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/IndexFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/IndexFieldMapper.java index 4061303416b..bb048ab9afa 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/IndexFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/IndexFieldMapper.java @@ -126,7 +126,7 @@ public class IndexFieldMapper extends MetadataFieldMapper { */ @Override public Query termQuery(Object value, @Nullable QueryShardContext context) { - if (isSameIndex(value, context.getFullyQualifiedIndexName())) { + if (isSameIndex(value, context.getFullyQualifiedIndex().getName())) { return Queries.newMatchAllQuery(); } else { return Queries.newMatchNoDocsQuery("Index didn't match. Index queried: " + context.index().getName() + " vs. " + value); @@ -139,14 +139,14 @@ public class IndexFieldMapper extends MetadataFieldMapper { return super.termsQuery(values, context); } for (Object value : values) { - if (isSameIndex(value, context.getFullyQualifiedIndexName())) { + if (isSameIndex(value, context.getFullyQualifiedIndex().getName())) { // No need to OR these clauses - we can only logically be // running in the context of just one of these index names. return Queries.newMatchAllQuery(); } } // None of the listed index names are this one - return Queries.newMatchNoDocsQuery("Index didn't match. Index queried: " + context.getFullyQualifiedIndexName() + return Queries.newMatchNoDocsQuery("Index didn't match. Index queried: " + context.getFullyQualifiedIndex().getName() + " vs. " + values); } @@ -189,5 +189,4 @@ public class IndexFieldMapper extends MetadataFieldMapper { protected void doMerge(Mapper mergeWith) { // nothing to do } - } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/MapperService.java b/server/src/main/java/org/elasticsearch/index/mapper/MapperService.java index 921e472c94f..9cd8ef1f6ac 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/MapperService.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/MapperService.java @@ -52,6 +52,7 @@ import org.elasticsearch.index.query.QueryShardContext; import org.elasticsearch.index.similarity.SimilarityService; import org.elasticsearch.indices.InvalidTypeNameException; import org.elasticsearch.indices.mapper.MapperRegistry; +import org.elasticsearch.search.suggest.completion.context.ContextMapping; import java.io.Closeable; import java.io.IOException; @@ -421,6 +422,8 @@ public class MapperService extends AbstractIndexComponent implements Closeable { MapperMergeValidator.validateFieldReferences(fieldMappers, fieldAliasMappers, fullPathObjectMappers, fieldTypes); + ContextMapping.validateContextPaths(indexSettings.getIndexVersionCreated(), fieldMappers, fieldTypes::get); + if (reason == MergeReason.MAPPING_UPDATE) { // this check will only be performed on the master node when there is // a call to the update mapping API. For all other cases like diff --git a/server/src/main/java/org/elasticsearch/index/query/QueryShardContext.java b/server/src/main/java/org/elasticsearch/index/query/QueryShardContext.java index 598a6f38a2e..ac19298ae32 100644 --- a/server/src/main/java/org/elasticsearch/index/query/QueryShardContext.java +++ b/server/src/main/java/org/elasticsearch/index/query/QueryShardContext.java @@ -83,7 +83,7 @@ public class QueryShardContext extends QueryRewriteContext { private String[] types = Strings.EMPTY_ARRAY; private boolean cachable = true; private final SetOnce frozen = new SetOnce<>(); - private final String fullyQualifiedIndexName; + private final Index fullyQualifiedIndex; public void setTypes(String... types) { this.types = types; @@ -116,7 +116,8 @@ public class QueryShardContext extends QueryRewriteContext { this.indexSettings = indexSettings; this.reader = reader; this.clusterAlias = clusterAlias; - this.fullyQualifiedIndexName = RemoteClusterAware.buildRemoteIndexName(clusterAlias, indexSettings.getIndex().getName()); + this.fullyQualifiedIndex = new Index(RemoteClusterAware.buildRemoteIndexName(clusterAlias, indexSettings.getIndex().getName()), + indexSettings.getIndex().getUUID()); } public QueryShardContext(QueryShardContext source) { @@ -163,7 +164,7 @@ public class QueryShardContext extends QueryRewriteContext { } public > IFD getForField(MappedFieldType fieldType) { - return (IFD) indexFieldDataService.apply(fieldType, fullyQualifiedIndexName); + return (IFD) indexFieldDataService.apply(fieldType, fullyQualifiedIndex.getName()); } public void addNamedQuery(String name, Query query) { @@ -275,7 +276,7 @@ public class QueryShardContext extends QueryRewriteContext { public SearchLookup lookup() { if (lookup == null) { lookup = new SearchLookup(getMapperService(), - mappedFieldType -> indexFieldDataService.apply(mappedFieldType, fullyQualifiedIndexName), types); + mappedFieldType -> indexFieldDataService.apply(mappedFieldType, fullyQualifiedIndex.getName()), types); } return lookup; } @@ -426,9 +427,9 @@ public class QueryShardContext extends QueryRewriteContext { } /** - * Returns the fully qualified index name including a remote cluster alias if applicable + * Returns the fully qualified index including a remote cluster alias if applicable, and the index uuid */ - public String getFullyQualifiedIndexName() { - return fullyQualifiedIndexName; + public Index getFullyQualifiedIndex() { + return fullyQualifiedIndex; } } diff --git a/server/src/main/java/org/elasticsearch/index/query/QueryShardException.java b/server/src/main/java/org/elasticsearch/index/query/QueryShardException.java index 9b6ce3a6e4b..843ff931d4f 100644 --- a/server/src/main/java/org/elasticsearch/index/query/QueryShardException.java +++ b/server/src/main/java/org/elasticsearch/index/query/QueryShardException.java @@ -37,16 +37,15 @@ public class QueryShardException extends ElasticsearchException { } public QueryShardException(QueryShardContext context, String msg, Throwable cause, Object... args) { - super(msg, cause, args); - setIndex(context.index()); + this(context.getFullyQualifiedIndex(), msg, cause, args); } /** * This constructor is provided for use in unit tests where a * {@link QueryShardContext} may not be available */ - public QueryShardException(Index index, String msg, Throwable cause) { - super(msg, cause); + public QueryShardException(Index index, String msg, Throwable cause, Object... args) { + super(msg, cause, args); setIndex(index); } diff --git a/server/src/main/java/org/elasticsearch/index/seqno/ReplicationTracker.java b/server/src/main/java/org/elasticsearch/index/seqno/ReplicationTracker.java index 6548aad7670..b406621e978 100644 --- a/server/src/main/java/org/elasticsearch/index/seqno/ReplicationTracker.java +++ b/server/src/main/java/org/elasticsearch/index/seqno/ReplicationTracker.java @@ -39,6 +39,7 @@ import java.util.Collection; import java.util.HashMap; import java.util.HashSet; import java.util.Map; +import java.util.Objects; import java.util.OptionalLong; import java.util.Set; import java.util.function.Function; @@ -85,6 +86,7 @@ public class ReplicationTracker extends AbstractIndexShardComponent implements L * computation from that point on. */ volatile boolean primaryMode; + /** * Boolean flag that indicates if a relocation handoff is in progress. A handoff is started by calling {@link #startRelocationHandoff} * and is finished by either calling {@link #completeRelocationHandoff} or {@link #abortRelocationHandoff}, depending on whether the @@ -102,6 +104,11 @@ public class ReplicationTracker extends AbstractIndexShardComponent implements L */ boolean handoffInProgress; + /** + * Boolean flag that indicates whether a relocation handoff completed (see {@link #completeRelocationHandoff}). + */ + volatile boolean relocated; + /** * The global checkpoint tracker relies on the property that cluster state updates are applied in-order. After transferring a primary * context from the primary relocation source to the target and initializing the target, it is possible for the target to apply a @@ -121,6 +128,13 @@ public class ReplicationTracker extends AbstractIndexShardComponent implements L */ final Map checkpoints; + /** + * A callback invoked when the global checkpoint is updated. For primary mode this occurs if the computed global checkpoint advances on + * the basis of state changes tracked here. For non-primary mode this occurs if the local knowledge of the global checkpoint advances + * due to an update from the primary. + */ + private final LongConsumer onGlobalCheckpointUpdated; + /** * This set contains allocation IDs for which there is a thread actively waiting for the local checkpoint to advance to at least the * current global checkpoint. @@ -260,6 +274,13 @@ public class ReplicationTracker extends AbstractIndexShardComponent implements L return primaryMode; } + /** + * Returns whether the replication tracker has relocated away to another shard copy. + */ + public boolean isRelocated() { + return relocated; + } + /** * Class invariant that should hold before and after every invocation of public methods on this class. As Java lacks implication * as a logical operator, many of the invariants are written under the form (!A || B), they should be read as (A implies B) however. @@ -287,6 +308,9 @@ public class ReplicationTracker extends AbstractIndexShardComponent implements L // relocation handoff can only occur in primary mode assert !handoffInProgress || primaryMode; + // a relocated copy is not in primary mode + assert !relocated || !primaryMode; + // the current shard is marked as in-sync when the global checkpoint tracker operates in primary mode assert !primaryMode || checkpoints.get(shardAllocationId).inSync; @@ -375,7 +399,8 @@ public class ReplicationTracker extends AbstractIndexShardComponent implements L final ShardId shardId, final String allocationId, final IndexSettings indexSettings, - final long globalCheckpoint) { + final long globalCheckpoint, + final LongConsumer onGlobalCheckpointUpdated) { super(shardId, indexSettings); assert globalCheckpoint >= SequenceNumbers.UNASSIGNED_SEQ_NO : "illegal initial global checkpoint: " + globalCheckpoint; this.shardAllocationId = allocationId; @@ -384,6 +409,7 @@ public class ReplicationTracker extends AbstractIndexShardComponent implements L this.appliedClusterStateVersion = -1L; this.checkpoints = new HashMap<>(1 + indexSettings.getNumberOfReplicas()); checkpoints.put(allocationId, new CheckpointState(SequenceNumbers.UNASSIGNED_SEQ_NO, globalCheckpoint, false, false)); + this.onGlobalCheckpointUpdated = Objects.requireNonNull(onGlobalCheckpointUpdated); this.pendingInSync = new HashSet<>(); this.routingTable = null; this.replicationGroup = null; @@ -440,7 +466,10 @@ public class ReplicationTracker extends AbstractIndexShardComponent implements L updateGlobalCheckpoint( shardAllocationId, globalCheckpoint, - current -> logger.trace("updating global checkpoint from [{}] to [{}] due to [{}]", current, globalCheckpoint, reason)); + current -> { + logger.trace("updated global checkpoint from [{}] to [{}] due to [{}]", current, globalCheckpoint, reason); + onGlobalCheckpointUpdated.accept(globalCheckpoint); + }); assert invariant(); } @@ -458,7 +487,7 @@ public class ReplicationTracker extends AbstractIndexShardComponent implements L allocationId, globalCheckpoint, current -> logger.trace( - "updating local knowledge for [{}] on the primary of the global checkpoint from [{}] to [{}]", + "updated local knowledge for [{}] on the primary of the global checkpoint from [{}] to [{}]", allocationId, current, globalCheckpoint)); @@ -469,8 +498,8 @@ public class ReplicationTracker extends AbstractIndexShardComponent implements L final CheckpointState cps = checkpoints.get(allocationId); assert !this.shardAllocationId.equals(allocationId) || cps != null; if (cps != null && globalCheckpoint > cps.globalCheckpoint) { - ifUpdated.accept(cps.globalCheckpoint); cps.globalCheckpoint = globalCheckpoint; + ifUpdated.accept(cps.globalCheckpoint); } } @@ -721,8 +750,9 @@ public class ReplicationTracker extends AbstractIndexShardComponent implements L assert computedGlobalCheckpoint >= globalCheckpoint : "new global checkpoint [" + computedGlobalCheckpoint + "] is lower than previous one [" + globalCheckpoint + "]"; if (globalCheckpoint != computedGlobalCheckpoint) { - logger.trace("global checkpoint updated to [{}]", computedGlobalCheckpoint); cps.globalCheckpoint = computedGlobalCheckpoint; + logger.trace("updated global checkpoint to [{}]", computedGlobalCheckpoint); + onGlobalCheckpointUpdated.accept(computedGlobalCheckpoint); } } @@ -766,8 +796,10 @@ public class ReplicationTracker extends AbstractIndexShardComponent implements L assert invariant(); assert primaryMode; assert handoffInProgress; + assert relocated == false; primaryMode = false; handoffInProgress = false; + relocated = true; // forget all checkpoint information except for global checkpoint of current shard checkpoints.entrySet().stream().forEach(e -> { final CheckpointState cps = e.getValue(); diff --git a/server/src/main/java/org/elasticsearch/index/shard/GlobalCheckpointListeners.java b/server/src/main/java/org/elasticsearch/index/shard/GlobalCheckpointListeners.java new file mode 100644 index 00000000000..e279badec4a --- /dev/null +++ b/server/src/main/java/org/elasticsearch/index/shard/GlobalCheckpointListeners.java @@ -0,0 +1,166 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.index.shard; + +import org.apache.logging.log4j.Logger; +import org.apache.logging.log4j.message.ParameterizedMessage; + +import java.io.Closeable; +import java.io.IOException; +import java.util.ArrayList; +import java.util.List; +import java.util.Objects; +import java.util.concurrent.Executor; + +import static org.elasticsearch.index.seqno.SequenceNumbers.NO_OPS_PERFORMED; +import static org.elasticsearch.index.seqno.SequenceNumbers.UNASSIGNED_SEQ_NO; + +/** + * Represents a collection of global checkpoint listeners. This collection can be added to, and all listeners present at the time of an + * update will be notified together. All listeners will be notified when the shard is closed. + */ +public class GlobalCheckpointListeners implements Closeable { + + /** + * A global checkpoint listener consisting of a callback that is notified when the global checkpoint is updated or the shard is closed. + */ + @FunctionalInterface + public interface GlobalCheckpointListener { + /** + * Callback when the global checkpoint is updated or the shard is closed. If the shard is closed, the value of the global checkpoint + * will be set to {@link org.elasticsearch.index.seqno.SequenceNumbers#UNASSIGNED_SEQ_NO} and the exception will be non-null. If the + * global checkpoint is updated, the exception will be null. + * + * @param globalCheckpoint the updated global checkpoint + * @param e if non-null, the shard is closed + */ + void accept(long globalCheckpoint, IndexShardClosedException e); + } + + // guarded by this + private boolean closed; + private volatile List listeners; + private long lastKnownGlobalCheckpoint = UNASSIGNED_SEQ_NO; + + private final ShardId shardId; + private final Executor executor; + private final Logger logger; + + /** + * Construct a global checkpoint listeners collection. + * + * @param shardId the shard ID on which global checkpoint updates can be listened to + * @param executor the executor for listener notifications + * @param logger a shard-level logger + */ + GlobalCheckpointListeners( + final ShardId shardId, + final Executor executor, + final Logger logger) { + this.shardId = Objects.requireNonNull(shardId); + this.executor = Objects.requireNonNull(executor); + this.logger = Objects.requireNonNull(logger); + } + + /** + * Add a global checkpoint listener. If the global checkpoint is above the current global checkpoint known to the listener then the + * listener will be asynchronously notified on the executor used to construct this collection of global checkpoint listeners. If the + * shard is closed then the listener will be asynchronously notified on the executor used to construct this collection of global + * checkpoint listeners. The listener will only be notified of at most one event, either the global checkpoint is updated or the shard + * is closed. A listener must re-register after one of these events to receive subsequent events. + * + * @param currentGlobalCheckpoint the current global checkpoint known to the listener + * @param listener the listener + */ + synchronized void add(final long currentGlobalCheckpoint, final GlobalCheckpointListener listener) { + if (closed) { + executor.execute(() -> notifyListener(listener, UNASSIGNED_SEQ_NO, new IndexShardClosedException(shardId))); + return; + } + if (lastKnownGlobalCheckpoint > currentGlobalCheckpoint) { + // notify directly + executor.execute(() -> notifyListener(listener, lastKnownGlobalCheckpoint, null)); + return; + } else { + if (listeners == null) { + listeners = new ArrayList<>(); + } + listeners.add(listener); + } + } + + @Override + public synchronized void close() throws IOException { + closed = true; + notifyListeners(UNASSIGNED_SEQ_NO, new IndexShardClosedException(shardId)); + } + + synchronized int pendingListeners() { + return listeners == null ? 0 : listeners.size(); + } + + /** + * Invoke to notify all registered listeners of an updated global checkpoint. + * + * @param globalCheckpoint the updated global checkpoint + */ + synchronized void globalCheckpointUpdated(final long globalCheckpoint) { + assert globalCheckpoint >= NO_OPS_PERFORMED; + assert globalCheckpoint > lastKnownGlobalCheckpoint + : "updated global checkpoint [" + globalCheckpoint + "]" + + " is not more than the last known global checkpoint [" + lastKnownGlobalCheckpoint + "]"; + lastKnownGlobalCheckpoint = globalCheckpoint; + notifyListeners(globalCheckpoint, null); + } + + private void notifyListeners(final long globalCheckpoint, final IndexShardClosedException e) { + assert Thread.holdsLock(this); + assert (globalCheckpoint == UNASSIGNED_SEQ_NO && e != null) || (globalCheckpoint >= NO_OPS_PERFORMED && e == null); + if (listeners != null) { + // capture the current listeners + final List currentListeners = listeners; + listeners = null; + if (currentListeners != null) { + executor.execute(() -> { + for (final GlobalCheckpointListener listener : currentListeners) { + notifyListener(listener, globalCheckpoint, e); + } + }); + } + } + } + + private void notifyListener(final GlobalCheckpointListener listener, final long globalCheckpoint, final IndexShardClosedException e) { + try { + listener.accept(globalCheckpoint, e); + } catch (final Exception caught) { + if (globalCheckpoint != UNASSIGNED_SEQ_NO) { + logger.warn( + new ParameterizedMessage( + "error notifying global checkpoint listener of updated global checkpoint [{}]", + globalCheckpoint), + caught); + } else { + logger.warn("error notifying global checkpoint listener of closed shard", caught); + } + } + } + +} diff --git a/server/src/main/java/org/elasticsearch/index/shard/IndexShard.java b/server/src/main/java/org/elasticsearch/index/shard/IndexShard.java index d4a1d0502d0..ffce0e6ea8b 100644 --- a/server/src/main/java/org/elasticsearch/index/shard/IndexShard.java +++ b/server/src/main/java/org/elasticsearch/index/shard/IndexShard.java @@ -52,6 +52,7 @@ import org.elasticsearch.cluster.routing.RecoverySource; import org.elasticsearch.cluster.routing.RecoverySource.SnapshotRecoverySource; import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.common.Booleans; +import org.elasticsearch.common.CheckedRunnable; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.io.stream.BytesStreamOutput; @@ -160,6 +161,8 @@ import java.util.stream.Collectors; import java.util.stream.StreamSupport; import static org.elasticsearch.index.mapper.SourceToParse.source; +import static org.elasticsearch.index.seqno.SequenceNumbers.NO_OPS_PERFORMED; +import static org.elasticsearch.index.seqno.SequenceNumbers.UNASSIGNED_SEQ_NO; public class IndexShard extends AbstractIndexShardComponent implements IndicesClusterStateService.Shard { @@ -188,11 +191,13 @@ public class IndexShard extends AbstractIndexShardComponent implements IndicesCl private final SearchOperationListener searchOperationListener; + private final GlobalCheckpointListeners globalCheckpointListeners; private final ReplicationTracker replicationTracker; protected volatile ShardRouting shardRouting; protected volatile IndexShardState state; - protected volatile long primaryTerm; + protected volatile long pendingPrimaryTerm; // see JavaDocs for getPendingPrimaryTerm + protected volatile long operationPrimaryTerm; protected final AtomicReference currentEngineReference = new AtomicReference<>(); final EngineFactory engineFactory; @@ -295,8 +300,11 @@ public class IndexShard extends AbstractIndexShardComponent implements IndicesCl this.checkIndexOnStartup = indexSettings.getValue(IndexSettings.INDEX_CHECK_ON_STARTUP); this.translogConfig = new TranslogConfig(shardId, shardPath().resolveTranslog(), indexSettings, bigArrays); - this.replicationTracker = new ReplicationTracker(shardId, shardRouting.allocationId().getId(), indexSettings, - SequenceNumbers.UNASSIGNED_SEQ_NO); + final String aId = shardRouting.allocationId().getId(); + this.globalCheckpointListeners = new GlobalCheckpointListeners(shardId, threadPool.executor(ThreadPool.Names.LISTENER), logger); + this.replicationTracker = + new ReplicationTracker(shardId, aId, indexSettings, UNASSIGNED_SEQ_NO, globalCheckpointListeners::globalCheckpointUpdated); + // the query cache is a node-level thing, however we want the most popular filters // to be computed on a per-shard basis if (IndexModule.INDEX_QUERY_CACHE_EVERYTHING_SETTING.get(settings)) { @@ -315,7 +323,8 @@ public class IndexShard extends AbstractIndexShardComponent implements IndicesCl } indexShardOperationPermits = new IndexShardOperationPermits(shardId, threadPool); searcherWrapper = indexSearcherWrapper; - primaryTerm = indexSettings.getIndexMetaData().primaryTerm(shardId.id()); + pendingPrimaryTerm = indexSettings.getIndexMetaData().primaryTerm(shardId.id()); + operationPrimaryTerm = pendingPrimaryTerm; refreshListeners = buildRefreshListeners(); lastSearcherAccess.set(threadPool.relativeTimeInMillis()); persistMetadata(path, indexSettings, shardRouting, null, logger); @@ -365,10 +374,14 @@ public class IndexShard extends AbstractIndexShardComponent implements IndicesCl } /** - * Returns the primary term the index shard is on. See {@link org.elasticsearch.cluster.metadata.IndexMetaData#primaryTerm(int)} + * USE THIS METHOD WITH CARE! + * Returns the primary term the index shard is supposed to be on. In case of primary promotion or when a replica learns about + * a new term due to a new primary, the term that's exposed here will not be the term that the shard internally uses to assign + * to operations. The shard will auto-correct its internal operation term, but this might take time. + * See {@link org.elasticsearch.cluster.metadata.IndexMetaData#primaryTerm(int)} */ - public long getPrimaryTerm() { - return this.primaryTerm; + public long getPendingPrimaryTerm() { + return this.pendingPrimaryTerm; } /** @@ -418,7 +431,7 @@ public class IndexShard extends AbstractIndexShardComponent implements IndicesCl "a primary relocation is completed by the master, but primary mode is not active " + currentRouting; changeState(IndexShardState.STARTED, "global state is [" + newRouting.state() + "]"); - } else if (currentRouting.primary() && currentRouting.relocating() && replicationTracker.isPrimaryMode() == false && + } else if (currentRouting.primary() && currentRouting.relocating() && replicationTracker.isRelocated() && (newRouting.relocating() == false || newRouting.equalsIgnoringMetaData(currentRouting) == false)) { // if the shard is not in primary mode anymore (after primary relocation) we have to fail when any changes in shard routing occur (e.g. due to recovery // failure / cancellation). The reason is that at the moment we cannot safely reactivate primary mode without risking two @@ -431,7 +444,7 @@ public class IndexShard extends AbstractIndexShardComponent implements IndicesCl final CountDownLatch shardStateUpdated = new CountDownLatch(1); if (newRouting.primary()) { - if (newPrimaryTerm == primaryTerm) { + if (newPrimaryTerm == pendingPrimaryTerm) { if (currentRouting.initializing() && currentRouting.isRelocationTarget() == false && newRouting.active()) { // the master started a recovering primary, activate primary mode. replicationTracker.activatePrimaryMode(getLocalCheckpoint()); @@ -454,10 +467,10 @@ public class IndexShard extends AbstractIndexShardComponent implements IndicesCl assert newRouting.initializing() == false : "a started primary shard should never update its term; " + "shard " + newRouting + ", " - + "current term [" + primaryTerm + "], " + + "current term [" + pendingPrimaryTerm + "], " + "new term [" + newPrimaryTerm + "]"; - assert newPrimaryTerm > primaryTerm : - "primary terms can only go up; current term [" + primaryTerm + "], new term [" + newPrimaryTerm + "]"; + assert newPrimaryTerm > pendingPrimaryTerm : + "primary terms can only go up; current term [" + pendingPrimaryTerm + "], new term [" + newPrimaryTerm + "]"; /* * Before this call returns, we are guaranteed that all future operations are delayed and so this happens before we * increment the primary term. The latch is needed to ensure that we do not unblock operations before the primary term is @@ -468,12 +481,15 @@ public class IndexShard extends AbstractIndexShardComponent implements IndicesCl if (resyncStarted == false) { throw new IllegalStateException("cannot start resync while it's already in progress"); } - indexShardOperationPermits.asyncBlockOperations( - 30, - TimeUnit.MINUTES, + bumpPrimaryTerm(newPrimaryTerm, () -> { shardStateUpdated.await(); + assert pendingPrimaryTerm == newPrimaryTerm : + "shard term changed on primary. expected [" + newPrimaryTerm + "] but was [" + pendingPrimaryTerm + "]" + + ", current routing: " + currentRouting + ", new routing: " + newRouting; + assert operationPrimaryTerm == newPrimaryTerm; try { + replicationTracker.activatePrimaryMode(getLocalCheckpoint()); /* * If this shard was serving as a replica shard when another shard was promoted to primary then the state of * its local checkpoint tracker was reset during the primary term transition. In particular, the local @@ -517,10 +533,7 @@ public class IndexShard extends AbstractIndexShardComponent implements IndicesCl } catch (final AlreadyClosedException e) { // okay, the index was deleted } - }, - e -> failShard("exception during primary term transition", e)); - replicationTracker.activatePrimaryMode(getLocalCheckpoint()); - primaryTerm = newPrimaryTerm; + }); } } // set this last, once we finished updating all internal state. @@ -528,8 +541,9 @@ public class IndexShard extends AbstractIndexShardComponent implements IndicesCl assert this.shardRouting.primary() == false || this.shardRouting.started() == false || // note that we use started and not active to avoid relocating shards + this.indexShardOperationPermits.isBlocked() || // if permits are blocked, we are still transitioning this.replicationTracker.isPrimaryMode() - : "an started primary must be in primary mode " + this.shardRouting; + : "a started primary with non-pending operation term must be in primary mode " + this.shardRouting; shardStateUpdated.countDown(); } if (currentRouting != null && currentRouting.active() == false && newRouting.active()) { @@ -590,7 +604,7 @@ public class IndexShard extends AbstractIndexShardComponent implements IndicesCl consumer.accept(primaryContext); synchronized (mutex) { verifyRelocatingState(); - replicationTracker.completeRelocationHandoff(); // make changes to primaryMode flag only under mutex + replicationTracker.completeRelocationHandoff(); // make changes to primaryMode and relocated flag only under mutex } } catch (final Exception e) { try { @@ -655,21 +669,22 @@ public class IndexShard extends AbstractIndexShardComponent implements IndicesCl public Engine.IndexResult applyIndexOperationOnPrimary(long version, VersionType versionType, SourceToParse sourceToParse, long autoGeneratedTimestamp, boolean isRetry) throws IOException { assert versionType.validateVersionForWrites(version); - return applyIndexOperation(SequenceNumbers.UNASSIGNED_SEQ_NO, primaryTerm, version, versionType, autoGeneratedTimestamp, + return applyIndexOperation(UNASSIGNED_SEQ_NO, operationPrimaryTerm, version, versionType, autoGeneratedTimestamp, isRetry, Engine.Operation.Origin.PRIMARY, sourceToParse); } public Engine.IndexResult applyIndexOperationOnReplica(long seqNo, long version, long autoGeneratedTimeStamp, boolean isRetry, SourceToParse sourceToParse) throws IOException { - return applyIndexOperation(seqNo, primaryTerm, version, null, autoGeneratedTimeStamp, isRetry, + return applyIndexOperation(seqNo, operationPrimaryTerm, version, null, autoGeneratedTimeStamp, isRetry, Engine.Operation.Origin.REPLICA, sourceToParse); } private Engine.IndexResult applyIndexOperation(long seqNo, long opPrimaryTerm, long version, @Nullable VersionType versionType, long autoGeneratedTimeStamp, boolean isRetry, Engine.Operation.Origin origin, SourceToParse sourceToParse) throws IOException { - assert opPrimaryTerm <= this.primaryTerm : "op term [ " + opPrimaryTerm + " ] > shard term [" + this.primaryTerm + "]"; + assert opPrimaryTerm <= this.operationPrimaryTerm: "op term [ " + opPrimaryTerm + " ] > shard term [" + this.operationPrimaryTerm + + "]"; ensureWriteAllowed(origin); Engine.Index operation; try { @@ -686,7 +701,7 @@ public class IndexShard extends AbstractIndexShardComponent implements IndicesCl // can not raise an exception that may block any replication of previous operations to the // replicas verifyNotClosed(e); - return new Engine.IndexResult(e, version, seqNo); + return new Engine.IndexResult(e, version, opPrimaryTerm, seqNo); } return index(getEngine(), operation); @@ -723,12 +738,13 @@ public class IndexShard extends AbstractIndexShardComponent implements IndicesCl } public Engine.NoOpResult markSeqNoAsNoop(long seqNo, String reason) throws IOException { - return markSeqNoAsNoop(seqNo, primaryTerm, reason, Engine.Operation.Origin.REPLICA); + return markSeqNoAsNoop(seqNo, operationPrimaryTerm, reason, Engine.Operation.Origin.REPLICA); } private Engine.NoOpResult markSeqNoAsNoop(long seqNo, long opPrimaryTerm, String reason, Engine.Operation.Origin origin) throws IOException { - assert opPrimaryTerm <= this.primaryTerm : "op term [ " + opPrimaryTerm + " ] > shard term [" + this.primaryTerm + "]"; + assert opPrimaryTerm <= this.operationPrimaryTerm : "op term [ " + opPrimaryTerm + " ] > shard term [" + this.operationPrimaryTerm + + "]"; long startTime = System.nanoTime(); ensureWriteAllowed(origin); final Engine.NoOp noOp = new Engine.NoOp(seqNo, opPrimaryTerm, origin, startTime, reason); @@ -743,20 +759,29 @@ public class IndexShard extends AbstractIndexShardComponent implements IndicesCl return engine.noOp(noOp); } + public Engine.IndexResult getFailedIndexResult(Exception e, long version) { + return new Engine.IndexResult(e, version, operationPrimaryTerm); + } + + public Engine.DeleteResult getFailedDeleteResult(Exception e, long version) { + return new Engine.DeleteResult(e, version, operationPrimaryTerm); + } + public Engine.DeleteResult applyDeleteOperationOnPrimary(long version, String type, String id, VersionType versionType) throws IOException { assert versionType.validateVersionForWrites(version); - return applyDeleteOperation(SequenceNumbers.UNASSIGNED_SEQ_NO, primaryTerm, version, type, id, versionType, + return applyDeleteOperation(UNASSIGNED_SEQ_NO, operationPrimaryTerm, version, type, id, versionType, Engine.Operation.Origin.PRIMARY); } public Engine.DeleteResult applyDeleteOperationOnReplica(long seqNo, long version, String type, String id) throws IOException { - return applyDeleteOperation(seqNo, primaryTerm, version, type, id, null, Engine.Operation.Origin.REPLICA); + return applyDeleteOperation(seqNo, operationPrimaryTerm, version, type, id, null, Engine.Operation.Origin.REPLICA); } private Engine.DeleteResult applyDeleteOperation(long seqNo, long opPrimaryTerm, long version, String type, String id, @Nullable VersionType versionType, Engine.Operation.Origin origin) throws IOException { - assert opPrimaryTerm <= this.primaryTerm : "op term [ " + opPrimaryTerm + " ] > shard term [" + this.primaryTerm + "]"; + assert opPrimaryTerm <= this.operationPrimaryTerm : "op term [ " + opPrimaryTerm + " ] > shard term [" + this.operationPrimaryTerm + + "]"; ensureWriteAllowed(origin); // When there is a single type, the unique identifier is only composed of the _id, // so there is no way to differenciate foo#1 from bar#1. This is especially an issue @@ -772,7 +797,7 @@ public class IndexShard extends AbstractIndexShardComponent implements IndicesCl return new Engine.DeleteResult(update); } } catch (MapperParsingException | IllegalArgumentException | TypeMissingException e) { - return new Engine.DeleteResult(e, version, seqNo, false); + return new Engine.DeleteResult(e, version, operationPrimaryTerm, seqNo, false); } final Term uid = extractUidForDelete(type, id); final Engine.Delete delete = prepareDelete(type, id, uid, seqNo, opPrimaryTerm, version, @@ -1172,7 +1197,7 @@ public class IndexShard extends AbstractIndexShardComponent implements IndicesCl } finally { // playing safe here and close the engine even if the above succeeds - close can be called multiple times // Also closing refreshListeners to prevent us from accumulating any more listeners - IOUtils.close(engine, refreshListeners); + IOUtils.close(engine, globalCheckpointListeners, refreshListeners); indexShardOperationPermits.close(); } } @@ -1209,7 +1234,7 @@ public class IndexShard extends AbstractIndexShardComponent implements IndicesCl } public void trimOperationOfPreviousPrimaryTerms(long aboveSeqNo) { - getEngine().trimOperationsFromTranslog(primaryTerm, aboveSeqNo); + getEngine().trimOperationsFromTranslog(operationPrimaryTerm, aboveSeqNo); } public Engine.Result applyTranslogOperation(Translog.Operation operation, Engine.Operation.Origin origin) throws IOException { @@ -1427,10 +1452,10 @@ public class IndexShard extends AbstractIndexShardComponent implements IndicesCl } } else { if (origin == Engine.Operation.Origin.PRIMARY) { - verifyPrimary(); + assert assertPrimaryMode(); } else { assert origin == Engine.Operation.Origin.REPLICA; - verifyReplicationTarget(); + assert assertReplicationTarget(); } if (writeAllowedStates.contains(state) == false) { throw new IllegalIndexShardStateException(shardId, state, "operation only allowed when shard state is one of " + writeAllowedStates + ", origin [" + origin + "]"); @@ -1438,19 +1463,14 @@ public class IndexShard extends AbstractIndexShardComponent implements IndicesCl } } - private void verifyPrimary() { - if (shardRouting.primary() == false) { - throw new IllegalStateException("shard " + shardRouting + " is not a primary"); - } + private boolean assertPrimaryMode() { + assert shardRouting.primary() && replicationTracker.isPrimaryMode() : "shard " + shardRouting + " is not a primary shard in primary mode"; + return true; } - private void verifyReplicationTarget() { - final IndexShardState state = state(); - if (shardRouting.primary() && shardRouting.active() && replicationTracker.isPrimaryMode()) { - // must use exception that is not ignored by replication logic. See TransportActions.isShardNotAvailableException - throw new IllegalStateException("active primary shard " + shardRouting + " cannot be a replication target before " + - "relocation hand off, state is [" + state + "]"); - } + private boolean assertReplicationTarget() { + assert replicationTracker.isPrimaryMode() == false : "shard " + shardRouting + " in primary mode cannot be a replication target"; + return true; } private void verifyNotClosed() throws IllegalIndexShardStateException { @@ -1697,7 +1717,7 @@ public class IndexShard extends AbstractIndexShardComponent implements IndicesCl * @param checkpoint the local checkpoint for the shard */ public void updateLocalCheckpointForShard(final String allocationId, final long checkpoint) { - verifyPrimary(); + assert assertPrimaryMode(); verifyNotClosed(); replicationTracker.updateLocalCheckpoint(allocationId, checkpoint); } @@ -1709,11 +1729,24 @@ public class IndexShard extends AbstractIndexShardComponent implements IndicesCl * @param globalCheckpoint the global checkpoint */ public void updateGlobalCheckpointForShard(final String allocationId, final long globalCheckpoint) { - verifyPrimary(); + assert assertPrimaryMode(); verifyNotClosed(); replicationTracker.updateGlobalCheckpointForShard(allocationId, globalCheckpoint); } + /** + * Add a global checkpoint listener. If the global checkpoint is above the current global checkpoint known to the listener then the + * listener will fire immediately on the calling thread. + * + * @param currentGlobalCheckpoint the current global checkpoint known to the listener + * @param listener the listener + */ + public void addGlobalCheckpointListener( + final long currentGlobalCheckpoint, + final GlobalCheckpointListeners.GlobalCheckpointListener listener) { + this.globalCheckpointListeners.add(currentGlobalCheckpoint, listener); + } + /** * Waits for all operations up to the provided sequence number to complete. * @@ -1731,7 +1764,7 @@ public class IndexShard extends AbstractIndexShardComponent implements IndicesCl * @param allocationId the allocation ID of the shard for which recovery was initiated */ public void initiateTracking(final String allocationId) { - verifyPrimary(); + assert assertPrimaryMode(); replicationTracker.initiateTracking(allocationId); } @@ -1744,7 +1777,7 @@ public class IndexShard extends AbstractIndexShardComponent implements IndicesCl * @param localCheckpoint the current local checkpoint on the shard */ public void markAllocationIdAsInSync(final String allocationId, final long localCheckpoint) throws InterruptedException { - verifyPrimary(); + assert assertPrimaryMode(); replicationTracker.markAllocationIdAsInSync(allocationId, localCheckpoint); } @@ -1779,7 +1812,7 @@ public class IndexShard extends AbstractIndexShardComponent implements IndicesCl * @return a map from allocation ID to the local knowledge of the global checkpoint for that allocation ID */ public ObjectLongMap getInSyncGlobalCheckpoints() { - verifyPrimary(); + assert assertPrimaryMode(); verifyNotClosed(); return replicationTracker.getInSyncGlobalCheckpoints(); } @@ -1789,11 +1822,12 @@ public class IndexShard extends AbstractIndexShardComponent implements IndicesCl * primary. */ public void maybeSyncGlobalCheckpoint(final String reason) { - verifyPrimary(); verifyNotClosed(); + assert shardRouting.primary() : "only call maybeSyncGlobalCheckpoint on primary shard"; if (replicationTracker.isPrimaryMode() == false) { return; } + assert assertPrimaryMode(); // only sync if there are not operations in flight final SeqNoStats stats = getEngine().getSeqNoStats(replicationTracker.getGlobalCheckpoint()); if (stats.getMaxSeqNo() == stats.getGlobalCheckpoint()) { @@ -1819,7 +1853,7 @@ public class IndexShard extends AbstractIndexShardComponent implements IndicesCl * @return the replication group */ public ReplicationGroup getReplicationGroup() { - verifyPrimary(); + assert assertPrimaryMode(); verifyNotClosed(); return replicationTracker.getReplicationGroup(); } @@ -1831,7 +1865,7 @@ public class IndexShard extends AbstractIndexShardComponent implements IndicesCl * @param reason the reason the global checkpoint was updated */ public void updateGlobalCheckpointOnReplica(final long globalCheckpoint, final String reason) { - verifyReplicationTarget(); + assert assertReplicationTarget(); final long localCheckpoint = getLocalCheckpoint(); if (globalCheckpoint > localCheckpoint) { /* @@ -1858,8 +1892,7 @@ public class IndexShard extends AbstractIndexShardComponent implements IndicesCl * @param primaryContext the sequence number context */ public void activateWithPrimaryContext(final ReplicationTracker.PrimaryContext primaryContext) { - verifyPrimary(); - assert shardRouting.isRelocationTarget() : "only relocation target can update allocation IDs from primary context: " + shardRouting; + assert shardRouting.primary() && shardRouting.isRelocationTarget() : "only primary relocation target can update allocation IDs from primary context: " + shardRouting; assert primaryContext.getCheckpointStates().containsKey(routingEntry().allocationId().getId()) && getLocalCheckpoint() == primaryContext.getCheckpointStates().get(routingEntry().allocationId().getId()).getLocalCheckpoint(); synchronized (mutex) { @@ -1873,7 +1906,7 @@ public class IndexShard extends AbstractIndexShardComponent implements IndicesCl * @return {@code true} if there is at least one shard pending in-sync, otherwise false */ public boolean pendingInSync() { - verifyPrimary(); + assert assertPrimaryMode(); return replicationTracker.pendingInSync(); } @@ -2082,10 +2115,11 @@ public class IndexShard extends AbstractIndexShardComponent implements IndicesCl } /** - * Returns whether the shard is in primary mode, i.e., in charge of replicating changes (see {@link ReplicationTracker}). + * Returns whether the shard is a relocated primary, i.e. not in charge anymore of replicating changes (see {@link ReplicationTracker}). */ - public boolean isPrimaryMode() { - return replicationTracker.isPrimaryMode(); + public boolean isRelocatedPrimary() { + assert shardRouting.primary() : "only call isRelocatedPrimary on primary shard"; + return replicationTracker.isRelocated(); } class ShardEventListener implements Engine.EventListener { @@ -2175,7 +2209,7 @@ public class IndexShard extends AbstractIndexShardComponent implements IndicesCl IndexingMemoryController.SHARD_INACTIVE_TIME_SETTING.get(indexSettings.getSettings()), Collections.singletonList(refreshListeners), Collections.singletonList(new RefreshMetricUpdater(refreshMetric)), - indexSort, this::runTranslogRecovery, circuitBreakerService, replicationTracker, this::getPrimaryTerm); + indexSort, this::runTranslogRecovery, circuitBreakerService, replicationTracker, () -> operationPrimaryTerm); } /** @@ -2189,12 +2223,36 @@ public class IndexShard extends AbstractIndexShardComponent implements IndicesCl */ public void acquirePrimaryOperationPermit(ActionListener onPermitAcquired, String executorOnDelay, Object debugInfo) { verifyNotClosed(); - verifyPrimary(); + assert shardRouting.primary() : "acquirePrimaryOperationPermit should only be called on primary shard: " + shardRouting; indexShardOperationPermits.acquire(onPermitAcquired, executorOnDelay, false, debugInfo); } - private final Object primaryTermMutex = new Object(); + private void bumpPrimaryTerm(long newPrimaryTerm, final CheckedRunnable onBlocked) { + assert Thread.holdsLock(mutex); + assert newPrimaryTerm > pendingPrimaryTerm; + assert operationPrimaryTerm <= pendingPrimaryTerm; + final CountDownLatch termUpdated = new CountDownLatch(1); + indexShardOperationPermits.asyncBlockOperations(30, TimeUnit.MINUTES, () -> { + assert operationPrimaryTerm <= pendingPrimaryTerm; + termUpdated.await(); + // indexShardOperationPermits doesn't guarantee that async submissions are executed + // in the order submitted. We need to guard against another term bump + if (operationPrimaryTerm < newPrimaryTerm) { + operationPrimaryTerm = newPrimaryTerm; + onBlocked.run(); + } + }, + e -> { + try { + failShard("exception during primary term transition", e); + } catch (AlreadyClosedException ace) { + // ignore, shard is already closed + } + }); + pendingPrimaryTerm = newPrimaryTerm; + termUpdated.countDown(); + } /** * Acquire a replica operation permit whenever the shard is ready for indexing (see @@ -2203,7 +2261,7 @@ public class IndexShard extends AbstractIndexShardComponent implements IndicesCl * {@link IllegalStateException}. If permit acquisition is delayed, the listener will be invoked on the executor with the specified * name. * - * @param operationPrimaryTerm the operation primary term + * @param opPrimaryTerm the operation primary term * @param globalCheckpoint the global checkpoint associated with the request * @param onPermitAcquired the listener for permit acquisition * @param executorOnDelay the name of the executor to invoke the listener on if permit acquisition is delayed @@ -2211,15 +2269,13 @@ public class IndexShard extends AbstractIndexShardComponent implements IndicesCl * the tracing will capture the supplied object's {@link Object#toString()} value. Otherwise the object * isn't used */ - public void acquireReplicaOperationPermit(final long operationPrimaryTerm, final long globalCheckpoint, + public void acquireReplicaOperationPermit(final long opPrimaryTerm, final long globalCheckpoint, final ActionListener onPermitAcquired, final String executorOnDelay, final Object debugInfo) { verifyNotClosed(); - verifyReplicationTarget(); - final boolean globalCheckpointUpdated; - if (operationPrimaryTerm > primaryTerm) { - synchronized (primaryTermMutex) { - if (operationPrimaryTerm > primaryTerm) { + if (opPrimaryTerm > pendingPrimaryTerm) { + synchronized (mutex) { + if (opPrimaryTerm > pendingPrimaryTerm) { IndexShardState shardState = state(); // only roll translog and update primary term if shard has made it past recovery // Having a new primary term here means that the old primary failed and that there is a new primary, which again @@ -2229,64 +2285,53 @@ public class IndexShard extends AbstractIndexShardComponent implements IndicesCl shardState != IndexShardState.STARTED) { throw new IndexShardNotStartedException(shardId, shardState); } - try { - indexShardOperationPermits.blockOperations(30, TimeUnit.MINUTES, () -> { - assert operationPrimaryTerm > primaryTerm : - "shard term already update. op term [" + operationPrimaryTerm + "], shardTerm [" + primaryTerm + "]"; - primaryTerm = operationPrimaryTerm; - updateGlobalCheckpointOnReplica(globalCheckpoint, "primary term transition"); - final long currentGlobalCheckpoint = getGlobalCheckpoint(); - final long localCheckpoint; - if (currentGlobalCheckpoint == SequenceNumbers.UNASSIGNED_SEQ_NO) { - localCheckpoint = SequenceNumbers.NO_OPS_PERFORMED; - } else { - localCheckpoint = currentGlobalCheckpoint; - } - logger.trace( + + if (opPrimaryTerm > pendingPrimaryTerm) { + bumpPrimaryTerm(opPrimaryTerm, () -> { + updateGlobalCheckpointOnReplica(globalCheckpoint, "primary term transition"); + final long currentGlobalCheckpoint = getGlobalCheckpoint(); + final long localCheckpoint; + if (currentGlobalCheckpoint == UNASSIGNED_SEQ_NO) { + localCheckpoint = NO_OPS_PERFORMED; + } else { + localCheckpoint = currentGlobalCheckpoint; + } + logger.trace( "detected new primary with primary term [{}], resetting local checkpoint from [{}] to [{}]", - operationPrimaryTerm, + opPrimaryTerm, getLocalCheckpoint(), localCheckpoint); - getEngine().resetLocalCheckpoint(localCheckpoint); - getEngine().rollTranslogGeneration(); + getEngine().resetLocalCheckpoint(localCheckpoint); + getEngine().rollTranslogGeneration(); }); - globalCheckpointUpdated = true; - } catch (final Exception e) { - onPermitAcquired.onFailure(e); - return; } - } else { - globalCheckpointUpdated = false; } } - } else { - globalCheckpointUpdated = false; } - assert operationPrimaryTerm <= primaryTerm - : "operation primary term [" + operationPrimaryTerm + "] should be at most [" + primaryTerm + "]"; + assert opPrimaryTerm <= pendingPrimaryTerm + : "operation primary term [" + opPrimaryTerm + "] should be at most [" + pendingPrimaryTerm + "]"; indexShardOperationPermits.acquire( new ActionListener() { @Override public void onResponse(final Releasable releasable) { - if (operationPrimaryTerm < primaryTerm) { + if (opPrimaryTerm < operationPrimaryTerm) { releasable.close(); final String message = String.format( Locale.ROOT, "%s operation primary term [%d] is too old (current [%d])", shardId, - operationPrimaryTerm, - primaryTerm); + opPrimaryTerm, + operationPrimaryTerm); onPermitAcquired.onFailure(new IllegalStateException(message)); } else { - if (globalCheckpointUpdated == false) { - try { - updateGlobalCheckpointOnReplica(globalCheckpoint, "operation"); - } catch (Exception e) { - releasable.close(); - onPermitAcquired.onFailure(e); - return; - } + assert assertReplicationTarget(); + try { + updateGlobalCheckpointOnReplica(globalCheckpoint, "operation"); + } catch (Exception e) { + releasable.close(); + onPermitAcquired.onFailure(e); + return; } onPermitAcquired.onResponse(releasable); } diff --git a/server/src/main/java/org/elasticsearch/index/shard/IndexShardOperationPermits.java b/server/src/main/java/org/elasticsearch/index/shard/IndexShardOperationPermits.java index a0d46c8eb23..d4c3833b13a 100644 --- a/server/src/main/java/org/elasticsearch/index/shard/IndexShardOperationPermits.java +++ b/server/src/main/java/org/elasticsearch/index/shard/IndexShardOperationPermits.java @@ -19,7 +19,6 @@ package org.elasticsearch.index.shard; -import org.elasticsearch.core.internal.io.IOUtils; import org.elasticsearch.Assertions; import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.action.ActionListener; @@ -29,10 +28,12 @@ import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.lease.Releasable; import org.elasticsearch.common.util.concurrent.AbstractRunnable; import org.elasticsearch.common.util.concurrent.ThreadContext.StoredContext; +import org.elasticsearch.core.internal.io.IOUtils; import org.elasticsearch.threadpool.ThreadPool; import java.io.Closeable; import java.util.ArrayList; +import java.util.Collections; import java.util.List; import java.util.Map; import java.util.concurrent.ConcurrentHashMap; @@ -59,7 +60,7 @@ final class IndexShardOperationPermits implements Closeable { final Semaphore semaphore = new Semaphore(TOTAL_PERMITS, true); // fair to ensure a blocking thread is not starved private final List delayedOperations = new ArrayList<>(); // operations that are delayed private volatile boolean closed; - private boolean delayed; // does not need to be volatile as all accesses are done under a lock on this + private int queuedBlockOperations; // does not need to be volatile as all accesses are done under a lock on this // only valid when assertions are enabled. Key is AtomicBoolean associated with each permit to ensure close once semantics. // Value is a tuple, with a some debug information supplied by the caller and a stack trace of the acquiring thread @@ -102,9 +103,6 @@ final class IndexShardOperationPermits implements Closeable { final long timeout, final TimeUnit timeUnit, final CheckedRunnable onBlocked) throws InterruptedException, TimeoutException, E { - if (closed) { - throw new IndexShardClosedException(shardId); - } delayOperations(); try { doBlockOperations(timeout, timeUnit, onBlocked); @@ -147,13 +145,12 @@ final class IndexShardOperationPermits implements Closeable { } private void delayOperations() { + if (closed) { + throw new IndexShardClosedException(shardId); + } synchronized (this) { - if (delayed) { - throw new IllegalStateException("operations are already delayed"); - } else { - assert delayedOperations.isEmpty(); - delayed = true; - } + assert queuedBlockOperations > 0 || delayedOperations.isEmpty(); + queuedBlockOperations++; } } @@ -164,7 +161,7 @@ final class IndexShardOperationPermits implements Closeable { if (Assertions.ENABLED) { // since delayed is not volatile, we have to synchronize even here for visibility synchronized (this) { - assert delayed; + assert queuedBlockOperations > 0; } } if (semaphore.tryAcquire(TOTAL_PERMITS, timeout, timeUnit)) { @@ -182,10 +179,14 @@ final class IndexShardOperationPermits implements Closeable { private void releaseDelayedOperations() { final List queuedActions; synchronized (this) { - assert delayed; - queuedActions = new ArrayList<>(delayedOperations); - delayedOperations.clear(); - delayed = false; + assert queuedBlockOperations > 0; + queuedBlockOperations--; + if (queuedBlockOperations == 0) { + queuedActions = new ArrayList<>(delayedOperations); + delayedOperations.clear(); + } else { + queuedActions = Collections.emptyList(); + } } if (!queuedActions.isEmpty()) { /* @@ -242,7 +243,7 @@ final class IndexShardOperationPermits implements Closeable { final Releasable releasable; try { synchronized (this) { - if (delayed) { + if (queuedBlockOperations > 0) { final Supplier contextSupplier = threadPool.getThreadContext().newRestorableContext(false); final ActionListener wrappedListener; if (executorOnDelay != null) { @@ -308,6 +309,11 @@ final class IndexShardOperationPermits implements Closeable { } } + + synchronized boolean isBlocked() { + return queuedBlockOperations > 0; + } + /** * @return a list of describing each permit that wasn't released yet. The description consist of the debugInfo supplied * when the permit was acquired plus a stack traces that was captured when the permit was request. diff --git a/server/src/main/java/org/elasticsearch/index/shard/PrimaryReplicaSyncer.java b/server/src/main/java/org/elasticsearch/index/shard/PrimaryReplicaSyncer.java index e66d78f2e1a..1edc0eb5dca 100644 --- a/server/src/main/java/org/elasticsearch/index/shard/PrimaryReplicaSyncer.java +++ b/server/src/main/java/org/elasticsearch/index/shard/PrimaryReplicaSyncer.java @@ -136,7 +136,7 @@ public class PrimaryReplicaSyncer extends AbstractComponent { } }; - resync(shardId, indexShard.routingEntry().allocationId().getId(), indexShard.getPrimaryTerm(), wrappedSnapshot, + resync(shardId, indexShard.routingEntry().allocationId().getId(), indexShard.getPendingPrimaryTerm(), wrappedSnapshot, startingSeqNo, maxSeqNo, resyncListener); } catch (Exception e) { try { diff --git a/server/src/main/java/org/elasticsearch/index/shard/StoreRecovery.java b/server/src/main/java/org/elasticsearch/index/shard/StoreRecovery.java index 54718c545a4..e9acfe3d8b0 100644 --- a/server/src/main/java/org/elasticsearch/index/shard/StoreRecovery.java +++ b/server/src/main/java/org/elasticsearch/index/shard/StoreRecovery.java @@ -394,7 +394,7 @@ final class StoreRecovery { final SegmentInfos segmentInfos = store.readLastCommittedSegmentsInfo(); final long maxSeqNo = Long.parseLong(segmentInfos.userData.get(SequenceNumbers.MAX_SEQ_NO)); final String translogUUID = Translog.createEmptyTranslog( - indexShard.shardPath().resolveTranslog(), maxSeqNo, shardId, indexShard.getPrimaryTerm()); + indexShard.shardPath().resolveTranslog(), maxSeqNo, shardId, indexShard.getPendingPrimaryTerm()); store.associateIndexWithNewTranslog(translogUUID); } else if (indexShouldExists) { // since we recover from local, just fill the files and size @@ -409,11 +409,12 @@ final class StoreRecovery { } else { store.createEmpty(); final String translogUUID = Translog.createEmptyTranslog( - indexShard.shardPath().resolveTranslog(), SequenceNumbers.NO_OPS_PERFORMED, shardId, indexShard.getPrimaryTerm()); + indexShard.shardPath().resolveTranslog(), SequenceNumbers.NO_OPS_PERFORMED, shardId, + indexShard.getPendingPrimaryTerm()); store.associateIndexWithNewTranslog(translogUUID); } indexShard.openEngineAndRecoverFromTranslog(); - indexShard.getEngine().fillSeqNoGaps(indexShard.getPrimaryTerm()); + indexShard.getEngine().fillSeqNoGaps(indexShard.getPendingPrimaryTerm()); indexShard.finalizeRecovery(); indexShard.postRecovery("post recovery from shard_store"); } catch (EngineException | IOException e) { @@ -458,11 +459,11 @@ final class StoreRecovery { final SegmentInfos segmentInfos = store.readLastCommittedSegmentsInfo(); final long maxSeqNo = Long.parseLong(segmentInfos.userData.get(SequenceNumbers.MAX_SEQ_NO)); final String translogUUID = Translog.createEmptyTranslog( - indexShard.shardPath().resolveTranslog(), maxSeqNo, shardId, indexShard.getPrimaryTerm()); + indexShard.shardPath().resolveTranslog(), maxSeqNo, shardId, indexShard.getPendingPrimaryTerm()); store.associateIndexWithNewTranslog(translogUUID); assert indexShard.shardRouting.primary() : "only primary shards can recover from store"; indexShard.openEngineAndRecoverFromTranslog(); - indexShard.getEngine().fillSeqNoGaps(indexShard.getPrimaryTerm()); + indexShard.getEngine().fillSeqNoGaps(indexShard.getPendingPrimaryTerm()); indexShard.finalizeRecovery(); indexShard.postRecovery("restore done"); } catch (Exception e) { diff --git a/server/src/main/java/org/elasticsearch/index/translog/BaseTranslogReader.java b/server/src/main/java/org/elasticsearch/index/translog/BaseTranslogReader.java index ff226ae00be..41c3252eab0 100644 --- a/server/src/main/java/org/elasticsearch/index/translog/BaseTranslogReader.java +++ b/server/src/main/java/org/elasticsearch/index/translog/BaseTranslogReader.java @@ -79,7 +79,9 @@ public abstract class BaseTranslogReader implements Comparable maxSize) { - throw new TranslogCorruptedException("operation size is corrupted must be [0.." + maxSize + "] but was: " + size); + throw new TranslogCorruptedException( + path.toString(), + "operation size is corrupted must be [0.." + maxSize + "] but was: " + size); } return size; } @@ -103,14 +105,16 @@ public abstract class BaseTranslogReader implements Comparable getPrimaryTerm() && getPrimaryTerm() != TranslogHeader.UNKNOWN_PRIMARY_TERM) { - throw new TranslogCorruptedException("Operation's term is newer than translog header term; " + - "operation term[" + op.primaryTerm() + "], translog header term [" + getPrimaryTerm() + "]"); + throw new TranslogCorruptedException( + path.toString(), + "operation's term is newer than translog header term; " + + "operation term[" + op.primaryTerm() + "], translog header term [" + getPrimaryTerm() + "]"); } return op; } diff --git a/server/src/main/java/org/elasticsearch/index/translog/BufferedChecksumStreamInput.java b/server/src/main/java/org/elasticsearch/index/translog/BufferedChecksumStreamInput.java index 37740b460b7..8e815d3599a 100644 --- a/server/src/main/java/org/elasticsearch/index/translog/BufferedChecksumStreamInput.java +++ b/server/src/main/java/org/elasticsearch/index/translog/BufferedChecksumStreamInput.java @@ -35,14 +35,11 @@ public final class BufferedChecksumStreamInput extends FilterStreamInput { private static final int SKIP_BUFFER_SIZE = 1024; private byte[] skipBuffer; private final Checksum digest; + private final String source; - public BufferedChecksumStreamInput(StreamInput in) { - super(in); - this.digest = new BufferedChecksum(new CRC32()); - } - - public BufferedChecksumStreamInput(StreamInput in, BufferedChecksumStreamInput reuse) { + public BufferedChecksumStreamInput(StreamInput in, String source, BufferedChecksumStreamInput reuse) { super(in); + this.source = source; if (reuse == null ) { this.digest = new BufferedChecksum(new CRC32()); } else { @@ -52,6 +49,10 @@ public final class BufferedChecksumStreamInput extends FilterStreamInput { } } + public BufferedChecksumStreamInput(StreamInput in, String source) { + this(in, source, null); + } + public long getChecksum() { return this.digest.getValue(); } @@ -85,7 +86,6 @@ public final class BufferedChecksumStreamInput extends FilterStreamInput { return delegate.markSupported(); } - @Override public long skip(long numBytes) throws IOException { if (numBytes < 0) { @@ -104,7 +104,6 @@ public final class BufferedChecksumStreamInput extends FilterStreamInput { return skipped; } - @Override public synchronized void mark(int readlimit) { delegate.mark(readlimit); @@ -114,4 +113,7 @@ public final class BufferedChecksumStreamInput extends FilterStreamInput { digest.reset(); } + public String getSource(){ + return source; + } } diff --git a/server/src/main/java/org/elasticsearch/index/translog/Translog.java b/server/src/main/java/org/elasticsearch/index/translog/Translog.java index 31404b7874a..e426b3a7253 100644 --- a/server/src/main/java/org/elasticsearch/index/translog/Translog.java +++ b/server/src/main/java/org/elasticsearch/index/translog/Translog.java @@ -491,7 +491,10 @@ public class Translog extends AbstractIndexShardComponent implements IndexShardC try (ReleasableLock ignored = readLock.acquire()) { ensureOpen(); if (operation.primaryTerm() > current.getPrimaryTerm()) { - throw new IllegalArgumentException("Operation term is newer than the current term;" + assert false : + "Operation term is newer than the current term; " + + "current term[" + current.getPrimaryTerm() + "], operation term[" + operation + "]"; + throw new IllegalArgumentException("Operation term is newer than the current term; " + "current term[" + current.getPrimaryTerm() + "], operation term[" + operation + "]"); } return current.add(bytes, operation.seqNo()); @@ -1424,7 +1427,7 @@ public class Translog extends AbstractIndexShardComponent implements IndexShardC long expectedChecksum = in.getChecksum(); long readChecksum = Integer.toUnsignedLong(in.readInt()); if (readChecksum != expectedChecksum) { - throw new TranslogCorruptedException("translog stream is corrupted, expected: 0x" + + throw new TranslogCorruptedException(in.getSource(), "checksum verification failed - expected: 0x" + Long.toHexString(expectedChecksum) + ", got: 0x" + Long.toHexString(readChecksum)); } } @@ -1432,10 +1435,10 @@ public class Translog extends AbstractIndexShardComponent implements IndexShardC /** * Reads a list of operations written with {@link #writeOperations(StreamOutput, List)} */ - public static List readOperations(StreamInput input) throws IOException { + public static List readOperations(StreamInput input, String source) throws IOException { ArrayList operations = new ArrayList<>(); int numOps = input.readInt(); - final BufferedChecksumStreamInput checksumStreamInput = new BufferedChecksumStreamInput(input); + final BufferedChecksumStreamInput checksumStreamInput = new BufferedChecksumStreamInput(input, source); for (int i = 0; i < numOps; i++) { operations.add(readOperation(checksumStreamInput)); } @@ -1447,7 +1450,7 @@ public class Translog extends AbstractIndexShardComponent implements IndexShardC try { final int opSize = in.readInt(); if (opSize < 4) { // 4byte for the checksum - throw new TranslogCorruptedException("operation size must be at least 4 but was: " + opSize); + throw new TranslogCorruptedException(in.getSource(), "operation size must be at least 4 but was: " + opSize); } in.resetDigest(); // size is not part of the checksum! if (in.markSupported()) { // if we can we validate the checksum first @@ -1462,17 +1465,15 @@ public class Translog extends AbstractIndexShardComponent implements IndexShardC } operation = Translog.Operation.readOperation(in); verifyChecksum(in); - } catch (TranslogCorruptedException e) { - throw e; } catch (EOFException e) { - throw new TruncatedTranslogException("reached premature end of file, translog is truncated", e); + throw new TruncatedTranslogException(in.getSource(), "reached premature end of file, translog is truncated", e); } return operation; } /** * Writes all operations in the given iterable to the given output stream including the size of the array - * use {@link #readOperations(StreamInput)} to read it back. + * use {@link #readOperations(StreamInput, String)} to read it back. */ public static void writeOperations(StreamOutput outStream, List toWrite) throws IOException { final ReleasableBytesStreamOutput out = new ReleasableBytesStreamOutput(BigArrays.NON_RECYCLING_INSTANCE); @@ -1713,7 +1714,7 @@ public class Translog extends AbstractIndexShardComponent implements IndexShardC } catch (TranslogCorruptedException ex) { throw ex; // just bubble up. } catch (Exception ex) { - throw new TranslogCorruptedException("Translog at [" + location + "] is corrupted", ex); + throw new TranslogCorruptedException(location.toString(), ex); } return checkpoint; } diff --git a/server/src/main/java/org/elasticsearch/index/translog/TranslogCorruptedException.java b/server/src/main/java/org/elasticsearch/index/translog/TranslogCorruptedException.java index 07700b3037c..ab1a48b2167 100644 --- a/server/src/main/java/org/elasticsearch/index/translog/TranslogCorruptedException.java +++ b/server/src/main/java/org/elasticsearch/index/translog/TranslogCorruptedException.java @@ -25,15 +25,27 @@ import org.elasticsearch.common.io.stream.StreamInput; import java.io.IOException; public class TranslogCorruptedException extends ElasticsearchException { - public TranslogCorruptedException(String msg) { - super(msg); + public TranslogCorruptedException(String source, String details) { + super(corruptedMessage(source, details)); } - public TranslogCorruptedException(String msg, Throwable cause) { - super(msg, cause); + public TranslogCorruptedException(String source, Throwable cause) { + this(source, null, cause); } - public TranslogCorruptedException(StreamInput in) throws IOException{ + public TranslogCorruptedException(String source, String details, Throwable cause) { + super(corruptedMessage(source, details), cause); + } + + private static String corruptedMessage(String source, String details) { + String msg = "translog from source [" + source + "] is corrupted"; + if (details != null) { + msg += ", " + details; + } + return msg; + } + + public TranslogCorruptedException(StreamInput in) throws IOException { super(in); } } diff --git a/server/src/main/java/org/elasticsearch/index/translog/TranslogHeader.java b/server/src/main/java/org/elasticsearch/index/translog/TranslogHeader.java index 0fde24d8bb4..20aadf21bcb 100644 --- a/server/src/main/java/org/elasticsearch/index/translog/TranslogHeader.java +++ b/server/src/main/java/org/elasticsearch/index/translog/TranslogHeader.java @@ -110,13 +110,15 @@ final class TranslogHeader { static TranslogHeader read(final String translogUUID, final Path path, final FileChannel channel) throws IOException { // This input is intentionally not closed because closing it will close the FileChannel. final BufferedChecksumStreamInput in = - new BufferedChecksumStreamInput(new InputStreamStreamInput(java.nio.channels.Channels.newInputStream(channel), channel.size())); + new BufferedChecksumStreamInput( + new InputStreamStreamInput(java.nio.channels.Channels.newInputStream(channel), channel.size()), + path.toString()); final int version; try { version = CodecUtil.checkHeader(new InputStreamDataInput(in), TRANSLOG_CODEC, VERSION_CHECKSUMS, VERSION_PRIMARY_TERM); } catch (CorruptIndexException | IndexFormatTooOldException | IndexFormatTooNewException e) { tryReportOldVersionError(path, channel); - throw new TranslogCorruptedException("Translog header corrupted. path:" + path, e); + throw new TranslogCorruptedException(path.toString(), "translog header corrupted", e); } if (version == VERSION_CHECKSUMS) { throw new IllegalStateException("pre-2.0 translog found [" + path + "]"); @@ -124,15 +126,19 @@ final class TranslogHeader { // Read the translogUUID final int uuidLen = in.readInt(); if (uuidLen > channel.size()) { - throw new TranslogCorruptedException("uuid length can't be larger than the translog"); + throw new TranslogCorruptedException( + path.toString(), + "UUID length can't be larger than the translog"); } final BytesRef uuid = new BytesRef(uuidLen); uuid.length = uuidLen; in.read(uuid.bytes, uuid.offset, uuid.length); final BytesRef expectedUUID = new BytesRef(translogUUID); if (uuid.bytesEquals(expectedUUID) == false) { - throw new TranslogCorruptedException("expected shard UUID " + expectedUUID + " but got: " + uuid + - " this translog file belongs to a different translog. path:" + path); + throw new TranslogCorruptedException( + path.toString(), + "expected shard UUID " + expectedUUID + " but got: " + uuid + + " this translog file belongs to a different translog"); } // Read the primary term final long primaryTerm; @@ -164,7 +170,9 @@ final class TranslogHeader { // 0x00 => version 0 of the translog final byte b1 = Channels.readFromFileChannel(channel, 0, 1)[0]; if (b1 == 0x3f) { // LUCENE_CODEC_HEADER_BYTE - throw new TranslogCorruptedException("translog looks like version 1 or later, but has corrupted header. path:" + path); + throw new TranslogCorruptedException( + path.toString(), + "translog looks like version 1 or later, but has corrupted header" ); } else if (b1 == 0x00) { // UNVERSIONED_TRANSLOG_HEADER_BYTE throw new IllegalStateException("pre-1.4 translog found [" + path + "]"); } diff --git a/server/src/main/java/org/elasticsearch/index/translog/TranslogWriter.java b/server/src/main/java/org/elasticsearch/index/translog/TranslogWriter.java index c135facc67f..b779644cd5c 100644 --- a/server/src/main/java/org/elasticsearch/index/translog/TranslogWriter.java +++ b/server/src/main/java/org/elasticsearch/index/translog/TranslogWriter.java @@ -200,8 +200,10 @@ public class TranslogWriter extends BaseTranslogReader implements Closeable { } else if (seenSequenceNumbers.containsKey(seqNo)) { final Tuple previous = seenSequenceNumbers.get(seqNo); if (previous.v1().equals(data) == false) { - Translog.Operation newOp = Translog.readOperation(new BufferedChecksumStreamInput(data.streamInput())); - Translog.Operation prvOp = Translog.readOperation(new BufferedChecksumStreamInput(previous.v1().streamInput())); + Translog.Operation newOp = Translog.readOperation( + new BufferedChecksumStreamInput(data.streamInput(), "assertion")); + Translog.Operation prvOp = Translog.readOperation( + new BufferedChecksumStreamInput(previous.v1().streamInput(), "assertion")); if (newOp.equals(prvOp) == false) { throw new AssertionError( "seqNo [" + seqNo + "] was processed twice in generation [" + generation + "], with different data. " + @@ -220,7 +222,8 @@ public class TranslogWriter extends BaseTranslogReader implements Closeable { .forEach(e -> { final Translog.Operation op; try { - op = Translog.readOperation(new BufferedChecksumStreamInput(e.getValue().v1().streamInput())); + op = Translog.readOperation( + new BufferedChecksumStreamInput(e.getValue().v1().streamInput(), "assertion")); } catch (IOException ex) { throw new RuntimeException(ex); } diff --git a/server/src/main/java/org/elasticsearch/index/translog/TruncatedTranslogException.java b/server/src/main/java/org/elasticsearch/index/translog/TruncatedTranslogException.java index e04eb58068d..5e0be02b7fc 100644 --- a/server/src/main/java/org/elasticsearch/index/translog/TruncatedTranslogException.java +++ b/server/src/main/java/org/elasticsearch/index/translog/TruncatedTranslogException.java @@ -25,11 +25,12 @@ import java.io.IOException; public class TruncatedTranslogException extends TranslogCorruptedException { - public TruncatedTranslogException(String msg, Throwable cause) { - super(msg, cause); - } - public TruncatedTranslogException(StreamInput in) throws IOException { super(in); } + + public TruncatedTranslogException(String source, String details, Throwable cause) { + super(source, details, cause); + } + } diff --git a/server/src/main/java/org/elasticsearch/indices/recovery/RecoverySourceHandler.java b/server/src/main/java/org/elasticsearch/indices/recovery/RecoverySourceHandler.java index 45500349865..352f07d5764 100644 --- a/server/src/main/java/org/elasticsearch/indices/recovery/RecoverySourceHandler.java +++ b/server/src/main/java/org/elasticsearch/indices/recovery/RecoverySourceHandler.java @@ -250,7 +250,7 @@ public class RecoverySourceHandler { try (Releasable ignored = FutureUtils.get(permit)) { // check that the IndexShard still has the primary authority. This needs to be checked under operation permit to prevent // races, as IndexShard will switch its authority only when it holds all operation permits, see IndexShard.relocated() - if (primary.isPrimaryMode() == false) { + if (primary.isRelocatedPrimary()) { throw new IndexShardRelocatedException(primary.shardId()); } runnable.run(); diff --git a/server/src/main/java/org/elasticsearch/indices/recovery/RecoveryTarget.java b/server/src/main/java/org/elasticsearch/indices/recovery/RecoveryTarget.java index 57deb4666da..1a772f0c3f8 100644 --- a/server/src/main/java/org/elasticsearch/indices/recovery/RecoveryTarget.java +++ b/server/src/main/java/org/elasticsearch/indices/recovery/RecoveryTarget.java @@ -443,7 +443,8 @@ public class RecoveryTarget extends AbstractRefCounted implements RecoveryTarget } // TODO: Assign the global checkpoint to the max_seqno of the safe commit if the index version >= 6.2 final String translogUUID = Translog.createEmptyTranslog( - indexShard.shardPath().resolveTranslog(), SequenceNumbers.UNASSIGNED_SEQ_NO, shardId, indexShard.getPrimaryTerm()); + indexShard.shardPath().resolveTranslog(), SequenceNumbers.UNASSIGNED_SEQ_NO, shardId, + indexShard.getPendingPrimaryTerm()); store.associateIndexWithNewTranslog(translogUUID); } catch (CorruptIndexException | IndexFormatTooNewException | IndexFormatTooOldException ex) { // this is a fatal exception at this stage. diff --git a/server/src/main/java/org/elasticsearch/indices/recovery/RecoveryTranslogOperationsRequest.java b/server/src/main/java/org/elasticsearch/indices/recovery/RecoveryTranslogOperationsRequest.java index 46494626920..be399e0f81f 100644 --- a/server/src/main/java/org/elasticsearch/indices/recovery/RecoveryTranslogOperationsRequest.java +++ b/server/src/main/java/org/elasticsearch/indices/recovery/RecoveryTranslogOperationsRequest.java @@ -66,7 +66,7 @@ public class RecoveryTranslogOperationsRequest extends TransportRequest { super.readFrom(in); recoveryId = in.readLong(); shardId = ShardId.readShardId(in); - operations = Translog.readOperations(in); + operations = Translog.readOperations(in, "recovery"); totalTranslogOps = in.readVInt(); } diff --git a/server/src/main/java/org/elasticsearch/ingest/ConfigurationUtils.java b/server/src/main/java/org/elasticsearch/ingest/ConfigurationUtils.java index 2853842c646..54d06d11655 100644 --- a/server/src/main/java/org/elasticsearch/ingest/ConfigurationUtils.java +++ b/server/src/main/java/org/elasticsearch/ingest/ConfigurationUtils.java @@ -284,14 +284,14 @@ public final class ConfigurationUtils { msg = "[" + propertyName + "] " + reason; } ElasticsearchParseException exception = new ElasticsearchParseException(msg); - addHeadersToException(exception, processorType, processorTag, propertyName); + addMetadataToException(exception, processorType, processorTag, propertyName); return exception; } public static ElasticsearchException newConfigurationException(String processorType, String processorTag, String propertyName, Exception cause) { ElasticsearchException exception = ExceptionsHelper.convertToElastic(cause); - addHeadersToException(exception, processorType, processorTag, propertyName); + addMetadataToException(exception, processorType, processorTag, propertyName); return exception; } @@ -341,16 +341,16 @@ public final class ConfigurationUtils { } } - private static void addHeadersToException(ElasticsearchException exception, String processorType, - String processorTag, String propertyName) { + private static void addMetadataToException(ElasticsearchException exception, String processorType, + String processorTag, String propertyName) { if (processorType != null) { - exception.addHeader("processor_type", processorType); + exception.addMetadata("es.processor_type", processorType); } if (processorTag != null) { - exception.addHeader("processor_tag", processorTag); + exception.addMetadata("es.processor_tag", processorTag); } if (propertyName != null) { - exception.addHeader("property_name", propertyName); + exception.addMetadata("es.property_name", propertyName); } } diff --git a/server/src/main/java/org/elasticsearch/ingest/IngestService.java b/server/src/main/java/org/elasticsearch/ingest/IngestService.java index 46b11f7ac14..01bc402e43b 100644 --- a/server/src/main/java/org/elasticsearch/ingest/IngestService.java +++ b/server/src/main/java/org/elasticsearch/ingest/IngestService.java @@ -39,6 +39,9 @@ import org.elasticsearch.threadpool.ThreadPool; * Holder class for several ingest related services. */ public class IngestService { + + public static final String NOOP_PIPELINE_NAME = "_none"; + private final PipelineStore pipelineStore; private final PipelineExecutionService pipelineExecutionService; diff --git a/server/src/main/java/org/elasticsearch/ingest/PipelineExecutionService.java b/server/src/main/java/org/elasticsearch/ingest/PipelineExecutionService.java index a8aca4fdfe5..56d44ee8881 100644 --- a/server/src/main/java/org/elasticsearch/ingest/PipelineExecutionService.java +++ b/server/src/main/java/org/elasticsearch/ingest/PipelineExecutionService.java @@ -24,7 +24,6 @@ import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.update.UpdateRequest; import org.elasticsearch.cluster.ClusterChangedEvent; import org.elasticsearch.cluster.ClusterStateApplier; -import org.elasticsearch.common.Strings; import org.elasticsearch.common.metrics.CounterMetric; import org.elasticsearch.common.metrics.MeanMetric; import org.elasticsearch.common.util.concurrent.AbstractRunnable; @@ -73,12 +72,16 @@ public class PipelineExecutionService implements ClusterStateApplier { UpdateRequest updateRequest = (UpdateRequest) actionRequest; indexRequest = updateRequest.docAsUpsert() ? updateRequest.doc() : updateRequest.upsertRequest(); } - if (indexRequest != null && Strings.hasText(indexRequest.getPipeline())) { + if (indexRequest == null) { + continue; + } + String pipeline = indexRequest.getPipeline(); + if (IngestService.NOOP_PIPELINE_NAME.equals(pipeline) == false) { try { innerExecute(indexRequest, getPipeline(indexRequest.getPipeline())); //this shouldn't be needed here but we do it for consistency with index api // which requires it to prevent double execution - indexRequest.setPipeline(null); + indexRequest.setPipeline(IngestService.NOOP_PIPELINE_NAME); } catch (Exception e) { itemFailureHandler.accept(indexRequest, e); } diff --git a/server/src/main/java/org/elasticsearch/ingest/PipelineStore.java b/server/src/main/java/org/elasticsearch/ingest/PipelineStore.java index c6dce0bd45b..9fceaf1a9a5 100644 --- a/server/src/main/java/org/elasticsearch/ingest/PipelineStore.java +++ b/server/src/main/java/org/elasticsearch/ingest/PipelineStore.java @@ -25,7 +25,7 @@ import org.elasticsearch.ResourceNotFoundException; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ingest.DeletePipelineRequest; import org.elasticsearch.action.ingest.PutPipelineRequest; -import org.elasticsearch.action.ingest.WritePipelineResponse; +import org.elasticsearch.action.support.master.AcknowledgedResponse; import org.elasticsearch.cluster.AckedClusterStateUpdateTask; import org.elasticsearch.cluster.ClusterChangedEvent; import org.elasticsearch.cluster.ClusterState; @@ -121,13 +121,13 @@ public class PipelineStore extends AbstractComponent implements ClusterStateAppl /** * Deletes the pipeline specified by id in the request. */ - public void delete(ClusterService clusterService, DeletePipelineRequest request, ActionListener listener) { + public void delete(ClusterService clusterService, DeletePipelineRequest request, ActionListener listener) { clusterService.submitStateUpdateTask("delete-pipeline-" + request.getId(), - new AckedClusterStateUpdateTask(request, listener) { + new AckedClusterStateUpdateTask(request, listener) { @Override - protected WritePipelineResponse newResponse(boolean acknowledged) { - return new WritePipelineResponse(acknowledged); + protected AcknowledgedResponse newResponse(boolean acknowledged) { + return new AcknowledgedResponse(acknowledged); } @Override @@ -169,15 +169,15 @@ public class PipelineStore extends AbstractComponent implements ClusterStateAppl * Stores the specified pipeline definition in the request. */ public void put(ClusterService clusterService, Map ingestInfos, PutPipelineRequest request, - ActionListener listener) throws Exception { + ActionListener listener) throws Exception { // validates the pipeline and processor configuration before submitting a cluster update task: validatePipeline(ingestInfos, request); clusterService.submitStateUpdateTask("put-pipeline-" + request.getId(), - new AckedClusterStateUpdateTask(request, listener) { + new AckedClusterStateUpdateTask(request, listener) { @Override - protected WritePipelineResponse newResponse(boolean acknowledged) { - return new WritePipelineResponse(acknowledged); + protected AcknowledgedResponse newResponse(boolean acknowledged) { + return new AcknowledgedResponse(acknowledged); } @Override diff --git a/server/src/main/java/org/elasticsearch/node/Node.java b/server/src/main/java/org/elasticsearch/node/Node.java index b5cc7964085..c65488bd08e 100644 --- a/server/src/main/java/org/elasticsearch/node/Node.java +++ b/server/src/main/java/org/elasticsearch/node/Node.java @@ -349,8 +349,7 @@ public class Node implements Closeable { getCustomNameResolvers(pluginsService.filterPlugins(DiscoveryPlugin.class))); List clusterPlugins = pluginsService.filterPlugins(ClusterPlugin.class); - final ClusterService clusterService = new ClusterService(settings, settingsModule.getClusterSettings(), threadPool, - ClusterModule.getClusterStateCustomSuppliers(clusterPlugins)); + final ClusterService clusterService = new ClusterService(settings, settingsModule.getClusterSettings(), threadPool); clusterService.addStateApplier(scriptModule.getScriptService()); resourcesToClose.add(clusterService); final IngestService ingestService = new IngestService(settings, threadPool, this.environment, diff --git a/server/src/main/java/org/elasticsearch/plugins/ClusterPlugin.java b/server/src/main/java/org/elasticsearch/plugins/ClusterPlugin.java index 61145c7a1d7..a1274b9346c 100644 --- a/server/src/main/java/org/elasticsearch/plugins/ClusterPlugin.java +++ b/server/src/main/java/org/elasticsearch/plugins/ClusterPlugin.java @@ -24,7 +24,6 @@ import java.util.Collections; import java.util.Map; import java.util.function.Supplier; -import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.routing.allocation.allocator.ShardsAllocator; import org.elasticsearch.cluster.routing.allocation.decider.AllocationDecider; import org.elasticsearch.common.settings.ClusterSettings; @@ -66,12 +65,4 @@ public interface ClusterPlugin { default void onNodeStarted() { } - /** - * Returns a map of {@link ClusterState.Custom} supplier that should be invoked to initialize the initial clusterstate. - * This allows custom clusterstate extensions to be always present and prevents invariants where clusterstates are published - * but customs are not initialized. - * - * TODO: Remove this whole concept of InitialClusterStateCustomSupplier, it's not used anymore - */ - default Map> getInitialClusterStateCustomSupplier() { return Collections.emptyMap(); } } diff --git a/server/src/main/java/org/elasticsearch/plugins/SearchPlugin.java b/server/src/main/java/org/elasticsearch/plugins/SearchPlugin.java index 952aa76fd17..b134db44517 100644 --- a/server/src/main/java/org/elasticsearch/plugins/SearchPlugin.java +++ b/server/src/main/java/org/elasticsearch/plugins/SearchPlugin.java @@ -48,6 +48,7 @@ import org.elasticsearch.search.fetch.FetchSubPhase; import org.elasticsearch.search.fetch.subphase.highlight.Highlighter; import org.elasticsearch.search.rescore.RescorerBuilder; import org.elasticsearch.search.rescore.Rescorer; +import org.elasticsearch.search.suggest.Suggest; import org.elasticsearch.search.suggest.Suggester; import org.elasticsearch.search.suggest.SuggestionBuilder; @@ -149,31 +150,61 @@ public interface SearchPlugin { * Specification for a {@link Suggester}. */ class SuggesterSpec> extends SearchExtensionSpec> { + + private Writeable.Reader suggestionReader; + /** * Specification of custom {@link Suggester}. * * @param name holds the names by which this suggester might be parsed. The {@link ParseField#getPreferredName()} is special as it - * is the name by under which the reader is registered. So it is the name that the query should use as its - * {@link NamedWriteable#getWriteableName()} too. - * @param reader the reader registered for this suggester's builder. Typically a reference to a constructor that takes a + * is the name by under which the request builder and Suggestion response readers are registered. So it is the name that the + * query and Suggestion response should use as their {@link NamedWriteable#getWriteableName()} return values too. + * @param builderReader the reader registered for this suggester's builder. Typically a reference to a constructor that takes a * {@link StreamInput} - * @param parser the parser the reads the query suggester from xcontent + * @param builderParser a parser that reads the suggester's builder from xcontent + * @param suggestionReader the reader registered for this suggester's Suggestion response. Typically a reference to a constructor + * that takes a {@link StreamInput} */ - public SuggesterSpec(ParseField name, Writeable.Reader reader, CheckedFunction parser) { - super(name, reader, parser); + public SuggesterSpec( + ParseField name, + Writeable.Reader builderReader, + CheckedFunction builderParser, + Writeable.Reader suggestionReader) { + + super(name, builderReader, builderParser); + setSuggestionReader(suggestionReader); } /** * Specification of custom {@link Suggester}. * - * @param name the name by which this suggester might be parsed or deserialized. Make sure that the query builder returns this name - * for {@link NamedWriteable#getWriteableName()}. - * @param reader the reader registered for this suggester's builder. Typically a reference to a constructor that takes a + * @param name the name by which this suggester might be parsed or deserialized. Make sure that the query builder and Suggestion + * response reader return this name for {@link NamedWriteable#getWriteableName()}. + * @param builderReader the reader registered for this suggester's builder. Typically a reference to a constructor that takes a * {@link StreamInput} - * @param parser the parser the reads the suggester builder from xcontent + * @param builderParser a parser that reads the suggester's builder from xcontent + * @param suggestionReader the reader registered for this suggester's Suggestion response. Typically a reference to a constructor + * that takes a {@link StreamInput} */ - public SuggesterSpec(String name, Writeable.Reader reader, CheckedFunction parser) { - super(name, reader, parser); + public SuggesterSpec( + String name, + Writeable.Reader builderReader, + CheckedFunction builderParser, + Writeable.Reader suggestionReader) { + + super(name, builderReader, builderParser); + setSuggestionReader(suggestionReader); + } + + private void setSuggestionReader(Writeable.Reader reader) { + this.suggestionReader = reader; + } + + /** + * Returns the reader used to read the {@link Suggest.Suggestion} generated by this suggester + */ + public Writeable.Reader getSuggestionReader() { + return this.suggestionReader; } } diff --git a/server/src/main/java/org/elasticsearch/rest/action/RestMainAction.java b/server/src/main/java/org/elasticsearch/rest/action/RestMainAction.java index 250ee209587..9efc8f526f3 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/RestMainAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/RestMainAction.java @@ -67,4 +67,9 @@ public class RestMainAction extends BaseRestHandler { response.toXContent(builder, request); return new BytesRestResponse(RestStatus.OK, builder); } + + @Override + public boolean canTripCircuitBreaker() { + return false; + } } diff --git a/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestReloadSecureSettingsAction.java b/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestReloadSecureSettingsAction.java index 0697871ea5d..2251615d678 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestReloadSecureSettingsAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestReloadSecureSettingsAction.java @@ -59,7 +59,6 @@ public final class RestReloadSecureSettingsAction extends BaseRestHandler { .cluster() .prepareReloadSecureSettings() .setTimeout(request.param("timeout")) - .source(request.requiredContent(), request.getXContentType()) .setNodesIds(nodesIds); final NodesReloadSecureSettingsRequest nodesRequest = nodesRequestBuilder.request(); return channel -> nodesRequestBuilder @@ -68,12 +67,12 @@ public final class RestReloadSecureSettingsAction extends BaseRestHandler { public RestResponse buildResponse(NodesReloadSecureSettingsResponse response, XContentBuilder builder) throws Exception { builder.startObject(); - RestActions.buildNodesHeader(builder, channel.request(), response); - builder.field("cluster_name", response.getClusterName().value()); - response.toXContent(builder, channel.request()); + { + RestActions.buildNodesHeader(builder, channel.request(), response); + builder.field("cluster_name", response.getClusterName().value()); + response.toXContent(builder, channel.request()); + } builder.endObject(); - // clear password for the original request - nodesRequest.secureSettingsPassword().close(); return new BytesRestResponse(RestStatus.OK, builder); } }); diff --git a/server/src/main/java/org/elasticsearch/rest/action/cat/RestIndicesAction.java b/server/src/main/java/org/elasticsearch/rest/action/cat/RestIndicesAction.java index 3a76c7ca0c9..1a859933ad3 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/cat/RestIndicesAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/cat/RestIndicesAction.java @@ -39,6 +39,7 @@ import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.common.Strings; import org.elasticsearch.common.Table; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.time.DateFormatters; import org.elasticsearch.index.Index; import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestRequest; @@ -380,7 +381,8 @@ public class RestIndicesAction extends AbstractCatAction { table.addCell(primaryStats.getDocs() == null ? null : primaryStats.getDocs().getDeleted()); table.addCell(indexMetaData.getCreationDate()); - table.addCell(ZonedDateTime.ofInstant(Instant.ofEpochMilli(indexMetaData.getCreationDate()), ZoneOffset.UTC)); + ZonedDateTime creationTime = ZonedDateTime.ofInstant(Instant.ofEpochMilli(indexMetaData.getCreationDate()), ZoneOffset.UTC); + table.addCell(DateFormatters.forPattern("strict_date_time").format(creationTime)); table.addCell(totalStats.getStore() == null ? null : totalStats.getStore().size()); table.addCell(primaryStats.getStore() == null ? null : primaryStats.getStore().size()); diff --git a/server/src/main/java/org/elasticsearch/rest/action/cat/RestShardsAction.java b/server/src/main/java/org/elasticsearch/rest/action/cat/RestShardsAction.java index d8f8c59caf6..a03fa2c059e 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/cat/RestShardsAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/cat/RestShardsAction.java @@ -40,6 +40,7 @@ import org.elasticsearch.rest.RestResponse; import org.elasticsearch.rest.action.RestActionListener; import org.elasticsearch.rest.action.RestResponseListener; +import java.time.Instant; import java.util.Locale; import static org.elasticsearch.rest.RestRequest.Method.GET; @@ -230,7 +231,8 @@ public class RestShardsAction extends AbstractCatAction { if (shard.unassignedInfo() != null) { table.addCell(shard.unassignedInfo().getReason()); - table.addCell(UnassignedInfo.DATE_TIME_FORMATTER.printer().print(shard.unassignedInfo().getUnassignedTimeInMillis())); + Instant unassignedTime = Instant.ofEpochMilli(shard.unassignedInfo().getUnassignedTimeInMillis()); + table.addCell(UnassignedInfo.DATE_TIME_FORMATTER.format(unassignedTime)); table.addCell(TimeValue.timeValueMillis(System.currentTimeMillis() - shard.unassignedInfo().getUnassignedTimeInMillis())); table.addCell(shard.unassignedInfo().getDetails()); } else { diff --git a/server/src/main/java/org/elasticsearch/rest/action/cat/RestSnapshotAction.java b/server/src/main/java/org/elasticsearch/rest/action/cat/RestSnapshotAction.java index 6d44e9aa856..2da5e432ca3 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/cat/RestSnapshotAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/cat/RestSnapshotAction.java @@ -25,6 +25,8 @@ import org.elasticsearch.action.admin.cluster.snapshots.get.GetSnapshotsResponse import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.common.Table; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.time.CompoundDateTimeFormatter; +import org.elasticsearch.common.time.DateFormatters; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestRequest; @@ -32,9 +34,9 @@ import org.elasticsearch.rest.RestResponse; import org.elasticsearch.rest.action.RestResponseListener; import org.elasticsearch.snapshots.SnapshotInfo; import org.elasticsearch.snapshots.SnapshotState; -import org.joda.time.format.DateTimeFormat; -import org.joda.time.format.DateTimeFormatter; +import java.time.Instant; +import java.time.ZoneOffset; import java.util.concurrent.TimeUnit; import static org.elasticsearch.rest.RestRequest.Method.GET; @@ -97,7 +99,7 @@ public class RestSnapshotAction extends AbstractCatAction { .endHeaders(); } - private DateTimeFormatter dateFormat = DateTimeFormat.forPattern("HH:mm:ss"); + private static final CompoundDateTimeFormatter FORMATTER = DateFormatters.forPattern("HH:mm:ss").withZone(ZoneOffset.UTC); private Table buildTable(RestRequest req, GetSnapshotsResponse getSnapshotsResponse) { Table table = getTableWithHeader(req); @@ -107,9 +109,9 @@ public class RestSnapshotAction extends AbstractCatAction { table.addCell(snapshotStatus.snapshotId().getName()); table.addCell(snapshotStatus.state()); table.addCell(TimeUnit.SECONDS.convert(snapshotStatus.startTime(), TimeUnit.MILLISECONDS)); - table.addCell(dateFormat.print(snapshotStatus.startTime())); + table.addCell(FORMATTER.format(Instant.ofEpochMilli(snapshotStatus.startTime()))); table.addCell(TimeUnit.SECONDS.convert(snapshotStatus.endTime(), TimeUnit.MILLISECONDS)); - table.addCell(dateFormat.print(snapshotStatus.endTime())); + table.addCell(FORMATTER.format(Instant.ofEpochMilli(snapshotStatus.endTime()))); final long durationMillis; if (snapshotStatus.state() == SnapshotState.IN_PROGRESS) { durationMillis = System.currentTimeMillis() - snapshotStatus.startTime(); diff --git a/server/src/main/java/org/elasticsearch/rest/action/cat/RestTasksAction.java b/server/src/main/java/org/elasticsearch/rest/action/cat/RestTasksAction.java index c0ebddc2908..7d14422b37c 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/cat/RestTasksAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/cat/RestTasksAction.java @@ -27,17 +27,20 @@ import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.common.Strings; import org.elasticsearch.common.Table; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.time.CompoundDateTimeFormatter; +import org.elasticsearch.common.time.DateFormatters; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.RestResponse; import org.elasticsearch.rest.action.RestResponseListener; import org.elasticsearch.tasks.TaskInfo; -import org.joda.time.format.DateTimeFormat; -import org.joda.time.format.DateTimeFormatter; +import java.time.Instant; +import java.time.ZoneOffset; import java.util.ArrayList; import java.util.Collections; +import java.util.Comparator; import java.util.HashSet; import java.util.List; import java.util.Set; @@ -122,7 +125,7 @@ public class RestTasksAction extends AbstractCatAction { return table; } - private DateTimeFormatter dateFormat = DateTimeFormat.forPattern("HH:mm:ss"); + private static final CompoundDateTimeFormatter FORMATTER = DateFormatters.forPattern("HH:mm:ss").withZone(ZoneOffset.UTC); private void buildRow(Table table, boolean fullId, boolean detailed, DiscoveryNodes discoveryNodes, TaskInfo taskInfo) { table.startRow(); @@ -139,7 +142,7 @@ public class RestTasksAction extends AbstractCatAction { } table.addCell(taskInfo.getType()); table.addCell(taskInfo.getStartTime()); - table.addCell(dateFormat.print(taskInfo.getStartTime())); + table.addCell(FORMATTER.format(Instant.ofEpochMilli(taskInfo.getStartTime()))); table.addCell(taskInfo.getRunningTimeNanos()); table.addCell(TimeValue.timeValueNanos(taskInfo.getRunningTimeNanos()).toString()); @@ -159,7 +162,7 @@ public class RestTasksAction extends AbstractCatAction { private void buildGroups(Table table, boolean fullId, boolean detailed, List taskGroups) { DiscoveryNodes discoveryNodes = nodesInCluster.get(); List sortedGroups = new ArrayList<>(taskGroups); - sortedGroups.sort((o1, o2) -> Long.compare(o1.getTaskInfo().getStartTime(), o2.getTaskInfo().getStartTime())); + sortedGroups.sort(Comparator.comparingLong(o -> o.getTaskInfo().getStartTime())); for (TaskGroup taskGroup : sortedGroups) { buildRow(table, fullId, detailed, discoveryNodes, taskGroup.getTaskInfo()); buildGroups(table, fullId, detailed, taskGroup.getChildTasks()); diff --git a/server/src/main/java/org/elasticsearch/script/BucketAggregationScript.java b/server/src/main/java/org/elasticsearch/script/BucketAggregationScript.java new file mode 100644 index 00000000000..5fa8d1fbf94 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/script/BucketAggregationScript.java @@ -0,0 +1,54 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.script; + +import java.util.Map; + +/** + * A script used in bucket aggregations that returns a {@code double} value. + */ +public abstract class BucketAggregationScript { + + public static final String[] PARAMETERS = {}; + + public static final ScriptContext CONTEXT = new ScriptContext<>("bucket_aggregation", Factory.class); + + /** + * The generic runtime parameters for the script. + */ + private final Map params; + + public BucketAggregationScript(Map params) { + this.params = params; + } + + /** + * Return the parameters for this script. + */ + public Map getParams() { + return params; + } + + public abstract Double execute(); + + public interface Factory { + BucketAggregationScript newInstance(Map params); + } +} diff --git a/server/src/main/java/org/elasticsearch/script/BucketAggregationSelectorScript.java b/server/src/main/java/org/elasticsearch/script/BucketAggregationSelectorScript.java new file mode 100644 index 00000000000..a8e2fad7cdc --- /dev/null +++ b/server/src/main/java/org/elasticsearch/script/BucketAggregationSelectorScript.java @@ -0,0 +1,54 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.script; + +import java.util.Map; + +/** + * A script used in bucket aggregations that returns a {@code boolean} value. + */ +public abstract class BucketAggregationSelectorScript { + + public static final String[] PARAMETERS = {}; + + public static final ScriptContext CONTEXT = new ScriptContext<>("aggregation_selector", Factory.class); + + /** + * The generic runtime parameters for the script. + */ + private final Map params; + + public BucketAggregationSelectorScript(Map params) { + this.params = params; + } + + /** + * Return the parameters for this script. + */ + public Map getParams() { + return params; + } + + public abstract boolean execute(); + + public interface Factory { + BucketAggregationSelectorScript newInstance(Map params); + } +} diff --git a/server/src/main/java/org/elasticsearch/script/ExecutableScript.java b/server/src/main/java/org/elasticsearch/script/ExecutableScript.java index 2f7a01c3798..d0d8020371b 100644 --- a/server/src/main/java/org/elasticsearch/script/ExecutableScript.java +++ b/server/src/main/java/org/elasticsearch/script/ExecutableScript.java @@ -46,8 +46,4 @@ public interface ExecutableScript { } ScriptContext CONTEXT = new ScriptContext<>("executable", Factory.class); - - // TODO: remove these once each has its own script interface - ScriptContext AGGS_CONTEXT = new ScriptContext<>("aggs_executable", Factory.class); - ScriptContext UPDATE_CONTEXT = new ScriptContext<>("update", Factory.class); } diff --git a/server/src/main/java/org/elasticsearch/script/ScriptModule.java b/server/src/main/java/org/elasticsearch/script/ScriptModule.java index a3da1dafe48..f04e690fa42 100644 --- a/server/src/main/java/org/elasticsearch/script/ScriptModule.java +++ b/server/src/main/java/org/elasticsearch/script/ScriptModule.java @@ -19,6 +19,11 @@ package org.elasticsearch.script; +import org.elasticsearch.common.settings.ClusterSettings; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.plugins.ScriptPlugin; +import org.elasticsearch.search.aggregations.pipeline.movfn.MovingFunctionScript; + import java.util.Collections; import java.util.HashMap; import java.util.List; @@ -27,12 +32,6 @@ import java.util.function.Function; import java.util.stream.Collectors; import java.util.stream.Stream; -import org.elasticsearch.common.settings.ClusterSettings; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.plugins.ScriptPlugin; -import org.elasticsearch.search.aggregations.pipeline.movfn.MovingFunctionScript; - - /** * Manages building {@link ScriptService}. */ @@ -47,8 +46,10 @@ public class ScriptModule { SearchScript.SCRIPT_SORT_CONTEXT, SearchScript.TERMS_SET_QUERY_CONTEXT, ExecutableScript.CONTEXT, - ExecutableScript.AGGS_CONTEXT, - ExecutableScript.UPDATE_CONTEXT, + UpdateScript.CONTEXT, + BucketAggregationScript.CONTEXT, + BucketAggregationSelectorScript.CONTEXT, + SignificantTermsHeuristicScoreScript.CONTEXT, IngestScript.CONTEXT, FilterScript.CONTEXT, SimilarityScript.CONTEXT, diff --git a/server/src/main/java/org/elasticsearch/script/ScriptService.java b/server/src/main/java/org/elasticsearch/script/ScriptService.java index ca79e3b80fc..d37cefb3a01 100644 --- a/server/src/main/java/org/elasticsearch/script/ScriptService.java +++ b/server/src/main/java/org/elasticsearch/script/ScriptService.java @@ -19,14 +19,12 @@ package org.elasticsearch.script; -import org.elasticsearch.core.internal.io.IOUtils; import org.elasticsearch.ResourceNotFoundException; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.admin.cluster.storedscripts.DeleteStoredScriptRequest; -import org.elasticsearch.action.admin.cluster.storedscripts.DeleteStoredScriptResponse; import org.elasticsearch.action.admin.cluster.storedscripts.GetStoredScriptRequest; import org.elasticsearch.action.admin.cluster.storedscripts.PutStoredScriptRequest; -import org.elasticsearch.action.admin.cluster.storedscripts.PutStoredScriptResponse; +import org.elasticsearch.action.support.master.AcknowledgedResponse; import org.elasticsearch.cluster.AckedClusterStateUpdateTask; import org.elasticsearch.cluster.ClusterChangedEvent; import org.elasticsearch.cluster.ClusterState; @@ -46,6 +44,7 @@ import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Setting.Property; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.core.internal.io.IOUtils; import java.io.Closeable; import java.io.IOException; @@ -285,7 +284,7 @@ public class ScriptService extends AbstractComponent implements Closeable, Clust // TODO: fix this through some API or something, that's wrong // special exception to prevent expressions from compiling as update or mapping scripts boolean expression = "expression".equals(lang); - boolean notSupported = context.name.equals(ExecutableScript.UPDATE_CONTEXT.name); + boolean notSupported = context.name.equals(UpdateScript.CONTEXT.name); if (expression && notSupported) { throw new UnsupportedOperationException("scripts of type [" + script.getType() + "]," + " operation [" + context.name + "] and lang [" + lang + "] are not supported"); @@ -417,7 +416,7 @@ public class ScriptService extends AbstractComponent implements Closeable, Clust } public void putStoredScript(ClusterService clusterService, PutStoredScriptRequest request, - ActionListener listener) { + ActionListener listener) { int max = SCRIPT_MAX_SIZE_IN_BYTES.get(settings); if (request.content().length() > max) { @@ -454,11 +453,11 @@ public class ScriptService extends AbstractComponent implements Closeable, Clust } clusterService.submitStateUpdateTask("put-script-" + request.id(), - new AckedClusterStateUpdateTask(request, listener) { + new AckedClusterStateUpdateTask(request, listener) { @Override - protected PutStoredScriptResponse newResponse(boolean acknowledged) { - return new PutStoredScriptResponse(acknowledged); + protected AcknowledgedResponse newResponse(boolean acknowledged) { + return new AcknowledgedResponse(acknowledged); } @Override @@ -473,13 +472,13 @@ public class ScriptService extends AbstractComponent implements Closeable, Clust } public void deleteStoredScript(ClusterService clusterService, DeleteStoredScriptRequest request, - ActionListener listener) { + ActionListener listener) { clusterService.submitStateUpdateTask("delete-script-" + request.id(), - new AckedClusterStateUpdateTask(request, listener) { + new AckedClusterStateUpdateTask(request, listener) { @Override - protected DeleteStoredScriptResponse newResponse(boolean acknowledged) { - return new DeleteStoredScriptResponse(acknowledged); + protected AcknowledgedResponse newResponse(boolean acknowledged) { + return new AcknowledgedResponse(acknowledged); } @Override diff --git a/server/src/main/java/org/elasticsearch/script/ScriptedMetricAggContexts.java b/server/src/main/java/org/elasticsearch/script/ScriptedMetricAggContexts.java index 774dc95d399..0c34c59b7be 100644 --- a/server/src/main/java/org/elasticsearch/script/ScriptedMetricAggContexts.java +++ b/server/src/main/java/org/elasticsearch/script/ScriptedMetricAggContexts.java @@ -22,6 +22,8 @@ package org.elasticsearch.script; import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.search.Scorer; import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.common.logging.DeprecationLogger; +import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.index.fielddata.ScriptDocValues; import org.elasticsearch.search.lookup.LeafSearchLookup; import org.elasticsearch.search.lookup.SearchLookup; @@ -31,6 +33,25 @@ import java.util.List; import java.util.Map; public class ScriptedMetricAggContexts { + private static final DeprecationLogger DEPRECATION_LOGGER = + new DeprecationLogger(Loggers.getLogger(ScriptedMetricAggContexts.class)); + + // Public for access from tests + public static final String AGG_PARAM_DEPRECATION_WARNING = + "params._agg/_aggs for scripted metric aggregations are deprecated, use state/states (not in params) instead. " + + "Use -Des.aggregations.enable_scripted_metric_agg_param=false to disable."; + + public static boolean deprecatedAggParamEnabled() { + boolean enabled = Boolean.parseBoolean( + System.getProperty("es.aggregations.enable_scripted_metric_agg_param", "true")); + + if (enabled) { + DEPRECATION_LOGGER.deprecatedAndMaybeLog("enable_scripted_metric_agg_param", AGG_PARAM_DEPRECATION_WARNING); + } + + return enabled; + } + private abstract static class ParamsAndStateBase { private final Map params; private final Object state; diff --git a/server/src/main/java/org/elasticsearch/script/SignificantTermsHeuristicScoreScript.java b/server/src/main/java/org/elasticsearch/script/SignificantTermsHeuristicScoreScript.java new file mode 100644 index 00000000000..0296bc36ce1 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/script/SignificantTermsHeuristicScoreScript.java @@ -0,0 +1,38 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.script; + +import java.util.Map; + +/** + * A script used in significant terms heuristic scoring. + */ +public abstract class SignificantTermsHeuristicScoreScript { + + public static final String[] PARAMETERS = { "params" }; + + public static final ScriptContext CONTEXT = new ScriptContext<>("script_heuristic", Factory.class); + + public abstract double execute(Map params); + + public interface Factory { + SignificantTermsHeuristicScoreScript newInstance(); + } +} diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/delete/DeleteIndexResponse.java b/server/src/main/java/org/elasticsearch/script/UpdateScript.java similarity index 50% rename from server/src/main/java/org/elasticsearch/action/admin/indices/delete/DeleteIndexResponse.java rename to server/src/main/java/org/elasticsearch/script/UpdateScript.java index b86549f536d..c6a1d5dd9ea 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/delete/DeleteIndexResponse.java +++ b/server/src/main/java/org/elasticsearch/script/UpdateScript.java @@ -1,3 +1,4 @@ + /* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with @@ -17,24 +18,35 @@ * under the License. */ -package org.elasticsearch.action.admin.indices.delete; +package org.elasticsearch.script; -import org.elasticsearch.action.support.master.AcknowledgedResponse; -import org.elasticsearch.common.xcontent.XContentParser; +import java.util.Map; /** - * A response for a delete index action. + * An update script. */ -public class DeleteIndexResponse extends AcknowledgedResponse { +public abstract class UpdateScript { - DeleteIndexResponse() { + public static final String[] PARAMETERS = { "ctx" }; + + /** The context used to compile {@link UpdateScript} factories. */ + public static final ScriptContext CONTEXT = new ScriptContext<>("update", Factory.class); + + /** The generic runtime parameters for the script. */ + private final Map params; + + public UpdateScript(Map params) { + this.params = params; } - DeleteIndexResponse(boolean acknowledged) { - super(acknowledged); + /** Return the parameters for this script. */ + public Map getParams() { + return params; } - public static DeleteIndexResponse fromXContent(XContentParser parser) { - return new DeleteIndexResponse(parseAcknowledged(parser)); + public abstract void execute(Map ctx); + + public interface Factory { + UpdateScript newInstance(Map params); } } diff --git a/server/src/main/java/org/elasticsearch/search/SearchModule.java b/server/src/main/java/org/elasticsearch/search/SearchModule.java index 99b47cf83e2..e2baad5d606 100644 --- a/server/src/main/java/org/elasticsearch/search/SearchModule.java +++ b/server/src/main/java/org/elasticsearch/search/SearchModule.java @@ -247,13 +247,17 @@ import org.elasticsearch.search.sort.GeoDistanceSortBuilder; import org.elasticsearch.search.sort.ScoreSortBuilder; import org.elasticsearch.search.sort.ScriptSortBuilder; import org.elasticsearch.search.sort.SortBuilder; +import org.elasticsearch.search.suggest.Suggest; import org.elasticsearch.search.suggest.SuggestionBuilder; +import org.elasticsearch.search.suggest.completion.CompletionSuggestion; import org.elasticsearch.search.suggest.completion.CompletionSuggestionBuilder; import org.elasticsearch.search.suggest.phrase.Laplace; import org.elasticsearch.search.suggest.phrase.LinearInterpolation; +import org.elasticsearch.search.suggest.phrase.PhraseSuggestion; import org.elasticsearch.search.suggest.phrase.PhraseSuggestionBuilder; import org.elasticsearch.search.suggest.phrase.SmoothingModel; import org.elasticsearch.search.suggest.phrase.StupidBackoff; +import org.elasticsearch.search.suggest.term.TermSuggestion; import org.elasticsearch.search.suggest.term.TermSuggestionBuilder; import java.util.ArrayList; @@ -590,9 +594,14 @@ public class SearchModule { private void registerSuggesters(List plugins) { registerSmoothingModels(namedWriteables); - registerSuggester(new SuggesterSpec<>("term", TermSuggestionBuilder::new, TermSuggestionBuilder::fromXContent)); - registerSuggester(new SuggesterSpec<>("phrase", PhraseSuggestionBuilder::new, PhraseSuggestionBuilder::fromXContent)); - registerSuggester(new SuggesterSpec<>("completion", CompletionSuggestionBuilder::new, CompletionSuggestionBuilder::fromXContent)); + registerSuggester(new SuggesterSpec<>(TermSuggestionBuilder.SUGGESTION_NAME, + TermSuggestionBuilder::new, TermSuggestionBuilder::fromXContent, TermSuggestion::new)); + + registerSuggester(new SuggesterSpec<>(PhraseSuggestionBuilder.SUGGESTION_NAME, + PhraseSuggestionBuilder::new, PhraseSuggestionBuilder::fromXContent, PhraseSuggestion::new)); + + registerSuggester(new SuggesterSpec<>(CompletionSuggestionBuilder.SUGGESTION_NAME, + CompletionSuggestionBuilder::new, CompletionSuggestionBuilder::fromXContent, CompletionSuggestion::new)); registerFromPlugin(plugins, SearchPlugin::getSuggesters, this::registerSuggester); } @@ -602,6 +611,10 @@ public class SearchModule { SuggestionBuilder.class, suggester.getName().getPreferredName(), suggester.getReader())); namedXContents.add(new NamedXContentRegistry.Entry(SuggestionBuilder.class, suggester.getName(), suggester.getParser())); + + namedWriteables.add(new NamedWriteableRegistry.Entry( + Suggest.Suggestion.class, suggester.getName().getPreferredName(), suggester.getSuggestionReader() + )); } private Map setupHighlighters(Settings settings, List plugins) { diff --git a/server/src/main/java/org/elasticsearch/search/SearchService.java b/server/src/main/java/org/elasticsearch/search/SearchService.java index 59af043e0cf..4bf5e03b8a7 100644 --- a/server/src/main/java/org/elasticsearch/search/SearchService.java +++ b/server/src/main/java/org/elasticsearch/search/SearchService.java @@ -806,7 +806,7 @@ public class SearchService extends AbstractLifecycleComponent implements IndexEv throw new SearchContextException(context, "failed to create SearchContextHighlighter", e); } } - if (source.scriptFields() != null) { + if (source.scriptFields() != null && source.size() != 0) { int maxAllowedScriptFields = context.mapperService().getIndexSettings().getMaxScriptFields(); if (source.scriptFields().size() > maxAllowedScriptFields) { throw new IllegalArgumentException( diff --git a/server/src/main/java/org/elasticsearch/search/SearchShardTarget.java b/server/src/main/java/org/elasticsearch/search/SearchShardTarget.java index faf415b54ae..19c0f8c64d5 100644 --- a/server/src/main/java/org/elasticsearch/search/SearchShardTarget.java +++ b/server/src/main/java/org/elasticsearch/search/SearchShardTarget.java @@ -19,8 +19,6 @@ package org.elasticsearch.search; -import java.io.IOException; - import org.elasticsearch.Version; import org.elasticsearch.action.OriginalIndices; import org.elasticsearch.common.Nullable; @@ -32,6 +30,8 @@ import org.elasticsearch.index.Index; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.transport.RemoteClusterAware; +import java.io.IOException; + /** * The target that the search request was executed on. */ @@ -96,6 +96,13 @@ public final class SearchShardTarget implements Writeable, Comparable (targetBuckets * roundings[roundingIdx].getMaximumInnerInterval()) + } while (requiredBuckets > (targetBuckets * roundings[currentRoundingIdx - 1].getMaximumInnerInterval()) && currentRoundingIdx < roundings.length); // The loop will increase past the correct rounding index here so we // need to subtract one to get the rounding index we need diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/ParsedSignificantTerms.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/ParsedSignificantTerms.java index 1b4739c184d..26c4ec420d0 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/ParsedSignificantTerms.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/ParsedSignificantTerms.java @@ -175,7 +175,7 @@ public abstract class ParsedSignificantTerms extends ParsedMultiBucketAggregatio bucket.subsetDf = value; bucket.setDocCount(value); } else if (InternalSignificantTerms.SCORE.equals(currentFieldName)) { - bucket.score = parser.longValue(); + bucket.score = parser.doubleValue(); } else if (InternalSignificantTerms.BG_COUNT.equals(currentFieldName)) { bucket.supersetDf = parser.longValue(); } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/heuristics/ScriptHeuristic.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/heuristics/ScriptHeuristic.java index a2cbd49693a..05415cf7d19 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/heuristics/ScriptHeuristic.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/heuristics/ScriptHeuristic.java @@ -28,12 +28,14 @@ import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.index.query.QueryShardContext; import org.elasticsearch.index.query.QueryShardException; -import org.elasticsearch.script.ExecutableScript; import org.elasticsearch.script.Script; +import org.elasticsearch.script.SignificantTermsHeuristicScoreScript; import org.elasticsearch.search.aggregations.InternalAggregation; import org.elasticsearch.search.internal.SearchContext; import java.io.IOException; +import java.util.HashMap; +import java.util.Map; import java.util.Objects; public class ScriptHeuristic extends SignificanceHeuristic { @@ -48,19 +50,21 @@ public class ScriptHeuristic extends SignificanceHeuristic { private final LongAccessor supersetSizeHolder; private final LongAccessor subsetDfHolder; private final LongAccessor supersetDfHolder; - private final ExecutableScript executableScript; + private final SignificantTermsHeuristicScoreScript executableScript; + private final Map params = new HashMap<>(); - ExecutableScriptHeuristic(Script script, ExecutableScript executableScript){ + ExecutableScriptHeuristic(Script script, SignificantTermsHeuristicScoreScript executableScript) { super(script); subsetSizeHolder = new LongAccessor(); supersetSizeHolder = new LongAccessor(); subsetDfHolder = new LongAccessor(); supersetDfHolder = new LongAccessor(); this.executableScript = executableScript; - executableScript.setNextVar("_subset_freq", subsetDfHolder); - executableScript.setNextVar("_subset_size", subsetSizeHolder); - executableScript.setNextVar("_superset_freq", supersetDfHolder); - executableScript.setNextVar("_superset_size", supersetSizeHolder); + params.putAll(script.getParams()); + params.put("_subset_freq", subsetDfHolder); + params.put("_subset_size", subsetSizeHolder); + params.put("_superset_freq", supersetDfHolder); + params.put("_superset_size", supersetSizeHolder); } @Override @@ -69,7 +73,7 @@ public class ScriptHeuristic extends SignificanceHeuristic { supersetSizeHolder.value = supersetSize; subsetDfHolder.value = subsetFreq; supersetDfHolder.value = supersetFreq; - return ((Number) executableScript.run()).doubleValue(); + return executableScript.execute(params); } } @@ -91,15 +95,15 @@ public class ScriptHeuristic extends SignificanceHeuristic { @Override public SignificanceHeuristic rewrite(InternalAggregation.ReduceContext context) { - ExecutableScript.Factory factory = context.scriptService().compile(script, ExecutableScript.AGGS_CONTEXT); - return new ExecutableScriptHeuristic(script, factory.newInstance(script.getParams())); + SignificantTermsHeuristicScoreScript.Factory factory = context.scriptService().compile(script, SignificantTermsHeuristicScoreScript.CONTEXT); + return new ExecutableScriptHeuristic(script, factory.newInstance()); } @Override public SignificanceHeuristic rewrite(SearchContext context) { QueryShardContext shardContext = context.getQueryShardContext(); - ExecutableScript.Factory compiledScript = shardContext.getScriptService().compile(script, ExecutableScript.AGGS_CONTEXT); - return new ExecutableScriptHeuristic(script, compiledScript.newInstance(script.getParams())); + SignificantTermsHeuristicScoreScript.Factory compiledScript = shardContext.getScriptService().compile(script, SignificantTermsHeuristicScoreScript.CONTEXT); + return new ExecutableScriptHeuristic(script, compiledScript.newInstance()); } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/scripted/InternalScriptedMetric.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/scripted/InternalScriptedMetric.java index f4281c063ff..4124a8eeb76 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/scripted/InternalScriptedMetric.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/scripted/InternalScriptedMetric.java @@ -96,7 +96,9 @@ public class InternalScriptedMetric extends InternalAggregation implements Scrip } // Add _aggs to params map for backwards compatibility (redundant with a context variable on the ReduceScript created below). - params.put("_aggs", aggregationObjects); + if (ScriptedMetricAggContexts.deprecatedAggParamEnabled()) { + params.put("_aggs", aggregationObjects); + } ScriptedMetricAggContexts.ReduceScript.Factory factory = reduceContext.scriptService().compile( firstAggregation.reduceScript, ScriptedMetricAggContexts.ReduceScript.CONTEXT); diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/scripted/ScriptedMetricAggregatorFactory.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/scripted/ScriptedMetricAggregatorFactory.java index 9bd904a0701..076c29fecea 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/scripted/ScriptedMetricAggregatorFactory.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/scripted/ScriptedMetricAggregatorFactory.java @@ -83,10 +83,17 @@ public class ScriptedMetricAggregatorFactory extends AggregatorFactory, since // it won't be possible to completely replace it with another type as is possible when it's an entry in params. - if (aggParams.containsKey("_agg") == false) { - aggParams.put("_agg", new HashMap()); + Object aggState = new HashMap(); + if (ScriptedMetricAggContexts.deprecatedAggParamEnabled()) { + if (aggParams.containsKey("_agg") == false) { + // Add _agg if it wasn't added manually + aggParams.put("_agg", aggState); + } else { + // If it was added manually, also use it for the agg context variable to reduce the likelihood of + // weird behavior due to multiple different variables. + aggState = aggParams.get("_agg"); + } } - Object aggState = aggParams.get("_agg"); final ScriptedMetricAggContexts.InitScript initScript = this.initScript.newInstance( mergeParams(aggParams, initScriptParams), aggState); diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketscript/BucketScriptPipelineAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketscript/BucketScriptPipelineAggregator.java index 42337fbce0f..042a30695c6 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketscript/BucketScriptPipelineAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketscript/BucketScriptPipelineAggregator.java @@ -21,10 +21,9 @@ package org.elasticsearch.search.aggregations.pipeline.bucketscript; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.script.ExecutableScript; +import org.elasticsearch.script.BucketAggregationScript; import org.elasticsearch.script.Script; import org.elasticsearch.search.DocValueFormat; -import org.elasticsearch.search.aggregations.AggregationExecutionException; import org.elasticsearch.search.aggregations.InternalAggregation; import org.elasticsearch.search.aggregations.InternalAggregation.ReduceContext; import org.elasticsearch.search.aggregations.InternalAggregations; @@ -89,7 +88,8 @@ public class BucketScriptPipelineAggregator extends PipelineAggregator { (InternalMultiBucketAggregation) aggregation; List buckets = originalAgg.getBuckets(); - ExecutableScript.Factory factory = reduceContext.scriptService().compile(script, ExecutableScript.AGGS_CONTEXT); + BucketAggregationScript.Factory factory = + reduceContext.scriptService().compile(script, BucketAggregationScript.CONTEXT); List newBuckets = new ArrayList<>(); for (InternalMultiBucketAggregation.InternalBucket bucket : buckets) { Map vars = new HashMap<>(); @@ -110,22 +110,15 @@ public class BucketScriptPipelineAggregator extends PipelineAggregator { if (skipBucket) { newBuckets.add(bucket); } else { - ExecutableScript executableScript = factory.newInstance(vars); - Object returned = executableScript.run(); - // no need to check for self references since only numbers are valid + Double returned = factory.newInstance(vars).execute(); if (returned == null) { newBuckets.add(bucket); } else { - if ((returned instanceof Number) == false) { - throw new AggregationExecutionException("series_arithmetic script for reducer [" + name() - + "] must return a Number"); - } final List aggs = StreamSupport.stream(bucket.getAggregations().spliterator(), false).map( - (p) -> (InternalAggregation) p).collect(Collectors.toList()); - aggs.add(new InternalSimpleValue(name(), ((Number) returned).doubleValue(), formatter, - new ArrayList<>(), metaData())); + (p) -> (InternalAggregation) p).collect(Collectors.toList()); + aggs.add(new InternalSimpleValue(name(), returned, formatter, new ArrayList<>(), metaData())); InternalMultiBucketAggregation.InternalBucket newBucket = originalAgg.createBucket(new InternalAggregations(aggs), - bucket); + bucket); newBuckets.add(newBucket); } } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketselector/BucketSelectorPipelineAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketselector/BucketSelectorPipelineAggregator.java index a54ad0ec21f..06beab04aa6 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketselector/BucketSelectorPipelineAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketselector/BucketSelectorPipelineAggregator.java @@ -22,7 +22,7 @@ package org.elasticsearch.search.aggregations.pipeline.bucketselector; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.script.ExecutableScript; +import org.elasticsearch.script.BucketAggregationSelectorScript; import org.elasticsearch.script.Script; import org.elasticsearch.search.aggregations.InternalAggregation; import org.elasticsearch.search.aggregations.InternalAggregation.ReduceContext; @@ -82,7 +82,8 @@ public class BucketSelectorPipelineAggregator extends PipelineAggregator { (InternalMultiBucketAggregation) aggregation; List buckets = originalAgg.getBuckets(); - ExecutableScript.Factory factory = reduceContext.scriptService().compile(script, ExecutableScript.AGGS_CONTEXT); + BucketAggregationSelectorScript.Factory factory = + reduceContext.scriptService().compile(script, BucketAggregationSelectorScript.CONTEXT); List newBuckets = new ArrayList<>(); for (InternalMultiBucketAggregation.InternalBucket bucket : buckets) { Map vars = new HashMap<>(); @@ -96,17 +97,8 @@ public class BucketSelectorPipelineAggregator extends PipelineAggregator { vars.put(varName, value); } // TODO: can we use one instance of the script for all buckets? it should be stateless? - ExecutableScript executableScript = factory.newInstance(vars); - Object scriptReturnValue = executableScript.run(); - final boolean keepBucket; - // TODO: WTF!!!!! - if ("expression".equals(script.getLang())) { - double scriptDoubleValue = (double) scriptReturnValue; - keepBucket = scriptDoubleValue == 1.0; - } else { - keepBucket = (boolean) scriptReturnValue; - } - if (keepBucket) { + BucketAggregationSelectorScript executableScript = factory.newInstance(vars); + if (executableScript.execute()) { newBuckets.add(bucket); } } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketsort/BucketSortPipelineAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketsort/BucketSortPipelineAggregator.java index de5e2638c6c..e10d5c35800 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketsort/BucketSortPipelineAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketsort/BucketSortPipelineAggregator.java @@ -180,7 +180,7 @@ public class BucketSortPipelineAggregator extends PipelineAggregator { private static class TopNPriorityQueue extends PriorityQueue { private TopNPriorityQueue(int n) { - super(n, false); + super(n); } @Override diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/support/AggregationPath.java b/server/src/main/java/org/elasticsearch/search/aggregations/support/AggregationPath.java index 1a5539cab18..ac97c48882a 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/support/AggregationPath.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/support/AggregationPath.java @@ -288,11 +288,14 @@ public class AggregationPath { public void validate(Aggregator root) throws AggregationExecutionException { Aggregator aggregator = root; for (int i = 0; i < pathElements.size(); i++) { - aggregator = ProfilingAggregator.unwrap(aggregator.subAggregator(pathElements.get(i).name)); + String name = pathElements.get(i).name; + aggregator = ProfilingAggregator.unwrap(aggregator.subAggregator(name)); if (aggregator == null) { - throw new AggregationExecutionException("Invalid aggregator order path [" + this + "]. Unknown aggregation [" - + pathElements.get(i).name + "]"); + throw new AggregationExecutionException("Invalid aggregator order path [" + this + "]. The " + + "provided aggregation [" + name + "] either does not exist, or is a pipeline aggregation " + + "and cannot be used to sort the buckets."); } + if (i < pathElements.size() - 1) { // we're in the middle of the path, so the aggregator can only be a single-bucket aggregator diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/support/values/ScriptDoubleValues.java b/server/src/main/java/org/elasticsearch/search/aggregations/support/values/ScriptDoubleValues.java index ac3c8f682ba..1227efb5ea0 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/support/values/ScriptDoubleValues.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/support/values/ScriptDoubleValues.java @@ -27,6 +27,7 @@ import org.joda.time.ReadableInstant; import java.io.IOException; import java.lang.reflect.Array; +import java.time.ZonedDateTime; import java.util.Collection; /** @@ -54,6 +55,9 @@ public class ScriptDoubleValues extends SortingNumericDoubleValues implements Sc } else if (value instanceof ReadableInstant) { resize(1); values[0] = ((ReadableInstant) value).getMillis(); + } else if (value instanceof ZonedDateTime) { + resize(1); + values[0] = ((ZonedDateTime) value).toInstant().toEpochMilli(); } else if (value.getClass().isArray()) { int length = Array.getLength(value); if (length == 0) { @@ -89,6 +93,8 @@ public class ScriptDoubleValues extends SortingNumericDoubleValues implements Sc } else if (o instanceof ReadableInstant) { // Dates are exposed in scripts as ReadableDateTimes but aggregations want them to be numeric return ((ReadableInstant) o).getMillis(); + } else if (o instanceof ZonedDateTime) { + return ((ZonedDateTime) o).toInstant().toEpochMilli(); } else if (o instanceof Boolean) { // We do expose boolean fields as boolean in scripts, however aggregations still expect // that scripts return the same internal representation as regular fields, so boolean diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/support/values/ScriptLongValues.java b/server/src/main/java/org/elasticsearch/search/aggregations/support/values/ScriptLongValues.java index 818a9d9fd8d..cdc448bd041 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/support/values/ScriptLongValues.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/support/values/ScriptLongValues.java @@ -28,6 +28,7 @@ import org.joda.time.ReadableInstant; import java.io.IOException; import java.lang.reflect.Array; +import java.time.ZonedDateTime; import java.util.Collection; import java.util.Iterator; @@ -91,6 +92,8 @@ public class ScriptLongValues extends AbstractSortingNumericDocValues implements } else if (o instanceof ReadableInstant) { // Dates are exposed in scripts as ReadableDateTimes but aggregations want them to be numeric return ((ReadableInstant) o).getMillis(); + } else if (o instanceof ZonedDateTime) { + return ((ZonedDateTime) o).toInstant().toEpochMilli(); } else if (o instanceof Boolean) { // We do expose boolean fields as boolean in scripts, however aggregations still expect // that scripts return the same internal representation as regular fields, so boolean diff --git a/server/src/main/java/org/elasticsearch/search/collapse/CollapseBuilder.java b/server/src/main/java/org/elasticsearch/search/collapse/CollapseBuilder.java index 90e35c34e28..ccab5e2cb93 100644 --- a/server/src/main/java/org/elasticsearch/search/collapse/CollapseBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/collapse/CollapseBuilder.java @@ -247,6 +247,6 @@ public class CollapseBuilder implements Writeable, ToXContentObject { + field + "`, " + "only indexed field can retrieve `inner_hits`"); } - return new CollapseContext(fieldType, innerHits); + return new CollapseContext(field, fieldType, innerHits); } } diff --git a/server/src/main/java/org/elasticsearch/search/collapse/CollapseContext.java b/server/src/main/java/org/elasticsearch/search/collapse/CollapseContext.java index 95fee901a30..82a7657f180 100644 --- a/server/src/main/java/org/elasticsearch/search/collapse/CollapseContext.java +++ b/server/src/main/java/org/elasticsearch/search/collapse/CollapseContext.java @@ -25,26 +25,31 @@ import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.NumberFieldMapper; import org.elasticsearch.index.query.InnerHitBuilder; -import java.util.Collections; import java.util.List; /** * Context used for field collapsing */ public class CollapseContext { + private final String fieldName; private final MappedFieldType fieldType; private final List innerHits; - public CollapseContext(MappedFieldType fieldType, InnerHitBuilder innerHit) { - this.fieldType = fieldType; - this.innerHits = Collections.singletonList(innerHit); - } - - public CollapseContext(MappedFieldType fieldType, List innerHits) { + public CollapseContext(String fieldName, + MappedFieldType fieldType, + List innerHits) { + this.fieldName = fieldName; this.fieldType = fieldType; this.innerHits = innerHits; } + /** + * The requested field name to collapse on. + */ + public String getFieldName() { + return fieldName; + } + /** The field type used for collapsing **/ public MappedFieldType getFieldType() { return fieldType; diff --git a/server/src/main/java/org/elasticsearch/search/fetch/subphase/DocValueFieldsFetchSubPhase.java b/server/src/main/java/org/elasticsearch/search/fetch/subphase/DocValueFieldsFetchSubPhase.java index a1562e118fb..3ef3064697a 100644 --- a/server/src/main/java/org/elasticsearch/search/fetch/subphase/DocValueFieldsFetchSubPhase.java +++ b/server/src/main/java/org/elasticsearch/search/fetch/subphase/DocValueFieldsFetchSubPhase.java @@ -61,7 +61,7 @@ public final class DocValueFieldsFetchSubPhase implements FetchSubPhase { if (context.collapse() != null) { // retrieve the `doc_value` associated with the collapse field - String name = context.collapse().getFieldType().name(); + String name = context.collapse().getFieldName(); if (context.docValueFieldsContext() == null) { context.docValueFieldsContext(new DocValueFieldsContext( Collections.singletonList(new FieldAndFormat(name, DocValueFieldsContext.USE_DEFAULT_FORMAT)))); diff --git a/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/HighlightBuilder.java b/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/HighlightBuilder.java index 049de439ac7..9483e76d072 100644 --- a/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/HighlightBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/HighlightBuilder.java @@ -220,7 +220,7 @@ public class HighlightBuilder extends AbstractHighlighterBuilder (SiblingPipelineAggregator) a) .collect(Collectors.toList()); if (in.readBoolean()) { - suggest = Suggest.readSuggest(in); + suggest = new Suggest(in); } searchTimedOut = in.readBoolean(); terminatedEarly = in.readOptionalBoolean(); diff --git a/server/src/main/java/org/elasticsearch/search/suggest/Suggest.java b/server/src/main/java/org/elasticsearch/search/suggest/Suggest.java index e4099193359..c957b165027 100644 --- a/server/src/main/java/org/elasticsearch/search/suggest/Suggest.java +++ b/server/src/main/java/org/elasticsearch/search/suggest/Suggest.java @@ -20,18 +20,18 @@ package org.elasticsearch.search.suggest; import org.apache.lucene.util.CollectionUtil; import org.apache.lucene.util.SetOnce; +import org.elasticsearch.Version; import org.elasticsearch.common.CheckedFunction; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.Strings; +import org.elasticsearch.common.io.stream.NamedWriteable; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.io.stream.Streamable; +import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.text.Text; -import org.elasticsearch.common.xcontent.ConstructingObjectParser; import org.elasticsearch.common.xcontent.ObjectParser; import org.elasticsearch.common.xcontent.ToXContentFragment; -import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentParser; @@ -53,16 +53,15 @@ import java.util.Iterator; import java.util.List; import java.util.Locale; import java.util.Map; +import java.util.Objects; import java.util.stream.Collectors; -import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg; -import static org.elasticsearch.common.xcontent.ConstructingObjectParser.optionalConstructorArg; import static org.elasticsearch.common.xcontent.XContentParserUtils.ensureExpectedToken; /** * Top level suggest result, containing the result for each suggestion. */ -public class Suggest implements Iterable>>, Streamable, ToXContentFragment { +public class Suggest implements Iterable>>, Writeable, ToXContentFragment { public static final String NAME = "suggest"; @@ -92,6 +91,40 @@ public class Suggest implements Iterable(size); + for (int i = 0; i < size; i++) { + Suggestion> suggestion; + final int type = in.readVInt(); + switch (type) { + case TermSuggestion.TYPE: + suggestion = new TermSuggestion(in); + break; + case CompletionSuggestion.TYPE: + suggestion = new CompletionSuggestion(in); + break; + case PhraseSuggestion.TYPE: + suggestion = new PhraseSuggestion(in); + break; + default: + throw new IllegalArgumentException("Unknown suggestion type with ordinal " + type); + } + suggestions.add(suggestion); + } + } else { + int suggestionCount = in.readVInt(); + suggestions = new ArrayList<>(suggestionCount); + for (int i = 0; i < suggestionCount; i++) { + suggestions.add(in.readNamedWriteable(Suggestion.class)); + } + } + + hasScoreDocs = filter(CompletionSuggestion.class).stream().anyMatch(CompletionSuggestion::hasScoreDocs); + } + @Override public Iterator>> iterator() { return suggestions.iterator(); @@ -125,42 +158,20 @@ public class Suggest implements Iterable(size); - for (int i = 0; i < size; i++) { - // TODO: remove these complicated generics - Suggestion> suggestion; - final int type = in.readVInt(); - switch (type) { - case TermSuggestion.TYPE: - suggestion = new TermSuggestion(); - break; - case CompletionSuggestion.TYPE: - suggestion = new CompletionSuggestion(); - break; - case 2: // CompletionSuggestion.TYPE - throw new IllegalArgumentException("Completion suggester 2.x is not supported anymore"); - case PhraseSuggestion.TYPE: - suggestion = new PhraseSuggestion(); - break; - default: - suggestion = new Suggestion(); - break; - } - suggestion.readFrom(in); - suggestions.add(suggestion); - } - hasScoreDocs = filter(CompletionSuggestion.class).stream().anyMatch(CompletionSuggestion::hasScoreDocs); - } - @Override public void writeTo(StreamOutput out) throws IOException { - out.writeVInt(suggestions.size()); - for (Suggestion command : suggestions) { - out.writeVInt(command.getWriteableType()); - command.writeTo(out); + // in older versions, Suggestion types were serialized as Streamable + if (out.getVersion().before(Version.V_7_0_0_alpha1)) { + out.writeVInt(suggestions.size()); + for (Suggestion command : suggestions) { + out.writeVInt(command.getWriteableType()); + command.writeTo(out); + } + } else { + out.writeVInt(suggestions.size()); + for (Suggestion> suggestion : suggestions) { + out.writeNamedWriteable(suggestion); + } } } @@ -195,12 +206,6 @@ public class Suggest implements Iterable>> reduce(Map> groupedSuggestions) { List>> reduced = new ArrayList<>(groupedSuggestions.size()); for (java.util.Map.Entry> unmergedResults : groupedSuggestions.entrySet()) { @@ -232,10 +237,27 @@ public class Suggest implements Iterable implements Iterable, Streamable, ToXContentFragment { + public abstract static class Suggestion implements Iterable, NamedWriteable, ToXContentFragment { private static final String NAME = "suggestion"; @@ -252,6 +274,24 @@ public class Suggest implements Iterable iterator() { return entries.iterator(); @@ -346,57 +380,67 @@ public class Suggest implements Iterable entry : entries) { entry.writeTo(out); } } - public void innerWriteTo(StreamOutput out) throws IOException { - out.writeString(name); - out.writeVInt(size); - } + @Override + public abstract String getWriteableName(); @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { if (params.paramAsBoolean(RestSearchAction.TYPED_KEYS_PARAM, false)) { // Concatenates the type and the name of the suggestion (ex: completion#foo) - builder.startArray(String.join(Aggregation.TYPED_KEYS_DELIMITER, getType(), getName())); + builder.startArray(String.join(Aggregation.TYPED_KEYS_DELIMITER, getWriteableName(), getName())); } else { builder.startArray(getName()); } for (Entry entry : entries) { + builder.startObject(); entry.toXContent(builder, params); + builder.endObject(); } builder.endArray(); return builder; } + @Override + public boolean equals(Object other) { + if (this == other) { + return true; + } + + if (other == null || getClass() != other.getClass()) { + return false; + } + + Suggestion otherSuggestion = (Suggestion) other; + return Objects.equals(name, otherSuggestion.name) + && Objects.equals(size, otherSuggestion.size) + && Objects.equals(entries, otherSuggestion.entries); + } + + @Override + public int hashCode() { + return Objects.hash(name, size, entries); + } + @SuppressWarnings("unchecked") public static Suggestion> fromXContent(XContentParser parser) throws IOException { ensureExpectedToken(XContentParser.Token.START_ARRAY, parser.currentToken(), parser::getTokenLocation); @@ -417,7 +461,7 @@ public class Suggest implements Iterable implements Iterable, Streamable, ToXContentObject { + public abstract static class Entry implements Iterable, Writeable, ToXContentFragment { private static final String TEXT = "text"; private static final String OFFSET = "offset"; @@ -436,7 +480,18 @@ public class Suggest implements Iterable(suggestedWords); + for (int j = 0; j < suggestedWords; j++) { + O newOption = newOption(in); + options.add(newOption); + } } public void addOption(O option) { @@ -534,44 +589,27 @@ public class Suggest implements Iterable entry = (Entry) o; - - if (length != entry.length) return false; - if (offset != entry.offset) return false; - if (!this.text.equals(entry.text)) return false; - - return true; + return Objects.equals(length, entry.length) + && Objects.equals(offset, entry.offset) + && Objects.equals(text, entry.text) + && Objects.equals(options, entry.options); } @Override public int hashCode() { - int result = text.hashCode(); - result = 31 * result + offset; - result = 31 * result + length; - return result; + return Objects.hash(text, offset, length, options); } - @Override - public void readFrom(StreamInput in) throws IOException { - text = in.readText(); - offset = in.readVInt(); - length = in.readVInt(); - int suggestedWords = in.readVInt(); - options = new ArrayList<>(suggestedWords); - for (int j = 0; j < suggestedWords; j++) { - O newOption = newOption(); - newOption.readFrom(in); - options.add(newOption); - } - } - - @SuppressWarnings("unchecked") - protected O newOption(){ - return (O) new Option(); - } + protected abstract O newOption(); + protected abstract O newOption(StreamInput in) throws IOException; @Override public void writeTo(StreamOutput out) throws IOException { @@ -586,40 +624,29 @@ public class Suggest implements Iterable, Void> PARSER = new ObjectParser<>("SuggestionEntryParser", true, Entry::new); - - static { - declareCommonFields(PARSER); - PARSER.declareObjectArray(Entry::addOptions, (p,c) -> Option.fromXContent(p), new ParseField(OPTIONS)); - } - protected static void declareCommonFields(ObjectParser, Void> parser) { parser.declareString((entry, text) -> entry.text = new Text(text), new ParseField(TEXT)); parser.declareInt((entry, offset) -> entry.offset = offset, new ParseField(OFFSET)); parser.declareInt((entry, length) -> entry.length = length, new ParseField(LENGTH)); } - public static Entry fromXContent(XContentParser parser) { - return PARSER.apply(parser, null); - } - /** * Contains the suggested text with its document frequency and score. */ - public static class Option implements Streamable, ToXContentObject { + public abstract static class Option implements Writeable, ToXContentFragment { public static final ParseField TEXT = new ParseField("text"); public static final ParseField HIGHLIGHTED = new ParseField("highlighted"); @@ -646,7 +673,13 @@ public class Suggest implements Iterable PARSER = new ConstructingObjectParser<>("SuggestOptionParser", - true, args -> { - Text text = new Text((String) args[0]); - float score = (Float) args[1]; - String highlighted = (String) args[2]; - Text highlightedText = highlighted == null ? null : new Text(highlighted); - Boolean collateMatch = (Boolean) args[3]; - return new Option(text, highlightedText, score, collateMatch); - }); - - static { - PARSER.declareString(constructorArg(), TEXT); - PARSER.declareFloat(constructorArg(), SCORE); - PARSER.declareString(optionalConstructorArg(), HIGHLIGHTED); - PARSER.declareBoolean(optionalConstructorArg(), COLLATE_MATCH); - } - - public static Option fromXContent(XContentParser parser) { - return PARSER.apply(parser, null); - } - protected void mergeInto(Option otherOption) { score = Math.max(score, otherOption.score); if (otherOption.collateMatch != null) { @@ -751,18 +750,25 @@ public class Suggest implements Iterable { - public static final String NAME = "completion"; - + @Deprecated public static final int TYPE = 4; private boolean skipDuplicates; @@ -86,14 +85,18 @@ public final class CompletionSuggestion extends Suggest.Suggestion 0; } + @Override + public boolean equals(Object other) { + return super.equals(other) + && Objects.equals(skipDuplicates, ((CompletionSuggestion) other).skipDuplicates); + } + + @Override + public int hashCode() { + return Objects.hash(super.hashCode(), skipDuplicates); + } + public static CompletionSuggestion fromXContent(XContentParser parser, String name) throws IOException { CompletionSuggestion suggestion = new CompletionSuggestion(name, -1, false); parseEntries(parser, suggestion, CompletionSuggestion.Entry::fromXContent); @@ -222,13 +236,13 @@ public final class CompletionSuggestion extends Suggest.Suggestion { @@ -237,7 +251,10 @@ public final class CompletionSuggestion extends Suggest.Suggestion PARSER = new ObjectParser<>("CompletionSuggestionEntryParser", true, Entry::new); @@ -274,6 +296,25 @@ public final class CompletionSuggestion extends Suggest.Suggestion(contextSize); + for (int i = 0; i < contextSize; i++) { + String contextName = in.readString(); + int nContexts = in.readVInt(); + Set contexts = new HashSet<>(nContexts); + for (int j = 0; j < nContexts; j++) { + contexts.add(in.readString()); + } + this.contexts.put(contextName, contexts); + } + } + @Override protected void mergeInto(Suggest.Suggestion.Entry.Option otherOption) { // Completion suggestions are reduced by @@ -302,7 +343,7 @@ public final class CompletionSuggestion extends Suggest.Suggestion(contextSize); - for (int i = 0; i < contextSize; i++) { - String contextName = in.readString(); - int nContexts = in.readVInt(); - Set contexts = new HashSet<>(nContexts); - for (int j = 0; j < nContexts; j++) { - contexts.add(in.readString()); - } - this.contexts.put(contextName, contexts); - } - } - @Override public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); diff --git a/server/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggestionBuilder.java b/server/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggestionBuilder.java index 25f2f7fa382..6c84379c3df 100644 --- a/server/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggestionBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggestionBuilder.java @@ -59,10 +59,12 @@ import java.util.Objects; public class CompletionSuggestionBuilder extends SuggestionBuilder { private static final XContentType CONTEXT_BYTES_XCONTENT_TYPE = XContentType.JSON; - static final String SUGGESTION_NAME = "completion"; + static final ParseField CONTEXTS_FIELD = new ParseField("contexts", "context"); static final ParseField SKIP_DUPLICATES_FIELD = new ParseField("skip_duplicates"); + public static final String SUGGESTION_NAME = "completion"; + /** * { * "field" : STRING diff --git a/server/src/main/java/org/elasticsearch/search/suggest/completion/context/ContextMapping.java b/server/src/main/java/org/elasticsearch/search/suggest/completion/context/ContextMapping.java index 1aa82eeb219..0d0c7e94589 100644 --- a/server/src/main/java/org/elasticsearch/search/suggest/completion/context/ContextMapping.java +++ b/server/src/main/java/org/elasticsearch/search/suggest/completion/context/ContextMapping.java @@ -20,6 +20,7 @@ package org.elasticsearch.search.suggest.completion.context; import org.elasticsearch.ElasticsearchParseException; +import org.elasticsearch.Version; import org.elasticsearch.common.Strings; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.ToXContentFragment; @@ -28,6 +29,8 @@ import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentParser.Token; import org.elasticsearch.common.xcontent.json.JsonXContent; import org.elasticsearch.index.mapper.CompletionFieldMapper; +import org.elasticsearch.index.mapper.FieldMapper; +import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.ParseContext; import java.io.IOException; @@ -35,6 +38,7 @@ import java.util.ArrayList; import java.util.List; import java.util.Objects; import java.util.Set; +import java.util.function.Function; /** * A {@link ContextMapping} defines criteria that can be used to @@ -131,6 +135,31 @@ public abstract class ContextMapping implements ToXContent */ protected abstract XContentBuilder toInnerXContent(XContentBuilder builder, Params params) throws IOException; + /** + * Checks if the current context is consistent with the rest of the fields. For example, the GeoContext + * should check that the field that it points to has the correct type. + */ + protected void validateReferences(Version indexVersionCreated, Function fieldResolver) { + // No validation is required by default + } + + /** + * Verifies that all field paths specified in contexts point to the fields with correct mappings + */ + public static void validateContextPaths(Version indexVersionCreated, List fieldMappers, + Function fieldResolver) { + for (FieldMapper fieldMapper : fieldMappers) { + if (CompletionFieldMapper.CONTENT_TYPE.equals(fieldMapper.typeName())) { + CompletionFieldMapper.CompletionFieldType fieldType = ((CompletionFieldMapper) fieldMapper).fieldType(); + if (fieldType.hasContextMappings()) { + for (ContextMapping context : fieldType.getContextMappings()) { + context.validateReferences(indexVersionCreated, fieldResolver); + } + } + } + } + } + @Override public final XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.field(FIELD_NAME, name); diff --git a/server/src/main/java/org/elasticsearch/search/suggest/completion/context/ContextMappings.java b/server/src/main/java/org/elasticsearch/search/suggest/completion/context/ContextMappings.java index 3c0f0e80ceb..b4c3276b946 100644 --- a/server/src/main/java/org/elasticsearch/search/suggest/completion/context/ContextMappings.java +++ b/server/src/main/java/org/elasticsearch/search/suggest/completion/context/ContextMappings.java @@ -37,6 +37,7 @@ import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; +import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Objects; @@ -50,7 +51,7 @@ import static org.elasticsearch.search.suggest.completion.context.ContextMapping * and creates context queries for defined {@link ContextMapping}s * for a {@link CompletionFieldMapper} */ -public class ContextMappings implements ToXContent { +public class ContextMappings implements ToXContent, Iterable> { private final List> contextMappings; private final Map> contextNameMap; @@ -97,6 +98,11 @@ public class ContextMappings implements ToXContent { document.add(new TypedContextField(name, input, weight, contexts, document)); } + @Override + public Iterator> iterator() { + return contextMappings.iterator(); + } + /** * Field prepends context values with a suggestion * Context values are associated with a type, denoted by diff --git a/server/src/main/java/org/elasticsearch/search/suggest/completion/context/GeoContextMapping.java b/server/src/main/java/org/elasticsearch/search/suggest/completion/context/GeoContextMapping.java index 48aaf705099..938c4963620 100644 --- a/server/src/main/java/org/elasticsearch/search/suggest/completion/context/GeoContextMapping.java +++ b/server/src/main/java/org/elasticsearch/search/suggest/completion/context/GeoContextMapping.java @@ -19,12 +19,17 @@ package org.elasticsearch.search.suggest.completion.context; +import org.apache.logging.log4j.LogManager; +import org.apache.lucene.document.LatLonDocValuesField; +import org.apache.lucene.document.LatLonPoint; import org.apache.lucene.document.StringField; import org.apache.lucene.index.DocValuesType; import org.apache.lucene.index.IndexableField; import org.elasticsearch.ElasticsearchParseException; +import org.elasticsearch.Version; import org.elasticsearch.common.geo.GeoPoint; import org.elasticsearch.common.geo.GeoUtils; +import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.common.unit.DistanceUnit; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; @@ -42,6 +47,7 @@ import java.util.List; import java.util.Map; import java.util.Objects; import java.util.Set; +import java.util.function.Function; import java.util.stream.Collectors; import static org.elasticsearch.common.geo.GeoHashUtils.addNeighbors; @@ -69,6 +75,8 @@ public class GeoContextMapping extends ContextMapping { static final String CONTEXT_PRECISION = "precision"; static final String CONTEXT_NEIGHBOURS = "neighbours"; + private static final DeprecationLogger DEPRECATION_LOGGER = new DeprecationLogger(LogManager.getLogger(GeoContextMapping.class)); + private final int precision; private final String fieldName; @@ -205,11 +213,11 @@ public class GeoContextMapping extends ContextMapping { for (IndexableField field : fields) { if (field instanceof StringField) { spare.resetFromString(field.stringValue()); - } else { - // todo return this to .stringValue() once LatLonPoint implements it + geohashes.add(spare.geohash()); + } else if (field instanceof LatLonPoint || field instanceof LatLonDocValuesField) { spare.resetFromIndexableField(field); + geohashes.add(spare.geohash()); } - geohashes.add(spare.geohash()); } } } @@ -279,6 +287,32 @@ public class GeoContextMapping extends ContextMapping { return internalQueryContextList; } + @Override + protected void validateReferences(Version indexVersionCreated, Function fieldResolver) { + if (fieldName != null) { + MappedFieldType mappedFieldType = fieldResolver.apply(fieldName); + if (mappedFieldType == null) { + if (indexVersionCreated.before(Version.V_7_0_0_alpha1)) { + DEPRECATION_LOGGER.deprecatedAndMaybeLog("geo_context_mapping", + "field [{}] referenced in context [{}] is not defined in the mapping", fieldName, name); + } else { + throw new ElasticsearchParseException( + "field [{}] referenced in context [{}] is not defined in the mapping", fieldName, name); + } + } else if (GeoPointFieldMapper.CONTENT_TYPE.equals(mappedFieldType.typeName()) == false) { + if (indexVersionCreated.before(Version.V_7_0_0_alpha1)) { + DEPRECATION_LOGGER.deprecatedAndMaybeLog("geo_context_mapping", + "field [{}] referenced in context [{}] must be mapped to geo_point, found [{}]", + fieldName, name, mappedFieldType.typeName()); + } else { + throw new ElasticsearchParseException( + "field [{}] referenced in context [{}] must be mapped to geo_point, found [{}]", + fieldName, name, mappedFieldType.typeName()); + } + } + } + } + @Override public boolean equals(Object o) { if (this == o) return true; diff --git a/server/src/main/java/org/elasticsearch/search/suggest/phrase/PhraseSuggester.java b/server/src/main/java/org/elasticsearch/search/suggest/phrase/PhraseSuggester.java index 670dac75ab7..3ff6774ff5c 100644 --- a/server/src/main/java/org/elasticsearch/search/suggest/phrase/PhraseSuggester.java +++ b/server/src/main/java/org/elasticsearch/search/suggest/phrase/PhraseSuggester.java @@ -133,9 +133,9 @@ public final class PhraseSuggester extends Suggester { highlighted = new Text(spare.toString()); } if (collatePrune) { - resultEntry.addOption(new Suggestion.Entry.Option(phrase, highlighted, (float) (correction.score), collateMatch)); + resultEntry.addOption(new PhraseSuggestion.Entry.Option(phrase, highlighted, (float) (correction.score), collateMatch)); } else { - resultEntry.addOption(new Suggestion.Entry.Option(phrase, highlighted, (float) (correction.score))); + resultEntry.addOption(new PhraseSuggestion.Entry.Option(phrase, highlighted, (float) (correction.score))); } } } else { diff --git a/server/src/main/java/org/elasticsearch/search/suggest/phrase/PhraseSuggestion.java b/server/src/main/java/org/elasticsearch/search/suggest/phrase/PhraseSuggestion.java index bd6c828bd42..39932049dfd 100644 --- a/server/src/main/java/org/elasticsearch/search/suggest/phrase/PhraseSuggestion.java +++ b/server/src/main/java/org/elasticsearch/search/suggest/phrase/PhraseSuggestion.java @@ -23,41 +23,55 @@ import org.elasticsearch.common.ParseField; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.text.Text; +import org.elasticsearch.common.xcontent.ConstructingObjectParser; +import org.elasticsearch.common.xcontent.ContextParser; import org.elasticsearch.common.xcontent.ObjectParser; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.search.suggest.Suggest; import org.elasticsearch.search.suggest.Suggest.Suggestion; import java.io.IOException; +import java.util.Objects; + +import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg; +import static org.elasticsearch.common.xcontent.ConstructingObjectParser.optionalConstructorArg; /** * Suggestion entry returned from the {@link PhraseSuggester}. */ public class PhraseSuggestion extends Suggest.Suggestion { - public static final String NAME = "phrase"; + @Deprecated public static final int TYPE = 3; - public PhraseSuggestion() { - } + public PhraseSuggestion() {} public PhraseSuggestion(String name, int size) { super(name, size); } + public PhraseSuggestion(StreamInput in) throws IOException { + super(in); + } + + @Override + public String getWriteableName() { + return PhraseSuggestionBuilder.SUGGESTION_NAME; + } + @Override public int getWriteableType() { return TYPE; } @Override - protected String getType() { - return NAME; + protected Entry newEntry() { + return new Entry(); } @Override - protected Entry newEntry() { - return new Entry(); + protected Entry newEntry(StreamInput in) throws IOException { + return new Entry(in); } public static PhraseSuggestion fromXContent(XContentParser parser, String name) throws IOException { @@ -66,7 +80,7 @@ public class PhraseSuggestion extends Suggest.Suggestion return suggestion; } - public static class Entry extends Suggestion.Entry { + public static class Entry extends Suggestion.Entry { protected double cutoffScore = Double.MIN_VALUE; @@ -75,7 +89,15 @@ public class PhraseSuggestion extends Suggest.Suggestion this.cutoffScore = cutoffScore; } - Entry() { + public Entry(Text text, int offset, int length) { + super(text, offset, length); + } + + Entry() {} + + public Entry(StreamInput in) throws IOException { + super(in); + cutoffScore = in.readDouble(); } /** @@ -86,7 +108,7 @@ public class PhraseSuggestion extends Suggest.Suggestion } @Override - protected void merge(Suggestion.Entry other) { + protected void merge(Suggestion.Entry