From 03ce3dd4a40d0bdb747695e0d6916310a9c7da1e Mon Sep 17 00:00:00 2001 From: Jason Tedor Date: Fri, 13 Apr 2018 09:31:06 -0400 Subject: [PATCH 01/13] Enable skipping fetching latest for BWC builds (#29497) The BWC builds always fetch the latest from the elastic/elasticsearch repository for the BWC branches. Yet, there are use-cases for using the local checkout without fetching the latest. This commit enables these use-cases by adding a tests.bwc.git.fetch.latest property to skip the fetches. --- TESTING.asciidoc | 7 +++++++ distribution/bwc/build.gradle | 12 +++++++++++- 2 files changed, 18 insertions(+), 1 deletion(-) diff --git a/TESTING.asciidoc b/TESTING.asciidoc index 97902d56ec8..2e4a6ede754 100644 --- a/TESTING.asciidoc +++ b/TESTING.asciidoc @@ -498,6 +498,13 @@ will contain your change. . Push both branches to your remote repository. . Run the tests with `./gradlew check -Dtests.bwc.remote=${remote} -Dtests.bwc.refspec.5.x=index_req_bwc_5.x`. +== Skip fetching latest + +For some BWC testing scenarios, you want to use the local clone of the +repository without fetching latest. For these use cases, you can set the system +property `tests.bwc.git_fetch_latest` to `false` and the BWC builds will skip +fetching the latest from the remote. + == Test coverage analysis Generating test coverage reports for Elasticsearch is currently not possible through Gradle. diff --git a/distribution/bwc/build.gradle b/distribution/bwc/build.gradle index 8d5aa204c48..3e6a2028498 100644 --- a/distribution/bwc/build.gradle +++ b/distribution/bwc/build.gradle @@ -54,6 +54,16 @@ subprojects { final String remote = System.getProperty("tests.bwc.remote", "elastic") + final boolean gitFetchLatest + final String gitFetchLatestProperty = System.getProperty("tests.bwc.git_fetch_latest", "true") + if ("true".equals(gitFetchLatestProperty)) { + gitFetchLatest = true + } else if ("false".equals(gitFetchLatestProperty)) { + gitFetchLatest = false + } else { + throw new GradleException("tests.bwc.git_fetch_latest must be [true] or [false] but was [" + gitFetchLatestProperty + "]") + } + task createClone(type: LoggedExec) { onlyIf { checkoutDir.exists() == false } commandLine = ['git', 'clone', rootDir, checkoutDir] @@ -83,7 +93,7 @@ subprojects { } task fetchLatest(type: LoggedExec) { - onlyIf { project.gradle.startParameter.isOffline() == false } + onlyIf { project.gradle.startParameter.isOffline() == false && gitFetchLatest } dependsOn addRemote workingDir = checkoutDir commandLine = ['git', 'fetch', '--all'] From 485d5d19bcee46dd180881428152cd19759bf905 Mon Sep 17 00:00:00 2001 From: javanna Date: Fri, 13 Apr 2018 17:03:09 +0200 Subject: [PATCH 02/13] Mute TranslogTests#testFatalIOExceptionsWhileWritingConcurrently This test has been failing quite a few times with a suite timeout, opened #29509 for it. --- .../java/org/elasticsearch/index/translog/TranslogTests.java | 1 + 1 file changed, 1 insertion(+) diff --git a/server/src/test/java/org/elasticsearch/index/translog/TranslogTests.java b/server/src/test/java/org/elasticsearch/index/translog/TranslogTests.java index b3b9fca886e..8899dca24b1 100644 --- a/server/src/test/java/org/elasticsearch/index/translog/TranslogTests.java +++ b/server/src/test/java/org/elasticsearch/index/translog/TranslogTests.java @@ -1812,6 +1812,7 @@ public class TranslogTests extends ESTestCase { assertTrue(translog.getTragicException() instanceof UnknownException); } + @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/29509") public void testFatalIOExceptionsWhileWritingConcurrently() throws IOException, InterruptedException { Path tempDir = createTempDir(); final FailSwitch fail = new FailSwitch(); From 27fafa24f569b098eeb524406a89e04627df0935 Mon Sep 17 00:00:00 2001 From: Jason Tedor Date: Fri, 13 Apr 2018 12:41:18 -0400 Subject: [PATCH 03/13] Use proper Java version for BWC builds (#29493) Today we have JAVA_HOME for the compiler Java home and RUNTIME_JAVA_HOME for the test Java home. However, when we compile BWC nodes and run them, neither of these Java homes might be the version that was suitable for that BWC node (e.g., 5.6 requires JDK 8 to compile and to run). This commit adds support for the environment variables JAVA\d+_HOME and uses the appropriate Java home based on the version of the node being started. We even do this for reindex-from-old which requires JDK 7 for these very old nodes. Note that these environment variables are not required if not running BWC tests, and they are strictly required if running BWC tests. --- .../elasticsearch/gradle/BuildPlugin.groovy | 33 +++++++++++++++++++ .../elasticsearch/gradle/test/NodeInfo.groovy | 22 ++++++++++++- distribution/bwc/build.gradle | 19 +++++++---- qa/reindex-from-old/build.gradle | 10 +++--- 4 files changed, 73 insertions(+), 11 deletions(-) diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/BuildPlugin.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/BuildPlugin.groovy index 50e1cd68523..444f2283be4 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/BuildPlugin.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/BuildPlugin.groovy @@ -97,6 +97,12 @@ class BuildPlugin implements Plugin { String compilerJavaHome = findCompilerJavaHome() String runtimeJavaHome = findRuntimeJavaHome(compilerJavaHome) File gradleJavaHome = Jvm.current().javaHome + + final Map javaVersions = [:] + for (int version = 7; version <= Integer.parseInt(minimumCompilerVersion.majorVersion); version++) { + javaVersions.put(version, findJavaHome(version)); + } + String javaVendor = System.getProperty('java.vendor') String javaVersion = System.getProperty('java.version') String gradleJavaVersionDetails = "${javaVendor} ${javaVersion}" + @@ -158,10 +164,32 @@ class BuildPlugin implements Plugin { throw new GradleException(message) } + for (final Map.Entry javaVersionEntry : javaVersions.entrySet()) { + final String javaHome = javaVersionEntry.getValue() + if (javaHome == null) { + continue + } + JavaVersion javaVersionEnum = JavaVersion.toVersion(findJavaSpecificationVersion(project, javaHome)) + final JavaVersion expectedJavaVersionEnum + final int version = javaVersionEntry.getKey() + if (version < 9) { + expectedJavaVersionEnum = JavaVersion.toVersion("1." + version) + } else { + expectedJavaVersionEnum = JavaVersion.toVersion(Integer.toString(version)) + } + if (javaVersionEnum != expectedJavaVersionEnum) { + final String message = + "the environment variable JAVA" + version + "_HOME must be set to a JDK installation directory for Java" + + " ${expectedJavaVersionEnum} but is [${javaHome}] corresponding to [${javaVersionEnum}]" + throw new GradleException(message) + } + } + project.rootProject.ext.compilerJavaHome = compilerJavaHome project.rootProject.ext.runtimeJavaHome = runtimeJavaHome project.rootProject.ext.compilerJavaVersion = compilerJavaVersionEnum project.rootProject.ext.runtimeJavaVersion = runtimeJavaVersionEnum + project.rootProject.ext.javaVersions = javaVersions project.rootProject.ext.buildChecksDone = true } @@ -173,6 +201,7 @@ class BuildPlugin implements Plugin { project.ext.runtimeJavaHome = project.rootProject.ext.runtimeJavaHome project.ext.compilerJavaVersion = project.rootProject.ext.compilerJavaVersion project.ext.runtimeJavaVersion = project.rootProject.ext.runtimeJavaVersion + project.ext.javaVersions = project.rootProject.ext.javaVersions } private static String findCompilerJavaHome() { @@ -188,6 +217,10 @@ class BuildPlugin implements Plugin { return javaHome } + private static String findJavaHome(int version) { + return System.getenv('JAVA' + version + '_HOME') + } + private static String findRuntimeJavaHome(final String compilerJavaHome) { assert compilerJavaHome != null return System.getenv('RUNTIME_JAVA_HOME') ?: compilerJavaHome diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/NodeInfo.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/NodeInfo.groovy index 686233bdfe7..2898df0445f 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/NodeInfo.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/NodeInfo.groovy @@ -22,7 +22,9 @@ import com.sun.jna.Native import com.sun.jna.WString import org.apache.tools.ant.taskdefs.condition.Os import org.elasticsearch.gradle.Version +import org.gradle.api.GradleException import org.gradle.api.InvalidUserDataException +import org.gradle.api.JavaVersion import org.gradle.api.Project import java.nio.file.Files @@ -162,7 +164,25 @@ class NodeInfo { args.add("${esScript}") } - env = ['JAVA_HOME': project.runtimeJavaHome] + final String javaHome + final Map javaVersions = project.javaVersions + if (Version.fromString(nodeVersion).before("6.2.0")) { + final String java8Home = javaVersions.get(8) + if (java8Home == null) { + throw new GradleException("JAVA8_HOME must be set to run BWC tests against [" + nodeVersion + "]") + } + javaHome = java8Home + } else if (Version.fromString(nodeVersion).onOrAfter("6.2.0") && Version.fromString(nodeVersion).before("6.3.0")) { + final String java9Home = javaVersions.get(9) + if (java9Home == null) { + throw new GradleException("JAVA9_HOME must be set to run BWC tests against [" + nodeVersion + "]") + } + javaHome = java9Home + } else { + javaHome = project.compilerJavaHome + } + + env = ['JAVA_HOME':javaHome] args.addAll("-E", "node.portsfile=true") String collectedSystemProperties = config.systemProperties.collect { key, value -> "-D${key}=${value}" }.join(" ") String esJavaOpts = config.jvmArgs.isEmpty() ? collectedSystemProperties : collectedSystemProperties + " " + config.jvmArgs diff --git a/distribution/bwc/build.gradle b/distribution/bwc/build.gradle index 3e6a2028498..7b4c9e959a9 100644 --- a/distribution/bwc/build.gradle +++ b/distribution/bwc/build.gradle @@ -144,12 +144,19 @@ subprojects { task buildBwcVersion(type: Exec) { dependsOn checkoutBwcBranch, writeBuildMetadata workingDir = checkoutDir - if (project.rootProject.ext.runtimeJavaVersion == JavaVersion.VERSION_1_8 && ["5.6", "6.0", "6.1"].contains(bwcBranch)) { - /* - * If runtime Java home is set to JDK 8 and we are building branches that are officially built with JDK 8, push this to JAVA_HOME for - * these builds. - */ - environment('JAVA_HOME', System.getenv('RUNTIME_JAVA_HOME')) + if (["5.6", "6.0", "6.1"].contains(bwcBranch)) { + // we are building branches that are officially built with JDK 8, push JAVA8_HOME to JAVA_HOME for these builds + if (project.javaVersions.get(8) == null) { + throw new GradleException("JAVA8_HOME is required to build BWC versions for BWC branch [" + bwcBranch + "]") + } + environment('JAVA_HOME', project.javaVersions.get(8)) + } else if ("6.2".equals(bwcBranch)) { + if (project.javaVersions.get(9) == null) { + throw new GradleException("JAVA9_HOME is required to build BWC versions for BWC branch [" + bwcBranch + "]") + } + environment('JAVA_HOME', project.javaVersions.get(9)) + } else { + environment('JAVA_HOME', project.compilerJavaHome) } if (Os.isFamily(Os.FAMILY_WINDOWS)) { executable 'cmd' diff --git a/qa/reindex-from-old/build.gradle b/qa/reindex-from-old/build.gradle index c9388c42bf5..4fe481543c3 100644 --- a/qa/reindex-from-old/build.gradle +++ b/qa/reindex-from-old/build.gradle @@ -51,11 +51,13 @@ dependencies { es090 'org.elasticsearch:elasticsearch:0.90.13@zip' } -if (project.runtimeJavaVersion >= JavaVersion.VERSION_1_9 || Os.isFamily(Os.FAMILY_WINDOWS)) { - /* We can't run the dependencies with Java 9 so for now we'll skip the whole - * thing. We can't get the pid files in windows so we skip that as well.... */ +if (Os.isFamily(Os.FAMILY_WINDOWS)) { + // we can't get the pid files in windows so we skip that integTest.enabled = false } else { + if (project.javaVersions.get(7) == null) { + throw new GradleException("JAVA7_HOME must be set to run reindex-from-old") + } /* Set up tasks to unzip and run the old versions of ES before running the * integration tests. */ for (String version : ['2', '1', '090']) { @@ -75,7 +77,7 @@ if (project.runtimeJavaVersion >= JavaVersion.VERSION_1_9 || Os.isFamily(Os.FAMI dependsOn unzip executable = new File(project.runtimeJavaHome, 'bin/java') env 'CLASSPATH', "${ -> project.configurations.oldesFixture.asPath }" - env 'JAVA_HOME', project.runtimeJavaHome + env 'JAVA_HOME', project.javaVersions.get(7) args 'oldes.OldElasticsearch', baseDir, unzip.temporaryDir, From 82a753dcc7265e674b9e0cc35b0f0da8ce04851b Mon Sep 17 00:00:00 2001 From: Jason Tedor Date: Fri, 13 Apr 2018 15:59:33 -0400 Subject: [PATCH 04/13] Enable license header exclusions (#29379) There are some scenarios where the license on a source file is one that is compatible with our projects yet we do not want to add the license to the list of approved license headers (to keep the number of files with that compatible license contained). This commit adds the ability to exclude a file from the license check. --- .../gradle/precommit/LicenseHeadersTask.groovy | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/precommit/LicenseHeadersTask.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/precommit/LicenseHeadersTask.groovy index fccbb43e872..45f60c35bf6 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/precommit/LicenseHeadersTask.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/precommit/LicenseHeadersTask.groovy @@ -49,6 +49,14 @@ public class LicenseHeadersTask extends AntTask { @Input List approvedLicenses = ['Apache', 'Generated'] + /** + * Files that should be excluded from the license header check. Use with extreme care, only in situations where the license on the + * source file is compatible with the codebase but we do not want to add the license to the list of approved headers (to avoid the + * possibility of inadvertently using the license on our own source files). + */ + @Input + List excludes = [] + /** * Additional license families that may be found. The key is the license category name (5 characters), * followed by the family name and the value list of patterns to search for. @@ -95,7 +103,7 @@ public class LicenseHeadersTask extends AntTask { for (File dir: dirSet.srcDirs) { // sometimes these dirs don't exist, e.g. site-plugin has no actual java src/main... if (dir.exists()) { - ant.fileset(dir: dir) + ant.fileset(dir: dir, excludes: excludes.join(' ')) } } } From 85ac541ab3dc4af61e94cdf9addb137dd76181f6 Mon Sep 17 00:00:00 2001 From: Jason Tedor Date: Fri, 13 Apr 2018 16:57:59 -0400 Subject: [PATCH 05/13] Make NodeInfo#nodeVersion strongly-typed as Version (#29515) Today we have a nodeVersion property on the NodeInfo class that we use to carry around information about a standalone node that we will start during tests. This property is a String which we usually end up parsing to a Version anyway to do various checks on it. This can end up happening a lot during configuration so it would be more efficient and safer to have this already be strongly-typed as a Version and parsed from a String only once for each instance of NodeInfo. Therefore, this commit makes NodeInfo#nodeVersion strongly-typed as a Version. --- build.gradle | 6 ++--- .../elasticsearch/gradle/BuildPlugin.groovy | 15 ++++++------ .../org/elasticsearch/gradle/Version.groovy | 24 +++++++++++++++---- .../gradle/VersionProperties.groovy | 4 ++-- .../gradle/doc/DocsTestPlugin.groovy | 2 +- .../gradle/plugin/PluginPropertiesTask.groovy | 2 +- .../gradle/test/ClusterConfiguration.groovy | 3 ++- .../gradle/test/ClusterFormationTasks.groovy | 15 +++++++----- .../elasticsearch/gradle/test/NodeInfo.groovy | 12 +++++----- 9 files changed, 52 insertions(+), 31 deletions(-) diff --git a/build.gradle b/build.gradle index dce2adf5ee0..ae472cc1340 100644 --- a/build.gradle +++ b/build.gradle @@ -36,7 +36,7 @@ import java.security.MessageDigest // common maven publishing configuration subprojects { group = 'org.elasticsearch' - version = VersionProperties.elasticsearch + version = VersionProperties.elasticsearch.toString() description = "Elasticsearch subproject ${project.path}" } @@ -80,7 +80,7 @@ configure(subprojects.findAll { it.projectDir.toPath().startsWith(rootPath) }) { * in a branch if there are only betas and rcs in the branch so we have * *something* to test against. */ VersionCollection versions = new VersionCollection(file('server/src/main/java/org/elasticsearch/Version.java').readLines('UTF-8')) -if (versions.currentVersion.toString() != VersionProperties.elasticsearch) { +if (versions.currentVersion != VersionProperties.elasticsearch) { throw new GradleException("The last version in Versions.java [${versions.currentVersion}] does not match " + "VersionProperties.elasticsearch [${VersionProperties.elasticsearch}]") } @@ -245,7 +245,7 @@ subprojects { // other packages (e.g org.elasticsearch.client) will point to server rather than // their own artifacts. if (project.plugins.hasPlugin(BuildPlugin)) { - String artifactsHost = VersionProperties.elasticsearch.endsWith("-SNAPSHOT") ? "https://snapshots.elastic.co" : "https://artifacts.elastic.co" + String artifactsHost = VersionProperties.elasticsearch.toString().endsWith("-SNAPSHOT") ? "https://snapshots.elastic.co" : "https://artifacts.elastic.co" Closure sortClosure = { a, b -> b.group <=> a.group } Closure depJavadocClosure = { dep -> if (dep.group != null && dep.group.startsWith('org.elasticsearch')) { diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/BuildPlugin.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/BuildPlugin.groovy index 444f2283be4..46dd3b902dc 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/BuildPlugin.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/BuildPlugin.groovy @@ -550,17 +550,18 @@ class BuildPlugin implements Plugin { jarTask.destinationDir = new File(project.buildDir, 'distributions') // fixup the jar manifest jarTask.doFirst { - boolean isSnapshot = VersionProperties.elasticsearch.endsWith("-SNAPSHOT"); - String version = VersionProperties.elasticsearch; - if (isSnapshot) { - version = version.substring(0, version.length() - 9) - } + final Version versionWithoutSnapshot = new Version( + VersionProperties.elasticsearch.major, + VersionProperties.elasticsearch.minor, + VersionProperties.elasticsearch.revision, + VersionProperties.elasticsearch.suffix, + false) // this doFirst is added before the info plugin, therefore it will run // after the doFirst added by the info plugin, and we can override attributes jarTask.manifest.attributes( - 'X-Compile-Elasticsearch-Version': version, + 'X-Compile-Elasticsearch-Version': versionWithoutSnapshot, 'X-Compile-Lucene-Version': VersionProperties.lucene, - 'X-Compile-Elasticsearch-Snapshot': isSnapshot, + 'X-Compile-Elasticsearch-Snapshot': VersionProperties.elasticsearch.isSnapshot(), 'Build-Date': ZonedDateTime.now(ZoneOffset.UTC), 'Build-Java-Version': project.compilerJavaVersion) if (jarTask.manifest.attributes.containsKey('Change') == false) { diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/Version.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/Version.groovy index 419d3792bb6..c28738d7695 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/Version.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/Version.groovy @@ -74,20 +74,36 @@ public class Version { return "${major}.${minor}.${revision}${suffix}${snapshotStr}" } + public boolean before(Version compareTo) { + return id < compareTo.id + } + public boolean before(String compareTo) { - return id < fromString(compareTo).id + return before(fromString(compareTo)) + } + + public boolean onOrBefore(Version compareTo) { + return id <= compareTo.id } public boolean onOrBefore(String compareTo) { - return id <= fromString(compareTo).id + return onOrBefore(fromString(compareTo)) + } + + public boolean onOrAfter(Version compareTo) { + return id >= compareTo.id } public boolean onOrAfter(String compareTo) { - return id >= fromString(compareTo).id + return onOrAfter(fromString(compareTo)) + } + + public boolean after(Version compareTo) { + return id > compareTo.id } public boolean after(String compareTo) { - return id > fromString(compareTo).id + return after(fromString(compareTo)) } public boolean onOrBeforeIncludingSuffix(Version otherVersion) { diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/VersionProperties.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/VersionProperties.groovy index c24431b4cbc..6983d12872f 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/VersionProperties.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/VersionProperties.groovy @@ -22,7 +22,7 @@ package org.elasticsearch.gradle * Accessor for shared dependency versions used by elasticsearch, namely the elasticsearch and lucene versions. */ class VersionProperties { - static final String elasticsearch + static final Version elasticsearch static final String lucene static final Map versions = new HashMap<>() static { @@ -32,7 +32,7 @@ class VersionProperties { throw new RuntimeException('/version.properties resource missing') } props.load(propsStream) - elasticsearch = props.getProperty('elasticsearch') + elasticsearch = Version.fromString(props.getProperty('elasticsearch')) lucene = props.getProperty('lucene') for (String property : props.stringPropertyNames()) { versions.put(property, props.getProperty(property)) diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/doc/DocsTestPlugin.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/doc/DocsTestPlugin.groovy index d2802638ce5..f674dbd33cd 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/doc/DocsTestPlugin.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/doc/DocsTestPlugin.groovy @@ -41,7 +41,7 @@ public class DocsTestPlugin extends RestTestPlugin { * to the version being built for testing but needs to resolve to * the last released version for docs. */ '\\{version\\}': - VersionProperties.elasticsearch.replace('-SNAPSHOT', ''), + VersionProperties.elasticsearch.toString().replace('-SNAPSHOT', ''), '\\{lucene_version\\}' : VersionProperties.lucene.replaceAll('-snapshot-\\w+$', ''), ] Task listSnippets = project.tasks.create('listSnippets', SnippetsTask) diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/plugin/PluginPropertiesTask.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/plugin/PluginPropertiesTask.groovy index f5dbcfd8b0d..8e913153f05 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/plugin/PluginPropertiesTask.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/plugin/PluginPropertiesTask.groovy @@ -77,7 +77,7 @@ class PluginPropertiesTask extends Copy { 'name': extension.name, 'description': extension.description, 'version': stringSnap(extension.version), - 'elasticsearchVersion': stringSnap(VersionProperties.elasticsearch), + 'elasticsearchVersion': stringSnap(VersionProperties.elasticsearch.toString()), 'javaVersion': project.targetCompatibility as String, 'classname': extension.classname, 'extendedPlugins': extension.extendedPlugins.join(','), diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/ClusterConfiguration.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/ClusterConfiguration.groovy index 1e609c5e05f..5aaf54454e1 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/ClusterConfiguration.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/ClusterConfiguration.groovy @@ -18,6 +18,7 @@ */ package org.elasticsearch.gradle.test +import org.elasticsearch.gradle.Version import org.gradle.api.GradleException import org.gradle.api.Project import org.gradle.api.tasks.Input @@ -37,7 +38,7 @@ class ClusterConfiguration { int numBwcNodes = 0 @Input - String bwcVersion = null + Version bwcVersion = null @Input int httpPort = 0 diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/ClusterFormationTasks.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/ClusterFormationTasks.groovy index f0b232c0d47..5f9e4c49b34 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/ClusterFormationTasks.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/ClusterFormationTasks.groovy @@ -107,11 +107,14 @@ class ClusterFormationTasks { for (int i = 0; i < config.numNodes; i++) { // we start N nodes and out of these N nodes there might be M bwc nodes. // for each of those nodes we might have a different configuration - String elasticsearchVersion = VersionProperties.elasticsearch - Configuration distro = currentDistro + final Configuration distro + final Version elasticsearchVersion if (i < config.numBwcNodes) { elasticsearchVersion = config.bwcVersion distro = bwcDistro + } else { + elasticsearchVersion = VersionProperties.elasticsearch + distro = currentDistro } NodeInfo node = new NodeInfo(config, i, project, prefix, elasticsearchVersion, sharedDir) nodes.add(node) @@ -126,7 +129,7 @@ class ClusterFormationTasks { } /** Adds a dependency on the given distribution */ - static void configureDistributionDependency(Project project, String distro, Configuration configuration, String elasticsearchVersion) { + static void configureDistributionDependency(Project project, String distro, Configuration configuration, Version elasticsearchVersion) { String packaging = distro if (distro == 'tar') { packaging = 'tar.gz' @@ -137,7 +140,7 @@ class ClusterFormationTasks { } /** Adds a dependency on a different version of the given plugin, which will be retrieved using gradle's dependency resolution */ - static void configureBwcPluginDependency(String name, Project project, Project pluginProject, Configuration configuration, String elasticsearchVersion) { + static void configureBwcPluginDependency(String name, Project project, Project pluginProject, Configuration configuration, Version elasticsearchVersion) { verifyProjectHasBuildPlugin(name, elasticsearchVersion, project, pluginProject) final String pluginName = findPluginName(pluginProject) project.dependencies.add(configuration.name, "org.elasticsearch.plugin:${pluginName}:${elasticsearchVersion}@zip") @@ -303,7 +306,7 @@ class ClusterFormationTasks { // Default the watermarks to absurdly low to prevent the tests from failing on nodes without enough disk space esConfig['cluster.routing.allocation.disk.watermark.low'] = '1b' esConfig['cluster.routing.allocation.disk.watermark.high'] = '1b' - if (Version.fromString(node.nodeVersion).major >= 6) { + if (node.nodeVersion.major >= 6) { esConfig['cluster.routing.allocation.disk.watermark.flood_stage'] = '1b' } // increase script compilation limit since tests can rapid-fire script compilations @@ -803,7 +806,7 @@ class ClusterFormationTasks { return retVal } - static void verifyProjectHasBuildPlugin(String name, String version, Project project, Project pluginProject) { + static void verifyProjectHasBuildPlugin(String name, Version version, Project project, Project pluginProject) { if (pluginProject.plugins.hasPlugin(PluginBuildPlugin) == false && pluginProject.plugins.hasPlugin(MetaPluginBuildPlugin) == false) { throw new GradleException("Task [${name}] cannot add plugin [${pluginProject.path}] with version [${version}] to project's " + "[${project.path}] dependencies: the plugin is not an esplugin or es_meta_plugin") diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/NodeInfo.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/NodeInfo.groovy index 2898df0445f..0f23db5d5f8 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/NodeInfo.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/NodeInfo.groovy @@ -102,10 +102,10 @@ class NodeInfo { ByteArrayOutputStream buffer = new ByteArrayOutputStream() /** the version of elasticsearch that this node runs */ - String nodeVersion + Version nodeVersion /** Holds node configuration for part of a test cluster. */ - NodeInfo(ClusterConfiguration config, int nodeNum, Project project, String prefix, String nodeVersion, File sharedDir) { + NodeInfo(ClusterConfiguration config, int nodeNum, Project project, String prefix, Version nodeVersion, File sharedDir) { this.config = config this.nodeNum = nodeNum this.sharedDir = sharedDir @@ -166,13 +166,13 @@ class NodeInfo { final String javaHome final Map javaVersions = project.javaVersions - if (Version.fromString(nodeVersion).before("6.2.0")) { + if (nodeVersion.before("6.2.0")) { final String java8Home = javaVersions.get(8) if (java8Home == null) { throw new GradleException("JAVA8_HOME must be set to run BWC tests against [" + nodeVersion + "]") } javaHome = java8Home - } else if (Version.fromString(nodeVersion).onOrAfter("6.2.0") && Version.fromString(nodeVersion).before("6.3.0")) { + } else if (nodeVersion.onOrAfter("6.2.0") && nodeVersion.before("6.3.0")) { final String java9Home = javaVersions.get(9) if (java9Home == null) { throw new GradleException("JAVA9_HOME must be set to run BWC tests against [" + nodeVersion + "]") @@ -304,7 +304,7 @@ class NodeInfo { } /** Returns the directory elasticsearch home is contained in for the given distribution */ - static File homeDir(File baseDir, String distro, String nodeVersion) { + static File homeDir(File baseDir, String distro, Version nodeVersion) { String path switch (distro) { case 'integ-test-zip': @@ -322,7 +322,7 @@ class NodeInfo { return new File(baseDir, path) } - static File pathConf(File baseDir, String distro, String nodeVersion) { + static File pathConf(File baseDir, String distro, Version nodeVersion) { switch (distro) { case 'integ-test-zip': case 'zip': From efa823bd79f642528bdcce5130fc1c9902ea517f Mon Sep 17 00:00:00 2001 From: Jason Tedor Date: Fri, 13 Apr 2018 17:11:37 -0400 Subject: [PATCH 06/13] Simplify snapshot check in root build file Rather than checking a substring match, now that VersionProperties#elasticsearch is a strongly-typed instance of Version, we can use the Version#isSnapshot convenience method. This commit switches the root build file to do this. --- build.gradle | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/build.gradle b/build.gradle index ae472cc1340..3027dade1a7 100644 --- a/build.gradle +++ b/build.gradle @@ -245,7 +245,7 @@ subprojects { // other packages (e.g org.elasticsearch.client) will point to server rather than // their own artifacts. if (project.plugins.hasPlugin(BuildPlugin)) { - String artifactsHost = VersionProperties.elasticsearch.toString().endsWith("-SNAPSHOT") ? "https://snapshots.elastic.co" : "https://artifacts.elastic.co" + String artifactsHost = VersionProperties.elasticsearch.isSnapshot() ? "https://snapshots.elastic.co" : "https://artifacts.elastic.co" Closure sortClosure = { a, b -> b.group <=> a.group } Closure depJavadocClosure = { dep -> if (dep.group != null && dep.group.startsWith('org.elasticsearch')) { From b883e1217f531d831e09c06230326528dbd271eb Mon Sep 17 00:00:00 2001 From: Jason Tedor Date: Sat, 14 Apr 2018 15:44:43 -0400 Subject: [PATCH 07/13] Lazy configure build tasks that require older JDKs (#29519) Some build tasks require older JDKs. For example, the BWC build tasks for older versions of Elasticsearch require older JDKs. It is onerous to require these be configured when merely compiling Elasticsearch, the requirement that they be strictly set to appropriate values should only be enforced if these tasks are going to be executed. To address this, we lazy configure these tasks. --- .../elasticsearch/gradle/BuildPlugin.groovy | 17 +++++++++++++++ .../elasticsearch/gradle/test/NodeInfo.groovy | 21 ++++++------------- distribution/bwc/build.gradle | 13 ++++-------- qa/reindex-from-old/build.gradle | 8 +++---- 4 files changed, 31 insertions(+), 28 deletions(-) diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/BuildPlugin.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/BuildPlugin.groovy index 46dd3b902dc..3103f23472e 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/BuildPlugin.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/BuildPlugin.groovy @@ -221,6 +221,23 @@ class BuildPlugin implements Plugin { return System.getenv('JAVA' + version + '_HOME') } + /** + * Get Java home for the project for the specified version. If the specified version is not configured, an exception with the specified + * message is thrown. + * + * @param project the project + * @param version the version of Java home to obtain + * @param message the exception message if Java home for the specified version is not configured + * @return Java home for the specified version + * @throws GradleException if Java home for the specified version is not configured + */ + static String getJavaHome(final Project project, final int version, final String message) { + if (project.javaVersions.get(version) == null) { + throw new GradleException(message) + } + return project.javaVersions.get(version) + } + private static String findRuntimeJavaHome(final String compilerJavaHome) { assert compilerJavaHome != null return System.getenv('RUNTIME_JAVA_HOME') ?: compilerJavaHome diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/NodeInfo.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/NodeInfo.groovy index 0f23db5d5f8..fcc357258b9 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/NodeInfo.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/NodeInfo.groovy @@ -16,21 +16,22 @@ * specific language governing permissions and limitations * under the License. */ + package org.elasticsearch.gradle.test import com.sun.jna.Native import com.sun.jna.WString import org.apache.tools.ant.taskdefs.condition.Os import org.elasticsearch.gradle.Version -import org.gradle.api.GradleException import org.gradle.api.InvalidUserDataException -import org.gradle.api.JavaVersion import org.gradle.api.Project import java.nio.file.Files import java.nio.file.Path import java.nio.file.Paths +import static org.elasticsearch.gradle.BuildPlugin.getJavaHome + /** * A container for the files and configuration associated with a single node in a test cluster. */ @@ -165,24 +166,14 @@ class NodeInfo { } final String javaHome - final Map javaVersions = project.javaVersions if (nodeVersion.before("6.2.0")) { - final String java8Home = javaVersions.get(8) - if (java8Home == null) { - throw new GradleException("JAVA8_HOME must be set to run BWC tests against [" + nodeVersion + "]") - } - javaHome = java8Home + env = ['JAVA_HOME':"${-> getJavaHome(project, 8, "JAVA8_HOME must be set to run BWC tests against [" + nodeVersion + "]")}"] } else if (nodeVersion.onOrAfter("6.2.0") && nodeVersion.before("6.3.0")) { - final String java9Home = javaVersions.get(9) - if (java9Home == null) { - throw new GradleException("JAVA9_HOME must be set to run BWC tests against [" + nodeVersion + "]") - } - javaHome = java9Home + env = ['JAVA_HOME':"${-> getJavaHome(project, 9, "JAVA9_HOME must be set to run BWC tests against [" + nodeVersion + "]")}"] } else { - javaHome = project.compilerJavaHome + env = ['JAVA_HOME':project.runtimeJavaHome] } - env = ['JAVA_HOME':javaHome] args.addAll("-E", "node.portsfile=true") String collectedSystemProperties = config.systemProperties.collect { key, value -> "-D${key}=${value}" }.join(" ") String esJavaOpts = config.jvmArgs.isEmpty() ? collectedSystemProperties : collectedSystemProperties + " " + config.jvmArgs diff --git a/distribution/bwc/build.gradle b/distribution/bwc/build.gradle index 7b4c9e959a9..48b84b40362 100644 --- a/distribution/bwc/build.gradle +++ b/distribution/bwc/build.gradle @@ -17,11 +17,12 @@ * under the License. */ + import org.apache.tools.ant.taskdefs.condition.Os import org.elasticsearch.gradle.LoggedExec import org.elasticsearch.gradle.Version -import java.util.regex.Matcher +import static org.elasticsearch.gradle.BuildPlugin.getJavaHome /** * This is a dummy project which does a local checkout of the previous @@ -146,15 +147,9 @@ subprojects { workingDir = checkoutDir if (["5.6", "6.0", "6.1"].contains(bwcBranch)) { // we are building branches that are officially built with JDK 8, push JAVA8_HOME to JAVA_HOME for these builds - if (project.javaVersions.get(8) == null) { - throw new GradleException("JAVA8_HOME is required to build BWC versions for BWC branch [" + bwcBranch + "]") - } - environment('JAVA_HOME', project.javaVersions.get(8)) + environment('JAVA_HOME', "${-> getJavaHome(project, 8, "JAVA8_HOME is required to build BWC versions for BWC branch [" + bwcBranch + "]")}") } else if ("6.2".equals(bwcBranch)) { - if (project.javaVersions.get(9) == null) { - throw new GradleException("JAVA9_HOME is required to build BWC versions for BWC branch [" + bwcBranch + "]") - } - environment('JAVA_HOME', project.javaVersions.get(9)) + environment('JAVA_HOME', "${-> getJavaHome(project, 9, "JAVA9_HOME is required to build BWC versions for BWC branch [" + bwcBranch + "]")}") } else { environment('JAVA_HOME', project.compilerJavaHome) } diff --git a/qa/reindex-from-old/build.gradle b/qa/reindex-from-old/build.gradle index 4fe481543c3..c4b4927a4a2 100644 --- a/qa/reindex-from-old/build.gradle +++ b/qa/reindex-from-old/build.gradle @@ -24,8 +24,11 @@ should be able to use the standard launching mechanism which is more flexible and reliable. """ + import org.apache.tools.ant.taskdefs.condition.Os +import static org.elasticsearch.gradle.BuildPlugin.getJavaHome + apply plugin: 'elasticsearch.standalone-rest-test' apply plugin: 'elasticsearch.rest-test' @@ -55,9 +58,6 @@ if (Os.isFamily(Os.FAMILY_WINDOWS)) { // we can't get the pid files in windows so we skip that integTest.enabled = false } else { - if (project.javaVersions.get(7) == null) { - throw new GradleException("JAVA7_HOME must be set to run reindex-from-old") - } /* Set up tasks to unzip and run the old versions of ES before running the * integration tests. */ for (String version : ['2', '1', '090']) { @@ -77,7 +77,7 @@ if (Os.isFamily(Os.FAMILY_WINDOWS)) { dependsOn unzip executable = new File(project.runtimeJavaHome, 'bin/java') env 'CLASSPATH', "${ -> project.configurations.oldesFixture.asPath }" - env 'JAVA_HOME', project.javaVersions.get(7) + env 'JAVA_HOME', "${-> getJavaHome(project, 7, "JAVA7_HOME must be set to run reindex-from-old")}" args 'oldes.OldElasticsearch', baseDir, unzip.temporaryDir, From 9125684d8633572cc303222c4f9fa25ce2f0db5d Mon Sep 17 00:00:00 2001 From: Jason Tedor Date: Sat, 14 Apr 2018 20:48:48 -0400 Subject: [PATCH 08/13] Minor cleanup in NodeInfo.groovy This commit is a minor cleanup of a code block in NodeInfo.groovy. We remove an unused variable, make the formatting of the code consistent, and cast a property that is typed as an Object to a String to avoid an annoying IDE warning. --- .../groovy/org/elasticsearch/gradle/test/NodeInfo.groovy | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/NodeInfo.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/NodeInfo.groovy index fcc357258b9..1fc944eeec6 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/NodeInfo.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/NodeInfo.groovy @@ -165,13 +165,12 @@ class NodeInfo { args.add("${esScript}") } - final String javaHome if (nodeVersion.before("6.2.0")) { - env = ['JAVA_HOME':"${-> getJavaHome(project, 8, "JAVA8_HOME must be set to run BWC tests against [" + nodeVersion + "]")}"] + env = ['JAVA_HOME': "${-> getJavaHome(project, 8, "JAVA8_HOME must be set to run BWC tests against [" + nodeVersion + "]")}"] } else if (nodeVersion.onOrAfter("6.2.0") && nodeVersion.before("6.3.0")) { - env = ['JAVA_HOME':"${-> getJavaHome(project, 9, "JAVA9_HOME must be set to run BWC tests against [" + nodeVersion + "]")}"] + env = ['JAVA_HOME': "${-> getJavaHome(project, 9, "JAVA9_HOME must be set to run BWC tests against [" + nodeVersion + "]")}"] } else { - env = ['JAVA_HOME':project.runtimeJavaHome] + env = ['JAVA_HOME': (String) project.runtimeJavaHome] } args.addAll("-E", "node.portsfile=true") From 00fd73acc4a2991f96438f8c1948016c5b9eefb2 Mon Sep 17 00:00:00 2001 From: Jason Tedor Date: Sun, 15 Apr 2018 16:26:09 -0400 Subject: [PATCH 09/13] Avoid self-deadlock in the translog (#29520) Today when reading an operation from the current generation fails tragically we attempt to close the translog. However, by invoking close before releasing the read lock we end up in self-deadlock because closing tries to acquire the write lock and the read lock can not be upgraded to a write lock. To avoid this, we move the close invocation outside of the try-with-resources that acquired the read lock. As an extra guard against this, we document the problem and add an assertion that we are not trying to invoke close while holding the read lock. --- .../index/translog/Translog.java | 29 +++++++++++++------ .../index/translog/TranslogTests.java | 1 - 2 files changed, 20 insertions(+), 10 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/index/translog/Translog.java b/server/src/main/java/org/elasticsearch/index/translog/Translog.java index ab4961892ca..cc5041bf244 100644 --- a/server/src/main/java/org/elasticsearch/index/translog/Translog.java +++ b/server/src/main/java/org/elasticsearch/index/translog/Translog.java @@ -583,12 +583,7 @@ public class Translog extends AbstractIndexShardComponent implements IndexShardC if (current.generation == location.generation) { // no need to fsync here the read operation will ensure that buffers are written to disk // if they are still in RAM and we are reading onto that position - try { - return current.read(location); - } catch (final Exception ex) { - closeOnTragicEvent(ex); - throw ex; - } + return current.read(location); } else { // read backwards - it's likely we need to read on that is recent for (int i = readers.size() - 1; i >= 0; i--) { @@ -598,6 +593,9 @@ public class Translog extends AbstractIndexShardComponent implements IndexShardC } } } + } catch (final Exception ex) { + closeOnTragicEvent(ex); + throw ex; } return null; } @@ -735,15 +733,28 @@ public class Translog extends AbstractIndexShardComponent implements IndexShardC } } + /** + * Closes the translog if the current translog writer experienced a tragic exception. + * + * Note that in case this thread closes the translog it must not already be holding a read lock on the translog as it will acquire a + * write lock in the course of closing the translog + * + * @param ex if an exception occurs closing the translog, it will be suppressed into the provided exception + */ private void closeOnTragicEvent(final Exception ex) { + // we can not hold a read lock here because closing will attempt to obtain a write lock and that would result in self-deadlock + assert readLock.isHeldByCurrentThread() == false : Thread.currentThread().getName(); if (current.getTragicException() != null) { try { close(); } catch (final AlreadyClosedException inner) { - // don't do anything in this case. The AlreadyClosedException comes from TranslogWriter and we should not add it as suppressed because - // will contain the Exception ex as cause. See also https://github.com/elastic/elasticsearch/issues/15941 + /* + * Don't do anything in this case. The AlreadyClosedException comes from TranslogWriter and we should not add it as + * suppressed because it will contain the provided exception as its cause. See also + * https://github.com/elastic/elasticsearch/issues/15941. + */ } catch (final Exception inner) { - assert (ex != inner.getCause()); + assert ex != inner.getCause(); ex.addSuppressed(inner); } } diff --git a/server/src/test/java/org/elasticsearch/index/translog/TranslogTests.java b/server/src/test/java/org/elasticsearch/index/translog/TranslogTests.java index 8899dca24b1..b3b9fca886e 100644 --- a/server/src/test/java/org/elasticsearch/index/translog/TranslogTests.java +++ b/server/src/test/java/org/elasticsearch/index/translog/TranslogTests.java @@ -1812,7 +1812,6 @@ public class TranslogTests extends ESTestCase { assertTrue(translog.getTragicException() instanceof UnknownException); } - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/29509") public void testFatalIOExceptionsWhileWritingConcurrently() throws IOException, InterruptedException { Path tempDir = createTempDir(); final FailSwitch fail = new FailSwitch(); From 7931cf87f01fec1e9165971c2472a1aed1c0f68c Mon Sep 17 00:00:00 2001 From: Bolarinwa Saheed Olayemi Date: Mon, 16 Apr 2018 10:46:05 +0200 Subject: [PATCH 10/13] [Docs] Add definitions to glossary (#29127) Definitions for "filter" and "query" are added to the glossary of terms. Closes #29127 --- docs/reference/glossary.asciidoc | 24 ++++++++++++++++++++++-- 1 file changed, 22 insertions(+), 2 deletions(-) diff --git a/docs/reference/glossary.asciidoc b/docs/reference/glossary.asciidoc index 0012beebdca..53164d366cd 100644 --- a/docs/reference/glossary.asciidoc +++ b/docs/reference/glossary.asciidoc @@ -61,6 +61,15 @@ `object`. The mapping also allows you to define (amongst other things) how the value for a field should be analyzed. +[[glossary-filter]] filter :: + + A filter is a non-scoring <>, meaning that it does not score documents. + It is only concerned about answering the question - "Does this document match?". + The answer is always a simple, binary yes or no. This kind of query is said to be made + in a <>, + hence it is called a filter. Filters are simple checks for set inclusion or exclusion. + In most cases, the goal of filtering is to reduce the number of documents that have to be examined. + [[glossary-index]] index :: An index is like a _table_ in a relational database. It has a @@ -105,6 +114,16 @@ + See also <> +[[glossary-query]] query :: + + A query is the basic component of a search. A search can be defined by one or more queries + which can be mixed and matched in endless combinations. While <> are + queries that only determine if a document matches, those queries that also calculate how well + the document matches are known as "scoring queries". Those queries assign it a score, which is + later used to sort matched documents. Scoring queries take more resources than <> + and their query results are not cacheable. As a general rule, use query clauses for full-text + search or for any condition that requires scoring, and use filters for everything else. + [[glossary-replica-shard]] replica shard :: Each <> can have zero or more @@ -161,8 +180,9 @@ A term is an exact value that is indexed in Elasticsearch. The terms `foo`, `Foo`, `FOO` are NOT equivalent. Terms (i.e. exact values) can - be searched for using _term_ queries. + - See also <> and <>. + be searched for using _term_ queries. + + + See also <> and <>. [[glossary-text]] text :: From a004a338038d22a16f2688fa71302f7608331499 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Christoph=20B=C3=BCscher?= Date: Mon, 16 Apr 2018 13:41:42 +0200 Subject: [PATCH 11/13] Prevent accidental changes of default values (#29528) The default percentiles values and the default highlighter per- and post-tags are currently publicly accessible and can be altered any time. This change prevents this by restricting field access. --- .../metrics/percentiles/PercentilesAggregationBuilder.java | 3 +-- .../search/fetch/subphase/highlight/HighlightBuilder.java | 4 ++-- 2 files changed, 3 insertions(+), 4 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/percentiles/PercentilesAggregationBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/percentiles/PercentilesAggregationBuilder.java index 8b5858dcd95..5c90832bb15 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/percentiles/PercentilesAggregationBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/percentiles/PercentilesAggregationBuilder.java @@ -26,7 +26,6 @@ import org.elasticsearch.common.xcontent.ObjectParser; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.search.aggregations.AggregationBuilder; -import org.elasticsearch.search.aggregations.AggregatorFactories; import org.elasticsearch.search.aggregations.AggregatorFactories.Builder; import org.elasticsearch.search.aggregations.AggregatorFactory; import org.elasticsearch.search.aggregations.metrics.percentiles.hdr.HDRPercentilesAggregatorFactory; @@ -50,7 +49,7 @@ import java.util.function.Consumer; public class PercentilesAggregationBuilder extends LeafOnly { public static final String NAME = Percentiles.TYPE_NAME; - public static final double[] DEFAULT_PERCENTS = new double[] { 1, 5, 25, 50, 75, 95, 99 }; + private static final double[] DEFAULT_PERCENTS = new double[] { 1, 5, 25, 50, 75, 95, 99 }; public static final ParseField PERCENTS_FIELD = new ParseField("percents"); public static final ParseField KEYED_FIELD = new ParseField("keyed"); public static final ParseField METHOD_FIELD = new ParseField("method"); diff --git a/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/HighlightBuilder.java b/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/HighlightBuilder.java index cc00c2faac7..ff332c7d734 100644 --- a/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/HighlightBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/HighlightBuilder.java @@ -75,9 +75,9 @@ public class HighlightBuilder extends AbstractHighlighterBuilder"}; + static final String[] DEFAULT_PRE_TAGS = new String[]{""}; /** the default closing tag */ - public static final String[] DEFAULT_POST_TAGS = new String[]{""}; + static final String[] DEFAULT_POST_TAGS = new String[]{""}; /** the default opening tags when tag_schema = "styled" */ public static final String[] DEFAULT_STYLED_PRE_TAG = { From 0bfb59dcf2cda291a47ffe476af005207a20b565 Mon Sep 17 00:00:00 2001 From: Ke Li Date: Mon, 16 Apr 2018 20:39:35 +0800 Subject: [PATCH 12/13] Using ObjectParser in UpdateRequest (#29293) CRUD: Parsing changes for UpdateRequest (#29293) Use `ObjectParser` to parse `UpdateRequest` so we reject unknown fields and drop support for the `_fields` parameter because it was deprecated in 5.x. --- .../noop/action/bulk/RestNoopBulkAction.java | 5 +- .../migration/migrate_7_0/api.asciidoc | 6 +- .../resources/rest-api-spec/api/bulk.json | 4 - .../resources/rest-api-spec/api/update.json | 4 - .../action/bulk/BulkProcessor.java | 2 +- .../action/bulk/BulkRequest.java | 32 ++--- .../action/bulk/TransportShardBulkAction.java | 3 +- .../action/update/TransportUpdateAction.java | 3 +- .../action/update/UpdateHelper.java | 59 ++------ .../action/update/UpdateRequest.java | 128 +++++++----------- .../action/update/UpdateRequestBuilder.java | 16 --- .../rest/action/document/RestBulkAction.java | 12 +- .../action/document/RestUpdateAction.java | 15 +- .../action/bulk/BulkRequestTests.java | 36 +++-- .../action/bulk/BulkWithUpdatesIT.java | 16 +-- .../action/update/UpdateRequestTests.java | 39 +++--- .../org/elasticsearch/update/UpdateIT.java | 36 ++--- .../elasticsearch/update/UpdateNoopIT.java | 2 +- 18 files changed, 144 insertions(+), 274 deletions(-) diff --git a/client/client-benchmark-noop-api-plugin/src/main/java/org/elasticsearch/plugin/noop/action/bulk/RestNoopBulkAction.java b/client/client-benchmark-noop-api-plugin/src/main/java/org/elasticsearch/plugin/noop/action/bulk/RestNoopBulkAction.java index ca5f3220567..9c632afe191 100644 --- a/client/client-benchmark-noop-api-plugin/src/main/java/org/elasticsearch/plugin/noop/action/bulk/RestNoopBulkAction.java +++ b/client/client-benchmark-noop-api-plugin/src/main/java/org/elasticsearch/plugin/noop/action/bulk/RestNoopBulkAction.java @@ -27,7 +27,6 @@ import org.elasticsearch.action.support.ActiveShardCount; import org.elasticsearch.action.update.UpdateResponse; import org.elasticsearch.client.Requests; import org.elasticsearch.client.node.NodeClient; -import org.elasticsearch.common.Strings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.index.shard.ShardId; @@ -68,9 +67,7 @@ public class RestNoopBulkAction extends BaseRestHandler { String defaultIndex = request.param("index"); String defaultType = request.param("type"); String defaultRouting = request.param("routing"); - String fieldsParam = request.param("fields"); String defaultPipeline = request.param("pipeline"); - String[] defaultFields = fieldsParam != null ? Strings.commaDelimitedListToStringArray(fieldsParam) : null; String waitForActiveShards = request.param("wait_for_active_shards"); if (waitForActiveShards != null) { @@ -78,7 +75,7 @@ public class RestNoopBulkAction extends BaseRestHandler { } bulkRequest.timeout(request.paramAsTime("timeout", BulkShardRequest.DEFAULT_TIMEOUT)); bulkRequest.setRefreshPolicy(request.param("refresh")); - bulkRequest.add(request.requiredContent(), defaultIndex, defaultType, defaultRouting, defaultFields, + bulkRequest.add(request.requiredContent(), defaultIndex, defaultType, defaultRouting, null, defaultPipeline, null, true, request.getXContentType()); // short circuit the call to the transport layer diff --git a/docs/reference/migration/migrate_7_0/api.asciidoc b/docs/reference/migration/migrate_7_0/api.asciidoc index 5fd425857c3..ec4778f9aa6 100644 --- a/docs/reference/migration/migrate_7_0/api.asciidoc +++ b/docs/reference/migration/migrate_7_0/api.asciidoc @@ -2,7 +2,7 @@ === Breaking API changes in 7.0 ==== Camel case and underscore parameters deprecated in 6.x have been removed -A number of duplicate parameters deprecated in 6.x have been removed from +A number of duplicate parameters deprecated in 6.x have been removed from Bulk request, Multi Get request, Term Vectors request, and More Like This Query requests. @@ -22,3 +22,7 @@ The following parameters starting with underscore have been removed: Instead of these removed parameters, use their non camel case equivalents without starting underscore, e.g. use `version_type` instead of `_version_type` or `versionType`. + +==== The parameter `fields` deprecated in 6.x has been removed from Bulk request +and Update request. The Update API returns `400 - Bad request` if request contains +unknown parameters (instead of ignored in the previous version). diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/bulk.json b/rest-api-spec/src/main/resources/rest-api-spec/api/bulk.json index 2ff171bf528..c30ee70e2eb 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/bulk.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/bulk.json @@ -37,10 +37,6 @@ "type" : "string", "description" : "Default document type for items which don't provide one" }, - "fields": { - "type": "list", - "description" : "Default comma-separated list of fields to return in the response for updates, can be overridden on each sub-request" - }, "_source": { "type" : "list", "description" : "True or false to return the _source field or not, or default list of fields to return, can be overridden on each sub-request" diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/update.json b/rest-api-spec/src/main/resources/rest-api-spec/api/update.json index 5e1dcf72e95..ffa99cc9dc3 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/update.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/update.json @@ -27,10 +27,6 @@ "type": "string", "description": "Sets the number of shard copies that must be active before proceeding with the update operation. Defaults to 1, meaning the primary shard only. Set to `all` for all shard copies, otherwise set to any non-negative value less than or equal to the total number of copies for the shard (number of replicas + 1)" }, - "fields": { - "type": "list", - "description": "A comma-separated list of fields to return in the response" - }, "_source": { "type" : "list", "description" : "True or false to return the _source field or not, or a list of fields to return" diff --git a/server/src/main/java/org/elasticsearch/action/bulk/BulkProcessor.java b/server/src/main/java/org/elasticsearch/action/bulk/BulkProcessor.java index 39a185741db..fdafb3b2b80 100644 --- a/server/src/main/java/org/elasticsearch/action/bulk/BulkProcessor.java +++ b/server/src/main/java/org/elasticsearch/action/bulk/BulkProcessor.java @@ -299,7 +299,7 @@ public class BulkProcessor implements Closeable { */ public synchronized BulkProcessor add(BytesReference data, @Nullable String defaultIndex, @Nullable String defaultType, @Nullable String defaultPipeline, @Nullable Object payload, XContentType xContentType) throws Exception { - bulkRequest.add(data, defaultIndex, defaultType, null, null, null, defaultPipeline, payload, true, xContentType); + bulkRequest.add(data, defaultIndex, defaultType, null, null, defaultPipeline, payload, true, xContentType); executeIfNeeded(); return this; } diff --git a/server/src/main/java/org/elasticsearch/action/bulk/BulkRequest.java b/server/src/main/java/org/elasticsearch/action/bulk/BulkRequest.java index c81ef6ee992..ebc095b1670 100644 --- a/server/src/main/java/org/elasticsearch/action/bulk/BulkRequest.java +++ b/server/src/main/java/org/elasticsearch/action/bulk/BulkRequest.java @@ -36,8 +36,6 @@ import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.logging.DeprecationLogger; -import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.lucene.uid.Versions; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.xcontent.LoggingDeprecationHandler; @@ -66,8 +64,6 @@ import static org.elasticsearch.action.ValidateActions.addValidationError; * @see org.elasticsearch.client.Client#bulk(BulkRequest) */ public class BulkRequest extends ActionRequest implements CompositeIndicesRequest, WriteRequest { - private static final DeprecationLogger DEPRECATION_LOGGER = - new DeprecationLogger(Loggers.getLogger(BulkRequest.class)); private static final int REQUEST_OVERHEAD = 50; @@ -80,7 +76,6 @@ public class BulkRequest extends ActionRequest implements CompositeIndicesReques private static final ParseField VERSION_TYPE = new ParseField("version_type"); private static final ParseField RETRY_ON_CONFLICT = new ParseField("retry_on_conflict"); private static final ParseField PIPELINE = new ParseField("pipeline"); - private static final ParseField FIELDS = new ParseField("fields"); private static final ParseField SOURCE = new ParseField("_source"); /** @@ -277,7 +272,7 @@ public class BulkRequest extends ActionRequest implements CompositeIndicesReques */ public BulkRequest add(BytesReference data, @Nullable String defaultIndex, @Nullable String defaultType, XContentType xContentType) throws IOException { - return add(data, defaultIndex, defaultType, null, null, null, null, null, true, xContentType); + return add(data, defaultIndex, defaultType, null, null, null, null, true, xContentType); } /** @@ -285,12 +280,13 @@ public class BulkRequest extends ActionRequest implements CompositeIndicesReques */ public BulkRequest add(BytesReference data, @Nullable String defaultIndex, @Nullable String defaultType, boolean allowExplicitIndex, XContentType xContentType) throws IOException { - return add(data, defaultIndex, defaultType, null, null, null, null, null, allowExplicitIndex, xContentType); + return add(data, defaultIndex, defaultType, null, null, null, null, allowExplicitIndex, xContentType); } - public BulkRequest add(BytesReference data, @Nullable String defaultIndex, @Nullable String defaultType, @Nullable String - defaultRouting, @Nullable String[] defaultFields, @Nullable FetchSourceContext defaultFetchSourceContext, @Nullable String - defaultPipeline, @Nullable Object payload, boolean allowExplicitIndex, XContentType xContentType) throws IOException { + public BulkRequest add(BytesReference data, @Nullable String defaultIndex, @Nullable String defaultType, + @Nullable String defaultRouting, @Nullable FetchSourceContext defaultFetchSourceContext, + @Nullable String defaultPipeline, @Nullable Object payload, boolean allowExplicitIndex, + XContentType xContentType) throws IOException { XContent xContent = xContentType.xContent(); int line = 0; int from = 0; @@ -333,7 +329,6 @@ public class BulkRequest extends ActionRequest implements CompositeIndicesReques String id = null; String routing = defaultRouting; FetchSourceContext fetchSourceContext = defaultFetchSourceContext; - String[] fields = defaultFields; String opType = null; long version = Versions.MATCH_ANY; VersionType versionType = VersionType.INTERNAL; @@ -371,21 +366,14 @@ public class BulkRequest extends ActionRequest implements CompositeIndicesReques retryOnConflict = parser.intValue(); } else if (PIPELINE.match(currentFieldName, parser.getDeprecationHandler())) { pipeline = parser.text(); - } else if (FIELDS.match(currentFieldName, parser.getDeprecationHandler())) { - throw new IllegalArgumentException("Action/metadata line [" + line + "] contains a simple value for parameter [fields] while a list is expected"); } else if (SOURCE.match(currentFieldName, parser.getDeprecationHandler())) { fetchSourceContext = FetchSourceContext.fromXContent(parser); } else { throw new IllegalArgumentException("Action/metadata line [" + line + "] contains an unknown parameter [" + currentFieldName + "]"); } } else if (token == XContentParser.Token.START_ARRAY) { - if (FIELDS.match(currentFieldName, parser.getDeprecationHandler())) { - DEPRECATION_LOGGER.deprecated("Deprecated field [fields] used, expected [_source] instead"); - List values = parser.list(); - fields = values.toArray(new String[values.size()]); - } else { - throw new IllegalArgumentException("Malformed action/metadata line [" + line + "], expected a simple value for field [" + currentFieldName + "] but found [" + token + "]"); - } + throw new IllegalArgumentException("Malformed action/metadata line [" + line + + "], expected a simple value for field [" + currentFieldName + "] but found [" + token + "]"); } else if (token == XContentParser.Token.START_OBJECT && SOURCE.match(currentFieldName, parser.getDeprecationHandler())) { fetchSourceContext = FetchSourceContext.fromXContent(parser); } else if (token != XContentParser.Token.VALUE_NULL) { @@ -435,10 +423,6 @@ public class BulkRequest extends ActionRequest implements CompositeIndicesReques if (fetchSourceContext != null) { updateRequest.fetchSource(fetchSourceContext); } - if (fields != null) { - updateRequest.fields(fields); - } - IndexRequest upsertRequest = updateRequest.upsertRequest(); if (upsertRequest != null) { upsertRequest.version(version); diff --git a/server/src/main/java/org/elasticsearch/action/bulk/TransportShardBulkAction.java b/server/src/main/java/org/elasticsearch/action/bulk/TransportShardBulkAction.java index fcc8f5377db..f9b27a1e620 100644 --- a/server/src/main/java/org/elasticsearch/action/bulk/TransportShardBulkAction.java +++ b/server/src/main/java/org/elasticsearch/action/bulk/TransportShardBulkAction.java @@ -291,8 +291,7 @@ public class TransportShardBulkAction extends TransportWriteAction 0)) { + if (updateRequest.fetchSource() != null && updateRequest.fetchSource().fetchSource()) { final BytesReference indexSourceAsBytes = updateIndexRequest.source(); final Tuple> sourceAndContent = XContentHelper.convertToMap(indexSourceAsBytes, true, updateIndexRequest.getContentType()); diff --git a/server/src/main/java/org/elasticsearch/action/update/TransportUpdateAction.java b/server/src/main/java/org/elasticsearch/action/update/TransportUpdateAction.java index a9f9a1d32bb..242dfe635ec 100644 --- a/server/src/main/java/org/elasticsearch/action/update/TransportUpdateAction.java +++ b/server/src/main/java/org/elasticsearch/action/update/TransportUpdateAction.java @@ -180,8 +180,7 @@ public class TransportUpdateAction extends TransportInstanceSingleOperationActio bulkAction.execute(toSingleItemBulkRequest(upsertRequest), wrapBulkResponse( ActionListener.wrap(response -> { UpdateResponse update = new UpdateResponse(response.getShardInfo(), response.getShardId(), response.getType(), response.getId(), response.getSeqNo(), response.getPrimaryTerm(), response.getVersion(), response.getResult()); - if ((request.fetchSource() != null && request.fetchSource().fetchSource()) || - (request.fields() != null && request.fields().length > 0)) { + if (request.fetchSource() != null && request.fetchSource().fetchSource()) { Tuple> sourceAndContent = XContentHelper.convertToMap(upsertSourceBytes, true, upsertRequest.getContentType()); update.setGetResult(UpdateHelper.extractGetResult(request, request.concreteIndex(), response.getVersion(), sourceAndContent.v2(), sourceAndContent.v1(), upsertSourceBytes)); diff --git a/server/src/main/java/org/elasticsearch/action/update/UpdateHelper.java b/server/src/main/java/org/elasticsearch/action/update/UpdateHelper.java index 63378fd75cc..4c5accbb4cc 100644 --- a/server/src/main/java/org/elasticsearch/action/update/UpdateHelper.java +++ b/server/src/main/java/org/elasticsearch/action/update/UpdateHelper.java @@ -29,7 +29,6 @@ import org.elasticsearch.common.Nullable; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.component.AbstractComponent; -import org.elasticsearch.common.document.DocumentField; import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.io.stream.Streamable; import org.elasticsearch.common.settings.Settings; @@ -49,7 +48,7 @@ import org.elasticsearch.script.ScriptService; import org.elasticsearch.search.lookup.SourceLookup; import java.io.IOException; -import java.util.ArrayList; +import java.util.Collections; import java.util.HashMap; import java.util.Map; import java.util.function.LongSupplier; @@ -292,61 +291,33 @@ public class UpdateHelper extends AbstractComponent { /** * Applies {@link UpdateRequest#fetchSource()} to the _source of the updated document to be returned in a update response. - * For BWC this function also extracts the {@link UpdateRequest#fields()} from the updated document to be returned in a update response */ public static GetResult extractGetResult(final UpdateRequest request, String concreteIndex, long version, final Map source, XContentType sourceContentType, @Nullable final BytesReference sourceAsBytes) { - if ((request.fields() == null || request.fields().length == 0) && - (request.fetchSource() == null || request.fetchSource().fetchSource() == false)) { + if (request.fetchSource() == null || request.fetchSource().fetchSource() == false) { return null; } - SourceLookup sourceLookup = new SourceLookup(); - sourceLookup.setSource(source); - boolean sourceRequested = false; - Map fields = null; - if (request.fields() != null && request.fields().length > 0) { - for (String field : request.fields()) { - if (field.equals("_source")) { - sourceRequested = true; - continue; - } - Object value = sourceLookup.extractValue(field); - if (value != null) { - if (fields == null) { - fields = new HashMap<>(2); - } - DocumentField documentField = fields.get(field); - if (documentField == null) { - documentField = new DocumentField(field, new ArrayList<>(2)); - fields.put(field, documentField); - } - documentField.getValues().add(value); - } - } - } BytesReference sourceFilteredAsBytes = sourceAsBytes; - if (request.fetchSource() != null && request.fetchSource().fetchSource()) { - sourceRequested = true; - if (request.fetchSource().includes().length > 0 || request.fetchSource().excludes().length > 0) { - Object value = sourceLookup.filter(request.fetchSource()); - try { - final int initialCapacity = Math.min(1024, sourceAsBytes.length()); - BytesStreamOutput streamOutput = new BytesStreamOutput(initialCapacity); - try (XContentBuilder builder = new XContentBuilder(sourceContentType.xContent(), streamOutput)) { - builder.value(value); - sourceFilteredAsBytes = BytesReference.bytes(builder); - } - } catch (IOException e) { - throw new ElasticsearchException("Error filtering source", e); + if (request.fetchSource().includes().length > 0 || request.fetchSource().excludes().length > 0) { + SourceLookup sourceLookup = new SourceLookup(); + sourceLookup.setSource(source); + Object value = sourceLookup.filter(request.fetchSource()); + try { + final int initialCapacity = Math.min(1024, sourceAsBytes.length()); + BytesStreamOutput streamOutput = new BytesStreamOutput(initialCapacity); + try (XContentBuilder builder = new XContentBuilder(sourceContentType.xContent(), streamOutput)) { + builder.value(value); + sourceFilteredAsBytes = BytesReference.bytes(builder); } + } catch (IOException e) { + throw new ElasticsearchException("Error filtering source", e); } } // TODO when using delete/none, we can still return the source as bytes by generating it (using the sourceContentType) - return new GetResult(concreteIndex, request.type(), request.id(), version, true, - sourceRequested ? sourceFilteredAsBytes : null, fields); + return new GetResult(concreteIndex, request.type(), request.id(), version, true, sourceFilteredAsBytes, Collections.emptyMap()); } public static class Result { diff --git a/server/src/main/java/org/elasticsearch/action/update/UpdateRequest.java b/server/src/main/java/org/elasticsearch/action/update/UpdateRequest.java index 73c56ca22b2..3f74f7311c2 100644 --- a/server/src/main/java/org/elasticsearch/action/update/UpdateRequest.java +++ b/server/src/main/java/org/elasticsearch/action/update/UpdateRequest.java @@ -19,8 +19,6 @@ package org.elasticsearch.action.update; -import java.util.Arrays; - import org.elasticsearch.Version; import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.DocWriteRequest; @@ -30,11 +28,14 @@ import org.elasticsearch.action.support.WriteRequest; import org.elasticsearch.action.support.replication.ReplicationRequest; import org.elasticsearch.action.support.single.instance.InstanceShardOperationRequest; import org.elasticsearch.common.Nullable; +import org.elasticsearch.common.ParseField; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.lucene.uid.Versions; import org.elasticsearch.common.xcontent.LoggingDeprecationHandler; import org.elasticsearch.common.xcontent.NamedXContentRegistry; +import org.elasticsearch.common.xcontent.ObjectParser; import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; @@ -48,15 +49,46 @@ import org.elasticsearch.script.ScriptType; import org.elasticsearch.search.fetch.subphase.FetchSourceContext; import java.io.IOException; -import java.util.Collections; import java.util.HashMap; -import java.util.List; import java.util.Map; import static org.elasticsearch.action.ValidateActions.addValidationError; public class UpdateRequest extends InstanceShardOperationRequest implements DocWriteRequest, WriteRequest, ToXContentObject { + private static ObjectParser PARSER; + + private static final ParseField SCRIPT_FIELD = new ParseField("script"); + private static final ParseField SCRIPTED_UPSERT_FIELD = new ParseField("scripted_upsert"); + private static final ParseField UPSERT_FIELD = new ParseField("upsert"); + private static final ParseField DOC_FIELD = new ParseField("doc"); + private static final ParseField DOC_AS_UPSERT_FIELD = new ParseField("doc_as_upsert"); + private static final ParseField DETECT_NOOP_FIELD = new ParseField("detect_noop"); + private static final ParseField SOURCE_FIELD = new ParseField("_source"); + + static { + PARSER = new ObjectParser<>(UpdateRequest.class.getSimpleName()); + PARSER.declareField((request, script) -> request.script = script, + (parser, context) -> Script.parse(parser), SCRIPT_FIELD, ObjectParser.ValueType.OBJECT_OR_STRING); + PARSER.declareBoolean(UpdateRequest::scriptedUpsert, SCRIPTED_UPSERT_FIELD); + PARSER.declareObject((request, builder) -> request.safeUpsertRequest().source(builder), + (parser, context) -> { + XContentBuilder builder = XContentFactory.contentBuilder(parser.contentType()); + builder.copyCurrentStructure(parser); + return builder; + }, UPSERT_FIELD); + PARSER.declareObject((request, builder) -> request.safeDoc().source(builder), + (parser, context) -> { + XContentBuilder docBuilder = XContentFactory.contentBuilder(parser.contentType()); + docBuilder.copyCurrentStructure(parser); + return docBuilder; + }, DOC_FIELD); + PARSER.declareBoolean(UpdateRequest::docAsUpsert, DOC_AS_UPSERT_FIELD); + PARSER.declareBoolean(UpdateRequest::detectNoop, DETECT_NOOP_FIELD); + PARSER.declareField(UpdateRequest::fetchSource, + (parser, context) -> FetchSourceContext.fromXContent(parser), SOURCE_FIELD, + ObjectParser.ValueType.OBJECT_ARRAY_BOOLEAN_OR_STRING); + } private String type; private String id; @@ -66,7 +98,6 @@ public class UpdateRequest extends InstanceShardOperationRequest @Nullable Script script; - private String[] fields; private FetchSourceContext fetchSourceContext; private long version = Versions.MATCH_ANY; @@ -365,16 +396,6 @@ public class UpdateRequest extends InstanceShardOperationRequest return this; } - /** - * Explicitly specify the fields that will be returned. By default, nothing is returned. - * @deprecated Use {@link UpdateRequest#fetchSource(String[], String[])} instead - */ - @Deprecated - public UpdateRequest fields(String... fields) { - this.fields = fields; - return this; - } - /** * Indicate that _source should be returned with every hit, with an * "include" and/or "exclude" set which can include simple wildcard @@ -389,7 +410,9 @@ public class UpdateRequest extends InstanceShardOperationRequest */ public UpdateRequest fetchSource(@Nullable String include, @Nullable String exclude) { FetchSourceContext context = this.fetchSourceContext == null ? FetchSourceContext.FETCH_SOURCE : this.fetchSourceContext; - this.fetchSourceContext = new FetchSourceContext(context.fetchSource(), new String[] {include}, new String[]{exclude}); + String[] includes = include == null ? Strings.EMPTY_ARRAY : new String[]{include}; + String[] excludes = exclude == null ? Strings.EMPTY_ARRAY : new String[]{exclude}; + this.fetchSourceContext = new FetchSourceContext(context.fetchSource(), includes, excludes); return this; } @@ -428,16 +451,6 @@ public class UpdateRequest extends InstanceShardOperationRequest return this; } - - /** - * Get the fields to be returned. - * @deprecated Use {@link UpdateRequest#fetchSource()} instead - */ - @Deprecated - public String[] fields() { - return fields; - } - /** * Gets the {@link FetchSourceContext} which defines how the _source should * be fetched. @@ -707,49 +720,7 @@ public class UpdateRequest extends InstanceShardOperationRequest } public UpdateRequest fromXContent(XContentParser parser) throws IOException { - Script script = null; - XContentParser.Token token = parser.nextToken(); - if (token == null) { - return this; - } - String currentFieldName = null; - while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { - if (token == XContentParser.Token.FIELD_NAME) { - currentFieldName = parser.currentName(); - } else if ("script".equals(currentFieldName)) { - script = Script.parse(parser); - } else if ("scripted_upsert".equals(currentFieldName)) { - scriptedUpsert = parser.booleanValue(); - } else if ("upsert".equals(currentFieldName)) { - XContentBuilder builder = XContentFactory.contentBuilder(parser.contentType()); - builder.copyCurrentStructure(parser); - safeUpsertRequest().source(builder); - } else if ("doc".equals(currentFieldName)) { - XContentBuilder docBuilder = XContentFactory.contentBuilder(parser.contentType()); - docBuilder.copyCurrentStructure(parser); - safeDoc().source(docBuilder); - } else if ("doc_as_upsert".equals(currentFieldName)) { - docAsUpsert(parser.booleanValue()); - } else if ("detect_noop".equals(currentFieldName)) { - detectNoop(parser.booleanValue()); - } else if ("fields".equals(currentFieldName)) { - List fields = null; - if (token == XContentParser.Token.START_ARRAY) { - fields = (List) parser.list(); - } else if (token.isValue()) { - fields = Collections.singletonList(parser.text()); - } - if (fields != null) { - fields(fields.toArray(new String[fields.size()])); - } - } else if ("_source".equals(currentFieldName)) { - fetchSourceContext = FetchSourceContext.fromXContent(parser); - } - } - if (script != null) { - this.script = script; - } - return this; + return PARSER.parse(parser, this, null); } public boolean docAsUpsert() { @@ -789,7 +760,12 @@ public class UpdateRequest extends InstanceShardOperationRequest doc = new IndexRequest(); doc.readFrom(in); } - fields = in.readOptionalStringArray(); + if (in.getVersion().before(Version.V_7_0_0_alpha1)) { + String[] fields = in.readOptionalStringArray(); + if (fields != null) { + throw new IllegalArgumentException("[fields] is no longer supported"); + } + } fetchSourceContext = in.readOptionalWriteable(FetchSourceContext::new); if (in.readBoolean()) { upsertRequest = new IndexRequest(); @@ -812,7 +788,7 @@ public class UpdateRequest extends InstanceShardOperationRequest if (out.getVersion().before(Version.V_7_0_0_alpha1)) { out.writeOptionalString(null); // _parent } - + boolean hasScript = script != null; out.writeBoolean(hasScript); if (hasScript) { @@ -830,7 +806,9 @@ public class UpdateRequest extends InstanceShardOperationRequest doc.id(id); doc.writeTo(out); } - out.writeOptionalStringArray(fields); + if (out.getVersion().before(Version.V_7_0_0_alpha1)) { + out.writeOptionalStringArray(null); + } out.writeOptionalWriteable(fetchSourceContext); if (upsertRequest == null) { out.writeBoolean(false); @@ -880,9 +858,6 @@ public class UpdateRequest extends InstanceShardOperationRequest if (detectNoop == false) { builder.field("detect_noop", detectNoop); } - if (fields != null) { - builder.array("fields", fields); - } if (fetchSourceContext != null) { builder.field("_source", fetchSourceContext); } @@ -908,9 +883,6 @@ public class UpdateRequest extends InstanceShardOperationRequest } res.append(", scripted_upsert[").append(scriptedUpsert).append("]"); res.append(", detect_noop[").append(detectNoop).append("]"); - if (fields != null) { - res.append(", fields[").append(Arrays.toString(fields)).append("]"); - } return res.append("}").toString(); } } diff --git a/server/src/main/java/org/elasticsearch/action/update/UpdateRequestBuilder.java b/server/src/main/java/org/elasticsearch/action/update/UpdateRequestBuilder.java index 8e753629d73..74935adbbb2 100644 --- a/server/src/main/java/org/elasticsearch/action/update/UpdateRequestBuilder.java +++ b/server/src/main/java/org/elasticsearch/action/update/UpdateRequestBuilder.java @@ -26,20 +26,15 @@ import org.elasticsearch.action.support.replication.ReplicationRequest; import org.elasticsearch.action.support.single.instance.InstanceShardOperationRequestBuilder; import org.elasticsearch.client.ElasticsearchClient; import org.elasticsearch.common.Nullable; -import org.elasticsearch.common.logging.DeprecationLogger; -import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.index.VersionType; -import org.elasticsearch.rest.action.document.RestUpdateAction; import org.elasticsearch.script.Script; import java.util.Map; public class UpdateRequestBuilder extends InstanceShardOperationRequestBuilder implements WriteRequestBuilder { - private static final DeprecationLogger DEPRECATION_LOGGER = - new DeprecationLogger(Loggers.getLogger(RestUpdateAction.class)); public UpdateRequestBuilder(ElasticsearchClient client, UpdateAction action) { super(client, action, new UpdateRequest()); @@ -87,17 +82,6 @@ public class UpdateRequestBuilder extends InstanceShardOperationRequestBuilder */ public class RestBulkAction extends BaseRestHandler { - private static final DeprecationLogger DEPRECATION_LOGGER = - new DeprecationLogger(Loggers.getLogger(RestBulkAction.class)); private final boolean allowExplicitIndex; @@ -80,11 +75,6 @@ public class RestBulkAction extends BaseRestHandler { String defaultType = request.param("type", MapperService.SINGLE_MAPPING_NAME); String defaultRouting = request.param("routing"); FetchSourceContext defaultFetchSourceContext = FetchSourceContext.parseFromRestRequest(request); - String fieldsParam = request.param("fields"); - if (fieldsParam != null) { - DEPRECATION_LOGGER.deprecated("Deprecated field [fields] used, expected [_source] instead"); - } - String[] defaultFields = fieldsParam != null ? Strings.commaDelimitedListToStringArray(fieldsParam) : null; String defaultPipeline = request.param("pipeline"); String waitForActiveShards = request.param("wait_for_active_shards"); if (waitForActiveShards != null) { @@ -92,7 +82,7 @@ public class RestBulkAction extends BaseRestHandler { } bulkRequest.timeout(request.paramAsTime("timeout", BulkShardRequest.DEFAULT_TIMEOUT)); bulkRequest.setRefreshPolicy(request.param("refresh")); - bulkRequest.add(request.requiredContent(), defaultIndex, defaultType, defaultRouting, defaultFields, + bulkRequest.add(request.requiredContent(), defaultIndex, defaultType, defaultRouting, defaultFetchSourceContext, defaultPipeline, null, allowExplicitIndex, request.getXContentType()); return channel -> client.bulk(bulkRequest, new RestStatusToXContentListener<>(channel)); diff --git a/server/src/main/java/org/elasticsearch/rest/action/document/RestUpdateAction.java b/server/src/main/java/org/elasticsearch/rest/action/document/RestUpdateAction.java index cec8101ad9b..de7c1fad5b2 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/document/RestUpdateAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/document/RestUpdateAction.java @@ -23,9 +23,6 @@ import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.support.ActiveShardCount; import org.elasticsearch.action.update.UpdateRequest; import org.elasticsearch.client.node.NodeClient; -import org.elasticsearch.common.Strings; -import org.elasticsearch.common.logging.DeprecationLogger; -import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.VersionType; import org.elasticsearch.rest.BaseRestHandler; @@ -40,8 +37,6 @@ import java.io.IOException; import static org.elasticsearch.rest.RestRequest.Method.POST; public class RestUpdateAction extends BaseRestHandler { - private static final DeprecationLogger DEPRECATION_LOGGER = - new DeprecationLogger(Loggers.getLogger(RestUpdateAction.class)); public RestUpdateAction(Settings settings, RestController controller) { super(settings); @@ -65,15 +60,7 @@ public class RestUpdateAction extends BaseRestHandler { } updateRequest.docAsUpsert(request.paramAsBoolean("doc_as_upsert", updateRequest.docAsUpsert())); FetchSourceContext fetchSourceContext = FetchSourceContext.parseFromRestRequest(request); - String sField = request.param("fields"); - if (sField != null && fetchSourceContext != null) { - throw new IllegalArgumentException("[fields] and [_source] cannot be used in the same request"); - } - if (sField != null) { - DEPRECATION_LOGGER.deprecated("Deprecated field [fields] used, expected [_source] instead"); - String[] sFields = Strings.splitStringByCommaToArray(sField); - updateRequest.fields(sFields); - } else if (fetchSourceContext != null) { + if (fetchSourceContext != null) { updateRequest.fetchSource(fetchSourceContext); } diff --git a/server/src/test/java/org/elasticsearch/action/bulk/BulkRequestTests.java b/server/src/test/java/org/elasticsearch/action/bulk/BulkRequestTests.java index 97a1ef2806a..1d03d065e7a 100644 --- a/server/src/test/java/org/elasticsearch/action/bulk/BulkRequestTests.java +++ b/server/src/test/java/org/elasticsearch/action/bulk/BulkRequestTests.java @@ -94,12 +94,12 @@ public class BulkRequestTests extends ESTestCase { BulkRequest bulkRequest = new BulkRequest(); bulkRequest.add(bulkAction.getBytes(StandardCharsets.UTF_8), 0, bulkAction.length(), null, null, XContentType.JSON); assertThat(bulkRequest.numberOfActions(), equalTo(4)); - assertThat(((UpdateRequest) bulkRequest.requests().get(0)).id(), equalTo("1")); + assertThat(bulkRequest.requests().get(0).id(), equalTo("1")); assertThat(((UpdateRequest) bulkRequest.requests().get(0)).retryOnConflict(), equalTo(2)); assertThat(((UpdateRequest) bulkRequest.requests().get(0)).doc().source().utf8ToString(), equalTo("{\"field\":\"value\"}")); - assertThat(((UpdateRequest) bulkRequest.requests().get(1)).id(), equalTo("0")); - assertThat(((UpdateRequest) bulkRequest.requests().get(1)).type(), equalTo("type1")); - assertThat(((UpdateRequest) bulkRequest.requests().get(1)).index(), equalTo("index1")); + assertThat(bulkRequest.requests().get(1).id(), equalTo("0")); + assertThat(bulkRequest.requests().get(1).type(), equalTo("type1")); + assertThat(bulkRequest.requests().get(1).index(), equalTo("index1")); Script script = ((UpdateRequest) bulkRequest.requests().get(1)).script(); assertThat(script, notNullValue()); assertThat(script.getIdOrCode(), equalTo("counter += param1")); @@ -107,20 +107,18 @@ public class BulkRequestTests extends ESTestCase { Map scriptParams = script.getParams(); assertThat(scriptParams, notNullValue()); assertThat(scriptParams.size(), equalTo(1)); - assertThat(((Integer) scriptParams.get("param1")), equalTo(1)); + assertThat(scriptParams.get("param1"), equalTo(1)); assertThat(((UpdateRequest) bulkRequest.requests().get(1)).upsertRequest().source().utf8ToString(), equalTo("{\"counter\":1}")); } public void testBulkAllowExplicitIndex() throws Exception { - String bulkAction = copyToStringFromClasspath("/org/elasticsearch/action/bulk/simple-bulk.json"); - try { - new BulkRequest().add(new BytesArray(bulkAction.getBytes(StandardCharsets.UTF_8)), null, null, false, XContentType.JSON); - fail(); - } catch (Exception e) { + String bulkAction1 = copyToStringFromClasspath("/org/elasticsearch/action/bulk/simple-bulk.json"); + Exception ex = expectThrows(Exception.class, + () -> new BulkRequest().add( + new BytesArray(bulkAction1.getBytes(StandardCharsets.UTF_8)), null, null, false, XContentType.JSON)); + assertEquals("explicit index in bulk is not allowed", ex.getMessage()); - } - - bulkAction = copyToStringFromClasspath("/org/elasticsearch/action/bulk/simple-bulk5.json"); + String bulkAction = copyToStringFromClasspath("/org/elasticsearch/action/bulk/simple-bulk5.json"); new BulkRequest().add(new BytesArray(bulkAction.getBytes(StandardCharsets.UTF_8)), "test", null, false, XContentType.JSON); } @@ -177,6 +175,16 @@ public class BulkRequestTests extends ESTestCase { assertThat(bulkRequest.numberOfActions(), equalTo(9)); } + public void testBulkActionShouldNotContainArray() throws Exception { + String bulkAction = "{ \"index\":{\"_index\":[\"index1\", \"index2\"],\"_type\":\"type1\",\"_id\":\"1\"} }\r\n" + + "{ \"field1\" : \"value1\" }\r\n"; + BulkRequest bulkRequest = new BulkRequest(); + IllegalArgumentException exc = expectThrows(IllegalArgumentException.class, + () -> bulkRequest.add(bulkAction.getBytes(StandardCharsets.UTF_8), 0, bulkAction.length(), null, null, XContentType.JSON)); + assertEquals(exc.getMessage(), "Malformed action/metadata line [1]" + + ", expected a simple value for field [_index] but found [START_ARRAY]"); + } + public void testBulkEmptyObject() throws Exception { String bulkIndexAction = "{ \"index\":{\"_index\":\"test\",\"_type\":\"type1\",\"_id\":\"1\"} }\r\n"; String bulkIndexSource = "{ \"field1\" : \"value1\" }\r\n"; @@ -299,7 +307,7 @@ public class BulkRequestTests extends ESTestCase { out.write(xContentType.xContent().streamSeparator()); try(XContentBuilder builder = XContentFactory.contentBuilder(xContentType, out)) { builder.startObject(); - builder.field("doc", "{}"); + builder.startObject("doc").endObject(); Map values = new HashMap<>(); values.put("version", 2L); values.put("_index", "index"); diff --git a/server/src/test/java/org/elasticsearch/action/bulk/BulkWithUpdatesIT.java b/server/src/test/java/org/elasticsearch/action/bulk/BulkWithUpdatesIT.java index c4857b7b094..80048cf3433 100644 --- a/server/src/test/java/org/elasticsearch/action/bulk/BulkWithUpdatesIT.java +++ b/server/src/test/java/org/elasticsearch/action/bulk/BulkWithUpdatesIT.java @@ -260,13 +260,13 @@ public class BulkWithUpdatesIT extends ESIntegTestCase { assertThat(bulkResponse.getItems().length, equalTo(3)); bulkResponse = client().prepareBulk() - .add(client().prepareUpdate().setIndex("test").setType("type1").setId("1").setFields("field") + .add(client().prepareUpdate().setIndex("test").setType("type1").setId("1").setFetchSource("field", null) .setScript(new Script( ScriptType.INLINE, CustomScriptPlugin.NAME, "throw script exception on unknown var", Collections.emptyMap()))) - .add(client().prepareUpdate().setIndex("test").setType("type1").setId("2").setFields("field") + .add(client().prepareUpdate().setIndex("test").setType("type1").setId("2").setFetchSource("field", null) .setScript(new Script( ScriptType.INLINE, CustomScriptPlugin.NAME, "ctx._source.field += 1", Collections.emptyMap()))) - .add(client().prepareUpdate().setIndex("test").setType("type1").setId("3").setFields("field") + .add(client().prepareUpdate().setIndex("test").setType("type1").setId("3").setFetchSource("field", null) .setScript(new Script( ScriptType.INLINE, CustomScriptPlugin.NAME, "throw script exception on unknown var", Collections.emptyMap()))) .execute().actionGet(); @@ -279,7 +279,7 @@ public class BulkWithUpdatesIT extends ESIntegTestCase { assertThat(bulkResponse.getItems()[1].getResponse().getId(), equalTo("2")); assertThat(bulkResponse.getItems()[1].getResponse().getVersion(), equalTo(2L)); - assertThat(((UpdateResponse) bulkResponse.getItems()[1].getResponse()).getGetResult().field("field").getValue(), equalTo(2)); + assertThat(((UpdateResponse) bulkResponse.getItems()[1].getResponse()).getGetResult().sourceAsMap().get("field"), equalTo(2)); assertThat(bulkResponse.getItems()[1].getFailure(), nullValue()); assertThat(bulkResponse.getItems()[2].getFailure().getId(), equalTo("3")); @@ -303,7 +303,7 @@ public class BulkWithUpdatesIT extends ESIntegTestCase { builder.add( client().prepareUpdate() .setIndex("test").setType("type1").setId(Integer.toString(i)) - .setFields("counter") + .setFetchSource("counter", null) .setScript(script) .setUpsert(jsonBuilder().startObject().field("counter", 1).endObject())); } @@ -319,7 +319,7 @@ public class BulkWithUpdatesIT extends ESIntegTestCase { assertThat(response.getItems()[i].getOpType(), equalTo(OpType.UPDATE)); assertThat(response.getItems()[i].getResponse().getId(), equalTo(Integer.toString(i))); assertThat(response.getItems()[i].getResponse().getVersion(), equalTo(1L)); - assertThat(((UpdateResponse) response.getItems()[i].getResponse()).getGetResult().field("counter").getValue(), equalTo(1)); + assertThat(((UpdateResponse) response.getItems()[i].getResponse()).getGetResult().sourceAsMap().get("counter"), equalTo(1)); for (int j = 0; j < 5; j++) { GetResponse getResponse = client().prepareGet("test", "type1", Integer.toString(i)).execute() @@ -333,7 +333,7 @@ public class BulkWithUpdatesIT extends ESIntegTestCase { builder = client().prepareBulk(); for (int i = 0; i < numDocs; i++) { UpdateRequestBuilder updateBuilder = client().prepareUpdate().setIndex("test").setType("type1").setId(Integer.toString(i)) - .setFields("counter"); + .setFetchSource("counter", null); if (i % 2 == 0) { updateBuilder.setScript(script); } else { @@ -357,7 +357,7 @@ public class BulkWithUpdatesIT extends ESIntegTestCase { assertThat(response.getItems()[i].getOpType(), equalTo(OpType.UPDATE)); assertThat(response.getItems()[i].getResponse().getId(), equalTo(Integer.toString(i))); assertThat(response.getItems()[i].getResponse().getVersion(), equalTo(2L)); - assertThat(((UpdateResponse) response.getItems()[i].getResponse()).getGetResult().field("counter").getValue(), equalTo(2)); + assertThat(((UpdateResponse) response.getItems()[i].getResponse()).getGetResult().sourceAsMap().get("counter"), equalTo(2)); } builder = client().prepareBulk(); diff --git a/server/src/test/java/org/elasticsearch/action/update/UpdateRequestTests.java b/server/src/test/java/org/elasticsearch/action/update/UpdateRequestTests.java index de97cf7b7e9..f562cbd0ec1 100644 --- a/server/src/test/java/org/elasticsearch/action/update/UpdateRequestTests.java +++ b/server/src/test/java/org/elasticsearch/action/update/UpdateRequestTests.java @@ -61,7 +61,6 @@ import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; import static org.elasticsearch.common.xcontent.XContentHelper.toXContent; import static org.elasticsearch.script.MockScriptEngine.mockInlineScript; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertToXContentEquivalent; -import static org.hamcrest.Matchers.arrayContaining; import static org.hamcrest.Matchers.contains; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.instanceOf; @@ -277,17 +276,26 @@ public class UpdateRequestTests extends ESTestCase { assertThat(((Map) doc.get("compound")).get("field2").toString(), equalTo("value2")); } - // Related to issue 15338 - public void testFieldsParsing() throws Exception { - UpdateRequest request = new UpdateRequest("test", "type1", "1").fromXContent( - createParser(JsonXContent.jsonXContent, new BytesArray("{\"doc\": {\"field1\": \"value1\"}, \"fields\": \"_source\"}"))); - assertThat(request.doc().sourceAsMap().get("field1").toString(), equalTo("value1")); - assertThat(request.fields(), arrayContaining("_source")); + public void testUnknownFieldParsing() throws Exception { + UpdateRequest request = new UpdateRequest("test", "type", "1"); + XContentParser contentParser = createParser(XContentFactory.jsonBuilder() + .startObject() + .field("unknown_field", "test") + .endObject()); - request = new UpdateRequest("test", "type2", "2").fromXContent(createParser(JsonXContent.jsonXContent, - new BytesArray("{\"doc\": {\"field2\": \"value2\"}, \"fields\": [\"field1\", \"field2\"]}"))); - assertThat(request.doc().sourceAsMap().get("field2").toString(), equalTo("value2")); - assertThat(request.fields(), arrayContaining("field1", "field2")); + IllegalArgumentException ex = expectThrows(IllegalArgumentException.class, () -> request.fromXContent(contentParser)); + assertEquals("[UpdateRequest] unknown field [unknown_field], parser not found", ex.getMessage()); + + UpdateRequest request2 = new UpdateRequest("test", "type", "1"); + XContentParser unknownObject = createParser(XContentFactory.jsonBuilder() + .startObject() + .field("script", "ctx.op = ctx._source.views == params.count ? 'delete' : 'none'") + .startObject("params") + .field("count", 1) + .endObject() + .endObject()); + ex = expectThrows(IllegalArgumentException.class, () -> request2.fromXContent(unknownObject)); + assertEquals("[UpdateRequest] unknown field [params], parser not found", ex.getMessage()); } public void testFetchSourceParsing() throws Exception { @@ -444,13 +452,6 @@ public class UpdateRequestTests extends ESTestCase { BytesReference source = RandomObjects.randomSource(random(), xContentType); updateRequest.upsert(new IndexRequest().source(source, xContentType)); } - if (randomBoolean()) { - String[] fields = new String[randomIntBetween(0, 5)]; - for (int i = 0; i < fields.length; i++) { - fields[i] = randomAlphaOfLength(5); - } - updateRequest.fields(fields); - } if (randomBoolean()) { if (randomBoolean()) { updateRequest.fetchSource(randomBoolean()); @@ -487,10 +488,8 @@ public class UpdateRequestTests extends ESTestCase { assertEquals(updateRequest.detectNoop(), parsedUpdateRequest.detectNoop()); assertEquals(updateRequest.docAsUpsert(), parsedUpdateRequest.docAsUpsert()); - assertEquals(updateRequest.docAsUpsert(), parsedUpdateRequest.docAsUpsert()); assertEquals(updateRequest.script(), parsedUpdateRequest.script()); assertEquals(updateRequest.scriptedUpsert(), parsedUpdateRequest.scriptedUpsert()); - assertArrayEquals(updateRequest.fields(), parsedUpdateRequest.fields()); assertEquals(updateRequest.fetchSource(), parsedUpdateRequest.fetchSource()); BytesReference finalBytes = toXContent(parsedUpdateRequest, xContentType, humanReadable); diff --git a/server/src/test/java/org/elasticsearch/update/UpdateIT.java b/server/src/test/java/org/elasticsearch/update/UpdateIT.java index 4d6ce3a4d1f..df5bdd8322f 100644 --- a/server/src/test/java/org/elasticsearch/update/UpdateIT.java +++ b/server/src/test/java/org/elasticsearch/update/UpdateIT.java @@ -225,7 +225,7 @@ public class UpdateIT extends ESIntegTestCase { UpdateResponse updateResponse = client().prepareUpdate(indexOrAlias(), "type1", "1") .setDoc(XContentFactory.jsonBuilder().startObject().field("bar", "baz").endObject()) .setDocAsUpsert(true) - .setFields("_source") + .setFetchSource(true) .execute().actionGet(); assertThat(updateResponse.getIndex(), equalTo("test")); assertThat(updateResponse.getGetResult(), notNullValue()); @@ -241,7 +241,7 @@ public class UpdateIT extends ESIntegTestCase { assertThrows(client().prepareUpdate(indexOrAlias(), "type1", "1") .setDoc(XContentFactory.jsonBuilder().startObject().field("bar", "baz").endObject()) .setDocAsUpsert(false) - .setFields("_source") + .setFetchSource(true) .execute(), DocumentMissingException.class); } @@ -264,7 +264,7 @@ public class UpdateIT extends ESIntegTestCase { updateResponse = client().prepareUpdate(indexOrAlias(), "type1", "1") .setUpsert(XContentFactory.jsonBuilder().startObject().field("bar", "baz").endObject()) .setScript(new Script(ScriptType.INLINE, UPDATE_SCRIPTS, PUT_VALUES_SCRIPT, Collections.singletonMap("extra", "foo"))) - .setFields("_source") + .setFetchSource(true) .execute().actionGet(); assertThat(updateResponse.getIndex(), equalTo("test")); @@ -293,12 +293,9 @@ public class UpdateIT extends ESIntegTestCase { ensureGreen(); Script fieldIncScript = new Script(ScriptType.INLINE, UPDATE_SCRIPTS, FIELD_INC_SCRIPT, Collections.singletonMap("field", "field")); - try { - client().prepareUpdate(indexOrAlias(), "type1", "1").setScript(fieldIncScript).execute().actionGet(); - fail(); - } catch (DocumentMissingException e) { - // all is well - } + DocumentMissingException ex = expectThrows(DocumentMissingException.class, + () -> client().prepareUpdate(indexOrAlias(), "type1", "1").setScript(fieldIncScript).execute().actionGet()); + assertEquals("[type1][1]: document missing", ex.getMessage()); client().prepareIndex("test", "type1", "1").setSource("field", 1).execute().actionGet(); @@ -353,19 +350,6 @@ public class UpdateIT extends ESIntegTestCase { assertThat(getResponse.isExists(), equalTo(false)); } - // check fields parameter - client().prepareIndex("test", "type1", "1").setSource("field", 1).execute().actionGet(); - updateResponse = client().prepareUpdate(indexOrAlias(), "type1", "1") - .setScript(fieldIncScript) - .setFields("field") - .setFetchSource(true) - .execute().actionGet(); - assertThat(updateResponse.getIndex(), equalTo("test")); - assertThat(updateResponse.getGetResult(), notNullValue()); - assertThat(updateResponse.getGetResult().getIndex(), equalTo("test")); - assertThat(updateResponse.getGetResult().sourceRef(), notNullValue()); - assertThat(updateResponse.getGetResult().field("field").getValue(), notNullValue()); - // check _source parameter client().prepareIndex("test", "type1", "1").setSource("field1", 1, "field2", 2).execute().actionGet(); updateResponse = client().prepareUpdate(indexOrAlias(), "type1", "1") @@ -383,7 +367,7 @@ public class UpdateIT extends ESIntegTestCase { // check updates without script // add new field client().prepareIndex("test", "type1", "1").setSource("field", 1).execute().actionGet(); - updateResponse = client().prepareUpdate(indexOrAlias(), "type1", "1").setDoc(XContentFactory.jsonBuilder().startObject().field("field2", 2).endObject()).execute().actionGet(); + client().prepareUpdate(indexOrAlias(), "type1", "1").setDoc(XContentFactory.jsonBuilder().startObject().field("field2", 2).endObject()).execute().actionGet(); for (int i = 0; i < 5; i++) { GetResponse getResponse = client().prepareGet("test", "type1", "1").execute().actionGet(); assertThat(getResponse.getSourceAsMap().get("field").toString(), equalTo("1")); @@ -391,7 +375,7 @@ public class UpdateIT extends ESIntegTestCase { } // change existing field - updateResponse = client().prepareUpdate(indexOrAlias(), "type1", "1").setDoc(XContentFactory.jsonBuilder().startObject().field("field", 3).endObject()).execute().actionGet(); + client().prepareUpdate(indexOrAlias(), "type1", "1").setDoc(XContentFactory.jsonBuilder().startObject().field("field", 3).endObject()).execute().actionGet(); for (int i = 0; i < 5; i++) { GetResponse getResponse = client().prepareGet("test", "type1", "1").execute().actionGet(); assertThat(getResponse.getSourceAsMap().get("field").toString(), equalTo("3")); @@ -409,7 +393,7 @@ public class UpdateIT extends ESIntegTestCase { testMap.put("map1", 8); client().prepareIndex("test", "type1", "1").setSource("map", testMap).execute().actionGet(); - updateResponse = client().prepareUpdate(indexOrAlias(), "type1", "1").setDoc(XContentFactory.jsonBuilder().startObject().field("map", testMap3).endObject()).execute().actionGet(); + client().prepareUpdate(indexOrAlias(), "type1", "1").setDoc(XContentFactory.jsonBuilder().startObject().field("map", testMap3).endObject()).execute().actionGet(); for (int i = 0; i < 5; i++) { GetResponse getResponse = client().prepareGet("test", "type1", "1").execute().actionGet(); Map map1 = (Map) getResponse.getSourceAsMap().get("map"); @@ -581,7 +565,7 @@ public class UpdateIT extends ESIntegTestCase { assertThat(response.getId(), equalTo(Integer.toString(i))); assertThat(response.isExists(), equalTo(true)); assertThat(response.getVersion(), equalTo((long) numberOfThreads)); - assertThat((Integer) response.getSource().get("field"), equalTo(numberOfThreads)); + assertThat(response.getSource().get("field"), equalTo(numberOfThreads)); } } diff --git a/server/src/test/java/org/elasticsearch/update/UpdateNoopIT.java b/server/src/test/java/org/elasticsearch/update/UpdateNoopIT.java index 17fb21441e2..2cb71d9bcbe 100644 --- a/server/src/test/java/org/elasticsearch/update/UpdateNoopIT.java +++ b/server/src/test/java/org/elasticsearch/update/UpdateNoopIT.java @@ -248,7 +248,7 @@ public class UpdateNoopIT extends ESIntegTestCase { UpdateRequestBuilder updateRequest = client().prepareUpdate("test", "type1", "1") .setDoc(xContentBuilder) .setDocAsUpsert(true) - .setFields("_source"); + .setFetchSource(true); if (detectNoop != null) { updateRequest.setDetectNoop(detectNoop); } From 62e33eeef3f64fc36c301c6c8610d7016162a721 Mon Sep 17 00:00:00 2001 From: Luca Cavanna Date: Mon, 16 Apr 2018 15:06:57 +0200 Subject: [PATCH 13/13] [TEST] REST client request without leading '/' (#29471) The following is the current behaviour, tested now through a specific test. The low-level REST client doesn't add a leading wildcard when not provided, unless a `pathPrefix` is configured in which case a trailing slash will be automatically added when concatenating the prefix and the provided uri. Also when configuring a pathPrefix, if it doesn't start with a '/' it will be modified by adding the missing leading '/'. --- .../RestClientSingleHostIntegTests.java | 31 +++++++++++++++++-- 1 file changed, 29 insertions(+), 2 deletions(-) diff --git a/client/rest/src/test/java/org/elasticsearch/client/RestClientSingleHostIntegTests.java b/client/rest/src/test/java/org/elasticsearch/client/RestClientSingleHostIntegTests.java index 3d282a642e0..59aa2baab96 100644 --- a/client/rest/src/test/java/org/elasticsearch/client/RestClientSingleHostIntegTests.java +++ b/client/rest/src/test/java/org/elasticsearch/client/RestClientSingleHostIntegTests.java @@ -58,6 +58,7 @@ import static org.hamcrest.Matchers.startsWith; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertThat; import static org.junit.Assert.assertTrue; +import static org.junit.Assert.fail; /** * Integration test to check interaction between {@link RestClient} and {@link org.apache.http.client.HttpClient}. @@ -135,8 +136,7 @@ public class RestClientSingleHostIntegTests extends RestClientTestCase { final RestClientBuilder restClientBuilder = RestClient.builder( new HttpHost(httpServer.getAddress().getHostString(), httpServer.getAddress().getPort())).setDefaultHeaders(defaultHeaders); if (pathPrefix.length() > 0) { - // sometimes cut off the leading slash - restClientBuilder.setPathPrefix(randomBoolean() ? pathPrefix.substring(1) : pathPrefix); + restClientBuilder.setPathPrefix(pathPrefix); } if (useAuth) { @@ -281,6 +281,33 @@ public class RestClientSingleHostIntegTests extends RestClientTestCase { } } + public void testUrlWithoutLeadingSlash() throws Exception { + if (pathPrefix.length() == 0) { + try { + restClient.performRequest("GET", "200"); + fail("request should have failed"); + } catch(ResponseException e) { + assertEquals(404, e.getResponse().getStatusLine().getStatusCode()); + } + } else { + { + Response response = restClient.performRequest("GET", "200"); + //a trailing slash gets automatically added if a pathPrefix is configured + assertEquals(200, response.getStatusLine().getStatusCode()); + } + { + //pathPrefix is not required to start with '/', will be added automatically + try (RestClient restClient = RestClient.builder( + new HttpHost(httpServer.getAddress().getHostString(), httpServer.getAddress().getPort())) + .setPathPrefix(pathPrefix.substring(1)).build()) { + Response response = restClient.performRequest("GET", "200"); + //a trailing slash gets automatically added if a pathPrefix is configured + assertEquals(200, response.getStatusLine().getStatusCode()); + } + } + } + } + private Response bodyTest(final String method) throws IOException { return bodyTest(restClient, method); }