diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/ClusterConfiguration.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/ClusterConfiguration.groovy index c8a37a8ec57..79a199e98e4 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/ClusterConfiguration.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/ClusterConfiguration.groovy @@ -18,6 +18,8 @@ */ package org.elasticsearch.gradle.test +import org.gradle.api.GradleException +import org.gradle.api.Project import org.gradle.api.file.FileCollection import org.gradle.api.tasks.Input @@ -31,10 +33,10 @@ class ClusterConfiguration { int numNodes = 1 @Input - int httpPort = 9400 + int baseHttpPort = 9400 @Input - int transportPort = 9500 + int baseTransportPort = 9500 @Input boolean daemonize = true @@ -45,24 +47,66 @@ class ClusterConfiguration { @Input String jvmArgs = System.getProperty('tests.jvm.argline', '') + /** + * A closure to call before the cluster is considered ready. The closure is passed the node info, + * as well as a groovy AntBuilder, to enable running ant condition checks. The default wait + * condition is for http on the http port. + */ + @Input + Closure waitCondition = { NodeInfo node, AntBuilder ant -> + File tmpFile = new File(node.cwd, 'wait.success') + ant.get(src: "http://localhost:${node.httpPort()}", + dest: tmpFile.toString(), + ignoreerrors: true, // do not fail on error, so logging buffers can be flushed by the wait task + retries: 10) + return tmpFile.exists() + } + Map systemProperties = new HashMap<>() - LinkedHashMap plugins = new LinkedHashMap<>() + Map settings = new HashMap<>() + + // map from destination path, to source file + Map extraConfigFiles = new HashMap<>() + + LinkedHashMap plugins = new LinkedHashMap<>() LinkedHashMap setupCommands = new LinkedHashMap<>() - @Input - void plugin(String name, FileCollection file) { - plugins.put(name, file) - } - @Input void systemProperty(String property, String value) { systemProperties.put(property, value) } + @Input + void setting(String name, String value) { + settings.put(name, value) + } + + @Input + void plugin(String name, FileCollection file) { + plugins.put(name, file) + } + + @Input + void plugin(String name, Project pluginProject) { + plugins.put(name, pluginProject) + } + @Input void setupCommand(String name, Object... args) { setupCommands.put(name, args) } + + /** + * Add an extra configuration file. The path is relative to the config dir, and the sourceFile + * is anything accepted by project.file() + */ + @Input + void extraConfigFile(String path, Object sourceFile) { + if (path == 'elasticsearch.yml') { + throw new GradleException('Overwriting elasticsearch.yml is not allowed, add additional settings using cluster { setting "foo", "bar" }') + } + extraConfigFiles.put(path, sourceFile) + } } diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/ClusterFormationTasks.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/ClusterFormationTasks.groovy index 0cc468c65fc..c5c57057f02 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/ClusterFormationTasks.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/ClusterFormationTasks.groovy @@ -21,8 +21,11 @@ package org.elasticsearch.gradle.test import org.apache.tools.ant.DefaultLogger import org.apache.tools.ant.taskdefs.condition.Os import org.elasticsearch.gradle.VersionProperties +import org.elasticsearch.gradle.plugin.PluginBuildPlugin import org.gradle.api.* +import org.gradle.api.artifacts.Configuration import org.gradle.api.file.FileCollection +import org.gradle.api.logging.Logger import org.gradle.api.tasks.Copy import org.gradle.api.tasks.Delete import org.gradle.api.tasks.Exec @@ -34,87 +37,6 @@ import java.nio.file.Paths */ class ClusterFormationTasks { - static class NodeInfo { - /** common configuration for all nodes, including this one */ - ClusterConfiguration config - /** node number within the cluster, for creating unique names and paths */ - int nodeNum - /** name of the cluster this node is part of */ - String clusterName - /** root directory all node files and operations happen under */ - File baseDir - /** the pid file the node will use */ - File pidFile - /** elasticsearch home dir */ - File homeDir - /** working directory for the node process */ - File cwd - /** file that if it exists, indicates the node failed to start */ - File failedMarker - /** stdout/stderr log of the elasticsearch process for this node */ - File startLog - /** directory to install plugins from */ - File pluginsTmpDir - /** environment variables to start the node with */ - Map env - /** arguments to start the node with */ - List args - /** Path to the elasticsearch start script */ - String esScript - /** buffer for ant output when starting this node */ - ByteArrayOutputStream buffer = new ByteArrayOutputStream() - - /** Creates a node to run as part of a cluster for the given task */ - NodeInfo(ClusterConfiguration config, int nodeNum, Project project, Task task) { - this.config = config - this.nodeNum = nodeNum - clusterName = "${task.path.replace(':', '_').substring(1)}" - baseDir = new File(project.buildDir, "cluster/${task.name} node${nodeNum}") - pidFile = new File(baseDir, 'es.pid') - homeDir = homeDir(baseDir, config.distribution) - cwd = new File(baseDir, "cwd") - failedMarker = new File(cwd, 'run.failed') - startLog = new File(cwd, 'run.log') - pluginsTmpDir = new File(baseDir, "plugins tmp") - - env = [ - 'JAVA_HOME' : project.javaHome, - 'ES_GC_OPTS': config.jvmArgs // we pass these with the undocumented gc opts so the argline can set gc, etc - ] - args = config.systemProperties.collect { key, value -> "-D${key}=${value}" } - for (Map.Entry property : System.properties.entrySet()) { - if (property.getKey().startsWith('es.')) { - args.add("-D${property.getKey()}=${property.getValue()}") - } - } - // running with cmd on windows will look for this with the .bat extension - esScript = new File(homeDir, 'bin/elasticsearch').toString() - } - - /** Returns debug string for the command that started this node. */ - String getCommandString() { - String esCommandString = "Elasticsearch node ${nodeNum} command: ${esScript} " - esCommandString += args.join(' ') - esCommandString += '\nenvironment:' - env.each { k, v -> esCommandString += "\n ${k}: ${v}" } - return esCommandString - } - - /** Returns the directory elasticsearch home is contained in for the given distribution */ - static File homeDir(File baseDir, String distro) { - String path - switch (distro) { - case 'zip': - case 'tar': - path = "elasticsearch-${VersionProperties.elasticsearch}" - break; - default: - throw new InvalidUserDataException("Unknown distribution: ${distro}") - } - return new File(baseDir, path) - } - } - /** * Adds dependent tasks to the given task to start and stop a cluster with the given configuration. */ @@ -179,22 +101,21 @@ class ClusterFormationTasks { setup = configureStopTask(taskName(task, node, 'stopPrevious'), project, setup, node) setup = configureExtractTask(taskName(task, node, 'extract'), project, setup, node) setup = configureWriteConfigTask(taskName(task, node, 'configure'), project, setup, node) + setup = configureExtraConfigFilesTask(taskName(task, node, 'extraConfig'), project, setup, node) setup = configureCopyPluginsTask(taskName(task, node, 'copyPlugins'), project, setup, node) // install plugins - for (Map.Entry plugin : node.config.plugins.entrySet()) { - // replace every dash followed by a character with just the uppercase character - String camelName = plugin.getKey().replaceAll(/-(\w)/) { _, c -> c.toUpperCase(Locale.ROOT) } - String actionName = "install${camelName[0].toUpperCase(Locale.ROOT) + camelName.substring(1)}Plugin" - // delay reading the file location until execution time by wrapping in a closure within a GString - String file = "${-> new File(node.pluginsTmpDir, plugin.getValue().singleFile.getName()).toURI().toURL().toString()}" - Object[] args = [new File(node.homeDir, 'bin/plugin'), 'install', file] - setup = configureExecTask(taskName(task, node, actionName), project, setup, node, args) + for (Map.Entry plugin : node.config.plugins.entrySet()) { + String actionName = pluginTaskName('install', plugin.getKey(), 'Plugin') + setup = configureInstallPluginTask(taskName(task, node, actionName), project, setup, node, plugin.getValue()) } // extra setup commands for (Map.Entry command : node.config.setupCommands.entrySet()) { - setup = configureExecTask(taskName(task, node, command.getKey()), project, setup, node, command.getValue()) + // the first argument is the actual script name, relative to home + Object[] args = command.getValue().clone() + args[0] = new File(node.homeDir, args[0].toString()) + setup = configureExecTask(taskName(task, node, command.getKey()), project, setup, node, args) } Task start = configureStartTask(taskName(task, node, 'start'), project, setup, node) @@ -236,34 +157,111 @@ class ClusterFormationTasks { static Task configureWriteConfigTask(String name, Project project, Task setup, NodeInfo node) { Map esConfig = [ 'cluster.name' : node.clusterName, - 'http.port' : node.config.httpPort + node.nodeNum, - 'transport.tcp.port' : node.config.transportPort + node.nodeNum, + 'http.port' : node.httpPort(), + 'transport.tcp.port' : node.transportPort(), 'pidfile' : node.pidFile, - 'discovery.zen.ping.unicast.hosts': (0.. "${key}: ${value}" }.join('\n'), 'UTF-8') } } - /** Adds a task to copy plugins to a temp dir, which they will later be installed from. */ + static Task configureExtraConfigFilesTask(String name, Project project, Task setup, NodeInfo node) { + if (node.config.extraConfigFiles.isEmpty()) { + return setup + } + Copy copyConfig = project.tasks.create(name: name, type: Copy, dependsOn: setup) + copyConfig.into(new File(node.homeDir, 'config')) // copy must always have a general dest dir, even though we don't use it + for (Map.Entry extraConfigFile : node.config.extraConfigFiles.entrySet()) { + File srcConfigFile = project.file(extraConfigFile.getValue()) + if (srcConfigFile.isDirectory()) { + throw new GradleException("Source for extraConfigFile must be a file: ${srcConfigFile}") + } + if (srcConfigFile.exists() == false) { + throw new GradleException("Source file for extraConfigFile does not exist: ${srcConfigFile}") + } + File destConfigFile = new File(node.homeDir, 'config/' + extraConfigFile.getKey()) + copyConfig.from(srcConfigFile) + .into(destConfigFile.canonicalFile.parentFile) + .rename { destConfigFile.name } + } + return copyConfig + } + + /** + * Adds a task to copy plugins to a temp dir, which they will later be installed from. + * + * For each plugin, if the plugin has rest spec apis in its tests, those api files are also copied + * to the test resources for this project. + */ static Task configureCopyPluginsTask(String name, Project project, Task setup, NodeInfo node) { if (node.config.plugins.isEmpty()) { return setup } + Copy copyPlugins = project.tasks.create(name: name, type: Copy, dependsOn: setup) - return project.tasks.create(name: name, type: Copy, dependsOn: setup) { - into node.pluginsTmpDir - from(node.config.plugins.values()) + List pluginFiles = [] + for (Map.Entry plugin : node.config.plugins.entrySet()) { + FileCollection pluginZip + if (plugin.getValue() instanceof Project) { + Project pluginProject = plugin.getValue() + if (pluginProject.plugins.hasPlugin(PluginBuildPlugin) == false) { + throw new GradleException("Task ${name} cannot project ${pluginProject.path} which is not an esplugin") + } + String configurationName = "_plugin_${pluginProject.path}" + Configuration configuration = project.configurations.findByName(configurationName) + if (configuration == null) { + configuration = project.configurations.create(configurationName) + } + project.dependencies.add(configurationName, pluginProject) + setup.dependsOn(pluginProject.tasks.bundlePlugin) + pluginZip = configuration + + // also allow rest tests to use the rest spec from the plugin + Copy copyRestSpec = null + for (File resourceDir : pluginProject.sourceSets.test.resources.srcDirs) { + File restApiDir = new File(resourceDir, 'rest-api-spec/api') + if (restApiDir.exists() == false) continue + if (copyRestSpec == null) { + copyRestSpec = project.tasks.create(name: pluginTaskName('copy', plugin.getKey(), 'PluginRestSpec'), type: Copy) + copyPlugins.dependsOn(copyRestSpec) + copyRestSpec.into(project.sourceSets.test.output.resourcesDir) + } + copyRestSpec.from(resourceDir).include('rest-api-spec/api/**') + } + } else { + pluginZip = plugin.getValue() + } + pluginFiles.add(pluginZip) } + + copyPlugins.into(node.pluginsTmpDir) + copyPlugins.from(pluginFiles) + return copyPlugins + } + + static Task configureInstallPluginTask(String name, Project project, Task setup, NodeInfo node, Object plugin) { + FileCollection pluginZip + if (plugin instanceof Project) { + pluginZip = project.configurations.getByName("_plugin_${plugin.path}") + } else { + pluginZip = plugin + } + // delay reading the file location until execution time by wrapping in a closure within a GString + String file = "${-> new File(node.pluginsTmpDir, pluginZip.singleFile.getName()).toURI().toURL().toString()}" + Object[] args = [new File(node.homeDir, 'bin/plugin'), 'install', file] + return configureExecTask(name, project, setup, node, args) } /** Adds a task to execute a command to help setup the cluster */ @@ -305,7 +303,7 @@ class ClusterFormationTasks { } // this closure is converted into ant nodes by groovy's AntBuilder - Closure antRunner = { + Closure antRunner = { AntBuilder ant -> // we must add debug options inside the closure so the config is read at execution time, as // gradle task options are not processed until the end of the configuration phase if (node.config.debug) { @@ -334,7 +332,7 @@ class ClusterFormationTasks { script = wrapperScript.toString() } - exec(executable: executable, spawn: node.config.daemonize, dir: node.cwd, taskname: 'elasticsearch') { + ant.exec(executable: executable, spawn: node.config.daemonize, dir: node.cwd, taskname: 'elasticsearch') { node.env.each { key, value -> env(key: key, value: value) } arg(value: script) node.args.each { arg(value: it) } @@ -347,7 +345,6 @@ class ClusterFormationTasks { node.getCommandString().eachLine { line -> logger.info(line) } if (logger.isInfoEnabled() || node.config.daemonize == false) { - // run with piping streams directly out (even stderr to stdout since gradle would capture it) runAntCommand(project, antRunner, System.out, System.err) } else { // buffer the output, we may not need to print it @@ -376,7 +373,7 @@ class ClusterFormationTasks { resourceexists { file(file: node.pidFile.toString()) } - http(url: "http://localhost:${node.config.httpPort + node.nodeNum}") + socket(server: '127.0.0.1', port: node.httpPort()) } } } @@ -386,26 +383,48 @@ class ClusterFormationTasks { anyNodeFailed |= node.failedMarker.exists() } if (ant.properties.containsKey("failed${name}".toString()) || anyNodeFailed) { - for (NodeInfo node : nodes) { - if (logger.isInfoEnabled() == false) { - // We already log the command at info level. No need to do it twice. - node.getCommandString().eachLine { line -> logger.error(line) } - } - // the waitfor failed, so dump any output we got (may be empty if info logging, but that is ok) - logger.error("Node ${node.nodeNum} ant output:") - node.buffer.toString('UTF-8').eachLine { line -> logger.error(line) } - // also dump the log file for the startup script (which will include ES logging output to stdout) - if (node.startLog.exists()) { - logger.error("Node ${node.nodeNum} log:") - node.startLog.eachLine { line -> logger.error(line) } - } + waitFailed(nodes, logger, 'Failed to start elasticsearch') + } + + // go through each node checking the wait condition + for (NodeInfo node : nodes) { + // first bind node info to the closure, then pass to the ant runner so we can get good logging + Closure antRunner = node.config.waitCondition.curry(node) + + boolean success + if (logger.isInfoEnabled()) { + success = runAntCommand(project, antRunner, System.out, System.err) + } else { + PrintStream captureStream = new PrintStream(node.buffer, true, "UTF-8") + success = runAntCommand(project, antRunner, captureStream, captureStream) + } + + if (success == false) { + waitFailed(nodes, logger, 'Elasticsearch cluster failed to pass wait condition') } - throw new GradleException('Failed to start elasticsearch') } } return wait } + static void waitFailed(List nodes, Logger logger, String msg) { + for (NodeInfo node : nodes) { + if (logger.isInfoEnabled() == false) { + // We already log the command at info level. No need to do it twice. + node.getCommandString().eachLine { line -> logger.error(line) } + } + // the waitfor failed, so dump any output we got (may be empty if info logging, but that is ok) + logger.error("Node ${node.nodeNum} ant output:") + node.buffer.toString('UTF-8').eachLine { line -> logger.error(line) } + // also dump the log file for the startup script (which will include ES logging output to stdout) + if (node.startLog.exists()) { + logger.error("Node ${node.nodeNum} log:") + node.startLog.eachLine { line -> logger.error(line) } + } + } + throw new GradleException(msg) + } + /** Adds a task to check if the process with the given pidfile is actually elasticsearch */ static Task configureCheckPreviousTask(String name, Project project, Object depends, NodeInfo node) { return project.tasks.create(name: name, type: Exec, dependsOn: depends) { @@ -474,15 +493,22 @@ class ClusterFormationTasks { } } + static String pluginTaskName(String action, String name, String suffix) { + // replace every dash followed by a character with just the uppercase character + String camelName = name.replaceAll(/-(\w)/) { _, c -> c.toUpperCase(Locale.ROOT) } + return action + camelName[0].toUpperCase(Locale.ROOT) + camelName.substring(1) + suffix + } + /** Runs an ant command, sending output to the given out and error streams */ - static void runAntCommand(Project project, Closure command, PrintStream outputStream, PrintStream errorStream) { + static Object runAntCommand(Project project, Closure command, PrintStream outputStream, PrintStream errorStream) { DefaultLogger listener = new DefaultLogger( errorPrintStream: errorStream, outputPrintStream: outputStream, messageOutputLevel: org.apache.tools.ant.Project.MSG_INFO) project.ant.project.addBuildListener(listener) - project.configure(project.ant, command) + Object retVal = command(project.ant) project.ant.project.removeBuildListener(listener) + return retVal } } diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/NodeInfo.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/NodeInfo.groovy new file mode 100644 index 00000000000..3955b9e0269 --- /dev/null +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/NodeInfo.groovy @@ -0,0 +1,131 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.gradle.test + +import org.elasticsearch.gradle.VersionProperties +import org.gradle.api.InvalidUserDataException +import org.gradle.api.Project +import org.gradle.api.Task + +/** + * A container for the files and configuration associated with a single node in a test cluster. + */ +class NodeInfo { + /** common configuration for all nodes, including this one */ + ClusterConfiguration config + + /** node number within the cluster, for creating unique names and paths */ + int nodeNum + + /** name of the cluster this node is part of */ + String clusterName + + /** root directory all node files and operations happen under */ + File baseDir + + /** the pid file the node will use */ + File pidFile + + /** elasticsearch home dir */ + File homeDir + + /** working directory for the node process */ + File cwd + + /** file that if it exists, indicates the node failed to start */ + File failedMarker + + /** stdout/stderr log of the elasticsearch process for this node */ + File startLog + + /** directory to install plugins from */ + File pluginsTmpDir + + /** environment variables to start the node with */ + Map env + + /** arguments to start the node with */ + List args + + /** Path to the elasticsearch start script */ + String esScript + + /** buffer for ant output when starting this node */ + ByteArrayOutputStream buffer = new ByteArrayOutputStream() + + /** Creates a node to run as part of a cluster for the given task */ + NodeInfo(ClusterConfiguration config, int nodeNum, Project project, Task task) { + this.config = config + this.nodeNum = nodeNum + clusterName = "${task.path.replace(':', '_').substring(1)}" + baseDir = new File(project.buildDir, "cluster/${task.name} node${nodeNum}") + pidFile = new File(baseDir, 'es.pid') + homeDir = homeDir(baseDir, config.distribution) + cwd = new File(baseDir, "cwd") + failedMarker = new File(cwd, 'run.failed') + startLog = new File(cwd, 'run.log') + pluginsTmpDir = new File(baseDir, "plugins tmp") + + env = [ + 'JAVA_HOME' : project.javaHome, + 'ES_GC_OPTS': config.jvmArgs // we pass these with the undocumented gc opts so the argline can set gc, etc + ] + args = config.systemProperties.collect { key, value -> "-D${key}=${value}" } + for (Map.Entry property : System.properties.entrySet()) { + if (property.getKey().startsWith('es.')) { + args.add("-D${property.getKey()}=${property.getValue()}") + } + } + // running with cmd on windows will look for this with the .bat extension + esScript = new File(homeDir, 'bin/elasticsearch').toString() + } + + /** Returns debug string for the command that started this node. */ + String getCommandString() { + String esCommandString = "Elasticsearch node ${nodeNum} command: ${esScript} " + esCommandString += args.join(' ') + esCommandString += '\nenvironment:' + env.each { k, v -> esCommandString += "\n ${k}: ${v}" } + return esCommandString + } + + /** Returns the http port for this node */ + int httpPort() { + return config.baseHttpPort + nodeNum + } + + /** Returns the transport port for this node */ + int transportPort() { + return config.baseTransportPort + nodeNum + } + + /** Returns the directory elasticsearch home is contained in for the given distribution */ + static File homeDir(File baseDir, String distro) { + String path + switch (distro) { + case 'zip': + case 'tar': + path = "elasticsearch-${VersionProperties.elasticsearch}" + break; + default: + throw new InvalidUserDataException("Unknown distribution: ${distro}") + } + return new File(baseDir, path) + } +} diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/RestIntegTestTask.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/RestIntegTestTask.groovy index c68a6744237..47cbdd5cb48 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/RestIntegTestTask.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/RestIntegTestTask.groovy @@ -67,7 +67,9 @@ class RestIntegTestTask extends RandomizedTestingTask { } RestIntegTestTask() { - project.afterEvaluate { + // this must run after all projects have been configured, so we know any project + // references can be accessed as a fully configured + project.gradle.projectsEvaluated { Task test = project.tasks.findByName('test') if (test != null) { mustRunAfter(test) @@ -75,7 +77,7 @@ class RestIntegTestTask extends RandomizedTestingTask { ClusterFormationTasks.setup(project, this, clusterConfig) configure { parallelism '1' - systemProperty 'tests.cluster', "localhost:${clusterConfig.transportPort}" + systemProperty 'tests.cluster', "localhost:${clusterConfig.baseTransportPort}" } } } diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/RunTask.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/RunTask.groovy index 6116ecf4ce2..37f65c88703 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/RunTask.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/RunTask.groovy @@ -6,7 +6,7 @@ import org.gradle.api.internal.tasks.options.Option class RunTask extends DefaultTask { - ClusterConfiguration clusterConfig = new ClusterConfiguration(httpPort: 9200, transportPort: 9300, daemonize: false) + ClusterConfiguration clusterConfig = new ClusterConfiguration(baseHttpPort: 9200, baseTransportPort: 9300, daemonize: false) RunTask() { project.afterEvaluate { diff --git a/core/src/main/java/org/elasticsearch/ElasticsearchException.java b/core/src/main/java/org/elasticsearch/ElasticsearchException.java index 65da4ed93e1..7ae10e0a5a6 100644 --- a/core/src/main/java/org/elasticsearch/ElasticsearchException.java +++ b/core/src/main/java/org/elasticsearch/ElasticsearchException.java @@ -51,9 +51,13 @@ public class ElasticsearchException extends RuntimeException implements ToXConte private static final Map, ElasticsearchExceptionHandle> CLASS_TO_ELASTICSEARCH_EXCEPTION_HANDLE; private final Map> headers = new HashMap<>(); + /** + * Construct a ElasticsearchException with the specified cause exception. + */ public ElasticsearchException(Throwable cause) { super(cause); } + /** * Construct a ElasticsearchException with the specified detail message. * diff --git a/core/src/main/java/org/elasticsearch/Version.java b/core/src/main/java/org/elasticsearch/Version.java index 1b4bd35faa3..4e4ec3b614f 100644 --- a/core/src/main/java/org/elasticsearch/Version.java +++ b/core/src/main/java/org/elasticsearch/Version.java @@ -266,11 +266,15 @@ public class Version { public static final int V_2_0_0_ID = 2000099; public static final Version V_2_0_0 = new Version(V_2_0_0_ID, false, org.apache.lucene.util.Version.LUCENE_5_2_1); public static final int V_2_0_1_ID = 2000199; - public static final Version V_2_0_1 = new Version(V_2_0_1_ID, true, org.apache.lucene.util.Version.LUCENE_5_2_1); + public static final Version V_2_0_1 = new Version(V_2_0_1_ID, false, org.apache.lucene.util.Version.LUCENE_5_2_1); + public static final int V_2_0_2_ID = 2000299; + public static final Version V_2_0_2 = new Version(V_2_0_2_ID, true, org.apache.lucene.util.Version.LUCENE_5_2_1); public static final int V_2_1_0_ID = 2010099; - public static final Version V_2_1_0 = new Version(V_2_1_0_ID, true, org.apache.lucene.util.Version.LUCENE_5_3_0); + public static final Version V_2_1_0 = new Version(V_2_1_0_ID, false, org.apache.lucene.util.Version.LUCENE_5_3_1); + public static final int V_2_1_1_ID = 2010199; + public static final Version V_2_1_1 = new Version(V_2_1_1_ID, true, org.apache.lucene.util.Version.LUCENE_5_3_1); public static final int V_2_2_0_ID = 2020099; - public static final Version V_2_2_0 = new Version(V_2_2_0_ID, true, org.apache.lucene.util.Version.LUCENE_5_3_0); + public static final Version V_2_2_0 = new Version(V_2_2_0_ID, true, org.apache.lucene.util.Version.LUCENE_5_4_0); public static final int V_3_0_0_ID = 3000099; public static final Version V_3_0_0 = new Version(V_3_0_0_ID, true, org.apache.lucene.util.Version.LUCENE_5_4_0); public static final Version CURRENT = V_3_0_0; @@ -289,8 +293,12 @@ public class Version { return V_3_0_0; case V_2_2_0_ID: return V_2_2_0; + case V_2_1_1_ID: + return V_2_1_1; case V_2_1_0_ID: return V_2_1_0; + case V_2_0_2_ID: + return V_2_0_2; case V_2_0_1_ID: return V_2_0_1; case V_2_0_0_ID: diff --git a/core/src/main/java/org/elasticsearch/bootstrap/JNANatives.java b/core/src/main/java/org/elasticsearch/bootstrap/JNANatives.java index 5db88ec254d..5356d33bb8e 100644 --- a/core/src/main/java/org/elasticsearch/bootstrap/JNANatives.java +++ b/core/src/main/java/org/elasticsearch/bootstrap/JNANatives.java @@ -191,7 +191,7 @@ class JNANatives { if (logger.isDebugEnabled()) { logger.debug("unable to install syscall filter", t); } - logger.warn("unable to install syscall filter: " + t.getMessage()); + logger.warn("unable to install syscall filter: ", t); } } } diff --git a/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataIndexUpgradeService.java b/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataIndexUpgradeService.java index c19558a8808..2d89857f60d 100644 --- a/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataIndexUpgradeService.java +++ b/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataIndexUpgradeService.java @@ -231,7 +231,7 @@ public class MetaDataIndexUpgradeService extends AbstractComponent { } } catch (Exception ex) { // Wrap the inner exception so we have the index name in the exception message - throw new IllegalStateException("unable to upgrade the mappings for the index [" + indexMetaData.getIndex() + "], reason: [" + ex.getMessage() + "]", ex); + throw new IllegalStateException("unable to upgrade the mappings for the index [" + indexMetaData.getIndex() + "]", ex); } } diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/AllocationId.java b/core/src/main/java/org/elasticsearch/cluster/routing/AllocationId.java index 20da5ce8cd3..528ed8b1c3f 100644 --- a/core/src/main/java/org/elasticsearch/cluster/routing/AllocationId.java +++ b/core/src/main/java/org/elasticsearch/cluster/routing/AllocationId.java @@ -20,15 +20,17 @@ package org.elasticsearch.cluster.routing; import org.elasticsearch.common.Nullable; +import org.elasticsearch.common.ParseField; import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.xcontent.ObjectParser; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.gateway.CorruptStateException; import java.io.IOException; +import java.util.Objects; /** * Uniquely identifies an allocation. An allocation is a shard moving from unassigned to initializing, @@ -43,6 +45,30 @@ public class AllocationId implements ToXContent { private static final String ID_KEY = "id"; private static final String RELOCATION_ID_KEY = "relocation_id"; + private static final ObjectParser ALLOCATION_ID_PARSER = new ObjectParser<>("allocationId"); + + static { + ALLOCATION_ID_PARSER.declareString(AllocationId.Builder::setId, new ParseField(ID_KEY)); + ALLOCATION_ID_PARSER.declareString(AllocationId.Builder::setRelocationId, new ParseField(RELOCATION_ID_KEY)); + } + + private static class Builder { + private String id; + private String relocationId; + + public void setId(String id) { + this.id = id; + } + + public void setRelocationId(String relocationId) { + this.relocationId = relocationId; + } + + public AllocationId build() { + return new AllocationId(id, relocationId); + } + } + private final String id; @Nullable private final String relocationId; @@ -58,6 +84,7 @@ public class AllocationId implements ToXContent { } private AllocationId(String id, String relocationId) { + Objects.requireNonNull(id, "Argument [id] must be non-null"); this.id = id; this.relocationId = relocationId; } @@ -164,35 +191,6 @@ public class AllocationId implements ToXContent { } public static AllocationId fromXContent(XContentParser parser) throws IOException { - XContentParser.Token token = parser.currentToken(); - if (token == null) { // fresh parser? move to the first real token under object - token = parser.nextToken(); - } - assert token == XContentParser.Token.START_OBJECT; - - String id = null; - String relocationId = null; - - String currentFieldName = null; - - while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { - if (token == XContentParser.Token.FIELD_NAME) { - currentFieldName = parser.currentName(); - } else if (token.isValue()) { - if (ID_KEY.equals(currentFieldName)) { - id = parser.text(); - } else if (RELOCATION_ID_KEY.equals(currentFieldName)) { - relocationId = parser.text(); - } else { - throw new CorruptStateException("unexpected field in allocation id [" + currentFieldName + "]"); - } - } else { - throw new CorruptStateException("unexpected token in allocation id [" + token.name() + "]"); - } - } - if (id == null) { - throw new CorruptStateException("missing value for [id] in allocation id"); - } - return new AllocationId(id, relocationId); + return ALLOCATION_ID_PARSER.parse(parser, new AllocationId.Builder()).build(); } } diff --git a/core/src/main/java/org/elasticsearch/common/Base64.java b/core/src/main/java/org/elasticsearch/common/Base64.java index 390b3708ffc..1bc6fd0ae3f 100644 --- a/core/src/main/java/org/elasticsearch/common/Base64.java +++ b/core/src/main/java/org/elasticsearch/common/Base64.java @@ -643,7 +643,8 @@ public class Base64 { try { encoded = encodeBytes(source, 0, source.length, NO_OPTIONS); } catch (java.io.IOException ex) { - assert false : ex.getMessage(); + // not sure why this was an assertion before, running with assertions disabled would mean swallowing this exception + throw new IllegalStateException(ex); } // end catch assert encoded != null; return encoded; @@ -705,7 +706,7 @@ public class Base64 { try { encoded = encodeBytes(source, off, len, NO_OPTIONS); } catch (java.io.IOException ex) { - assert false : ex.getMessage(); + throw new IllegalStateException(ex); } // end catch assert encoded != null; return encoded; @@ -766,7 +767,7 @@ public class Base64 { try { encoded = encodeBytesToBytes(source, 0, source.length, Base64.NO_OPTIONS); } catch (java.io.IOException ex) { - assert false : "IOExceptions only come from GZipping, which is turned off: " + ex.getMessage(); + throw new IllegalStateException("IOExceptions only come from GZipping, which is turned off: ", ex); } return encoded; } diff --git a/core/src/main/java/org/elasticsearch/common/geo/builders/CircleBuilder.java b/core/src/main/java/org/elasticsearch/common/geo/builders/CircleBuilder.java index f1054e18663..5f11d12a4bf 100644 --- a/core/src/main/java/org/elasticsearch/common/geo/builders/CircleBuilder.java +++ b/core/src/main/java/org/elasticsearch/common/geo/builders/CircleBuilder.java @@ -21,24 +21,30 @@ package org.elasticsearch.common.geo.builders; import com.spatial4j.core.shape.Circle; import com.vividsolutions.jts.geom.Coordinate; + +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.unit.DistanceUnit; import org.elasticsearch.common.unit.DistanceUnit.Distance; import org.elasticsearch.common.xcontent.XContentBuilder; import java.io.IOException; +import java.util.Objects; public class CircleBuilder extends ShapeBuilder { public static final String FIELD_RADIUS = "radius"; public static final GeoShapeType TYPE = GeoShapeType.CIRCLE; + public static final CircleBuilder PROTOTYPE = new CircleBuilder(); + private DistanceUnit unit; private double radius; private Coordinate center; - + /** * Set the center of the circle - * + * * @param center coordinate of the circles center * @return this */ @@ -57,6 +63,13 @@ public class CircleBuilder extends ShapeBuilder { return center(new Coordinate(lon, lat)); } + /** + * Get the center of the circle + */ + public Coordinate center() { + return center; + } + /** * Set the radius of the circle. The String value will be parsed by {@link DistanceUnit} * @param radius Value and unit of the circle combined in a string @@ -97,10 +110,24 @@ public class CircleBuilder extends ShapeBuilder { return this; } + /** + * Get the radius of the circle without unit + */ + public double radius() { + return this.radius; + } + + /** + * Get the radius unit of the circle + */ + public DistanceUnit unit() { + return this.unit; + } + @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); - builder.field(FIELD_TYPE, TYPE.shapename); + builder.field(FIELD_TYPE, TYPE.shapeName()); builder.field(FIELD_RADIUS, unit.toString(radius)); builder.field(FIELD_COORDINATES); toXContent(builder, center); @@ -116,4 +143,37 @@ public class CircleBuilder extends ShapeBuilder { public GeoShapeType type() { return TYPE; } + + @Override + public int hashCode() { + return Objects.hash(center, radius, unit.ordinal()); + } + + @Override + public boolean equals(Object obj) { + if (this == obj) { + return true; + } + if (obj == null || getClass() != obj.getClass()) { + return false; + } + CircleBuilder other = (CircleBuilder) obj; + return Objects.equals(center, other.center) && + Objects.equals(radius, other.radius) && + Objects.equals(unit.ordinal(), other.unit.ordinal()); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + writeCoordinateTo(center, out); + out.writeDouble(radius); + DistanceUnit.writeDistanceUnit(out, unit); + } + + @Override + public CircleBuilder readFrom(StreamInput in) throws IOException { + return new CircleBuilder() + .center(readCoordinateFrom(in)) + .radius(in.readDouble(), DistanceUnit.readDistanceUnit(in)); + } } diff --git a/core/src/main/java/org/elasticsearch/common/geo/builders/EnvelopeBuilder.java b/core/src/main/java/org/elasticsearch/common/geo/builders/EnvelopeBuilder.java index a296b3406ef..62f29d2bad7 100644 --- a/core/src/main/java/org/elasticsearch/common/geo/builders/EnvelopeBuilder.java +++ b/core/src/main/java/org/elasticsearch/common/geo/builders/EnvelopeBuilder.java @@ -21,13 +21,19 @@ package org.elasticsearch.common.geo.builders; import com.spatial4j.core.shape.Rectangle; import com.vividsolutions.jts.geom.Coordinate; + +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.XContentBuilder; import java.io.IOException; +import java.util.Locale; +import java.util.Objects; public class EnvelopeBuilder extends ShapeBuilder { - public static final GeoShapeType TYPE = GeoShapeType.ENVELOPE; + public static final GeoShapeType TYPE = GeoShapeType.ENVELOPE; + public static final EnvelopeBuilder PROTOTYPE = new EnvelopeBuilder(); protected Coordinate topLeft; protected Coordinate bottomRight; @@ -61,7 +67,8 @@ public class EnvelopeBuilder extends ShapeBuilder { @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); - builder.field(FIELD_TYPE, TYPE.shapename); + builder.field(FIELD_TYPE, TYPE.shapeName()); + builder.field(FIELD_ORIENTATION, orientation.name().toLowerCase(Locale.ROOT)); builder.startArray(FIELD_COORDINATES); toXContent(builder, topLeft); toXContent(builder, bottomRight); @@ -78,4 +85,38 @@ public class EnvelopeBuilder extends ShapeBuilder { public GeoShapeType type() { return TYPE; } + + @Override + public int hashCode() { + return Objects.hash(orientation, topLeft, bottomRight); + } + + @Override + public boolean equals(Object obj) { + if (this == obj) { + return true; + } + if (obj == null || getClass() != obj.getClass()) { + return false; + } + EnvelopeBuilder other = (EnvelopeBuilder) obj; + return Objects.equals(orientation, other.orientation) && + Objects.equals(topLeft, other.topLeft) && + Objects.equals(bottomRight, other.bottomRight); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeBoolean(orientation == Orientation.RIGHT); + writeCoordinateTo(topLeft, out); + writeCoordinateTo(bottomRight, out); + } + + @Override + public EnvelopeBuilder readFrom(StreamInput in) throws IOException { + Orientation orientation = in.readBoolean() ? Orientation.RIGHT : Orientation.LEFT; + return new EnvelopeBuilder(orientation) + .topLeft(readCoordinateFrom(in)) + .bottomRight(readCoordinateFrom(in)); + } } diff --git a/core/src/main/java/org/elasticsearch/common/geo/builders/GeometryCollectionBuilder.java b/core/src/main/java/org/elasticsearch/common/geo/builders/GeometryCollectionBuilder.java index 57f3fc67b64..45397ed962f 100644 --- a/core/src/main/java/org/elasticsearch/common/geo/builders/GeometryCollectionBuilder.java +++ b/core/src/main/java/org/elasticsearch/common/geo/builders/GeometryCollectionBuilder.java @@ -102,7 +102,7 @@ public class GeometryCollectionBuilder extends ShapeBuilder { @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); - builder.field(FIELD_TYPE, TYPE.shapename); + builder.field(FIELD_TYPE, TYPE.shapeName()); builder.startArray(FIELD_GEOMETRIES); for (ShapeBuilder shape : shapes) { shape.toXContent(builder, params); diff --git a/core/src/main/java/org/elasticsearch/common/geo/builders/LineStringBuilder.java b/core/src/main/java/org/elasticsearch/common/geo/builders/LineStringBuilder.java index 265efe11621..4bf84ea8f50 100644 --- a/core/src/main/java/org/elasticsearch/common/geo/builders/LineStringBuilder.java +++ b/core/src/main/java/org/elasticsearch/common/geo/builders/LineStringBuilder.java @@ -39,7 +39,7 @@ public class LineStringBuilder extends PointCollection { @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); - builder.field(FIELD_TYPE, TYPE.shapename); + builder.field(FIELD_TYPE, TYPE.shapeName()); builder.field(FIELD_COORDINATES); coordinatesToXcontent(builder, false); builder.endObject(); diff --git a/core/src/main/java/org/elasticsearch/common/geo/builders/MultiLineStringBuilder.java b/core/src/main/java/org/elasticsearch/common/geo/builders/MultiLineStringBuilder.java index 10ad25c89e1..a004b90a2dc 100644 --- a/core/src/main/java/org/elasticsearch/common/geo/builders/MultiLineStringBuilder.java +++ b/core/src/main/java/org/elasticsearch/common/geo/builders/MultiLineStringBuilder.java @@ -57,7 +57,7 @@ public class MultiLineStringBuilder extends ShapeBuilder { @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); - builder.field(FIELD_TYPE, TYPE.shapename); + builder.field(FIELD_TYPE, TYPE.shapeName()); builder.field(FIELD_COORDINATES); builder.startArray(); for(LineStringBuilder line : lines) { diff --git a/core/src/main/java/org/elasticsearch/common/geo/builders/MultiPointBuilder.java b/core/src/main/java/org/elasticsearch/common/geo/builders/MultiPointBuilder.java index d12baad70d9..8d5cfabdabb 100644 --- a/core/src/main/java/org/elasticsearch/common/geo/builders/MultiPointBuilder.java +++ b/core/src/main/java/org/elasticsearch/common/geo/builders/MultiPointBuilder.java @@ -37,7 +37,7 @@ public class MultiPointBuilder extends PointCollection { @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); - builder.field(FIELD_TYPE, TYPE.shapename); + builder.field(FIELD_TYPE, TYPE.shapeName()); builder.field(FIELD_COORDINATES); super.coordinatesToXcontent(builder, false); builder.endObject(); diff --git a/core/src/main/java/org/elasticsearch/common/geo/builders/MultiPolygonBuilder.java b/core/src/main/java/org/elasticsearch/common/geo/builders/MultiPolygonBuilder.java index 0998cd2944b..7911ddff835 100644 --- a/core/src/main/java/org/elasticsearch/common/geo/builders/MultiPolygonBuilder.java +++ b/core/src/main/java/org/elasticsearch/common/geo/builders/MultiPolygonBuilder.java @@ -51,7 +51,7 @@ public class MultiPolygonBuilder extends ShapeBuilder { @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); - builder.field(FIELD_TYPE, TYPE.shapename); + builder.field(FIELD_TYPE, TYPE.shapeName()); builder.startArray(FIELD_COORDINATES); for(PolygonBuilder polygon : polygons) { builder.startArray(); diff --git a/core/src/main/java/org/elasticsearch/common/geo/builders/PointBuilder.java b/core/src/main/java/org/elasticsearch/common/geo/builders/PointBuilder.java index 53c67387e91..d6d62c28b8c 100644 --- a/core/src/main/java/org/elasticsearch/common/geo/builders/PointBuilder.java +++ b/core/src/main/java/org/elasticsearch/common/geo/builders/PointBuilder.java @@ -20,7 +20,10 @@ package org.elasticsearch.common.geo.builders; import java.io.IOException; +import java.util.Objects; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.XContentBuilder; import com.spatial4j.core.shape.Point; @@ -30,6 +33,8 @@ public class PointBuilder extends ShapeBuilder { public static final GeoShapeType TYPE = GeoShapeType.POINT; + public static final PointBuilder PROTOTYPE = new PointBuilder(); + private Coordinate coordinate; public PointBuilder coordinate(Coordinate coordinate) { @@ -48,10 +53,10 @@ public class PointBuilder extends ShapeBuilder { @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); - builder.field(FIELD_TYPE, TYPE.shapename); + builder.field(FIELD_TYPE, TYPE.shapeName()); builder.field(FIELD_COORDINATES); toXContent(builder, coordinate); - return builder.endObject(); + return builder.endObject(); } @Override @@ -63,4 +68,31 @@ public class PointBuilder extends ShapeBuilder { public GeoShapeType type() { return TYPE; } + + @Override + public int hashCode() { + return Objects.hash(coordinate); + } + + @Override + public boolean equals(Object obj) { + if (this == obj) { + return true; + } + if (obj == null || getClass() != obj.getClass()) { + return false; + } + PointBuilder other = (PointBuilder) obj; + return Objects.equals(coordinate, other.coordinate); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + writeCoordinateTo(coordinate, out); + } + + @Override + public PointBuilder readFrom(StreamInput in) throws IOException { + return new PointBuilder().coordinate(readCoordinateFrom(in)); + } } diff --git a/core/src/main/java/org/elasticsearch/common/geo/builders/PolygonBuilder.java b/core/src/main/java/org/elasticsearch/common/geo/builders/PolygonBuilder.java index 4a406eb22b8..94d8fc049d8 100644 --- a/core/src/main/java/org/elasticsearch/common/geo/builders/PolygonBuilder.java +++ b/core/src/main/java/org/elasticsearch/common/geo/builders/PolygonBuilder.java @@ -172,7 +172,7 @@ public class PolygonBuilder extends ShapeBuilder { @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); - builder.field(FIELD_TYPE, TYPE.shapename); + builder.field(FIELD_TYPE, TYPE.shapeName()); builder.startArray(FIELD_COORDINATES); coordinatesArray(builder, params); builder.endArray(); diff --git a/core/src/main/java/org/elasticsearch/common/geo/builders/ShapeBuilder.java b/core/src/main/java/org/elasticsearch/common/geo/builders/ShapeBuilder.java index 13237727173..7f153a9197f 100644 --- a/core/src/main/java/org/elasticsearch/common/geo/builders/ShapeBuilder.java +++ b/core/src/main/java/org/elasticsearch/common/geo/builders/ShapeBuilder.java @@ -26,8 +26,12 @@ import com.spatial4j.core.shape.jts.JtsGeometry; import com.vividsolutions.jts.geom.Coordinate; import com.vividsolutions.jts.geom.Geometry; import com.vividsolutions.jts.geom.GeometryFactory; + import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.action.support.ToXContentToBytes; +import org.elasticsearch.common.io.stream.NamedWriteable; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.logging.ESLoggerFactory; import org.elasticsearch.common.unit.DistanceUnit.Distance; @@ -43,7 +47,7 @@ import java.util.*; /** * Basic class for building GeoJSON shapes like Polygons, Linestrings, etc */ -public abstract class ShapeBuilder extends ToXContentToBytes { +public abstract class ShapeBuilder extends ToXContentToBytes implements NamedWriteable { protected static final ESLogger LOGGER = ESLoggerFactory.getLogger(ShapeBuilder.class.getName()); @@ -173,6 +177,15 @@ public abstract class ShapeBuilder extends ToXContentToBytes { return builder.startArray().value(coordinate.x).value(coordinate.y).endArray(); } + protected static void writeCoordinateTo(Coordinate coordinate, StreamOutput out) throws IOException { + out.writeDouble(coordinate.x); + out.writeDouble(coordinate.y); + } + + protected Coordinate readCoordinateFrom(StreamInput in) throws IOException { + return new Coordinate(in.readDouble(), in.readDouble()); + } + public static Orientation orientationFromString(String orientation) { orientation = orientation.toLowerCase(Locale.ROOT); switch (orientation) { @@ -565,12 +578,16 @@ public abstract class ShapeBuilder extends ToXContentToBytes { ENVELOPE("envelope"), CIRCLE("circle"); - protected final String shapename; + private final String shapename; private GeoShapeType(String shapename) { this.shapename = shapename; } + protected String shapeName() { + return shapename; + } + public static GeoShapeType forName(String geoshapename) { String typename = geoshapename.toLowerCase(Locale.ROOT); for (GeoShapeType type : values()) { @@ -823,4 +840,20 @@ public abstract class ShapeBuilder extends ToXContentToBytes { return geometryCollection; } } + + @Override + public String getWriteableName() { + return type().shapeName(); + } + + // NORELEASE this should be deleted as soon as all shape builders implement writable + @Override + public void writeTo(StreamOutput out) throws IOException { + } + + // NORELEASE this should be deleted as soon as all shape builders implement writable + @Override + public ShapeBuilder readFrom(StreamInput in) throws IOException { + return null; + } } diff --git a/core/src/main/java/org/elasticsearch/common/inject/multibindings/Multibinder.java b/core/src/main/java/org/elasticsearch/common/inject/multibindings/Multibinder.java index 56f0ec0f055..5bc1595be5f 100644 --- a/core/src/main/java/org/elasticsearch/common/inject/multibindings/Multibinder.java +++ b/core/src/main/java/org/elasticsearch/common/inject/multibindings/Multibinder.java @@ -331,6 +331,6 @@ public abstract class Multibinder { NullPointerException npe = new NullPointerException(name); throw new ConfigurationException(singleton( - new Message(emptyList(), npe.toString(), npe))); + new Message(emptyList(), npe))); } } diff --git a/core/src/main/java/org/elasticsearch/common/inject/spi/Message.java b/core/src/main/java/org/elasticsearch/common/inject/spi/Message.java index e5488d07417..5a39b9edf13 100644 --- a/core/src/main/java/org/elasticsearch/common/inject/spi/Message.java +++ b/core/src/main/java/org/elasticsearch/common/inject/spi/Message.java @@ -58,6 +58,10 @@ public final class Message implements Serializable, Element { this(Collections.singletonList(source), message, null); } + public Message(Object source, Throwable cause) { + this(Collections.singletonList(source), null, cause); + } + public Message(String message) { this(Collections.emptyList(), message, null); } diff --git a/core/src/main/java/org/elasticsearch/discovery/zen/ZenDiscovery.java b/core/src/main/java/org/elasticsearch/discovery/zen/ZenDiscovery.java index c2b16046b0c..e5ec230fd66 100644 --- a/core/src/main/java/org/elasticsearch/discovery/zen/ZenDiscovery.java +++ b/core/src/main/java/org/elasticsearch/discovery/zen/ZenDiscovery.java @@ -525,7 +525,7 @@ public class ZenDiscovery extends AbstractLifecycleComponent implemen } }); } else if (node.equals(nodes().masterNode())) { - handleMasterGone(node, "shut_down"); + handleMasterGone(node, null, "shut_down"); } } @@ -615,7 +615,7 @@ public class ZenDiscovery extends AbstractLifecycleComponent implemen }); } - private void handleMasterGone(final DiscoveryNode masterNode, final String reason) { + private void handleMasterGone(final DiscoveryNode masterNode, final Throwable cause, final String reason) { if (lifecycleState() != Lifecycle.State.STARTED) { // not started, ignore a master failure return; @@ -625,7 +625,7 @@ public class ZenDiscovery extends AbstractLifecycleComponent implemen return; } - logger.info("master_left [{}], reason [{}]", masterNode, reason); + logger.info("master_left [{}], reason [{}]", cause, masterNode, reason); clusterService.submitStateUpdateTask("zen-disco-master_failed (" + masterNode + ")", Priority.IMMEDIATE, new ClusterStateUpdateTask() { @@ -1078,8 +1078,8 @@ public class ZenDiscovery extends AbstractLifecycleComponent implemen private class MasterNodeFailureListener implements MasterFaultDetection.Listener { @Override - public void onMasterFailure(DiscoveryNode masterNode, String reason) { - handleMasterGone(masterNode, reason); + public void onMasterFailure(DiscoveryNode masterNode, Throwable cause, String reason) { + handleMasterGone(masterNode, cause, reason); } } diff --git a/core/src/main/java/org/elasticsearch/discovery/zen/fd/MasterFaultDetection.java b/core/src/main/java/org/elasticsearch/discovery/zen/fd/MasterFaultDetection.java index 8333b967c2f..8842bafb116 100644 --- a/core/src/main/java/org/elasticsearch/discovery/zen/fd/MasterFaultDetection.java +++ b/core/src/main/java/org/elasticsearch/discovery/zen/fd/MasterFaultDetection.java @@ -49,7 +49,7 @@ public class MasterFaultDetection extends FaultDetection { public static interface Listener { /** called when pinging the master failed, like a timeout, transport disconnects etc */ - void onMasterFailure(DiscoveryNode masterNode, String reason); + void onMasterFailure(DiscoveryNode masterNode, Throwable cause, String reason); } @@ -117,7 +117,7 @@ public class MasterFaultDetection extends FaultDetection { transportService.connectToNode(masterNode); } catch (final Exception e) { // notify master failure (which stops also) and bail.. - notifyMasterFailure(masterNode, "failed to perform initial connect [" + e.getMessage() + "]"); + notifyMasterFailure(masterNode, e, "failed to perform initial connect "); return; } if (masterPinger != null) { @@ -176,22 +176,22 @@ public class MasterFaultDetection extends FaultDetection { threadPool.schedule(TimeValue.timeValueMillis(0), ThreadPool.Names.SAME, masterPinger); } catch (Exception e) { logger.trace("[master] [{}] transport disconnected (with verified connect)", masterNode); - notifyMasterFailure(masterNode, "transport disconnected (with verified connect)"); + notifyMasterFailure(masterNode, null, "transport disconnected (with verified connect)"); } } else { logger.trace("[master] [{}] transport disconnected", node); - notifyMasterFailure(node, "transport disconnected"); + notifyMasterFailure(node, null, "transport disconnected"); } } } - private void notifyMasterFailure(final DiscoveryNode masterNode, final String reason) { + private void notifyMasterFailure(final DiscoveryNode masterNode, final Throwable cause, final String reason) { if (notifiedMasterFailure.compareAndSet(false, true)) { threadPool.generic().execute(new Runnable() { @Override public void run() { for (Listener listener : listeners) { - listener.onMasterFailure(masterNode, reason); + listener.onMasterFailure(masterNode, cause, reason); } } }); @@ -255,15 +255,15 @@ public class MasterFaultDetection extends FaultDetection { return; } else if (exp.getCause() instanceof NotMasterException) { logger.debug("[master] pinging a master {} that is no longer a master", masterNode); - notifyMasterFailure(masterToPing, "no longer master"); + notifyMasterFailure(masterToPing, exp, "no longer master"); return; } else if (exp.getCause() instanceof ThisIsNotTheMasterYouAreLookingForException) { logger.debug("[master] pinging a master {} that is not the master", masterNode); - notifyMasterFailure(masterToPing, "not master"); + notifyMasterFailure(masterToPing, exp,"not master"); return; } else if (exp.getCause() instanceof NodeDoesNotExistOnMasterException) { logger.debug("[master] pinging a master {} but we do not exists on it, act as if its master failure", masterNode); - notifyMasterFailure(masterToPing, "do not exists on master, act as master failure"); + notifyMasterFailure(masterToPing, exp,"do not exists on master, act as master failure"); return; } @@ -272,7 +272,7 @@ public class MasterFaultDetection extends FaultDetection { if (retryCount >= pingRetryCount) { logger.debug("[master] failed to ping [{}], tried [{}] times, each with maximum [{}] timeout", masterNode, pingRetryCount, pingRetryTimeout); // not good, failure - notifyMasterFailure(masterToPing, "failed to ping, tried [" + pingRetryCount + "] times, each with maximum [" + pingRetryTimeout + "] timeout"); + notifyMasterFailure(masterToPing, null, "failed to ping, tried [" + pingRetryCount + "] times, each with maximum [" + pingRetryTimeout + "] timeout"); } else { // resend the request, not reschedule, rely on send timeout transportService.sendRequest(masterToPing, MASTER_PING_ACTION_NAME, request, options, this); diff --git a/core/src/main/java/org/elasticsearch/discovery/zen/publish/PublishClusterStateAction.java b/core/src/main/java/org/elasticsearch/discovery/zen/publish/PublishClusterStateAction.java index 93d457d7382..91fd622023f 100644 --- a/core/src/main/java/org/elasticsearch/discovery/zen/publish/PublishClusterStateAction.java +++ b/core/src/main/java/org/elasticsearch/discovery/zen/publish/PublishClusterStateAction.java @@ -140,9 +140,9 @@ public class PublishClusterStateAction extends AbstractComponent { throw t; } catch (Throwable t) { // try to fail committing, in cause it's still on going - if (sendingController.markAsFailed("unexpected error [" + t.getMessage() + "]")) { + if (sendingController.markAsFailed("unexpected error", t)) { // signal the change should be rejected - throw new Discovery.FailedToCommitClusterStateException("unexpected error [{}]", t, t.getMessage()); + throw new Discovery.FailedToCommitClusterStateException("unexpected error", t); } else { throw t; } @@ -583,6 +583,21 @@ public class PublishClusterStateAction extends AbstractComponent { return true; } + /** + * tries marking the publishing as failed, if a decision wasn't made yet + * + * @return true if the publishing was failed and the cluster state is *not* committed + **/ + synchronized private boolean markAsFailed(String details, Throwable reason) { + if (committedOrFailed()) { + return committed == false; + } + logger.trace("failed to commit version [{}]. {}", reason, clusterState.version(), details); + committed = false; + committedOrFailedLatch.countDown(); + return true; + } + /** * tries marking the publishing as failed, if a decision wasn't made yet * diff --git a/core/src/main/java/org/elasticsearch/index/analysis/Analysis.java b/core/src/main/java/org/elasticsearch/index/analysis/Analysis.java index 27cd9fd8c4c..a2c65c6441d 100644 --- a/core/src/main/java/org/elasticsearch/index/analysis/Analysis.java +++ b/core/src/main/java/org/elasticsearch/index/analysis/Analysis.java @@ -235,7 +235,7 @@ public class Analysis { try (BufferedReader reader = FileSystemUtils.newBufferedReader(wordListFile.toUri().toURL(), StandardCharsets.UTF_8)) { return loadWordList(reader, "#"); } catch (IOException ioe) { - String message = String.format(Locale.ROOT, "IOException while reading %s_path: %s", settingPrefix, ioe.getMessage()); + String message = String.format(Locale.ROOT, "IOException while reading %s_path: %s", settingPrefix); throw new IllegalArgumentException(message, ioe); } } @@ -282,7 +282,7 @@ public class Analysis { try { return FileSystemUtils.newBufferedReader(path.toUri().toURL(), StandardCharsets.UTF_8); } catch (IOException ioe) { - String message = String.format(Locale.ROOT, "IOException while reading %s_path: %s", settingPrefix, ioe.getMessage()); + String message = String.format(Locale.ROOT, "IOException while reading %s_path: %s", settingPrefix); throw new IllegalArgumentException(message, ioe); } } diff --git a/core/src/main/java/org/elasticsearch/index/shard/TranslogRecoveryPerformer.java b/core/src/main/java/org/elasticsearch/index/shard/TranslogRecoveryPerformer.java index 68c552d4419..ac46f6725de 100644 --- a/core/src/main/java/org/elasticsearch/index/shard/TranslogRecoveryPerformer.java +++ b/core/src/main/java/org/elasticsearch/index/shard/TranslogRecoveryPerformer.java @@ -67,7 +67,7 @@ public class TranslogRecoveryPerformer { numOps++; } } catch (Throwable t) { - throw new BatchOperationException(shardId, "failed to apply batch translog operation [" + t.getMessage() + "]", numOps, t); + throw new BatchOperationException(shardId, "failed to apply batch translog operation", numOps, t); } return numOps; } diff --git a/core/src/main/java/org/elasticsearch/index/snapshots/IndexShardSnapshotException.java b/core/src/main/java/org/elasticsearch/index/snapshots/IndexShardSnapshotException.java index 741350966a5..7ace0303f67 100644 --- a/core/src/main/java/org/elasticsearch/index/snapshots/IndexShardSnapshotException.java +++ b/core/src/main/java/org/elasticsearch/index/snapshots/IndexShardSnapshotException.java @@ -33,6 +33,11 @@ public class IndexShardSnapshotException extends ElasticsearchException { this(shardId, msg, null); } + public IndexShardSnapshotException(ShardId shardId, Throwable cause) { + super(cause); + setShard(shardId); + } + public IndexShardSnapshotException(ShardId shardId, String msg, Throwable cause) { super(msg, cause); setShard(shardId); diff --git a/core/src/main/java/org/elasticsearch/index/snapshots/IndexShardSnapshotFailedException.java b/core/src/main/java/org/elasticsearch/index/snapshots/IndexShardSnapshotFailedException.java index bfb755c9e14..7b7fc68d4d4 100644 --- a/core/src/main/java/org/elasticsearch/index/snapshots/IndexShardSnapshotFailedException.java +++ b/core/src/main/java/org/elasticsearch/index/snapshots/IndexShardSnapshotFailedException.java @@ -32,6 +32,10 @@ public class IndexShardSnapshotFailedException extends IndexShardSnapshotExcepti super(shardId, msg); } + public IndexShardSnapshotFailedException(ShardId shardId, Throwable cause) { + super(shardId, cause); + } + public IndexShardSnapshotFailedException(ShardId shardId, String msg, Throwable cause) { super(shardId, msg, cause); } diff --git a/core/src/main/java/org/elasticsearch/index/snapshots/blobstore/BlobStoreIndexShardRepository.java b/core/src/main/java/org/elasticsearch/index/snapshots/blobstore/BlobStoreIndexShardRepository.java index d90a869f5b3..674d1085660 100644 --- a/core/src/main/java/org/elasticsearch/index/snapshots/blobstore/BlobStoreIndexShardRepository.java +++ b/core/src/main/java/org/elasticsearch/index/snapshots/blobstore/BlobStoreIndexShardRepository.java @@ -191,7 +191,7 @@ public class BlobStoreIndexShardRepository extends AbstractComponent implements if (e instanceof IndexShardSnapshotFailedException) { throw (IndexShardSnapshotFailedException) e; } else { - throw new IndexShardSnapshotFailedException(shardId, e.getMessage(), e); + throw new IndexShardSnapshotFailedException(shardId, e); } } } @@ -373,7 +373,7 @@ public class BlobStoreIndexShardRepository extends AbstractComponent implements } catch (IOException e) { // We cannot delete index file - this is fatal, we cannot continue, otherwise we might end up // with references to non-existing files - throw new IndexShardSnapshotFailedException(shardId, "error deleting index files during cleanup, reason: " + e.getMessage(), e); + throw new IndexShardSnapshotFailedException(shardId, "error deleting index files during cleanup", e); } blobsToDelete = new ArrayList<>(); diff --git a/core/src/main/java/org/elasticsearch/indices/recovery/RecoveryStatus.java b/core/src/main/java/org/elasticsearch/indices/recovery/RecoveryStatus.java index 80f458b8f5c..0064021dd33 100644 --- a/core/src/main/java/org/elasticsearch/indices/recovery/RecoveryStatus.java +++ b/core/src/main/java/org/elasticsearch/indices/recovery/RecoveryStatus.java @@ -22,6 +22,7 @@ package org.elasticsearch.indices.recovery; import org.apache.lucene.store.IOContext; import org.apache.lucene.store.IndexOutput; import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.logging.Loggers; @@ -175,7 +176,7 @@ public class RecoveryStatus extends AbstractRefCounted { listener.onRecoveryFailure(state(), e, sendShardFailure); } finally { try { - cancellableThreads.cancel("failed recovery [" + e.getMessage() + "]"); + cancellableThreads.cancel("failed recovery [" + ExceptionsHelper.stackTrace(e) + "]"); } finally { // release the initial reference. recovery files will be cleaned as soon as ref count goes to zero, potentially now decRef(); diff --git a/core/src/main/java/org/elasticsearch/indices/recovery/RecoveryTarget.java b/core/src/main/java/org/elasticsearch/indices/recovery/RecoveryTarget.java index 72872a585f1..2ccfbcb5420 100644 --- a/core/src/main/java/org/elasticsearch/indices/recovery/RecoveryTarget.java +++ b/core/src/main/java/org/elasticsearch/indices/recovery/RecoveryTarget.java @@ -130,8 +130,17 @@ public class RecoveryTarget extends AbstractComponent implements IndexEventListe } + protected void retryRecovery(final RecoveryStatus recoveryStatus, final Throwable reason, TimeValue retryAfter, final StartRecoveryRequest currentRequest) { + logger.trace("will retry recovery with id [{}] in [{}]", reason, recoveryStatus.recoveryId(), retryAfter); + retryRecovery(recoveryStatus, retryAfter, currentRequest); + } + protected void retryRecovery(final RecoveryStatus recoveryStatus, final String reason, TimeValue retryAfter, final StartRecoveryRequest currentRequest) { - logger.trace("will retrying recovery with id [{}] in [{}] (reason [{}])", recoveryStatus.recoveryId(), retryAfter, reason); + logger.trace("will retry recovery with id [{}] in [{}] (reason [{}])", recoveryStatus.recoveryId(), retryAfter, reason); + retryRecovery(recoveryStatus, retryAfter, currentRequest); + } + + private void retryRecovery(final RecoveryStatus recoveryStatus, TimeValue retryAfter, final StartRecoveryRequest currentRequest) { try { recoveryStatus.resetRecovery(); } catch (Throwable e) { @@ -224,7 +233,7 @@ public class RecoveryTarget extends AbstractComponent implements IndexEventListe } if (cause instanceof DelayRecoveryException) { - retryRecovery(recoveryStatus, cause.getMessage(), recoverySettings.retryDelayStateSync(), request); + retryRecovery(recoveryStatus, cause, recoverySettings.retryDelayStateSync(), request); return; } diff --git a/core/src/main/java/org/elasticsearch/indices/recovery/SharedFSRecoverySourceHandler.java b/core/src/main/java/org/elasticsearch/indices/recovery/SharedFSRecoverySourceHandler.java index 123480e81de..e849580b2c4 100644 --- a/core/src/main/java/org/elasticsearch/indices/recovery/SharedFSRecoverySourceHandler.java +++ b/core/src/main/java/org/elasticsearch/indices/recovery/SharedFSRecoverySourceHandler.java @@ -69,7 +69,7 @@ public class SharedFSRecoverySourceHandler extends RecoverySourceHandler { // create a new IndexWriter logger.info("recovery failed for primary shadow shard, failing shard"); // pass the failure as null, as we want to ensure the store is not marked as corrupted - shard.failShard("primary relocation failed on shared filesystem caused by: [" + t.getMessage() + "]", null); + shard.failShard("primary relocation failed on shared filesystem", t); } else { logger.info("recovery failed on shared filesystem", t); } diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/InternalSignificantTerms.java b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/InternalSignificantTerms.java index 27d9f58a7ff..21b92e83fff 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/InternalSignificantTerms.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/InternalSignificantTerms.java @@ -173,7 +173,7 @@ public abstract class InternalSignificantTerms terms = (InternalSignificantTerms) aggregation; for (Bucket bucket : terms.buckets) { - List existingBuckets = buckets.get(bucket.getKey()); + List existingBuckets = buckets.get(bucket.getKeyAsString()); if (existingBuckets == null) { existingBuckets = new ArrayList<>(aggregations.size()); buckets.put(bucket.getKeyAsString(), existingBuckets); diff --git a/core/src/main/java/org/elasticsearch/transport/TransportService.java b/core/src/main/java/org/elasticsearch/transport/TransportService.java index 964cfacc8c1..14fc9029b00 100644 --- a/core/src/main/java/org/elasticsearch/transport/TransportService.java +++ b/core/src/main/java/org/elasticsearch/transport/TransportService.java @@ -484,7 +484,7 @@ public class TransportService extends AbstractLifecycleComponent extends ESTestCase { + + private static final int NUMBER_OF_TESTBUILDERS = 20; + private static NamedWriteableRegistry namedWriteableRegistry; + + /** + * setup for the whole base test class + */ + @BeforeClass + public static void init() { + if (namedWriteableRegistry == null) { + namedWriteableRegistry = new NamedWriteableRegistry(); + namedWriteableRegistry.registerPrototype(ShapeBuilder.class, PointBuilder.PROTOTYPE); + namedWriteableRegistry.registerPrototype(ShapeBuilder.class, CircleBuilder.PROTOTYPE); + namedWriteableRegistry.registerPrototype(ShapeBuilder.class, EnvelopeBuilder.PROTOTYPE); + } + } + + @AfterClass + public static void afterClass() throws Exception { + namedWriteableRegistry = null; + } + + /** + * create random shape that is put under test + */ + protected abstract SB createTestShapeBuilder(); + + /** + * mutate the given shape so the returned shape is different + */ + protected abstract SB mutate(SB original) throws IOException; + + /** + * Test that creates new shape from a random test shape and checks both for equality + */ + public void testFromXContent() throws IOException { + for (int runs = 0; runs < NUMBER_OF_TESTBUILDERS; runs++) { + SB testShape = createTestShapeBuilder(); + XContentBuilder contentBuilder = XContentFactory.contentBuilder(randomFrom(XContentType.values())); + if (randomBoolean()) { + contentBuilder.prettyPrint(); + } + XContentBuilder builder = testShape.toXContent(contentBuilder, ToXContent.EMPTY_PARAMS); + XContentParser shapeParser = XContentHelper.createParser(builder.bytes()); + XContentHelper.createParser(builder.bytes()); + shapeParser.nextToken(); + ShapeBuilder parsedShape = ShapeBuilder.parse(shapeParser); + assertNotSame(testShape, parsedShape); + assertEquals(testShape, parsedShape); + assertEquals(testShape.hashCode(), parsedShape.hashCode()); + } + } + + /** + * Test serialization and deserialization of the test shape. + */ + public void testSerialization() throws IOException { + for (int runs = 0; runs < NUMBER_OF_TESTBUILDERS; runs++) { + SB testShape = createTestShapeBuilder(); + SB deserializedShape = copyShape(testShape); + assertEquals(deserializedShape, testShape); + assertEquals(deserializedShape.hashCode(), testShape.hashCode()); + assertNotSame(deserializedShape, testShape); + } + } + + /** + * Test equality and hashCode properties + */ + public void testEqualsAndHashcode() throws IOException { + for (int runs = 0; runs < NUMBER_OF_TESTBUILDERS; runs++) { + SB firstShape = createTestShapeBuilder(); + assertFalse("shape is equal to null", firstShape.equals(null)); + assertFalse("shape is equal to incompatible type", firstShape.equals("")); + assertTrue("shape is not equal to self", firstShape.equals(firstShape)); + assertThat("same shape's hashcode returns different values if called multiple times", firstShape.hashCode(), + equalTo(firstShape.hashCode())); + assertThat("different shapes should not be equal", mutate(firstShape), not(equalTo(firstShape))); + assertThat("different shapes should have different hashcode", mutate(firstShape).hashCode(), not(equalTo(firstShape.hashCode()))); + + SB secondShape = copyShape(firstShape); + assertTrue("shape is not equal to self", secondShape.equals(secondShape)); + assertTrue("shape is not equal to its copy", firstShape.equals(secondShape)); + assertTrue("equals is not symmetric", secondShape.equals(firstShape)); + assertThat("shape copy's hashcode is different from original hashcode", secondShape.hashCode(), equalTo(firstShape.hashCode())); + + SB thirdShape = copyShape(secondShape); + assertTrue("shape is not equal to self", thirdShape.equals(thirdShape)); + assertTrue("shape is not equal to its copy", secondShape.equals(thirdShape)); + assertThat("shape copy's hashcode is different from original hashcode", secondShape.hashCode(), equalTo(thirdShape.hashCode())); + assertTrue("equals is not transitive", firstShape.equals(thirdShape)); + assertThat("shape copy's hashcode is different from original hashcode", firstShape.hashCode(), equalTo(thirdShape.hashCode())); + assertTrue("equals is not symmetric", thirdShape.equals(secondShape)); + assertTrue("equals is not symmetric", thirdShape.equals(firstShape)); + } + } + + protected SB copyShape(SB original) throws IOException { + try (BytesStreamOutput output = new BytesStreamOutput()) { + original.writeTo(output); + try (StreamInput in = new NamedWriteableAwareStreamInput(StreamInput.wrap(output.bytes()), namedWriteableRegistry)) { + ShapeBuilder prototype = (ShapeBuilder) namedWriteableRegistry.getPrototype(ShapeBuilder.class, original.getWriteableName()); + @SuppressWarnings("unchecked") + SB copy = (SB) prototype.readFrom(in); + return copy; + } + } + } +} diff --git a/core/src/test/java/org/elasticsearch/common/geo/builders/CirlceBuilderTests.java b/core/src/test/java/org/elasticsearch/common/geo/builders/CirlceBuilderTests.java new file mode 100644 index 00000000000..6b102b87b2c --- /dev/null +++ b/core/src/test/java/org/elasticsearch/common/geo/builders/CirlceBuilderTests.java @@ -0,0 +1,58 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.common.geo.builders; + +import com.vividsolutions.jts.geom.Coordinate; + +import org.elasticsearch.common.unit.DistanceUnit; + +import java.io.IOException; + +public class CirlceBuilderTests extends AbstractShapeBuilderTestCase { + + @Override + protected CircleBuilder createTestShapeBuilder() { + double centerX = randomDoubleBetween(-180, 180, false); + double centerY = randomDoubleBetween(-90, 90, false); + return new CircleBuilder() + .center(new Coordinate(centerX, centerY)) + .radius(randomDoubleBetween(0.1, 10.0, false), randomFrom(DistanceUnit.values())); + } + + @Override + protected CircleBuilder mutate(CircleBuilder original) throws IOException { + CircleBuilder mutation = copyShape(original); + double radius = original.radius(); + DistanceUnit unit = original.unit(); + + if (randomBoolean()) { + mutation.center(new Coordinate(original.center().x/2, original.center().y/2)); + } else if (randomBoolean()) { + radius = radius/2; + } else { + DistanceUnit newRandom = unit; + while (newRandom == unit) { + newRandom = randomFrom(DistanceUnit.values()); + }; + unit = newRandom; + } + return mutation.radius(radius, unit); + } +} diff --git a/core/src/test/java/org/elasticsearch/common/geo/builders/EnvelopeBuilderTests.java b/core/src/test/java/org/elasticsearch/common/geo/builders/EnvelopeBuilderTests.java new file mode 100644 index 00000000000..e6f3db2f8af --- /dev/null +++ b/core/src/test/java/org/elasticsearch/common/geo/builders/EnvelopeBuilderTests.java @@ -0,0 +1,66 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.common.geo.builders; + +import com.spatial4j.core.shape.Rectangle; +import com.vividsolutions.jts.geom.Coordinate; + +import org.elasticsearch.common.geo.builders.ShapeBuilder.Orientation; +import org.elasticsearch.test.geo.RandomShapeGenerator; + +import java.io.IOException; + +public class EnvelopeBuilderTests extends AbstractShapeBuilderTestCase { + + @Override + protected EnvelopeBuilder createTestShapeBuilder() { + EnvelopeBuilder envelope = new EnvelopeBuilder(randomFrom(Orientation.values())); + Rectangle box = RandomShapeGenerator.xRandomRectangle(getRandom(), RandomShapeGenerator.xRandomPoint(getRandom())); + envelope.topLeft(box.getMinX(), box.getMaxY()) + .bottomRight(box.getMaxX(), box.getMinY()); + return envelope; + } + + @Override + protected EnvelopeBuilder mutate(EnvelopeBuilder original) throws IOException { + EnvelopeBuilder mutation = copyShape(original); + if (randomBoolean()) { + // toggle orientation + mutation.orientation = (original.orientation == Orientation.LEFT ? Orientation.RIGHT : Orientation.LEFT); + } else { + // move one corner to the middle of original + switch (randomIntBetween(0, 3)) { + case 0: + mutation.topLeft(new Coordinate(randomDoubleBetween(-180.0, original.bottomRight.x, true), original.topLeft.y)); + break; + case 1: + mutation.topLeft(new Coordinate(original.topLeft.x, randomDoubleBetween(original.bottomRight.y, 90.0, true))); + break; + case 2: + mutation.bottomRight(new Coordinate(randomDoubleBetween(original.topLeft.x, 180.0, true), original.bottomRight.y)); + break; + case 3: + mutation.bottomRight(new Coordinate(original.bottomRight.x, randomDoubleBetween(-90.0, original.topLeft.y, true))); + break; + } + } + return mutation; + } +} diff --git a/core/src/test/java/org/elasticsearch/common/geo/builders/PointBuilderTests.java b/core/src/test/java/org/elasticsearch/common/geo/builders/PointBuilderTests.java new file mode 100644 index 00000000000..1e94a1bab3a --- /dev/null +++ b/core/src/test/java/org/elasticsearch/common/geo/builders/PointBuilderTests.java @@ -0,0 +1,38 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.common.geo.builders; + +import com.vividsolutions.jts.geom.Coordinate; + +import org.elasticsearch.test.geo.RandomShapeGenerator; +import org.elasticsearch.test.geo.RandomShapeGenerator.ShapeType; + +public class PointBuilderTests extends AbstractShapeBuilderTestCase { + + @Override + protected PointBuilder createTestShapeBuilder() { + return (PointBuilder) RandomShapeGenerator.createShape(getRandom(), ShapeType.POINT); + } + + @Override + protected PointBuilder mutate(PointBuilder original) { + return new PointBuilder().coordinate(new Coordinate(original.longitude()/2, original.latitude()/2)); + } +} diff --git a/core/src/test/java/org/elasticsearch/common/unit/DistanceUnitTests.java b/core/src/test/java/org/elasticsearch/common/unit/DistanceUnitTests.java index 1010d2a5e8c..25c3a136271 100644 --- a/core/src/test/java/org/elasticsearch/common/unit/DistanceUnitTests.java +++ b/core/src/test/java/org/elasticsearch/common/unit/DistanceUnitTests.java @@ -57,4 +57,20 @@ public class DistanceUnitTests extends ESTestCase { assertThat("Value can be parsed from '" + testValue + unit.toString() + "'", DistanceUnit.Distance.parseDistance(unit.toString(testValue)).value, equalTo(testValue)); } } + + /** + * This test ensures that we are aware of accidental reordering in the distance unit ordinals, + * since equality in e.g. CircleShapeBuilder, hashCode and serialization rely on them + */ + public void testDistanceUnitNames() { + assertEquals(0, DistanceUnit.INCH.ordinal()); + assertEquals(1, DistanceUnit.YARD.ordinal()); + assertEquals(2, DistanceUnit.FEET.ordinal()); + assertEquals(3, DistanceUnit.KILOMETERS.ordinal()); + assertEquals(4, DistanceUnit.NAUTICALMILES.ordinal()); + assertEquals(5, DistanceUnit.MILLIMETERS.ordinal()); + assertEquals(6, DistanceUnit.CENTIMETERS.ordinal()); + assertEquals(7, DistanceUnit.MILES.ordinal()); + assertEquals(8, DistanceUnit.METERS.ordinal()); + } } diff --git a/core/src/test/java/org/elasticsearch/discovery/ZenFaultDetectionTests.java b/core/src/test/java/org/elasticsearch/discovery/ZenFaultDetectionTests.java index d9a75b297aa..e7a10b0f62b 100644 --- a/core/src/test/java/org/elasticsearch/discovery/ZenFaultDetectionTests.java +++ b/core/src/test/java/org/elasticsearch/discovery/ZenFaultDetectionTests.java @@ -193,7 +193,7 @@ public class ZenFaultDetectionTests extends ESTestCase { masterFD.addListener(new MasterFaultDetection.Listener() { @Override - public void onMasterFailure(DiscoveryNode masterNode, String reason) { + public void onMasterFailure(DiscoveryNode masterNode, Throwable cause, String reason) { failureNode[0] = masterNode; failureReason[0] = reason; notified.countDown(); @@ -211,4 +211,4 @@ public class ZenFaultDetectionTests extends ESTestCase { assertThat(failureReason[0], matcher); } -} \ No newline at end of file +} diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/SignificantTermsBackwardCompatibilityIT.java b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/SignificantTermsBackwardCompatibilityIT.java index 5b7976043f8..d6afb35547f 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/SignificantTermsBackwardCompatibilityIT.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/SignificantTermsBackwardCompatibilityIT.java @@ -48,24 +48,16 @@ public class SignificantTermsBackwardCompatibilityIT extends ESBackcompatTestCas static final String CLASS_FIELD = "class"; /** - * Simple upgrade test for streaming significant terms buckets + * Test for streaming significant terms buckets to old es versions. */ public void testBucketStreaming() throws IOException, ExecutionException, InterruptedException { logger.debug("testBucketStreaming: indexing documents"); String type = randomBoolean() ? "string" : "long"; String settings = "{\"index.number_of_shards\": 5, \"index.number_of_replicas\": 0}"; index01Docs(type, settings); - + ensureGreen(); logClusterState(); - boolean upgraded; - int upgradedNodesCounter = 1; - do { - logger.debug("testBucketStreaming: upgrading {}st node", upgradedNodesCounter++); - upgraded = backwardsCluster().upgradeOneNode(); - ensureGreen(); - logClusterState(); - checkSignificantTermsAggregationCorrect(); - } while (upgraded); + checkSignificantTermsAggregationCorrect(); logger.debug("testBucketStreaming: done testing significant terms while upgrading"); } @@ -101,7 +93,7 @@ public class SignificantTermsBackwardCompatibilityIT extends ESBackcompatTestCas .execute() .actionGet(); assertSearchResponse(response); - StringTerms classes = (StringTerms) response.getAggregations().get("class"); + StringTerms classes = response.getAggregations().get("class"); assertThat(classes.getBuckets().size(), equalTo(2)); for (Terms.Bucket classBucket : classes.getBuckets()) { Map aggs = classBucket.getAggregations().asMap(); diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/significant/SignificanceHeuristicTests.java b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/significant/SignificanceHeuristicTests.java index c911da06ae9..b10dfd31b35 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/significant/SignificanceHeuristicTests.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/significant/SignificanceHeuristicTests.java @@ -28,6 +28,7 @@ import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.json.JsonXContent; import org.elasticsearch.search.SearchShardTarget; +import org.elasticsearch.search.aggregations.InternalAggregation; import org.elasticsearch.search.aggregations.InternalAggregations; import org.elasticsearch.search.aggregations.bucket.significant.heuristics.ChiSquare; import org.elasticsearch.search.aggregations.bucket.significant.heuristics.GND; @@ -38,6 +39,8 @@ import org.elasticsearch.search.aggregations.bucket.significant.heuristics.Signi import org.elasticsearch.search.aggregations.bucket.significant.heuristics.SignificanceHeuristicBuilder; import org.elasticsearch.search.aggregations.bucket.significant.heuristics.SignificanceHeuristicParser; import org.elasticsearch.search.aggregations.bucket.significant.heuristics.SignificanceHeuristicParserMapper; +import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator; +import org.elasticsearch.search.aggregations.support.format.ValueFormatter; import org.elasticsearch.search.internal.SearchContext; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.TestSearchContext; @@ -45,18 +48,11 @@ import org.elasticsearch.test.TestSearchContext; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.io.IOException; -import java.util.ArrayList; -import java.util.Collections; -import java.util.HashSet; -import java.util.List; -import java.util.Set; +import java.nio.charset.StandardCharsets; +import java.util.*; import static org.elasticsearch.test.VersionUtils.randomVersion; -import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.Matchers.greaterThan; -import static org.hamcrest.Matchers.greaterThanOrEqualTo; -import static org.hamcrest.Matchers.lessThan; -import static org.hamcrest.Matchers.lessThanOrEqualTo; +import static org.hamcrest.Matchers.*; /** * @@ -83,24 +79,28 @@ public class SignificanceHeuristicTests extends ESTestCase { ByteArrayOutputStream outBuffer = new ByteArrayOutputStream(); OutputStreamStreamOutput out = new OutputStreamStreamOutput(outBuffer); out.setVersion(version); - sigTerms[0].writeTo(out); // read ByteArrayInputStream inBuffer = new ByteArrayInputStream(outBuffer.toByteArray()); InputStreamStreamInput in = new InputStreamStreamInput(inBuffer); in.setVersion(version); - sigTerms[1].readFrom(in); assertTrue(sigTerms[1].significanceHeuristic.equals(sigTerms[0].significanceHeuristic)); + InternalSignificantTerms.Bucket originalBucket = (InternalSignificantTerms.Bucket) sigTerms[0].buckets.get(0); + InternalSignificantTerms.Bucket streamedBucket = (InternalSignificantTerms.Bucket) sigTerms[1].buckets.get(0); + assertThat(originalBucket.getKeyAsString(), equalTo(streamedBucket.getKeyAsString())); + assertThat(originalBucket.getSupersetDf(), equalTo(streamedBucket.getSupersetDf())); + assertThat(originalBucket.getSubsetDf(), equalTo(streamedBucket.getSubsetDf())); + assertThat(streamedBucket.getSubsetSize(), equalTo(10l)); + assertThat(streamedBucket.getSupersetSize(), equalTo(20l)); } InternalSignificantTerms[] getRandomSignificantTerms(SignificanceHeuristic heuristic) { InternalSignificantTerms[] sTerms = new InternalSignificantTerms[2]; ArrayList buckets = new ArrayList<>(); if (randomBoolean()) { - BytesRef term = new BytesRef("123.0"); buckets.add(new SignificantLongTerms.Bucket(1, 2, 3, 4, 123, InternalAggregations.EMPTY, null)); sTerms[0] = new SignificantLongTerms(10, 20, "some_name", null, 1, 1, heuristic, buckets, Collections.EMPTY_LIST, null); @@ -125,6 +125,56 @@ public class SignificanceHeuristicTests extends ESTestCase { return heuristics.get(randomInt(3)); } + public void testReduce() { + List aggs = createInternalAggregations(); + SignificantTerms reducedAgg = (SignificantTerms) aggs.get(0).doReduce(aggs, null); + assertThat(reducedAgg.getBuckets().size(), equalTo(2)); + assertThat(reducedAgg.getBuckets().get(0).getSubsetDf(), equalTo(8l)); + assertThat(reducedAgg.getBuckets().get(0).getSubsetSize(), equalTo(16l)); + assertThat(reducedAgg.getBuckets().get(0).getSupersetDf(), equalTo(10l)); + assertThat(reducedAgg.getBuckets().get(0).getSupersetSize(), equalTo(30l)); + assertThat(reducedAgg.getBuckets().get(1).getSubsetDf(), equalTo(8l)); + assertThat(reducedAgg.getBuckets().get(1).getSubsetSize(), equalTo(16l)); + assertThat(reducedAgg.getBuckets().get(1).getSupersetDf(), equalTo(10l)); + assertThat(reducedAgg.getBuckets().get(1).getSupersetSize(), equalTo(30l)); + } + + // Create aggregations as they might come from three different shards and return as list. + private List createInternalAggregations() { + + String type = randomBoolean() ? "long" : "string"; + SignificanceHeuristic significanceHeuristic = getRandomSignificanceheuristic(); + + List aggs = new ArrayList<>(); + List terms0Buckets = new ArrayList<>(); + terms0Buckets.add(createBucket(type, 4, 4, 5, 10, 0)); + aggs.add(createAggregation(type, significanceHeuristic, terms0Buckets, 4, 10)); + List terms1Buckets = new ArrayList<>(); + terms0Buckets.add(createBucket(type, 4, 4, 5, 10, 1)); + aggs.add(createAggregation(type, significanceHeuristic, terms1Buckets, 4, 10)); + List terms01Buckets = new ArrayList<>(); + terms0Buckets.add(createBucket(type, 4, 8, 5, 10, 0)); + terms0Buckets.add(createBucket(type, 4, 8, 5, 10, 1)); + aggs.add(createAggregation(type, significanceHeuristic, terms01Buckets, 8, 10)); + return aggs; + } + + private InternalSignificantTerms createAggregation(String type, SignificanceHeuristic significanceHeuristic, List buckets, long subsetSize, long supersetSize) { + if (type.equals("string")) { + return new SignificantStringTerms(subsetSize, supersetSize, "sig_terms", 2, -1, significanceHeuristic, buckets, new ArrayList(), new HashMap()); + } else { + return new SignificantLongTerms(subsetSize, supersetSize, "sig_terms", ValueFormatter.RAW, 2, -1, significanceHeuristic, buckets, new ArrayList(), new HashMap()); + } + } + + private InternalSignificantTerms.Bucket createBucket(String type, long subsetDF, long subsetSize, long supersetDF, long supersetSize, long label) { + if (type.equals("string")) { + return new SignificantStringTerms.Bucket(new BytesRef(Long.toString(label).getBytes(StandardCharsets.UTF_8)), subsetDF, subsetSize, supersetDF, supersetSize, InternalAggregations.EMPTY); + } else { + return new SignificantLongTerms.Bucket(subsetDF, subsetSize, supersetDF, supersetSize, label, InternalAggregations.EMPTY, ValueFormatter.RAW); + } + } + // test that // 1. The output of the builders can actually be parsed // 2. The parser does not swallow parameters after a significance heuristic was defined diff --git a/core/src/test/resources/indices/bwc/index-2.0.1.zip b/core/src/test/resources/indices/bwc/index-2.0.1.zip new file mode 100644 index 00000000000..dccb7774fa6 Binary files /dev/null and b/core/src/test/resources/indices/bwc/index-2.0.1.zip differ diff --git a/core/src/test/resources/indices/bwc/index-2.1.0.zip b/core/src/test/resources/indices/bwc/index-2.1.0.zip new file mode 100644 index 00000000000..8c07e922260 Binary files /dev/null and b/core/src/test/resources/indices/bwc/index-2.1.0.zip differ diff --git a/core/src/test/resources/indices/bwc/repo-2.0.1.zip b/core/src/test/resources/indices/bwc/repo-2.0.1.zip new file mode 100644 index 00000000000..305820877bb Binary files /dev/null and b/core/src/test/resources/indices/bwc/repo-2.0.1.zip differ diff --git a/core/src/test/resources/indices/bwc/repo-2.1.0.zip b/core/src/test/resources/indices/bwc/repo-2.1.0.zip new file mode 100644 index 00000000000..2f287ea3481 Binary files /dev/null and b/core/src/test/resources/indices/bwc/repo-2.1.0.zip differ diff --git a/dev-tools/create_bwc_index.py b/dev-tools/create_bwc_index.py index 5d663ca69f3..83a35941577 100644 --- a/dev-tools/create_bwc_index.py +++ b/dev-tools/create_bwc_index.py @@ -149,6 +149,16 @@ def start_node(version, release_dir, data_dir, repo_dir, tcp_port=DEFAULT_TRANSP cmd.append('-f') # version before 1.0 start in background automatically return subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE) +def install_plugin(version, release_dir, plugin_name): + run_plugin(version, release_dir, 'install', [plugin_name]) + +def remove_plugin(version, release_dir, plugin_name): + run_plugin(version, release_dir, 'remove', [plugin_name]) + +def run_plugin(version, release_dir, plugin_cmd, args): + cmd = [os.path.join(release_dir, 'bin/plugin'), plugin_cmd] + args + subprocess.check_call(cmd) + def create_client(http_port=DEFAULT_HTTP_TCP_PORT, timeout=30): logging.info('Waiting for node to startup') for _ in range(0, timeout): diff --git a/dev-tools/create_bwc_index_with_plugin_mappings.py b/dev-tools/create_bwc_index_with_plugin_mappings.py new file mode 100644 index 00000000000..c30de412d1d --- /dev/null +++ b/dev-tools/create_bwc_index_with_plugin_mappings.py @@ -0,0 +1,124 @@ +import create_bwc_index +import logging +import os +import random +import shutil +import subprocess +import sys +import tempfile + +def fetch_version(version): + logging.info('fetching ES version %s' % version) + if subprocess.call([sys.executable, os.path.join(os.path.split(sys.argv[0])[0], 'get-bwc-version.py'), version]) != 0: + raise RuntimeError('failed to download ES version %s' % version) + +def create_index(plugin, mapping, docs): + ''' + Creates a static back compat index (.zip) with mappings using fields defined in plugins. + ''' + + logging.basicConfig(format='[%(levelname)s] [%(asctime)s] %(message)s', level=logging.INFO, + datefmt='%Y-%m-%d %I:%M:%S %p') + logging.getLogger('elasticsearch').setLevel(logging.ERROR) + logging.getLogger('urllib3').setLevel(logging.WARN) + + tmp_dir = tempfile.mkdtemp() + plugin_installed = False + node = None + try: + data_dir = os.path.join(tmp_dir, 'data') + repo_dir = os.path.join(tmp_dir, 'repo') + logging.info('Temp data dir: %s' % data_dir) + logging.info('Temp repo dir: %s' % repo_dir) + + version = '2.0.0' + classifier = '%s-%s' %(plugin, version) + index_name = 'index-%s' % classifier + + # Download old ES releases if necessary: + release_dir = os.path.join('backwards', 'elasticsearch-%s' % version) + if not os.path.exists(release_dir): + fetch_version(version) + + create_bwc_index.install_plugin(version, release_dir, plugin) + plugin_installed = True + node = create_bwc_index.start_node(version, release_dir, data_dir, repo_dir, cluster_name=index_name) + client = create_bwc_index.create_client() + put_plugin_mappings(client, index_name, mapping, docs) + create_bwc_index.shutdown_node(node) + + print('%s server output:\n%s' % (version, node.stdout.read().decode('utf-8'))) + node = None + create_bwc_index.compress_index(classifier, tmp_dir, 'plugins/%s/src/test/resources/indices/bwc' %plugin) + finally: + if node is not None: + create_bwc_index.shutdown_node(node) + if plugin_installed: + create_bwc_index.remove_plugin(version, release_dir, plugin) + shutil.rmtree(tmp_dir) + +def put_plugin_mappings(client, index_name, mapping, docs): + client.indices.delete(index=index_name, ignore=404) + logging.info('Create single shard test index') + + client.indices.create(index=index_name, body={ + 'settings': { + 'number_of_shards': 1, + 'number_of_replicas': 0 + }, + 'mappings': { + 'type': mapping + } + }) + health = client.cluster.health(wait_for_status='green', wait_for_relocating_shards=0) + assert health['timed_out'] == False, 'cluster health timed out %s' % health + + logging.info('Indexing documents') + for i in range(len(docs)): + client.index(index=index_name, doc_type="type", id=str(i), body=docs[i]) + logging.info('Flushing index') + client.indices.flush(index=index_name) + + logging.info('Running basic checks') + count = client.count(index=index_name)['count'] + assert count == len(docs), "expected %d docs, got %d" %(len(docs), count) + +def main(): + docs = [ + { + "foo": "abc" + }, + { + "foo": "abcdef" + }, + { + "foo": "a" + } + ] + + murmur3_mapping = { + 'properties': { + 'foo': { + 'type': 'string', + 'fields': { + 'hash': { + 'type': 'murmur3' + } + } + } + } + } + + create_index("mapper-murmur3", murmur3_mapping, docs) + + size_mapping = { + '_size': { + 'enabled': True + } + } + + create_index("mapper-size", size_mapping, docs) + +if __name__ == '__main__': + main() + diff --git a/dev-tools/prepare_release_candidate.py b/dev-tools/prepare_release_candidate.py index d3a3e178961..31b07043389 100644 --- a/dev-tools/prepare_release_candidate.py +++ b/dev-tools/prepare_release_candidate.py @@ -356,7 +356,7 @@ if __name__ == "__main__": debs3_list_cmd = 'deb-s3 list -b %s --prefix %s' % (bucket, debs3_prefix) debs3_verify_cmd = 'deb-s3 verify -b %s --prefix %s' % (bucket, debs3_prefix) rpms3_prefix = 'elasticsearch/staging/%s-%s/repos/%s/centos' % (release_version, shortHash, package_repo_version) - rpms3_upload_cmd = 'rpm-s3 -v -b %s -p %s --sign --visibility public-read -k 0 %s' % (bucket, rpms3_prefix, rpm) + rpms3_upload_cmd = 'rpm-s3 -v -b %s -p %s --sign --visibility public-read -k 100 %s' % (bucket, rpms3_prefix, rpm) if deploy_s3: run(s3cmd_sync_to_staging_bucket_cmd) diff --git a/docs/reference/index-modules/similarity.asciidoc b/docs/reference/index-modules/similarity.asciidoc index 82399a8cc79..0ade819dc0c 100644 --- a/docs/reference/index-modules/similarity.asciidoc +++ b/docs/reference/index-modules/similarity.asciidoc @@ -157,7 +157,7 @@ implementation used for these two methods, while not changing the `default`, it is possible to configure a similarity with the name `base`. This similarity will then be used for the two methods. -You can change the default similarity for all fields like this: +You can change the default similarity for all fields by putting the following setting into `elasticsearch.yml`: [source,js] -------------------------------------------------- diff --git a/docs/reference/index.asciidoc b/docs/reference/index.asciidoc index 5783376a54f..34d1cba92c0 100644 --- a/docs/reference/index.asciidoc +++ b/docs/reference/index.asciidoc @@ -7,8 +7,8 @@ :jdk: 1.8.0_25 :defguide: https://www.elastic.co/guide/en/elasticsearch/guide/current :plugins: https://www.elastic.co/guide/en/elasticsearch/plugins/master -:issue: https://github.com/elastic/elasticsearch/issues -:pull: https://github.com/elastic/elasticsearch/pull +:issue: https://github.com/elastic/elasticsearch/issues/ +:pull: https://github.com/elastic/elasticsearch/pull/ include::getting-started.asciidoc[] diff --git a/docs/reference/mapping/fields/ttl-field.asciidoc b/docs/reference/mapping/fields/ttl-field.asciidoc index 07ce8a86b9e..d81582c9078 100644 --- a/docs/reference/mapping/fields/ttl-field.asciidoc +++ b/docs/reference/mapping/fields/ttl-field.asciidoc @@ -62,7 +62,7 @@ PUT my_index "my_type": { "_ttl": { "enabled": true, - "defaut": "5m" + "default": "5m" } } } diff --git a/plugins/mapper-murmur3/src/test/java/org/elasticsearch/index/mapper/murmur3/Murmur3FieldMapperUpgradeTests.java b/plugins/mapper-murmur3/src/test/java/org/elasticsearch/index/mapper/murmur3/Murmur3FieldMapperUpgradeTests.java new file mode 100644 index 00000000000..290280b9101 --- /dev/null +++ b/plugins/mapper-murmur3/src/test/java/org/elasticsearch/index/mapper/murmur3/Murmur3FieldMapperUpgradeTests.java @@ -0,0 +1,82 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.index.mapper.murmur3; + +import org.apache.lucene.util.LuceneTestCase; +import org.apache.lucene.util.TestUtil; +import org.elasticsearch.action.search.SearchResponse; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.env.NodeEnvironment; +import org.elasticsearch.plugin.mapper.MapperMurmur3Plugin; +import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.search.aggregations.AggregationBuilders; +import org.elasticsearch.search.aggregations.metrics.cardinality.Cardinality; +import org.elasticsearch.test.ESIntegTestCase; +import org.elasticsearch.test.hamcrest.ElasticsearchAssertions; + +import java.io.IOException; +import java.io.InputStream; +import java.nio.file.Files; +import java.nio.file.Path; +import java.util.Collection; +import java.util.Collections; + +@ESIntegTestCase.ClusterScope(scope = ESIntegTestCase.Scope.TEST, numDataNodes = 0) +@LuceneTestCase.SuppressFileSystems("ExtrasFS") +public class Murmur3FieldMapperUpgradeTests extends ESIntegTestCase { + + @Override + protected Collection> nodePlugins() { + return Collections.singleton(MapperMurmur3Plugin.class); + } + + public void testUpgradeOldMapping() throws IOException { + final String indexName = "index-mapper-murmur3-2.0.0"; + Path unzipDir = createTempDir(); + Path unzipDataDir = unzipDir.resolve("data"); + Path backwardsIndex = getBwcIndicesPath().resolve(indexName + ".zip"); + try (InputStream stream = Files.newInputStream(backwardsIndex)) { + TestUtil.unzip(stream, unzipDir); + } + assertTrue(Files.exists(unzipDataDir)); + + Path dataPath = createTempDir(); + Settings settings = Settings.builder() + .put("path.data", dataPath) + .build(); + final String node = internalCluster().startNode(settings); + Path[] nodePaths = internalCluster().getInstance(NodeEnvironment.class, node).nodeDataPaths(); + assertEquals(1, nodePaths.length); + dataPath = nodePaths[0].resolve(NodeEnvironment.INDICES_FOLDER); + assertFalse(Files.exists(dataPath)); + Path src = unzipDataDir.resolve(indexName + "/nodes/0/indices"); + Files.move(src, dataPath); + + ensureYellow(); + final SearchResponse countResponse = client().prepareSearch(indexName).setSize(0).get(); + ElasticsearchAssertions.assertHitCount(countResponse, 3L); + + final SearchResponse cardinalityResponse = client().prepareSearch(indexName).addAggregation( + AggregationBuilders.cardinality("card").field("foo.hash")).get(); + Cardinality cardinality = cardinalityResponse.getAggregations().get("card"); + assertEquals(3L, cardinality.getValue()); + } + +} diff --git a/plugins/mapper-murmur3/src/test/resources/indices/bwc/index-mapper-murmur3-2.0.0.zip b/plugins/mapper-murmur3/src/test/resources/indices/bwc/index-mapper-murmur3-2.0.0.zip new file mode 100644 index 00000000000..0b69aac180b Binary files /dev/null and b/plugins/mapper-murmur3/src/test/resources/indices/bwc/index-mapper-murmur3-2.0.0.zip differ diff --git a/plugins/mapper-size/src/test/java/org/elasticsearch/index/mapper/size/SizeFieldMapperUpgradeTests.java b/plugins/mapper-size/src/test/java/org/elasticsearch/index/mapper/size/SizeFieldMapperUpgradeTests.java new file mode 100644 index 00000000000..b9902500376 --- /dev/null +++ b/plugins/mapper-size/src/test/java/org/elasticsearch/index/mapper/size/SizeFieldMapperUpgradeTests.java @@ -0,0 +1,93 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.index.mapper.size; + +import org.apache.lucene.util.LuceneTestCase; +import org.apache.lucene.util.TestUtil; +import org.elasticsearch.action.search.SearchResponse; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.env.NodeEnvironment; +import org.elasticsearch.plugin.mapper.MapperSizePlugin; +import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.search.SearchHit; +import org.elasticsearch.search.SearchHitField; +import org.elasticsearch.test.ESIntegTestCase; +import org.elasticsearch.test.hamcrest.ElasticsearchAssertions; + +import java.io.IOException; +import java.io.InputStream; +import java.nio.file.Files; +import java.nio.file.Path; +import java.util.Collection; +import java.util.Collections; +import java.util.Map; + +@ESIntegTestCase.ClusterScope(scope = ESIntegTestCase.Scope.TEST, numDataNodes = 0) +@LuceneTestCase.SuppressFileSystems("ExtrasFS") +public class SizeFieldMapperUpgradeTests extends ESIntegTestCase { + + @Override + protected Collection> nodePlugins() { + return Collections.singleton(MapperSizePlugin.class); + } + + public void testUpgradeOldMapping() throws IOException { + final String indexName = "index-mapper-size-2.0.0"; + Path unzipDir = createTempDir(); + Path unzipDataDir = unzipDir.resolve("data"); + Path backwardsIndex = getBwcIndicesPath().resolve(indexName + ".zip"); + try (InputStream stream = Files.newInputStream(backwardsIndex)) { + TestUtil.unzip(stream, unzipDir); + } + assertTrue(Files.exists(unzipDataDir)); + + Path dataPath = createTempDir(); + Settings settings = Settings.builder() + .put("path.data", dataPath) + .build(); + final String node = internalCluster().startNode(settings); + Path[] nodePaths = internalCluster().getInstance(NodeEnvironment.class, node).nodeDataPaths(); + assertEquals(1, nodePaths.length); + dataPath = nodePaths[0].resolve(NodeEnvironment.INDICES_FOLDER); + assertFalse(Files.exists(dataPath)); + Path src = unzipDataDir.resolve(indexName + "/nodes/0/indices"); + Files.move(src, dataPath); + + ensureYellow(); + final SearchResponse countResponse = client().prepareSearch(indexName).setSize(0).get(); + ElasticsearchAssertions.assertHitCount(countResponse, 3L); + + final SearchResponse sizeResponse = client().prepareSearch(indexName) + .addField("_source") + .addField("_size") + .get(); + ElasticsearchAssertions.assertHitCount(sizeResponse, 3L); + for (SearchHit hit : sizeResponse.getHits().getHits()) { + String source = hit.getSourceAsString(); + assertNotNull(source); + Map fields = hit.getFields(); + assertTrue(fields.containsKey("_size")); + Number size = fields.get("_size").getValue(); + assertNotNull(size); + assertEquals(source.length(), size.longValue()); + } + } + +} diff --git a/plugins/mapper-size/src/test/resources/indices/bwc/index-mapper-size-2.0.0.zip b/plugins/mapper-size/src/test/resources/indices/bwc/index-mapper-size-2.0.0.zip new file mode 100644 index 00000000000..0a74f835c3e Binary files /dev/null and b/plugins/mapper-size/src/test/resources/indices/bwc/index-mapper-size-2.0.0.zip differ diff --git a/qa/smoke-test-plugins/build.gradle b/qa/smoke-test-plugins/build.gradle index d93594bd6dc..864a58baf25 100644 --- a/qa/smoke-test-plugins/build.gradle +++ b/qa/smoke-test-plugins/build.gradle @@ -25,10 +25,9 @@ ext.pluginCount = 0 for (Project subproj : project.rootProject.subprojects) { if (subproj.path.startsWith(':plugins:')) { integTest { - def bundlePlugin = subproj.tasks.findByName('bundlePlugin') - dependsOn bundlePlugin cluster { - plugin subproj.name, bundlePlugin.outputs.files + // need to get a non-decorated project object, so must re-lookup the project by path + plugin subproj.name, project(subproj.path) } } pluginCount += 1 diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/get/50_with_headers.yaml b/rest-api-spec/src/main/resources/rest-api-spec/test/get/50_with_headers.yaml new file mode 100644 index 00000000000..71229686eed --- /dev/null +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/get/50_with_headers.yaml @@ -0,0 +1,30 @@ +--- +"REST test with headers": + - skip: + features: headers + + - do: + index: + index: test_1 + type: test + id: 1 + body: { "body": "foo" } + + - do: + headers: + Content-Type: application/yaml + get: + index: test_1 + type: _all + id: 1 + + - match: + $body: | + /^---\n + _index:\s+\"test_1"\n + _type:\s+"test"\n + _id:\s+"1"\n + _version:\s+1\n + found:\s+true\n + _source:\n + \s+body:\s+"foo"\n$/ diff --git a/test-framework/src/main/java/org/elasticsearch/test/rest/RestTestExecutionContext.java b/test-framework/src/main/java/org/elasticsearch/test/rest/RestTestExecutionContext.java index b7dad93d593..4054b8efce1 100644 --- a/test-framework/src/main/java/org/elasticsearch/test/rest/RestTestExecutionContext.java +++ b/test-framework/src/main/java/org/elasticsearch/test/rest/RestTestExecutionContext.java @@ -62,7 +62,8 @@ public class RestTestExecutionContext implements Closeable { * Saves the obtained response in the execution context. * @throws RestException if the returned status code is non ok */ - public RestResponse callApi(String apiName, Map params, List> bodies) throws IOException, RestException { + public RestResponse callApi(String apiName, Map params, List> bodies, + Map headers) throws IOException, RestException { //makes a copy of the parameters before modifying them for this specific request HashMap requestParams = new HashMap<>(params); for (Map.Entry entry : requestParams.entrySet()) { @@ -74,7 +75,7 @@ public class RestTestExecutionContext implements Closeable { String body = actualBody(bodies); try { - response = callApiInternal(apiName, requestParams, body); + response = callApiInternal(apiName, requestParams, body, headers); //we always stash the last response body stash.stashValue("body", response.getBody()); return response; @@ -104,8 +105,8 @@ public class RestTestExecutionContext implements Closeable { return XContentFactory.jsonBuilder().map(body).string(); } - private RestResponse callApiInternal(String apiName, Map params, String body) throws IOException, RestException { - return restClient.callApi(apiName, params, body); + private RestResponse callApiInternal(String apiName, Map params, String body, Map headers) throws IOException, RestException { + return restClient.callApi(apiName, params, body, headers); } /** diff --git a/test-framework/src/main/java/org/elasticsearch/test/rest/client/RestClient.java b/test-framework/src/main/java/org/elasticsearch/test/rest/client/RestClient.java index 4b46a0e6498..63a8b397c45 100644 --- a/test-framework/src/main/java/org/elasticsearch/test/rest/client/RestClient.java +++ b/test-framework/src/main/java/org/elasticsearch/test/rest/client/RestClient.java @@ -132,7 +132,7 @@ public class RestClient implements Closeable { * @throws RestException if the obtained status code is non ok, unless the specific error code needs to be ignored * according to the ignore parameter received as input (which won't get sent to elasticsearch) */ - public RestResponse callApi(String apiName, Map params, String body) throws IOException, RestException { + public RestResponse callApi(String apiName, Map params, String body, Map headers) throws IOException, RestException { List ignores = new ArrayList<>(); Map requestParams = null; @@ -151,6 +151,9 @@ public class RestClient implements Closeable { } HttpRequestBuilder httpRequestBuilder = callApiBuilder(apiName, requestParams, body); + for (Map.Entry header : headers.entrySet()) { + httpRequestBuilder.addHeader(header.getKey(), header.getValue()); + } logger.debug("calling api [{}]", apiName); HttpResponse httpResponse = httpRequestBuilder.execute(); diff --git a/test-framework/src/main/java/org/elasticsearch/test/rest/parser/DoSectionParser.java b/test-framework/src/main/java/org/elasticsearch/test/rest/parser/DoSectionParser.java index ec5aef54459..2a20e0f3146 100644 --- a/test-framework/src/main/java/org/elasticsearch/test/rest/parser/DoSectionParser.java +++ b/test-framework/src/main/java/org/elasticsearch/test/rest/parser/DoSectionParser.java @@ -25,6 +25,8 @@ import org.elasticsearch.test.rest.section.ApiCallSection; import org.elasticsearch.test.rest.section.DoSection; import java.io.IOException; +import java.util.HashMap; +import java.util.Map; /** * Parser for do sections @@ -40,6 +42,8 @@ public class DoSectionParser implements RestTestFragmentParser { XContentParser.Token token; DoSection doSection = new DoSection(); + ApiCallSection apiCallSection = null; + Map headers = new HashMap<>(); while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { if (token == XContentParser.Token.FIELD_NAME) { @@ -49,8 +53,17 @@ public class DoSectionParser implements RestTestFragmentParser { doSection.setCatch(parser.text()); } } else if (token == XContentParser.Token.START_OBJECT) { - if (currentFieldName != null) { - ApiCallSection apiCallSection = new ApiCallSection(currentFieldName); + if ("headers".equals(currentFieldName)) { + String headerName = null; + while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { + if (token == XContentParser.Token.FIELD_NAME) { + headerName = parser.currentName(); + } else if (token.isValue()) { + headers.put(headerName, parser.text()); + } + } + } else if (currentFieldName != null) { // must be part of API call then + apiCallSection = new ApiCallSection(currentFieldName); String paramName = null; while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { if (token == XContentParser.Token.FIELD_NAME) { @@ -73,17 +86,20 @@ public class DoSectionParser implements RestTestFragmentParser { } } } - doSection.setApiCallSection(apiCallSection); } } } - - parser.nextToken(); - - if (doSection.getApiCallSection() == null) { - throw new RestTestParseException("client call section is mandatory within a do section"); + try { + if (apiCallSection == null) { + throw new RestTestParseException("client call section is mandatory within a do section"); + } + if (headers.isEmpty() == false) { + apiCallSection.addHeaders(headers); + } + doSection.setApiCallSection(apiCallSection); + } finally { + parser.nextToken(); } - return doSection; } } diff --git a/test-framework/src/main/java/org/elasticsearch/test/rest/section/ApiCallSection.java b/test-framework/src/main/java/org/elasticsearch/test/rest/section/ApiCallSection.java index da6c0b3be2c..030469148ed 100644 --- a/test-framework/src/main/java/org/elasticsearch/test/rest/section/ApiCallSection.java +++ b/test-framework/src/main/java/org/elasticsearch/test/rest/section/ApiCallSection.java @@ -33,6 +33,7 @@ public class ApiCallSection { private final String api; private final Map params = new HashMap<>(); + private final Map headers = new HashMap<>(); private final List> bodies = new ArrayList<>(); public ApiCallSection(String api) { @@ -56,6 +57,18 @@ public class ApiCallSection { this.params.put(key, value); } + public void addHeaders(Map otherHeaders) { + this.headers.putAll(otherHeaders); + } + + public void addHeader(String key, String value) { + this.headers.put(key, value); + } + + public Map getHeaders() { + return unmodifiableMap(headers); + } + public List> getBodies() { return Collections.unmodifiableList(bodies); } diff --git a/test-framework/src/main/java/org/elasticsearch/test/rest/section/DoSection.java b/test-framework/src/main/java/org/elasticsearch/test/rest/section/DoSection.java index 9a1bf1c9267..38504c4af5f 100644 --- a/test-framework/src/main/java/org/elasticsearch/test/rest/section/DoSection.java +++ b/test-framework/src/main/java/org/elasticsearch/test/rest/section/DoSection.java @@ -45,6 +45,9 @@ import static org.junit.Assert.fail; * * - do: * catch: missing + * headers: + * Authorization: Basic user:pass + * Content-Type: application/json * update: * index: test_1 * type: test @@ -86,7 +89,8 @@ public class DoSection implements ExecutableSection { } try { - RestResponse restResponse = executionContext.callApi(apiCallSection.getApi(), apiCallSection.getParams(), apiCallSection.getBodies()); + RestResponse restResponse = executionContext.callApi(apiCallSection.getApi(), apiCallSection.getParams(), + apiCallSection.getBodies(), apiCallSection.getHeaders()); if (Strings.hasLength(catchParam)) { String catchStatusCode; if (catches.containsKey(catchParam)) { diff --git a/test-framework/src/main/java/org/elasticsearch/test/rest/support/Features.java b/test-framework/src/main/java/org/elasticsearch/test/rest/support/Features.java index 018d2413737..0f51f72e8e5 100644 --- a/test-framework/src/main/java/org/elasticsearch/test/rest/support/Features.java +++ b/test-framework/src/main/java/org/elasticsearch/test/rest/support/Features.java @@ -34,7 +34,7 @@ import java.util.List; */ public final class Features { - private static final List SUPPORTED = Arrays.asList("stash_in_path", "groovy_scripting"); + private static final List SUPPORTED = Arrays.asList("stash_in_path", "groovy_scripting", "headers"); private Features() { diff --git a/test-framework/src/test/java/org/elasticsearch/test/rest/test/DoSectionParserTests.java b/test-framework/src/test/java/org/elasticsearch/test/rest/test/DoSectionParserTests.java index 5f0f2bd8b35..3c65fda94ca 100644 --- a/test-framework/src/test/java/org/elasticsearch/test/rest/test/DoSectionParserTests.java +++ b/test-framework/src/test/java/org/elasticsearch/test/rest/test/DoSectionParserTests.java @@ -341,6 +341,29 @@ public class DoSectionParserTests extends AbstractParserTestCase { assertThat(doSection.getApiCallSection().hasBody(), equalTo(false)); } + public void testParseDoSectionWithHeaders() throws Exception { + parser = YamlXContent.yamlXContent.createParser( + "headers:\n" + + " Authorization: \"thing one\"\n" + + " Content-Type: \"application/json\"\n" + + "indices.get_warmer:\n" + + " index: test_index\n" + + " name: test_warmer" + ); + + DoSectionParser doSectionParser = new DoSectionParser(); + DoSection doSection = doSectionParser.parse(new RestTestSuiteParseContext("api", "suite", parser)); + + assertThat(doSection.getApiCallSection(), notNullValue()); + assertThat(doSection.getApiCallSection().getApi(), equalTo("indices.get_warmer")); + assertThat(doSection.getApiCallSection().getParams().size(), equalTo(2)); + assertThat(doSection.getApiCallSection().hasBody(), equalTo(false)); + assertThat(doSection.getApiCallSection().getHeaders(), notNullValue()); + assertThat(doSection.getApiCallSection().getHeaders().size(), equalTo(2)); + assertThat(doSection.getApiCallSection().getHeaders().get("Authorization"), equalTo("thing one")); + assertThat(doSection.getApiCallSection().getHeaders().get("Content-Type"), equalTo("application/json")); + } + public void testParseDoSectionWithoutClientCallSection() throws Exception { parser = YamlXContent.yamlXContent.createParser( "catch: missing\n"