diff --git a/.editorconfig b/.editorconfig new file mode 100644 index 00000000000..9d4bfbf55d3 --- /dev/null +++ b/.editorconfig @@ -0,0 +1,10 @@ +# EditorConfig: http://editorconfig.org/ + +root = true + +[*.java] +charset = utf-8 +indent_style = space +indent_size = 4 +trim_trailing_whitespace = true +insert_final_newline = true diff --git a/TESTING.asciidoc b/TESTING.asciidoc index b12111c94a4..569c16b0747 100644 --- a/TESTING.asciidoc +++ b/TESTING.asciidoc @@ -149,17 +149,23 @@ gradle test -Dtests.awaitsfix=[false] - known issue (@AwaitsFix) === Load balancing and caches. -By default, the tests run sequentially on a single forked JVM. - -To run with more forked JVMs than the default use: +By default the tests run on up to 4 JVMs based on the number of cores. If you +want to explicitly specify the number of JVMs you can do so on the command +line: ---------------------------- gradle test -Dtests.jvms=8 ---------------------------- -Don't count hypercores for CPU-intense tests and leave some slack -for JVM-internal threads (like the garbage collector). Make sure there is -enough RAM to handle child JVMs. +Or in `~/.gradle/gradle.properties`: + +---------------------------- +systemProp.tests.jvms=8 +---------------------------- + +Its difficult to pick the "right" number here. Hypercores don't count for CPU +intensive tests and you should leave some slack for JVM-interal threads like +the garbage collector. And you have to have enough RAM to handle each JVM. === Test compatibility. @@ -280,11 +286,20 @@ The REST layer is tested through specific tests that are shared between all the elasticsearch official clients and consist of YAML files that describe the operations to be executed and the obtained results that need to be tested. -The REST tests are run automatically when executing the maven test command. To run only the +The REST tests are run automatically when executing the "gradle check" command. To run only the REST tests use the following command: --------------------------------------------------------------------------- -gradle integTest -Dtests.filter="@Rest" +gradle :distribution:tar:integTest \ + -Dtests.class=org.elasticsearch.test.rest.RestIT +--------------------------------------------------------------------------- + +A specific test case can be run with + +--------------------------------------------------------------------------- +gradle :distribution:tar:integTest \ + -Dtests.class=org.elasticsearch.test.rest.RestIT \ + -Dtests.method="test {p0=cat.shards/10_basic/Help}" --------------------------------------------------------------------------- `RestNIT` are the executable test classes that runs all the diff --git a/build.gradle b/build.gradle index 785db7ec0c4..dac8232b172 100644 --- a/build.gradle +++ b/build.gradle @@ -45,7 +45,7 @@ subprojects { } } } - } + } extraArchive { javadoc = true tests = false @@ -86,8 +86,8 @@ subprojects { tasks.withType(Jar) { into('META-INF') { from project.rootProject.rootDir - include 'LICENSE.txt' - include 'NOTICE.txt' + include 'LICENSE.txt' + include 'NOTICE.txt' } } // ignore missing javadocs @@ -101,12 +101,19 @@ subprojects { } } + /* Sets up the dependencies that we build as part of this project but + register as thought they were external to resolve internally. We register + them as external dependencies so the build plugin that we use can be used + to build elasticsearch plugins outside of the elasticsearch source tree. */ ext.projectSubstitutions = [ "org.elasticsearch:rest-api-spec:${version}": ':rest-api-spec', "org.elasticsearch:elasticsearch:${version}": ':core', "org.elasticsearch:test-framework:${version}": ':test-framework', + "org.elasticsearch.distribution.integ-test-zip:elasticsearch:${version}": ':distribution:integ-test-zip', "org.elasticsearch.distribution.zip:elasticsearch:${version}": ':distribution:zip', - "org.elasticsearch.distribution.tar:elasticsearch:${version}": ':distribution:tar' + "org.elasticsearch.distribution.tar:elasticsearch:${version}": ':distribution:tar', + "org.elasticsearch.distribution.rpm:elasticsearch:${version}": ':distribution:rpm', + "org.elasticsearch.distribution.deb:elasticsearch:${version}": ':distribution:deb', ] configurations.all { resolutionStrategy.dependencySubstitution { DependencySubstitutions subs -> @@ -226,7 +233,7 @@ class Run extends DefaultTask { ) public void setDebug(boolean enabled) { project.project(':distribution').run.clusterConfig.debug = enabled - } + } } task run(type: Run) { dependsOn ':distribution:run' @@ -234,4 +241,3 @@ task run(type: Run) { group = 'Verification' impliesSubProjects = true } - diff --git a/buildSrc/build.gradle b/buildSrc/build.gradle index 0fa713fe371..e46f9cb33c0 100644 --- a/buildSrc/build.gradle +++ b/buildSrc/build.gradle @@ -80,3 +80,13 @@ eclipse { defaultOutputDir = new File(file('build'), 'eclipse') } } + +task copyEclipseSettings(type: Copy) { + from project.file('src/main/resources/eclipse.settings') + into '.settings' +} +// otherwise .settings is not nuked entirely +tasks.cleanEclipse { + delete '.settings' +} +tasks.eclipse.dependsOn(cleanEclipse, copyEclipseSettings) diff --git a/buildSrc/src/main/groovy/com/carrotsearch/gradle/junit4/TestLoggingConfiguration.groovy b/buildSrc/src/main/groovy/com/carrotsearch/gradle/junit4/TestLoggingConfiguration.groovy index d18ac3fbd5a..97251252f54 100644 --- a/buildSrc/src/main/groovy/com/carrotsearch/gradle/junit4/TestLoggingConfiguration.groovy +++ b/buildSrc/src/main/groovy/com/carrotsearch/gradle/junit4/TestLoggingConfiguration.groovy @@ -1,5 +1,6 @@ package com.carrotsearch.gradle.junit4 +import org.gradle.api.tasks.Input import org.gradle.util.ConfigureUtil class TestLoggingConfiguration { @@ -20,6 +21,10 @@ class TestLoggingConfiguration { SlowTestsConfiguration slowTests = new SlowTestsConfiguration() StackTraceFiltersConfiguration stackTraceFilters = new StackTraceFiltersConfiguration() + /** Summarize the first N failures at the end of the test. */ + @Input + int showNumFailuresAtEnd = 3 // match TextReport default + void slowTests(Closure closure) { ConfigureUtil.configure(closure, slowTests) } @@ -31,4 +36,8 @@ class TestLoggingConfiguration { void outputMode(String mode) { outputMode = mode.toUpperCase() as OutputMode } + + void showNumFailuresAtEnd(int n) { + showNumFailuresAtEnd = n + } } diff --git a/buildSrc/src/main/groovy/com/carrotsearch/gradle/junit4/TestReportLogger.groovy b/buildSrc/src/main/groovy/com/carrotsearch/gradle/junit4/TestReportLogger.groovy index 22a449cb5f1..b56a22ee2d9 100644 --- a/buildSrc/src/main/groovy/com/carrotsearch/gradle/junit4/TestReportLogger.groovy +++ b/buildSrc/src/main/groovy/com/carrotsearch/gradle/junit4/TestReportLogger.groovy @@ -48,9 +48,6 @@ class TestReportLogger extends TestsSummaryEventListener implements AggregatedEv /** Format line for JVM ID string. */ String jvmIdFormat - /** Summarize the first N failures at the end. */ - int showNumFailuresAtEnd = 3 - /** Output stream that logs messages to the given logger */ LoggingOutputStream outStream LoggingOutputStream errStream @@ -110,13 +107,13 @@ class TestReportLogger extends TestsSummaryEventListener implements AggregatedEv @Subscribe void onQuit(AggregatedQuitEvent e) throws IOException { - if (showNumFailuresAtEnd > 0 && !failedTests.isEmpty()) { + if (config.showNumFailuresAtEnd > 0 && !failedTests.isEmpty()) { List sublist = this.failedTests StringBuilder b = new StringBuilder() b.append('Tests with failures') - if (sublist.size() > showNumFailuresAtEnd) { - sublist = sublist.subList(0, showNumFailuresAtEnd) - b.append(" (first " + showNumFailuresAtEnd + " out of " + failedTests.size() + ")") + if (sublist.size() > config.showNumFailuresAtEnd) { + sublist = sublist.subList(0, config.showNumFailuresAtEnd) + b.append(" (first " + config.showNumFailuresAtEnd + " out of " + failedTests.size() + ")") } b.append(':\n') for (Description description : sublist) { diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/BuildPlugin.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/BuildPlugin.groovy index c35ec5cd0f6..c4d0ced6b5c 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/BuildPlugin.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/BuildPlugin.groovy @@ -62,7 +62,7 @@ class BuildPlugin implements Plugin { configureCompile(project) configureTest(project) - PrecommitTasks.configure(project) + configurePrecommit(project) } /** Performs checks on the build environment and prints information about the build environment. */ @@ -283,16 +283,24 @@ class BuildPlugin implements Plugin { /** Adds compiler settings to the project */ static void configureCompile(Project project) { + project.ext.compactProfile = 'compact3' project.afterEvaluate { // fail on all javac warnings project.tasks.withType(JavaCompile) { options.fork = true options.forkOptions.executable = new File(project.javaHome, 'bin/javac') + options.forkOptions.memoryMaximumSize = "1g" /* * -path because gradle will send in paths that don't always exist. * -missing because we have tons of missing @returns and @param. */ + // don't even think about passing args with -J-xxx, oracle will ask you to submit a bug report :) options.compilerArgs << '-Werror' << '-Xlint:all,-path' << '-Xdoclint:all' << '-Xdoclint:-missing' + // compile with compact 3 profile by default + // NOTE: this is just a compile time check: does not replace testing with a compact3 JRE + if (project.compactProfile != 'full') { + options.compilerArgs << '-profile' << project.compactProfile + } options.encoding = 'UTF-8' } } @@ -363,6 +371,7 @@ class BuildPlugin implements Plugin { enableSystemAssertions false testLogging { + showNumFailuresAtEnd 25 slowTests { heartbeat 10 summarySize 5 @@ -407,4 +416,11 @@ class BuildPlugin implements Plugin { } return test } + + private static configurePrecommit(Project project) { + Task precommit = PrecommitTasks.create(project, true) + project.check.dependsOn(precommit) + project.test.mustRunAfter(precommit) + project.dependencyLicenses.dependencies = project.configurations.runtime - project.configurations.provided + } } diff --git a/core/src/test/java/org/elasticsearch/common/compress/lzf/LZFTestCompressor.java b/buildSrc/src/main/groovy/org/elasticsearch/gradle/LoggedExec.groovy similarity index 50% rename from core/src/test/java/org/elasticsearch/common/compress/lzf/LZFTestCompressor.java rename to buildSrc/src/main/groovy/org/elasticsearch/gradle/LoggedExec.groovy index 8f21b0cbf0b..1896cdf1b67 100644 --- a/core/src/test/java/org/elasticsearch/common/compress/lzf/LZFTestCompressor.java +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/LoggedExec.groovy @@ -17,18 +17,26 @@ * under the License. */ -package org.elasticsearch.common.compress.lzf; +package org.elasticsearch.gradle -import org.elasticsearch.common.io.stream.StreamOutput; +import org.gradle.api.GradleException +import org.gradle.api.tasks.Exec -import java.io.IOException; - -// LZF compressor with write support, for testing only -public class LZFTestCompressor extends LZFCompressor { - - @Override - public StreamOutput streamOutput(StreamOutput out) throws IOException { - return new LZFCompressedStreamOutput(out); +/** + * A wrapper around gradle's Exec task to capture output and log on error. + */ +class LoggedExec extends Exec { + LoggedExec() { + if (logger.isInfoEnabled() == false) { + standardOutput = new ByteArrayOutputStream() + errorOutput = standardOutput + ignoreExitValue = true + doLast { + if (execResult.exitValue != 0) { + standardOutput.toString('UTF-8').eachLine { line -> logger.error(line) } + throw new GradleException("Process '${executable} ${args.join(' ')}' finished with non-zero exit value ${execResult.exitValue}") + } + } + } } - } diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/plugin/PluginBuildPlugin.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/plugin/PluginBuildPlugin.groovy index fbbdb242d45..eea2041052a 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/plugin/PluginBuildPlugin.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/plugin/PluginBuildPlugin.groovy @@ -23,40 +23,41 @@ import org.elasticsearch.gradle.test.RestIntegTestTask import org.elasticsearch.gradle.test.RunTask import org.gradle.api.Project import org.gradle.api.Task +import org.gradle.api.tasks.SourceSet import org.gradle.api.tasks.bundling.Zip /** * Encapsulates build configuration for an Elasticsearch plugin. */ -class PluginBuildPlugin extends BuildPlugin { +public class PluginBuildPlugin extends BuildPlugin { @Override - void apply(Project project) { + public void apply(Project project) { super.apply(project) configureDependencies(project) - // this afterEvaluate must happen before the afterEvaluate added by integTest configure, + // this afterEvaluate must happen before the afterEvaluate added by integTest creation, // so that the file name resolution for installing the plugin will be setup project.afterEvaluate { String name = project.pluginProperties.extension.name project.jar.baseName = name project.bundlePlugin.baseName = name + project.integTest.dependsOn(project.bundlePlugin) - project.integTest.clusterConfig.plugin(name, project.bundlePlugin.outputs.files) project.tasks.run.dependsOn(project.bundlePlugin) - project.tasks.run.clusterConfig.plugin(name, project.bundlePlugin.outputs.files) - } - RestIntegTestTask.configure(project) - RunTask.configure(project) - Task bundle = configureBundleTask(project) - project.configurations.archives.artifacts.removeAll { it.archiveTask.is project.jar } - project.configurations.getByName('default').extendsFrom = [] - project.artifacts { - archives bundle - 'default' bundle + if (project.path.startsWith(':modules:')) { + project.integTest.clusterConfig.module(project) + project.tasks.run.clusterConfig.module(project) + } else { + project.integTest.clusterConfig.plugin(name, project.bundlePlugin.outputs.files) + project.tasks.run.clusterConfig.plugin(name, project.bundlePlugin.outputs.files) + } } + createIntegTestTask(project) + createBundleTask(project) + project.tasks.create('run', RunTask) // allow running ES with this plugin in the foreground of a build } - static void configureDependencies(Project project) { + private static void configureDependencies(Project project) { project.dependencies { provided "org.elasticsearch:elasticsearch:${project.versions.elasticsearch}" testCompile "org.elasticsearch:test-framework:${project.versions.elasticsearch}" @@ -72,21 +73,36 @@ class PluginBuildPlugin extends BuildPlugin { } } - static Task configureBundleTask(Project project) { - PluginPropertiesTask buildProperties = project.tasks.create(name: 'pluginProperties', type: PluginPropertiesTask) - File pluginMetadata = project.file("src/main/plugin-metadata") - project.sourceSets.test { - output.dir(buildProperties.generatedResourcesDir, builtBy: 'pluginProperties') - resources { - srcDir pluginMetadata - } - } - Task bundle = project.tasks.create(name: 'bundlePlugin', type: Zip, dependsOn: [project.jar, buildProperties]) - bundle.configure { - from buildProperties - from pluginMetadata - from project.jar - from bundle.project.configurations.runtime - bundle.project.configurations.provided + /** Adds an integTest task which runs rest tests */ + private static void createIntegTestTask(Project project) { + RestIntegTestTask integTest = project.tasks.create('integTest', RestIntegTestTask.class) + integTest.mustRunAfter(project.precommit, project.test) + project.check.dependsOn(integTest) + } + + /** + * Adds a bundlePlugin task which builds the zip containing the plugin jars, + * metadata, properties, and packaging files + */ + private static void createBundleTask(Project project) { + File pluginMetadata = project.file('src/main/plugin-metadata') + + // create a task to build the properties file for this plugin + PluginPropertiesTask buildProperties = project.tasks.create('pluginProperties', PluginPropertiesTask.class) + + // add the plugin properties and metadata to test resources, so unit tests can + // know about the plugin (used by test security code to statically initialize the plugin in unit tests) + SourceSet testSourceSet = project.sourceSets.test + testSourceSet.output.dir(buildProperties.generatedResourcesDir, builtBy: 'pluginProperties') + testSourceSet.resources.srcDir(pluginMetadata) + + // create the actual bundle task, which zips up all the files for the plugin + Zip bundle = project.tasks.create(name: 'bundlePlugin', type: Zip, dependsOn: [project.jar, buildProperties]) { + from buildProperties // plugin properties file + from pluginMetadata // metadata (eg custom security policy) + from project.jar // this plugin's jar + from project.configurations.runtime - project.configurations.provided // the dep jars + // extra files for the plugin to go into the zip from('src/main/packaging') // TODO: move all config/bin/_size/etc into packaging from('src/main') { include 'config/**' @@ -97,6 +113,13 @@ class PluginBuildPlugin extends BuildPlugin { } } project.assemble.dependsOn(bundle) - return bundle + + // remove jar from the archives (things that will be published), and set it to the zip + project.configurations.archives.artifacts.removeAll { it.archiveTask.is project.jar } + project.artifacts.add('archives', bundle) + + // also make the zip the default artifact (used when depending on this project) + project.configurations.getByName('default').extendsFrom = [] + project.artifacts.add('default', bundle) } } diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/precommit/DependencyLicensesTask.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/precommit/DependencyLicensesTask.groovy index 1161fa35666..e2f10100269 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/precommit/DependencyLicensesTask.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/precommit/DependencyLicensesTask.groovy @@ -18,64 +18,104 @@ */ package org.elasticsearch.gradle.precommit -import org.gradle.api.DefaultTask -import org.gradle.api.GradleException -import org.gradle.api.InvalidUserDataException -import org.gradle.api.Project -import org.gradle.api.Task +import org.gradle.api.* import org.gradle.api.file.FileCollection import org.gradle.api.tasks.Input import org.gradle.api.tasks.InputDirectory import org.gradle.api.tasks.InputFiles -import org.gradle.api.tasks.StopActionException import org.gradle.api.tasks.TaskAction -import org.gradle.api.tasks.VerificationTask import java.nio.file.Files import java.security.MessageDigest import java.util.regex.Matcher import java.util.regex.Pattern -class DependencyLicensesTask extends DefaultTask { +/** + * A task to check licenses for dependencies. + * + * There are two parts to the check: + *
    + *
  • LICENSE and NOTICE files
  • + *
  • SHA checksums for each dependency jar
  • + *
+ * + * The directory to find the license and sha files in defaults to the dir @{code licenses} + * in the project directory for this task. You can override this directory: + *
+ *   dependencyLicenses {
+ *     licensesDir = project.file('mybetterlicensedir')
+ *   }
+ * 
+ * + * The jar files to check default to the dependencies from the default configuration. You + * can override this, for example, to only check compile dependencies: + *
+ *   dependencyLicenses {
+ *     dependencies = project.configurations.compile
+ *   }
+ * 
+ * + * Every jar must have a {@code .sha1} file in the licenses dir. These can be managed + * automatically using the {@code updateShas} helper task that is created along + * with this task. It will add {@code .sha1} files for new jars that are in dependencies + * and remove old {@code .sha1} files that are no longer needed. + * + * Every jar must also have a LICENSE and NOTICE file. However, multiple jars can share + * LICENSE and NOTICE files by mapping a pattern to the same name. + *
+ *   dependencyLicenses {
+ *     mapping from: /lucene-.*/, to: 'lucene'
+ *   }
+ * 
+ */ +public class DependencyLicensesTask extends DefaultTask { static final String SHA_EXTENSION = '.sha1' - static Task configure(Project project, Closure closure) { - DependencyLicensesTask task = project.tasks.create(type: DependencyLicensesTask, name: 'dependencyLicenses') - UpdateShasTask update = project.tasks.create(type: UpdateShasTask, name: 'updateShas') - update.parentTask = task - task.configure(closure) - project.check.dependsOn(task) - return task - } - + // TODO: we should be able to default this to eg compile deps, but we need to move the licenses + // check from distribution to core (ie this should only be run on java projects) + /** A collection of jar files that should be checked. */ @InputFiles - FileCollection dependencies + public FileCollection dependencies + /** The directory to find the license and sha files in. */ @InputDirectory - File licensesDir = new File(project.projectDir, 'licenses') + public File licensesDir = new File(project.projectDir, 'licenses') - LinkedHashMap mappings = new LinkedHashMap<>() + /** A map of patterns to prefix, used to find the LICENSE and NOTICE file. */ + private LinkedHashMap mappings = new LinkedHashMap<>() + /** + * Add a mapping from a regex pattern for the jar name, to a prefix to find + * the LICENSE and NOTICE file for that jar. + */ @Input - void mapping(Map props) { - String from = props.get('from') + public void mapping(Map props) { + String from = props.remove('from') if (from == null) { throw new InvalidUserDataException('Missing "from" setting for license name mapping') } - String to = props.get('to') + String to = props.remove('to') if (to == null) { throw new InvalidUserDataException('Missing "to" setting for license name mapping') } + if (props.isEmpty() == false) { + throw new InvalidUserDataException("Unknown properties for mapping on dependencyLicenses: ${props.keySet()}") + } mappings.put(from, to) } @TaskAction - void checkDependencies() { - // TODO: empty license dir (or error when dir exists and no deps) - if (licensesDir.exists() == false && dependencies.isEmpty() == false) { + public void checkDependencies() { + if (dependencies.isEmpty()) { + if (licensesDir.exists()) { + throw new GradleException("Licenses dir ${licensesDir} exists, but there are no dependencies") + } + return // no dependencies to check + } else if (licensesDir.exists() == false) { throw new GradleException("Licences dir ${licensesDir} does not exist, but there are dependencies") } + // order is the same for keys and values iteration since we use a linked hashmap List mapped = new ArrayList<>(mappings.values()) Pattern mappingsPattern = Pattern.compile('(' + mappings.keySet().join(')|(') + ')') @@ -127,7 +167,7 @@ class DependencyLicensesTask extends DefaultTask { } } - void checkSha(File jar, String jarName, Set shaFiles) { + private void checkSha(File jar, String jarName, Set shaFiles) { File shaFile = new File(licensesDir, jarName + SHA_EXTENSION) if (shaFile.exists() == false) { throw new GradleException("Missing SHA for ${jarName}. Run 'gradle updateSHAs' to create") @@ -143,7 +183,7 @@ class DependencyLicensesTask extends DefaultTask { shaFiles.remove(shaFile) } - void checkFile(String name, String jarName, Map counters, String type) { + private void checkFile(String name, String jarName, Map counters, String type) { String fileName = "${name}-${type}" Integer count = counters.get(fileName) if (count == null) { @@ -158,10 +198,12 @@ class DependencyLicensesTask extends DefaultTask { counters.put(fileName, count + 1) } - static class UpdateShasTask extends DefaultTask { - DependencyLicensesTask parentTask + /** A helper task to update the sha files in the license dir. */ + public static class UpdateShasTask extends DefaultTask { + private DependencyLicensesTask parentTask + @TaskAction - void updateShas() { + public void updateShas() { Set shaFiles = new HashSet() parentTask.licensesDir.eachFile { String name = it.getName() diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/precommit/ForbiddenPatternsTask.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/precommit/ForbiddenPatternsTask.groovy index 6ed18f4d18c..5fa63956b57 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/precommit/ForbiddenPatternsTask.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/precommit/ForbiddenPatternsTask.groovy @@ -19,10 +19,11 @@ package org.elasticsearch.gradle.precommit import org.gradle.api.DefaultTask +import org.gradle.api.GradleException +import org.gradle.api.InvalidUserDataException import org.gradle.api.file.FileCollection import org.gradle.api.tasks.InputFiles import org.gradle.api.tasks.OutputFile -import org.gradle.api.tasks.OutputFiles import org.gradle.api.tasks.SourceSet import org.gradle.api.tasks.TaskAction import org.gradle.api.tasks.util.PatternFilterable @@ -33,14 +34,19 @@ import java.util.regex.Pattern /** * Checks for patterns in source files for the project which are forbidden. */ -class ForbiddenPatternsTask extends DefaultTask { - Map patterns = new LinkedHashMap<>() - PatternFilterable filesFilter = new PatternSet() +public class ForbiddenPatternsTask extends DefaultTask { + + /** The rules: a map from the rule name, to a rule regex pattern. */ + private Map patterns = new LinkedHashMap<>() + /** A pattern set of which files should be checked. */ + private PatternFilterable filesFilter = new PatternSet() @OutputFile File outputMarker = new File(project.buildDir, "markers/forbiddenPatterns") - ForbiddenPatternsTask() { + public ForbiddenPatternsTask() { + description = 'Checks source files for invalid patterns like nocommits or tabs' + // we always include all source files, and exclude what should not be checked filesFilter.include('**') // exclude known binary extensions @@ -52,23 +58,28 @@ class ForbiddenPatternsTask extends DefaultTask { filesFilter.exclude('**/*.crt') filesFilter.exclude('**/*.png') - // TODO: add compile and test compile outputs as this tasks outputs, so we don't rerun when source files haven't changed + // add mandatory rules + patterns.put('nocommit', /nocommit/) + patterns.put('tab', /\t/) } /** Adds a file glob pattern to be excluded */ - void exclude(String... excludes) { + public void exclude(String... excludes) { this.filesFilter.exclude(excludes) } - /** Adds pattern to forbid */ + /** Adds a pattern to forbid. T */ void rule(Map props) { - String name = props.get('name') + String name = props.remove('name') if (name == null) { - throw new IllegalArgumentException('Missing [name] for invalid pattern rule') + throw new InvalidUserDataException('Missing [name] for invalid pattern rule') } - String pattern = props.get('pattern') + String pattern = props.remove('pattern') if (pattern == null) { - throw new IllegalArgumentException('Missing [pattern] for invalid pattern rule') + throw new InvalidUserDataException('Missing [pattern] for invalid pattern rule') + } + if (props.isEmpty() == false) { + throw new InvalidUserDataException("Unknown arguments for ForbiddenPatterns rule mapping: ${props.keySet()}") } // TODO: fail if pattern contains a newline, it won't work (currently) patterns.put(name, pattern) @@ -89,14 +100,14 @@ class ForbiddenPatternsTask extends DefaultTask { Pattern allPatterns = Pattern.compile('(' + patterns.values().join(')|(') + ')') List failures = new ArrayList<>() for (File f : files()) { - f.eachLine('UTF-8') { line, lineNumber -> + f.eachLine('UTF-8') { String line, int lineNumber -> if (allPatterns.matcher(line).find()) { - addErrorMessages(failures, f, (String)line, (int)lineNumber) + addErrorMessages(failures, f, line, lineNumber) } } } if (failures.isEmpty() == false) { - throw new IllegalArgumentException('Found invalid patterns:\n' + failures.join('\n')) + throw new GradleException('Found invalid patterns:\n' + failures.join('\n')) } outputMarker.setText('done', 'UTF-8') } diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/precommit/JarHellTask.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/precommit/JarHellTask.groovy new file mode 100644 index 00000000000..2873fbd4df5 --- /dev/null +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/precommit/JarHellTask.groovy @@ -0,0 +1,62 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.gradle.precommit + +import org.elasticsearch.gradle.LoggedExec +import org.gradle.api.file.FileCollection +import org.gradle.api.tasks.InputFile +import org.gradle.api.tasks.OutputFile + +/** + * Runs CheckJarHell on a classpath. + */ +public class JarHellTask extends LoggedExec { + + /** + * We use a simple "marker" file that we touch when the task succeeds + * as the task output. This is compared against the modified time of the + * inputs (ie the jars/class files). + */ + @OutputFile + public File successMarker = new File(project.buildDir, 'markers/jarHell') + + /** The classpath to run jarhell check on, defaults to the test runtime classpath */ + @InputFile + public FileCollection classpath = project.sourceSets.test.runtimeClasspath + + public JarHellTask() { + project.afterEvaluate { + dependsOn(classpath) + description = "Runs CheckJarHell on ${classpath}" + executable = new File(project.javaHome, 'bin/java') + doFirst({ + /* JarHell doesn't like getting directories that don't exist but + gradle isn't especially careful about that. So we have to do it + filter it ourselves. */ + FileCollection taskClasspath = classpath.filter { it.exists() } + args('-cp', taskClasspath.asPath, 'org.elasticsearch.bootstrap.JarHell') + }) + doLast({ + successMarker.parentFile.mkdirs() + successMarker.setText("", 'UTF-8') + }) + } + } +} diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/precommit/PrecommitTasks.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/precommit/PrecommitTasks.groovy index aebc00e038a..04878d979e9 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/precommit/PrecommitTasks.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/precommit/PrecommitTasks.groovy @@ -18,16 +18,10 @@ */ package org.elasticsearch.gradle.precommit -import de.thetaphi.forbiddenapis.gradle.CheckForbiddenApis -import de.thetaphi.forbiddenapis.gradle.CheckForbiddenApisExtension import de.thetaphi.forbiddenapis.gradle.ForbiddenApisPlugin -import org.gradle.api.GradleException import org.gradle.api.Project import org.gradle.api.Task -import org.gradle.api.file.FileCollection import org.gradle.api.plugins.JavaBasePlugin -import org.gradle.api.tasks.Exec -import org.gradle.api.tasks.TaskContainer /** * Validation tasks which should be run before committing. These run before tests. @@ -35,36 +29,34 @@ import org.gradle.api.tasks.TaskContainer class PrecommitTasks { /** Adds a precommit task, which depends on non-test verification tasks. */ - static void configure(Project project) { - List precommitTasks = [ - configureForbiddenApis(project), - configureForbiddenPatterns(project.tasks), - configureJarHell(project)] + public static Task create(Project project, boolean includeDependencyLicenses) { - Map precommitOptions = [ - name: 'precommit', - group: JavaBasePlugin.VERIFICATION_GROUP, - description: 'Runs all non-test checks.', - dependsOn: precommitTasks - ] - Task precommit = project.tasks.create(precommitOptions) - project.check.dependsOn(precommit) + List precommitTasks = [ + configureForbiddenApis(project), + project.tasks.create('forbiddenPatterns', ForbiddenPatternsTask.class), + project.tasks.create('jarHell', JarHellTask.class)] - // delay ordering relative to test tasks, since they may not be setup yet - project.afterEvaluate { - Task test = project.tasks.findByName('test') - if (test != null) { - test.mustRunAfter(precommit) - } - Task integTest = project.tasks.findByName('integTest') - if (integTest != null) { - integTest.mustRunAfter(precommit) - } + // tasks with just tests don't need dependency licenses, so this flag makes adding + // the task optional + if (includeDependencyLicenses) { + DependencyLicensesTask dependencyLicenses = project.tasks.create('dependencyLicenses', DependencyLicensesTask.class) + precommitTasks.add(dependencyLicenses) + // we also create the updateShas helper task that is associated with dependencyLicenses + UpdateShasTask updateShas = project.tasks.create('updateShas', UpdateShasTask.class) + updateShas.parentTask = dependencyLicenses } + + Map precommitOptions = [ + name: 'precommit', + group: JavaBasePlugin.VERIFICATION_GROUP, + description: 'Runs all non-test checks.', + dependsOn: precommitTasks + ] + return project.tasks.create(precommitOptions) } - static Task configureForbiddenApis(Project project) { - project.pluginManager.apply('de.thetaphi.forbiddenapis') + private static Task configureForbiddenApis(Project project) { + project.pluginManager.apply(ForbiddenApisPlugin.class) project.forbiddenApis { internalRuntimeForbidden = true failOnUnsupportedJava = false @@ -75,72 +67,18 @@ class PrecommitTasks { Task mainForbidden = project.tasks.findByName('forbiddenApisMain') if (mainForbidden != null) { mainForbidden.configure { - bundledSignatures += ['jdk-system-out'] - signaturesURLs += [ - getClass().getResource('/forbidden/core-signatures.txt'), - getClass().getResource('/forbidden/third-party-signatures.txt')] + bundledSignatures += 'jdk-system-out' + signaturesURLs += getClass().getResource('/forbidden/core-signatures.txt') } } Task testForbidden = project.tasks.findByName('forbiddenApisTest') if (testForbidden != null) { testForbidden.configure { - signaturesURLs += [getClass().getResource('/forbidden/test-signatures.txt')] + signaturesURLs += getClass().getResource('/forbidden/test-signatures.txt') } } Task forbiddenApis = project.tasks.findByName('forbiddenApis') forbiddenApis.group = "" // clear group, so this does not show up under verification tasks return forbiddenApis } - - static Task configureForbiddenPatterns(TaskContainer tasks) { - Map options = [ - name: 'forbiddenPatterns', - type: ForbiddenPatternsTask, - description: 'Checks source files for invalid patterns like nocommits or tabs', - ] - return tasks.create(options) { - rule name: 'nocommit', pattern: /nocommit/ - rule name: 'tab', pattern: /\t/ - } - } - - /** - * Adds a task to run jar hell before on the test classpath. - * - * We use a simple "marker" file that we touch when the task succeeds - * as the task output. This is compared against the modified time of the - * inputs (ie the jars/class files). - */ - static Task configureJarHell(Project project) { - File successMarker = new File(project.buildDir, 'markers/jarHell') - Exec task = project.tasks.create(name: 'jarHell', type: Exec) - FileCollection testClasspath = project.sourceSets.test.runtimeClasspath - task.dependsOn(testClasspath) - task.inputs.files(testClasspath) - task.outputs.file(successMarker) - task.executable = new File(project.javaHome, 'bin/java') - task.doFirst({ - /* JarHell doesn't like getting directories that don't exist but - gradle isn't especially careful about that. So we have to do it - filter it ourselves. */ - def taskClasspath = testClasspath.filter { it.exists() } - task.args('-cp', taskClasspath.asPath, 'org.elasticsearch.bootstrap.JarHell') - }) - if (task.logger.isInfoEnabled() == false) { - task.standardOutput = new ByteArrayOutputStream() - task.errorOutput = task.standardOutput - task.ignoreExitValue = true - task.doLast({ - if (execResult.exitValue != 0) { - logger.error(standardOutput.toString()) - throw new GradleException("JarHell failed") - } - }) - } - task.doLast({ - successMarker.parentFile.mkdirs() - successMarker.setText("", 'UTF-8') - }) - return task - } } diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/precommit/UpdateShasTask.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/precommit/UpdateShasTask.groovy new file mode 100644 index 00000000000..4a174688aa1 --- /dev/null +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/precommit/UpdateShasTask.groovy @@ -0,0 +1,66 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.gradle.precommit + +import org.gradle.api.DefaultTask +import org.gradle.api.tasks.TaskAction + +import java.nio.file.Files +import java.security.MessageDigest + +/** + * A task to update shas used by {@code DependencyLicensesCheck} + */ +public class UpdateShasTask extends DefaultTask { + + /** The parent dependency licenses task to use configuration from */ + public DependencyLicensesTask parentTask + + public UpdateShasTask() { + description = 'Updates the sha files for the dependencyLicenses check' + onlyIf { parentTask.licensesDir.exists() } + } + + @TaskAction + public void updateShas() { + Set shaFiles = new HashSet() + parentTask.licensesDir.eachFile { + String name = it.getName() + if (name.endsWith(DependencyLicensesTask.SHA_EXTENSION)) { + shaFiles.add(it) + } + } + for (File dependency : parentTask.dependencies) { + String jarName = dependency.getName() + File shaFile = new File(parentTask.licensesDir, jarName + DependencyLicensesTask.SHA_EXTENSION) + if (shaFile.exists() == false) { + logger.lifecycle("Adding sha for ${jarName}") + String sha = MessageDigest.getInstance("SHA-1").digest(dependency.getBytes()).encodeHex().toString() + shaFile.setText(sha, 'UTF-8') + } else { + shaFiles.remove(shaFile) + } + } + shaFiles.each { shaFile -> + logger.lifecycle("Removing unused sha ${shaFile.getName()}") + Files.delete(shaFile.toPath()) + } + } +} diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/ClusterConfiguration.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/ClusterConfiguration.groovy index a6b59e58bf9..8bc80da74b5 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/ClusterConfiguration.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/ClusterConfiguration.groovy @@ -18,6 +18,8 @@ */ package org.elasticsearch.gradle.test +import org.gradle.api.GradleException +import org.gradle.api.Project import org.gradle.api.file.FileCollection import org.gradle.api.tasks.Input @@ -25,7 +27,7 @@ import org.gradle.api.tasks.Input class ClusterConfiguration { @Input - String distribution = 'zip' + String distribution = 'integ-test-zip' @Input int numNodes = 1 @@ -64,7 +66,12 @@ class ClusterConfiguration { Map settings = new HashMap<>() - LinkedHashMap plugins = new LinkedHashMap<>() + // map from destination path, to source file + Map extraConfigFiles = new HashMap<>() + + LinkedHashMap plugins = new LinkedHashMap<>() + + List modules = new ArrayList<>() LinkedHashMap setupCommands = new LinkedHashMap<>() @@ -83,8 +90,31 @@ class ClusterConfiguration { plugins.put(name, file) } + @Input + void plugin(String name, Project pluginProject) { + plugins.put(name, pluginProject) + } + + /** Add a module to the cluster. The project must be an esplugin and have a single zip default artifact. */ + @Input + void module(Project moduleProject) { + modules.add(moduleProject) + } + @Input void setupCommand(String name, Object... args) { setupCommands.put(name, args) } + + /** + * Add an extra configuration file. The path is relative to the config dir, and the sourceFile + * is anything accepted by project.file() + */ + @Input + void extraConfigFile(String path, Object sourceFile) { + if (path == 'elasticsearch.yml') { + throw new GradleException('Overwriting elasticsearch.yml is not allowed, add additional settings using cluster { setting "foo", "bar" }') + } + extraConfigFiles.put(path, sourceFile) + } } diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/ClusterFormationTasks.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/ClusterFormationTasks.groovy index 43b9f8af3a7..fff6082b3e5 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/ClusterFormationTasks.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/ClusterFormationTasks.groovy @@ -20,13 +20,14 @@ package org.elasticsearch.gradle.test import org.apache.tools.ant.DefaultLogger import org.apache.tools.ant.taskdefs.condition.Os +import org.elasticsearch.gradle.LoggedExec import org.elasticsearch.gradle.VersionProperties +import org.elasticsearch.gradle.plugin.PluginBuildPlugin import org.gradle.api.* +import org.gradle.api.artifacts.Configuration import org.gradle.api.file.FileCollection import org.gradle.api.logging.Logger -import org.gradle.api.tasks.Copy -import org.gradle.api.tasks.Delete -import org.gradle.api.tasks.Exec +import org.gradle.api.tasks.* import java.nio.file.Paths @@ -59,7 +60,12 @@ class ClusterFormationTasks { /** Adds a dependency on the given distribution */ static void configureDistributionDependency(Project project, String distro) { String elasticsearchVersion = VersionProperties.elasticsearch - String packaging = distro == 'tar' ? 'tar.gz' : distro + String packaging = distro + if (distro == 'tar') { + packaging = 'tar.gz' + } else if (distro == 'integ-test-zip') { + packaging = 'zip' + } project.configurations { elasticsearchDistro } @@ -99,17 +105,19 @@ class ClusterFormationTasks { setup = configureStopTask(taskName(task, node, 'stopPrevious'), project, setup, node) setup = configureExtractTask(taskName(task, node, 'extract'), project, setup, node) setup = configureWriteConfigTask(taskName(task, node, 'configure'), project, setup, node) + setup = configureExtraConfigFilesTask(taskName(task, node, 'extraConfig'), project, setup, node) setup = configureCopyPluginsTask(taskName(task, node, 'copyPlugins'), project, setup, node) + // install modules + for (Project module : node.config.modules) { + String actionName = pluginTaskName('install', module.name, 'Module') + setup = configureInstallModuleTask(taskName(task, node, actionName), project, setup, node, module) + } + // install plugins - for (Map.Entry plugin : node.config.plugins.entrySet()) { - // replace every dash followed by a character with just the uppercase character - String camelName = plugin.getKey().replaceAll(/-(\w)/) { _, c -> c.toUpperCase(Locale.ROOT) } - String actionName = "install${camelName[0].toUpperCase(Locale.ROOT) + camelName.substring(1)}Plugin" - // delay reading the file location until execution time by wrapping in a closure within a GString - String file = "${-> new File(node.pluginsTmpDir, plugin.getValue().singleFile.getName()).toURI().toURL().toString()}" - Object[] args = [new File(node.homeDir, 'bin/plugin'), 'install', file] - setup = configureExecTask(taskName(task, node, actionName), project, setup, node, args) + for (Map.Entry plugin : node.config.plugins.entrySet()) { + String actionName = pluginTaskName('install', plugin.getKey(), 'Plugin') + setup = configureInstallPluginTask(taskName(task, node, actionName), project, setup, node, plugin.getValue()) } // extra setup commands @@ -133,8 +141,15 @@ class ClusterFormationTasks { /** Adds a task to extract the elasticsearch distribution */ static Task configureExtractTask(String name, Project project, Task setup, NodeInfo node) { List extractDependsOn = [project.configurations.elasticsearchDistro, setup] + /* project.configurations.elasticsearchDistro.singleFile will be an + external artifact if this is being run by a plugin not living in the + elasticsearch source tree. If this is a plugin built in the + elasticsearch source tree or this is a distro in the elasticsearch + source tree then this should be the version of elasticsearch built + by the source tree. If it isn't then Bad Things(TM) will happen. */ Task extract switch (node.config.distribution) { + case 'integ-test-zip': case 'zip': extract = project.tasks.create(name: name, type: Copy, dependsOn: extractDependsOn) { from { project.zipTree(project.configurations.elasticsearchDistro.singleFile) } @@ -149,6 +164,33 @@ class ClusterFormationTasks { into node.baseDir } break; + case 'rpm': + File rpmDatabase = new File(node.baseDir, 'rpm-database') + File rpmExtracted = new File(node.baseDir, 'rpm-extracted') + /* Delay reading the location of the rpm file until task execution */ + Object rpm = "${ -> project.configurations.elasticsearchDistro.singleFile}" + extract = project.tasks.create(name: name, type: LoggedExec, dependsOn: extractDependsOn) { + commandLine 'rpm', '--badreloc', '--nodeps', '--noscripts', '--notriggers', + '--dbpath', rpmDatabase, + '--relocate', "/=${rpmExtracted}", + '-i', rpm + doFirst { + rpmDatabase.deleteDir() + rpmExtracted.deleteDir() + } + } + break; + case 'deb': + /* Delay reading the location of the deb file until task execution */ + File debExtracted = new File(node.baseDir, 'deb-extracted') + Object deb = "${ -> project.configurations.elasticsearchDistro.singleFile}" + extract = project.tasks.create(name: name, type: LoggedExec, dependsOn: extractDependsOn) { + commandLine 'dpkg-deb', '-x', deb, debExtracted + doFirst { + debExtracted.deleteDir() + } + } + break; default: throw new InvalidUserDataException("Unknown distribution: ${node.config.distribution}") } @@ -169,29 +211,123 @@ class ClusterFormationTasks { 'node.testattr' : 'test', 'repositories.url.allowed_urls' : 'http://snapshot.test*' ] + esConfig.putAll(node.config.settings) - return project.tasks.create(name: name, type: DefaultTask, dependsOn: setup) << { - File configFile = new File(node.homeDir, 'config/elasticsearch.yml') + Task writeConfig = project.tasks.create(name: name, type: DefaultTask, dependsOn: setup) + writeConfig.doFirst { + File configFile = new File(node.confDir, 'elasticsearch.yml') logger.info("Configuring ${configFile}") configFile.setText(esConfig.collect { key, value -> "${key}: ${value}" }.join('\n'), 'UTF-8') } } - /** Adds a task to copy plugins to a temp dir, which they will later be installed from. */ + static Task configureExtraConfigFilesTask(String name, Project project, Task setup, NodeInfo node) { + if (node.config.extraConfigFiles.isEmpty()) { + return setup + } + Copy copyConfig = project.tasks.create(name: name, type: Copy, dependsOn: setup) + copyConfig.into(new File(node.homeDir, 'config')) // copy must always have a general dest dir, even though we don't use it + for (Map.Entry extraConfigFile : node.config.extraConfigFiles.entrySet()) { + copyConfig.doFirst { + // make sure the copy won't be a no-op or act on a directory + File srcConfigFile = project.file(extraConfigFile.getValue()) + if (srcConfigFile.isDirectory()) { + throw new GradleException("Source for extraConfigFile must be a file: ${srcConfigFile}") + } + if (srcConfigFile.exists() == false) { + throw new GradleException("Source file for extraConfigFile does not exist: ${srcConfigFile}") + } + } + File destConfigFile = new File(node.homeDir, 'config/' + extraConfigFile.getKey()) + copyConfig.into(destConfigFile.canonicalFile.parentFile) + .from({ extraConfigFile.getValue() }) // wrap in closure to delay resolution to execution time + .rename { destConfigFile.name } + } + return copyConfig + } + + /** + * Adds a task to copy plugins to a temp dir, which they will later be installed from. + * + * For each plugin, if the plugin has rest spec apis in its tests, those api files are also copied + * to the test resources for this project. + */ static Task configureCopyPluginsTask(String name, Project project, Task setup, NodeInfo node) { if (node.config.plugins.isEmpty()) { return setup } + Copy copyPlugins = project.tasks.create(name: name, type: Copy, dependsOn: setup) - return project.tasks.create(name: name, type: Copy, dependsOn: setup) { - into node.pluginsTmpDir - from(node.config.plugins.values()) + List pluginFiles = [] + for (Map.Entry plugin : node.config.plugins.entrySet()) { + FileCollection pluginZip + if (plugin.getValue() instanceof Project) { + Project pluginProject = plugin.getValue() + if (pluginProject.plugins.hasPlugin(PluginBuildPlugin) == false) { + throw new GradleException("Task ${name} cannot project ${pluginProject.path} which is not an esplugin") + } + String configurationName = "_plugin_${pluginProject.path}" + Configuration configuration = project.configurations.findByName(configurationName) + if (configuration == null) { + configuration = project.configurations.create(configurationName) + } + project.dependencies.add(configurationName, pluginProject) + setup.dependsOn(pluginProject.tasks.bundlePlugin) + pluginZip = configuration + + // also allow rest tests to use the rest spec from the plugin + Copy copyRestSpec = null + for (File resourceDir : pluginProject.sourceSets.test.resources.srcDirs) { + File restApiDir = new File(resourceDir, 'rest-api-spec/api') + if (restApiDir.exists() == false) continue + if (copyRestSpec == null) { + copyRestSpec = project.tasks.create(name: pluginTaskName('copy', plugin.getKey(), 'PluginRestSpec'), type: Copy) + copyPlugins.dependsOn(copyRestSpec) + copyRestSpec.into(project.sourceSets.test.output.resourcesDir) + } + copyRestSpec.from(resourceDir).include('rest-api-spec/api/**') + } + } else { + pluginZip = plugin.getValue() + } + pluginFiles.add(pluginZip) } + + copyPlugins.into(node.pluginsTmpDir) + copyPlugins.from(pluginFiles) + return copyPlugins + } + + static Task configureInstallModuleTask(String name, Project project, Task setup, NodeInfo node, Project module) { + if (node.config.distribution != 'integ-test-zip') { + throw new GradleException("Module ${module.path} not allowed be installed distributions other than integ-test-zip because they should already have all modules bundled!") + } + if (module.plugins.hasPlugin(PluginBuildPlugin) == false) { + throw new GradleException("Task ${name} cannot include module ${module.path} which is not an esplugin") + } + Copy installModule = project.tasks.create(name, Copy.class) + installModule.dependsOn(setup) + installModule.into(new File(node.homeDir, "modules/${module.name}")) + installModule.from({ project.zipTree(module.tasks.bundlePlugin.outputs.files.singleFile) }) + return installModule + } + + static Task configureInstallPluginTask(String name, Project project, Task setup, NodeInfo node, Object plugin) { + FileCollection pluginZip + if (plugin instanceof Project) { + pluginZip = project.configurations.getByName("_plugin_${plugin.path}") + } else { + pluginZip = plugin + } + // delay reading the file location until execution time by wrapping in a closure within a GString + String file = "${-> new File(node.pluginsTmpDir, pluginZip.singleFile.getName()).toURI().toURL().toString()}" + Object[] args = [new File(node.homeDir, 'bin/plugin'), 'install', file] + return configureExecTask(name, project, setup, node, args) } /** Adds a task to execute a command to help setup the cluster */ static Task configureExecTask(String name, Project project, Task setup, NodeInfo node, Object[] execArgs) { - return project.tasks.create(name: name, type: Exec, dependsOn: setup) { + return project.tasks.create(name: name, type: LoggedExec, dependsOn: setup) { workingDir node.cwd if (Os.isFamily(Os.FAMILY_WINDOWS)) { executable 'cmd' @@ -200,35 +336,32 @@ class ClusterFormationTasks { executable 'sh' } args execArgs - // only show output on failure, when not in info or debug mode - if (logger.isInfoEnabled() == false) { - standardOutput = new ByteArrayOutputStream() - errorOutput = standardOutput - ignoreExitValue = true - doLast { - if (execResult.exitValue != 0) { - logger.error(standardOutput.toString()) - throw new GradleException("Process '${execArgs.join(' ')}' finished with non-zero exit value ${execResult.exitValue}") - } - } - } } } /** Adds a task to start an elasticsearch node with the given configuration */ static Task configureStartTask(String name, Project project, Task setup, NodeInfo node) { - String executable - List esArgs = [] - if (Os.isFamily(Os.FAMILY_WINDOWS)) { - executable = 'cmd' - esArgs.add('/C') - esArgs.add('call') - } else { - executable = 'sh' - } // this closure is converted into ant nodes by groovy's AntBuilder Closure antRunner = { AntBuilder ant -> + ant.exec(executable: node.executable, spawn: node.config.daemonize, dir: node.cwd, taskname: 'elasticsearch') { + node.env.each { key, value -> env(key: key, value: value) } + node.args.each { arg(value: it) } + } + } + + // this closure is the actual code to run elasticsearch + Closure elasticsearchRunner = { + // Due to how ant exec works with the spawn option, we lose all stdout/stderr from the + // process executed. To work around this, when spawning, we wrap the elasticsearch start + // command inside another shell script, which simply internally redirects the output + // of the real elasticsearch script. This allows ant to keep the streams open with the + // dummy process, but us to have the output available if there is an error in the + // elasticsearch start script + if (node.config.daemonize) { + node.writeWrapperScript() + } + // we must add debug options inside the closure so the config is read at execution time, as // gradle task options are not processed until the end of the configuration phase if (node.config.debug) { @@ -236,37 +369,6 @@ class ClusterFormationTasks { node.env['JAVA_OPTS'] = '-agentlib:jdwp=transport=dt_socket,server=y,suspend=y,address=8000' } - // Due to how ant exec works with the spawn option, we lose all stdout/stderr from the - // process executed. To work around this, when spawning, we wrap the elasticsearch start - // command inside another shell script, which simply internally redirects the output - // of the real elasticsearch script. This allows ant to keep the streams open with the - // dummy process, but us to have the output available if there is an error in the - // elasticsearch start script - String script = node.esScript - if (node.config.daemonize) { - String scriptName = 'run' - String argsPasser = '"$@"' - String exitMarker = "; if [ \$? != 0 ]; then touch run.failed; fi" - if (Os.isFamily(Os.FAMILY_WINDOWS)) { - scriptName += '.bat' - argsPasser = '%*' - exitMarker = "\r\n if \"%errorlevel%\" neq \"0\" ( type nul >> run.failed )" - } - File wrapperScript = new File(node.cwd, scriptName) - wrapperScript.setText("\"${script}\" ${argsPasser} > run.log 2>&1 ${exitMarker}", 'UTF-8') - script = wrapperScript.toString() - } - - ant.exec(executable: executable, spawn: node.config.daemonize, dir: node.cwd, taskname: 'elasticsearch') { - node.env.each { key, value -> env(key: key, value: value) } - arg(value: script) - node.args.each { arg(value: it) } - } - - } - - // this closure is the actual code to run elasticsearch - Closure elasticsearchRunner = { node.getCommandString().eachLine { line -> logger.info(line) } if (logger.isInfoEnabled() || node.config.daemonize == false) { @@ -338,14 +440,19 @@ class ClusterFormationTasks { // We already log the command at info level. No need to do it twice. node.getCommandString().eachLine { line -> logger.error(line) } } - // the waitfor failed, so dump any output we got (may be empty if info logging, but that is ok) - logger.error("Node ${node.nodeNum} ant output:") - node.buffer.toString('UTF-8').eachLine { line -> logger.error(line) } + logger.error("Node ${node.nodeNum} output:") + logger.error("|-----------------------------------------") + logger.error("| failure marker exists: ${node.failedMarker.exists()}") + logger.error("| pid file exists: ${node.pidFile.exists()}") + // the waitfor failed, so dump any output we got (if info logging this goes directly to stdout) + logger.error("|\n| [ant output]") + node.buffer.toString('UTF-8').eachLine { line -> logger.error("| ${line}") } // also dump the log file for the startup script (which will include ES logging output to stdout) if (node.startLog.exists()) { - logger.error("Node ${node.nodeNum} log:") - node.startLog.eachLine { line -> logger.error(line) } + logger.error("|\n| [log]") + node.startLog.eachLine { line -> logger.error("| ${line}") } } + logger.error("|-----------------------------------------") } throw new GradleException(msg) } @@ -389,7 +496,7 @@ class ClusterFormationTasks { /** Adds a task to kill an elasticsearch node with the given pidfile */ static Task configureStopTask(String name, Project project, Object depends, NodeInfo node) { - return project.tasks.create(name: name, type: Exec, dependsOn: depends) { + return project.tasks.create(name: name, type: LoggedExec, dependsOn: depends) { onlyIf { node.pidFile.exists() } // the pid file won't actually be read until execution time, since the read is wrapped within an inner closure of the GString ext.pid = "${ -> node.pidFile.getText('UTF-8').trim()}" @@ -418,6 +525,12 @@ class ClusterFormationTasks { } } + static String pluginTaskName(String action, String name, String suffix) { + // replace every dash followed by a character with just the uppercase character + String camelName = name.replaceAll(/-(\w)/) { _, c -> c.toUpperCase(Locale.ROOT) } + return action + camelName[0].toUpperCase(Locale.ROOT) + camelName.substring(1) + suffix + } + /** Runs an ant command, sending output to the given out and error streams */ static Object runAntCommand(Project project, Closure command, PrintStream outputStream, PrintStream errorStream) { DefaultLogger listener = new DefaultLogger( diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/NodeInfo.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/NodeInfo.groovy index 3955b9e0269..337eea3de97 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/NodeInfo.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/NodeInfo.groovy @@ -18,6 +18,7 @@ */ package org.elasticsearch.gradle.test +import org.apache.tools.ant.taskdefs.condition.Os import org.elasticsearch.gradle.VersionProperties import org.gradle.api.InvalidUserDataException import org.gradle.api.Project @@ -45,6 +46,12 @@ class NodeInfo { /** elasticsearch home dir */ File homeDir + /** config directory */ + File confDir + + /** THE config file */ + File configFile + /** working directory for the node process */ File cwd @@ -63,8 +70,14 @@ class NodeInfo { /** arguments to start the node with */ List args + /** Executable to run the bin/elasticsearch with, either cmd or sh */ + String executable + /** Path to the elasticsearch start script */ - String esScript + File esScript + + /** script to run when running in the background */ + File wrapperScript /** buffer for ant output when starting this node */ ByteArrayOutputStream buffer = new ByteArrayOutputStream() @@ -77,34 +90,75 @@ class NodeInfo { baseDir = new File(project.buildDir, "cluster/${task.name} node${nodeNum}") pidFile = new File(baseDir, 'es.pid') homeDir = homeDir(baseDir, config.distribution) + confDir = confDir(baseDir, config.distribution) + configFile = new File(confDir, 'elasticsearch.yml') cwd = new File(baseDir, "cwd") failedMarker = new File(cwd, 'run.failed') startLog = new File(cwd, 'run.log') pluginsTmpDir = new File(baseDir, "plugins tmp") + args = [] + if (Os.isFamily(Os.FAMILY_WINDOWS)) { + executable = 'cmd' + args.add('/C') + args.add('"') // quote the entire command + wrapperScript = new File(cwd, "run.bat") + esScript = new File(homeDir, 'bin/elasticsearch.bat') + } else { + executable = 'sh' + wrapperScript = new File(cwd, "run") + esScript = new File(homeDir, 'bin/elasticsearch') + } + if (config.daemonize) { + args.add("${wrapperScript}") + } else { + args.add("${esScript}") + } + env = [ 'JAVA_HOME' : project.javaHome, 'ES_GC_OPTS': config.jvmArgs // we pass these with the undocumented gc opts so the argline can set gc, etc ] - args = config.systemProperties.collect { key, value -> "-D${key}=${value}" } + args.addAll(config.systemProperties.collect { key, value -> "-D${key}=${value}" }) for (Map.Entry property : System.properties.entrySet()) { if (property.getKey().startsWith('es.')) { args.add("-D${property.getKey()}=${property.getValue()}") } } - // running with cmd on windows will look for this with the .bat extension - esScript = new File(homeDir, 'bin/elasticsearch').toString() + args.add("-Des.path.conf=${confDir}") + if (Os.isFamily(Os.FAMILY_WINDOWS)) { + args.add('"') // end the entire command, quoted + } } /** Returns debug string for the command that started this node. */ String getCommandString() { - String esCommandString = "Elasticsearch node ${nodeNum} command: ${esScript} " - esCommandString += args.join(' ') - esCommandString += '\nenvironment:' - env.each { k, v -> esCommandString += "\n ${k}: ${v}" } + String esCommandString = "\nNode ${nodeNum} configuration:\n" + esCommandString += "|-----------------------------------------\n" + esCommandString += "| cwd: ${cwd}\n" + esCommandString += "| command: ${executable} ${args.join(' ')}\n" + esCommandString += '| environment:\n' + env.each { k, v -> esCommandString += "| ${k}: ${v}\n" } + if (config.daemonize) { + esCommandString += "|\n| [${wrapperScript.name}]\n" + wrapperScript.eachLine('UTF-8', { line -> esCommandString += " ${line}\n"}) + } + esCommandString += '|\n| [elasticsearch.yml]\n' + configFile.eachLine('UTF-8', { line -> esCommandString += "| ${line}\n" }) + esCommandString += "|-----------------------------------------" return esCommandString } + void writeWrapperScript() { + String argsPasser = '"$@"' + String exitMarker = "; if [ \$? != 0 ]; then touch run.failed; fi" + if (Os.isFamily(Os.FAMILY_WINDOWS)) { + argsPasser = '%*' + exitMarker = "\r\n if \"%errorlevel%\" neq \"0\" ( type nul >> run.failed )" + } + wrapperScript.setText("\"${esScript}\" ${argsPasser} > run.log 2>&1 ${exitMarker}", 'UTF-8') + } + /** Returns the http port for this node */ int httpPort() { return config.baseHttpPort + nodeNum @@ -119,13 +173,32 @@ class NodeInfo { static File homeDir(File baseDir, String distro) { String path switch (distro) { + case 'integ-test-zip': case 'zip': case 'tar': path = "elasticsearch-${VersionProperties.elasticsearch}" - break; + break + case 'rpm': + case 'deb': + path = "${distro}-extracted/usr/share/elasticsearch" + break default: throw new InvalidUserDataException("Unknown distribution: ${distro}") } return new File(baseDir, path) } + + static File confDir(File baseDir, String distro) { + switch (distro) { + case 'integ-test-zip': + case 'zip': + case 'tar': + return new File(homeDir(baseDir, distro), 'config') + case 'rpm': + case 'deb': + return new File(baseDir, "${distro}-extracted/etc/elasticsearch") + default: + throw new InvalidUserDataException("Unkown distribution: ${distro}") + } + } } diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/RestIntegTestTask.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/RestIntegTestTask.groovy index 7a3d067baab..cd43cd2ca67 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/RestIntegTestTask.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/RestIntegTestTask.groovy @@ -31,52 +31,38 @@ import org.gradle.util.ConfigureUtil * Runs integration tests, but first starts an ES cluster, * and passes the ES cluster info as parameters to the tests. */ -class RestIntegTestTask extends RandomizedTestingTask { +public class RestIntegTestTask extends RandomizedTestingTask { ClusterConfiguration clusterConfig = new ClusterConfiguration() + /** Flag indicating whether the rest tests in the rest spec should be run. */ @Input boolean includePackaged = false - static RestIntegTestTask configure(Project project) { - Map integTestOptions = [ - name: 'integTest', - type: RestIntegTestTask, - dependsOn: 'testClasses', - group: JavaBasePlugin.VERIFICATION_GROUP, - description: 'Runs rest tests against an elasticsearch cluster.' - ] - RestIntegTestTask integTest = project.tasks.create(integTestOptions) - integTest.configure(BuildPlugin.commonTestConfig(project)) - integTest.configure { - include '**/*IT.class' - systemProperty 'tests.rest.load_packaged', 'false' - } - RandomizedTestingTask test = project.tasks.findByName('test') - if (test != null) { - integTest.classpath = test.classpath - integTest.testClassesDir = test.testClassesDir - integTest.mustRunAfter(test) - } - project.check.dependsOn(integTest) + public RestIntegTestTask() { + description = 'Runs rest tests against an elasticsearch cluster.' + group = JavaBasePlugin.VERIFICATION_GROUP + dependsOn(project.testClasses) + classpath = project.sourceSets.test.runtimeClasspath + testClassesDir = project.sourceSets.test.output.classesDir + + // start with the common test configuration + configure(BuildPlugin.commonTestConfig(project)) + // override/add more for rest tests + parallelism = '1' + include('**/*IT.class') + systemProperty('tests.rest.load_packaged', 'false') + + // copy the rest spec/tests into the test resources RestSpecHack.configureDependencies(project) project.afterEvaluate { - integTest.dependsOn(RestSpecHack.configureTask(project, integTest.includePackaged)) + dependsOn(RestSpecHack.configureTask(project, includePackaged)) + systemProperty('tests.cluster', "localhost:${clusterConfig.baseTransportPort}") } - return integTest - } - - RestIntegTestTask() { - project.afterEvaluate { - Task test = project.tasks.findByName('test') - if (test != null) { - mustRunAfter(test) - } + // this must run after all projects have been configured, so we know any project + // references can be accessed as a fully configured + project.gradle.projectsEvaluated { ClusterFormationTasks.setup(project, this, clusterConfig) - configure { - parallelism '1' - systemProperty 'tests.cluster', "localhost:${clusterConfig.baseTransportPort}" - } } } @@ -89,11 +75,11 @@ class RestIntegTestTask extends RandomizedTestingTask { } @Input - void cluster(Closure closure) { + public void cluster(Closure closure) { ConfigureUtil.configure(closure, clusterConfig) } - ClusterConfiguration getCluster() { + public ClusterConfiguration getCluster() { return clusterConfig } } diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/RestSpecHack.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/RestSpecHack.groovy index e0af8f4cc8e..43b5c2f6f38 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/RestSpecHack.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/RestSpecHack.groovy @@ -28,12 +28,12 @@ import org.gradle.api.tasks.Copy * currently must be available on the local filesystem. This class encapsulates * setting up tasks to copy the rest spec api to test resources. */ -class RestSpecHack { +public class RestSpecHack { /** * Sets dependencies needed to copy the rest spec. * @param project The project to add rest spec dependency to */ - static void configureDependencies(Project project) { + public static void configureDependencies(Project project) { project.configurations { restSpec } @@ -48,7 +48,7 @@ class RestSpecHack { * @param project The project to add the copy task to * @param includePackagedTests true if the packaged tests should be copied, false otherwise */ - static Task configureTask(Project project, boolean includePackagedTests) { + public static Task configureTask(Project project, boolean includePackagedTests) { Map copyRestSpecProps = [ name : 'copyRestSpec', type : Copy, @@ -65,7 +65,6 @@ class RestSpecHack { project.idea { module { if (scopes.TEST != null) { - // TODO: need to add the TEST scope somehow for rest test plugin... scopes.TEST.plus.add(project.configurations.restSpec) } } diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/RestTestPlugin.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/RestTestPlugin.groovy index 2be80ca005a..dc9aa769388 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/RestTestPlugin.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/RestTestPlugin.groovy @@ -18,22 +18,19 @@ */ package org.elasticsearch.gradle.test -import com.carrotsearch.gradle.junit4.RandomizedTestingTask import org.gradle.api.Plugin import org.gradle.api.Project -/** Configures the build to have a rest integration test. */ -class RestTestPlugin implements Plugin { +/** A plugin to add rest integration tests. Used for qa projects. */ +public class RestTestPlugin implements Plugin { @Override - void apply(Project project) { + public void apply(Project project) { project.pluginManager.apply(StandaloneTestBasePlugin) - RandomizedTestingTask integTest = RestIntegTestTask.configure(project) - RestSpecHack.configureDependencies(project) - integTest.configure { - classpath = project.sourceSets.test.runtimeClasspath - testClassesDir project.sourceSets.test.output.classesDir - } + RestIntegTestTask integTest = project.tasks.create('integTest', RestIntegTestTask.class) + integTest.cluster.distribution = 'zip' // rest tests should run with the real zip + integTest.mustRunAfter(project.precommit) + project.check.dependsOn(integTest) } } diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/RunTask.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/RunTask.groovy index 37f65c88703..32469218a5b 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/RunTask.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/RunTask.groovy @@ -2,13 +2,17 @@ package org.elasticsearch.gradle.test import org.gradle.api.DefaultTask import org.gradle.api.Project +import org.gradle.api.Task import org.gradle.api.internal.tasks.options.Option +import org.gradle.util.ConfigureUtil -class RunTask extends DefaultTask { +public class RunTask extends DefaultTask { ClusterConfiguration clusterConfig = new ClusterConfiguration(baseHttpPort: 9200, baseTransportPort: 9300, daemonize: false) - RunTask() { + public RunTask() { + description = "Runs elasticsearch with '${project.path}'" + group = 'Verification' project.afterEvaluate { ClusterFormationTasks.setup(project, this, clusterConfig) } @@ -22,11 +26,10 @@ class RunTask extends DefaultTask { clusterConfig.debug = enabled; } - static void configure(Project project) { - RunTask task = project.tasks.create( - name: 'run', - type: RunTask, - description: "Runs elasticsearch with '${project.path}'", - group: 'Verification') + /** Configure the cluster that will be run. */ + @Override + public Task configure(Closure closure) { + ConfigureUtil.configure(closure, clusterConfig) + return this } } diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/StandaloneTestBasePlugin.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/StandaloneTestBasePlugin.groovy index 271bc5e58be..8c3b5f754cc 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/StandaloneTestBasePlugin.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/StandaloneTestBasePlugin.groovy @@ -27,35 +27,27 @@ import org.elasticsearch.gradle.precommit.PrecommitTasks import org.gradle.api.Plugin import org.gradle.api.Project import org.gradle.api.plugins.JavaBasePlugin +import org.gradle.plugins.ide.eclipse.model.EclipseClasspath /** Configures the build to have a rest integration test. */ -class StandaloneTestBasePlugin implements Plugin { +public class StandaloneTestBasePlugin implements Plugin { @Override - void apply(Project project) { + public void apply(Project project) { project.pluginManager.apply(JavaBasePlugin) project.pluginManager.apply(RandomizedTestingPlugin) BuildPlugin.globalBuildInfo(project) BuildPlugin.configureRepositories(project) - // remove some unnecessary tasks for a qa test - project.tasks.removeAll { it.name in ['assemble', 'buildDependents'] } - // only setup tests to build - project.sourceSets { - test - } - project.dependencies { - testCompile "org.elasticsearch:test-framework:${VersionProperties.elasticsearch}" - } + project.sourceSets.create('test') + project.dependencies.add('testCompile', "org.elasticsearch:test-framework:${VersionProperties.elasticsearch}") - project.eclipse { - classpath { - sourceSets = [project.sourceSets.test] - plusConfigurations = [project.configurations.testRuntime] - } - } - PrecommitTasks.configure(project) + project.eclipse.classpath.sourceSets = [project.sourceSets.test] + project.eclipse.classpath.plusConfigurations = [project.configurations.testRuntime] + + PrecommitTasks.create(project, false) + project.check.dependsOn(project.precommit) } } diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/StandaloneTestPlugin.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/StandaloneTestPlugin.groovy index 21bf7e9a01a..0a2cc841282 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/StandaloneTestPlugin.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/StandaloneTestPlugin.groovy @@ -25,11 +25,11 @@ import org.gradle.api.Plugin import org.gradle.api.Project import org.gradle.api.plugins.JavaBasePlugin -/** Configures the build to have only unit tests. */ -class StandaloneTestPlugin implements Plugin { +/** A plugin to add tests only. Used for QA tests that run arbitrary unit tests. */ +public class StandaloneTestPlugin implements Plugin { @Override - void apply(Project project) { + public void apply(Project project) { project.pluginManager.apply(StandaloneTestBasePlugin) Map testOptions = [ @@ -41,10 +41,9 @@ class StandaloneTestPlugin implements Plugin { ] RandomizedTestingTask test = project.tasks.create(testOptions) test.configure(BuildPlugin.commonTestConfig(project)) - test.configure { - classpath = project.sourceSets.test.runtimeClasspath - testClassesDir project.sourceSets.test.output.classesDir - } + test.classpath = project.sourceSets.test.runtimeClasspath + test.testClassesDir project.sourceSets.test.output.classesDir + test.mustRunAfter(project.precommit) project.check.dependsOn(test) } } diff --git a/buildSrc/src/main/resources/forbidden/all-signatures.txt b/buildSrc/src/main/resources/forbidden/all-signatures.txt index 0809cdab9c2..5a91807233b 100644 --- a/buildSrc/src/main/resources/forbidden/all-signatures.txt +++ b/buildSrc/src/main/resources/forbidden/all-signatures.txt @@ -112,3 +112,7 @@ java.lang.System#setProperty(java.lang.String,java.lang.String) java.lang.System#clearProperty(java.lang.String) java.lang.System#getProperties() @ Use BootstrapInfo.getSystemProperties for a read-only view +@defaultMessage Avoid unchecked warnings by using Collections#empty(List|Map|Set) methods +java.util.Collections#EMPTY_LIST +java.util.Collections#EMPTY_MAP +java.util.Collections#EMPTY_SET diff --git a/buildSrc/src/main/resources/forbidden/core-signatures.txt b/buildSrc/src/main/resources/forbidden/core-signatures.txt index 0f54946ea37..c6ab430595c 100644 --- a/buildSrc/src/main/resources/forbidden/core-signatures.txt +++ b/buildSrc/src/main/resources/forbidden/core-signatures.txt @@ -90,3 +90,12 @@ org.elasticsearch.common.io.PathUtils#get(java.net.URI) @defaultMessage Don't use deprecated Query#setBoost, wrap the query into a BoostQuery instead org.apache.lucene.search.Query#setBoost(float) + +@defaultMessage Constructing a DateTime without a time zone is dangerous +org.joda.time.DateTime#() +org.joda.time.DateTime#(long) +org.joda.time.DateTime#(int, int, int, int, int) +org.joda.time.DateTime#(int, int, int, int, int, int) +org.joda.time.DateTime#(int, int, int, int, int, int, int) +org.joda.time.DateTime#now() +org.joda.time.DateTimeZone#getDefault() diff --git a/buildSrc/src/main/resources/forbidden/third-party-signatures.txt b/buildSrc/src/main/resources/forbidden/third-party-signatures.txt deleted file mode 100644 index ac1ce33ac92..00000000000 --- a/buildSrc/src/main/resources/forbidden/third-party-signatures.txt +++ /dev/null @@ -1,66 +0,0 @@ -# Licensed to Elasticsearch under one or more contributor -# license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright -# ownership. Elasticsearch licenses this file to you under -# the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on -# an 'AS IS' BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, -# either express or implied. See the License for the specific -# language governing permissions and limitations under the License. - -@defaultMessage unsafe encoders/decoders have problems in the lzf compress library. Use variants of encode/decode functions which take Encoder/Decoder. -com.ning.compress.lzf.impl.UnsafeChunkEncoders#createEncoder(int) -com.ning.compress.lzf.impl.UnsafeChunkEncoders#createNonAllocatingEncoder(int) -com.ning.compress.lzf.impl.UnsafeChunkEncoders#createEncoder(int, com.ning.compress.BufferRecycler) -com.ning.compress.lzf.impl.UnsafeChunkEncoders#createNonAllocatingEncoder(int, com.ning.compress.BufferRecycler) -com.ning.compress.lzf.impl.UnsafeChunkDecoder#() -com.ning.compress.lzf.parallel.CompressTask -com.ning.compress.lzf.util.ChunkEncoderFactory#optimalInstance() -com.ning.compress.lzf.util.ChunkEncoderFactory#optimalInstance(int) -com.ning.compress.lzf.util.ChunkEncoderFactory#optimalNonAllocatingInstance(int) -com.ning.compress.lzf.util.ChunkEncoderFactory#optimalInstance(com.ning.compress.BufferRecycler) -com.ning.compress.lzf.util.ChunkEncoderFactory#optimalInstance(int, com.ning.compress.BufferRecycler) -com.ning.compress.lzf.util.ChunkEncoderFactory#optimalNonAllocatingInstance(int, com.ning.compress.BufferRecycler) -com.ning.compress.lzf.util.ChunkDecoderFactory#optimalInstance() -com.ning.compress.lzf.util.LZFFileInputStream#(java.io.File) -com.ning.compress.lzf.util.LZFFileInputStream#(java.io.FileDescriptor) -com.ning.compress.lzf.util.LZFFileInputStream#(java.lang.String) -com.ning.compress.lzf.util.LZFFileOutputStream#(java.io.File) -com.ning.compress.lzf.util.LZFFileOutputStream#(java.io.File, boolean) -com.ning.compress.lzf.util.LZFFileOutputStream#(java.io.FileDescriptor) -com.ning.compress.lzf.util.LZFFileOutputStream#(java.lang.String) -com.ning.compress.lzf.util.LZFFileOutputStream#(java.lang.String, boolean) -com.ning.compress.lzf.LZFEncoder#encode(byte[]) -com.ning.compress.lzf.LZFEncoder#encode(byte[], int, int) -com.ning.compress.lzf.LZFEncoder#encode(byte[], int, int, com.ning.compress.BufferRecycler) -com.ning.compress.lzf.LZFEncoder#appendEncoded(byte[], int, int, byte[], int) -com.ning.compress.lzf.LZFEncoder#appendEncoded(byte[], int, int, byte[], int, com.ning.compress.BufferRecycler) -com.ning.compress.lzf.LZFCompressingInputStream#(java.io.InputStream) -com.ning.compress.lzf.LZFDecoder#fastDecoder() -com.ning.compress.lzf.LZFDecoder#decode(byte[]) -com.ning.compress.lzf.LZFDecoder#decode(byte[], int, int) -com.ning.compress.lzf.LZFDecoder#decode(byte[], byte[]) -com.ning.compress.lzf.LZFDecoder#decode(byte[], int, int, byte[]) -com.ning.compress.lzf.LZFInputStream#(java.io.InputStream) -com.ning.compress.lzf.LZFInputStream#(java.io.InputStream, boolean) -com.ning.compress.lzf.LZFInputStream#(java.io.InputStream, com.ning.compress.BufferRecycler) -com.ning.compress.lzf.LZFInputStream#(java.io.InputStream, com.ning.compress.BufferRecycler, boolean) -com.ning.compress.lzf.LZFOutputStream#(java.io.OutputStream) -com.ning.compress.lzf.LZFOutputStream#(java.io.OutputStream, com.ning.compress.BufferRecycler) -com.ning.compress.lzf.LZFUncompressor#(com.ning.compress.DataHandler) -com.ning.compress.lzf.LZFUncompressor#(com.ning.compress.DataHandler, com.ning.compress.BufferRecycler) - -@defaultMessage Constructing a DateTime without a time zone is dangerous -org.joda.time.DateTime#() -org.joda.time.DateTime#(long) -org.joda.time.DateTime#(int, int, int, int, int) -org.joda.time.DateTime#(int, int, int, int, int, int) -org.joda.time.DateTime#(int, int, int, int, int, int, int) -org.joda.time.DateTime#now() -org.joda.time.DateTimeZone#getDefault() diff --git a/core/build.gradle b/core/build.gradle index 618e252e9a8..3db5097ea7a 100644 --- a/core/build.gradle +++ b/core/build.gradle @@ -62,12 +62,9 @@ dependencies { compile "com.fasterxml.jackson.dataformat:jackson-dataformat-smile:${versions.jackson}" compile "com.fasterxml.jackson.dataformat:jackson-dataformat-yaml:${versions.jackson}" compile "com.fasterxml.jackson.dataformat:jackson-dataformat-cbor:${versions.jackson}" - compile "org.yaml:snakeyaml:1.15" // used by jackson yaml // network stack compile 'io.netty:netty:3.10.5.Final' - // compression of transport protocol - compile 'com.ning:compress-lzf:1.0.2' // percentiles aggregation compile 'com.tdunning:t-digest:3.0' // precentil ranks aggregation @@ -117,6 +114,9 @@ forbiddenPatterns { exclude '**/org/elasticsearch/cluster/routing/shard_routes.txt' } +// dependency license are currently checked in distribution +dependencyLicenses.enabled = false + if (isEclipse == false || project.path == ":core-tests") { task integTest(type: RandomizedTestingTask, group: JavaBasePlugin.VERIFICATION_GROUP, @@ -129,8 +129,4 @@ if (isEclipse == false || project.path == ":core-tests") { } check.dependsOn integTest integTest.mustRunAfter test - - RestSpecHack.configureDependencies(project) - Task copyRestSpec = RestSpecHack.configureTask(project, true) - integTest.dependsOn copyRestSpec } diff --git a/core/src/main/java/org/elasticsearch/ElasticsearchException.java b/core/src/main/java/org/elasticsearch/ElasticsearchException.java index 65da4ed93e1..18376aff88f 100644 --- a/core/src/main/java/org/elasticsearch/ElasticsearchException.java +++ b/core/src/main/java/org/elasticsearch/ElasticsearchException.java @@ -51,9 +51,13 @@ public class ElasticsearchException extends RuntimeException implements ToXConte private static final Map, ElasticsearchExceptionHandle> CLASS_TO_ELASTICSEARCH_EXCEPTION_HANDLE; private final Map> headers = new HashMap<>(); + /** + * Construct a ElasticsearchException with the specified cause exception. + */ public ElasticsearchException(Throwable cause) { super(cause); } + /** * Construct a ElasticsearchException with the specified detail message. * @@ -550,7 +554,7 @@ public class ElasticsearchException extends RuntimeException implements ToXConte NODE_DISCONNECTED_EXCEPTION(org.elasticsearch.transport.NodeDisconnectedException.class, org.elasticsearch.transport.NodeDisconnectedException::new, 84), ALREADY_EXPIRED_EXCEPTION(org.elasticsearch.index.AlreadyExpiredException.class, org.elasticsearch.index.AlreadyExpiredException::new, 85), AGGREGATION_EXECUTION_EXCEPTION(org.elasticsearch.search.aggregations.AggregationExecutionException.class, org.elasticsearch.search.aggregations.AggregationExecutionException::new, 86), - MERGE_MAPPING_EXCEPTION(org.elasticsearch.index.mapper.MergeMappingException.class, org.elasticsearch.index.mapper.MergeMappingException::new, 87), + // 87 used to be for MergeMappingException INVALID_INDEX_TEMPLATE_EXCEPTION(org.elasticsearch.indices.InvalidIndexTemplateException.class, org.elasticsearch.indices.InvalidIndexTemplateException::new, 88), PERCOLATE_EXCEPTION(org.elasticsearch.percolator.PercolateException.class, org.elasticsearch.percolator.PercolateException::new, 89), REFRESH_FAILED_ENGINE_EXCEPTION(org.elasticsearch.index.engine.RefreshFailedEngineException.class, org.elasticsearch.index.engine.RefreshFailedEngineException::new, 90), diff --git a/core/src/main/java/org/elasticsearch/Version.java b/core/src/main/java/org/elasticsearch/Version.java index ca235d53fba..4e4ec3b614f 100644 --- a/core/src/main/java/org/elasticsearch/Version.java +++ b/core/src/main/java/org/elasticsearch/Version.java @@ -270,11 +270,11 @@ public class Version { public static final int V_2_0_2_ID = 2000299; public static final Version V_2_0_2 = new Version(V_2_0_2_ID, true, org.apache.lucene.util.Version.LUCENE_5_2_1); public static final int V_2_1_0_ID = 2010099; - public static final Version V_2_1_0 = new Version(V_2_1_0_ID, false, org.apache.lucene.util.Version.LUCENE_5_3_0); + public static final Version V_2_1_0 = new Version(V_2_1_0_ID, false, org.apache.lucene.util.Version.LUCENE_5_3_1); public static final int V_2_1_1_ID = 2010199; - public static final Version V_2_1_1 = new Version(V_2_1_1_ID, true, org.apache.lucene.util.Version.LUCENE_5_3_0); + public static final Version V_2_1_1 = new Version(V_2_1_1_ID, true, org.apache.lucene.util.Version.LUCENE_5_3_1); public static final int V_2_2_0_ID = 2020099; - public static final Version V_2_2_0 = new Version(V_2_2_0_ID, true, org.apache.lucene.util.Version.LUCENE_5_3_0); + public static final Version V_2_2_0 = new Version(V_2_2_0_ID, true, org.apache.lucene.util.Version.LUCENE_5_4_0); public static final int V_3_0_0_ID = 3000099; public static final Version V_3_0_0 = new Version(V_3_0_0_ID, true, org.apache.lucene.util.Version.LUCENE_5_4_0); public static final Version CURRENT = V_3_0_0; diff --git a/core/src/main/java/org/elasticsearch/action/admin/cluster/health/TransportClusterHealthAction.java b/core/src/main/java/org/elasticsearch/action/admin/cluster/health/TransportClusterHealthAction.java index 0f0b2680f61..f1cc59ba760 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/cluster/health/TransportClusterHealthAction.java +++ b/core/src/main/java/org/elasticsearch/action/admin/cluster/health/TransportClusterHealthAction.java @@ -74,7 +74,7 @@ public class TransportClusterHealthAction extends TransportMasterNodeReadAction< protected void masterOperation(final ClusterHealthRequest request, final ClusterState unusedState, final ActionListener listener) { if (request.waitForEvents() != null) { final long endTimeMS = TimeValue.nsecToMSec(System.nanoTime()) + request.timeout().millis(); - clusterService.submitStateUpdateTask("cluster_health (wait_for_events [" + request.waitForEvents() + "])", request.waitForEvents(), new ClusterStateUpdateTask() { + clusterService.submitStateUpdateTask("cluster_health (wait_for_events [" + request.waitForEvents() + "])", new ClusterStateUpdateTask(request.waitForEvents()) { @Override public ClusterState execute(ClusterState currentState) { return currentState; diff --git a/core/src/main/java/org/elasticsearch/action/admin/cluster/node/info/NodeInfo.java b/core/src/main/java/org/elasticsearch/action/admin/cluster/node/info/NodeInfo.java index 2d683852012..1fa64d5e7b7 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/cluster/node/info/NodeInfo.java +++ b/core/src/main/java/org/elasticsearch/action/admin/cluster/node/info/NodeInfo.java @@ -72,14 +72,14 @@ public class NodeInfo extends BaseNodeResponse { private HttpInfo http; @Nullable - private PluginsInfo plugins; + private PluginsAndModules plugins; NodeInfo() { } public NodeInfo(Version version, Build build, DiscoveryNode node, @Nullable Map serviceAttributes, @Nullable Settings settings, @Nullable OsInfo os, @Nullable ProcessInfo process, @Nullable JvmInfo jvm, @Nullable ThreadPoolInfo threadPool, - @Nullable TransportInfo transport, @Nullable HttpInfo http, @Nullable PluginsInfo plugins) { + @Nullable TransportInfo transport, @Nullable HttpInfo http, @Nullable PluginsAndModules plugins) { super(node); this.version = version; this.build = build; @@ -172,7 +172,7 @@ public class NodeInfo extends BaseNodeResponse { } @Nullable - public PluginsInfo getPlugins() { + public PluginsAndModules getPlugins() { return this.plugins; } @@ -217,7 +217,8 @@ public class NodeInfo extends BaseNodeResponse { http = HttpInfo.readHttpInfo(in); } if (in.readBoolean()) { - plugins = PluginsInfo.readPluginsInfo(in); + plugins = new PluginsAndModules(); + plugins.readFrom(in); } } diff --git a/core/src/main/java/org/elasticsearch/action/admin/cluster/node/info/PluginsAndModules.java b/core/src/main/java/org/elasticsearch/action/admin/cluster/node/info/PluginsAndModules.java new file mode 100644 index 00000000000..3831fd24f3e --- /dev/null +++ b/core/src/main/java/org/elasticsearch/action/admin/cluster/node/info/PluginsAndModules.java @@ -0,0 +1,115 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.action.admin.cluster.node.info; + +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.io.stream.Streamable; +import org.elasticsearch.common.xcontent.ToXContent; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.plugins.PluginInfo; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.Collections; +import java.util.List; + +/** + * Information about plugins and modules + */ +public class PluginsAndModules implements Streamable, ToXContent { + private List plugins; + private List modules; + + public PluginsAndModules() { + plugins = new ArrayList<>(); + modules = new ArrayList<>(); + } + + /** + * Returns an ordered list based on plugins name + */ + public List getPluginInfos() { + List plugins = new ArrayList<>(this.plugins); + Collections.sort(plugins, (p1, p2) -> p1.getName().compareTo(p2.getName())); + return plugins; + } + + /** + * Returns an ordered list based on modules name + */ + public List getModuleInfos() { + List modules = new ArrayList<>(this.modules); + Collections.sort(modules, (p1, p2) -> p1.getName().compareTo(p2.getName())); + return modules; + } + + public void addPlugin(PluginInfo info) { + plugins.add(info); + } + + public void addModule(PluginInfo info) { + modules.add(info); + } + + @Override + public void readFrom(StreamInput in) throws IOException { + if (plugins.isEmpty() == false || modules.isEmpty() == false) { + throw new IllegalStateException("instance is already populated"); + } + int plugins_size = in.readInt(); + for (int i = 0; i < plugins_size; i++) { + plugins.add(PluginInfo.readFromStream(in)); + } + int modules_size = in.readInt(); + for (int i = 0; i < modules_size; i++) { + modules.add(PluginInfo.readFromStream(in)); + } + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeInt(plugins.size()); + for (PluginInfo plugin : getPluginInfos()) { + plugin.writeTo(out); + } + out.writeInt(modules.size()); + for (PluginInfo module : getModuleInfos()) { + module.writeTo(out); + } + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startArray("plugins"); + for (PluginInfo pluginInfo : getPluginInfos()) { + pluginInfo.toXContent(builder, params); + } + builder.endArray(); + // TODO: not ideal, make a better api for this (e.g. with jar metadata, and so on) + builder.startArray("modules"); + for (PluginInfo moduleInfo : getModuleInfos()) { + moduleInfo.toXContent(builder, params); + } + builder.endArray(); + + return builder; + } +} diff --git a/core/src/main/java/org/elasticsearch/action/admin/cluster/node/info/PluginsInfo.java b/core/src/main/java/org/elasticsearch/action/admin/cluster/node/info/PluginsInfo.java deleted file mode 100644 index 927a79b6639..00000000000 --- a/core/src/main/java/org/elasticsearch/action/admin/cluster/node/info/PluginsInfo.java +++ /dev/null @@ -1,101 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.action.admin.cluster.node.info; - -import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.io.stream.Streamable; -import org.elasticsearch.common.xcontent.ToXContent; -import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.common.xcontent.XContentBuilderString; -import org.elasticsearch.plugins.PluginInfo; - -import java.io.IOException; -import java.util.ArrayList; -import java.util.Collections; -import java.util.Comparator; -import java.util.List; - -public class PluginsInfo implements Streamable, ToXContent { - static final class Fields { - static final XContentBuilderString PLUGINS = new XContentBuilderString("plugins"); - } - - private List infos; - - public PluginsInfo() { - infos = new ArrayList<>(); - } - - public PluginsInfo(int size) { - infos = new ArrayList<>(size); - } - - /** - * @return an ordered list based on plugins name - */ - public List getInfos() { - Collections.sort(infos, new Comparator() { - @Override - public int compare(final PluginInfo o1, final PluginInfo o2) { - return o1.getName().compareTo(o2.getName()); - } - }); - - return infos; - } - - public void add(PluginInfo info) { - infos.add(info); - } - - public static PluginsInfo readPluginsInfo(StreamInput in) throws IOException { - PluginsInfo infos = new PluginsInfo(); - infos.readFrom(in); - return infos; - } - - @Override - public void readFrom(StreamInput in) throws IOException { - int plugins_size = in.readInt(); - for (int i = 0; i < plugins_size; i++) { - infos.add(PluginInfo.readFromStream(in)); - } - } - - @Override - public void writeTo(StreamOutput out) throws IOException { - out.writeInt(infos.size()); - for (PluginInfo plugin : getInfos()) { - plugin.writeTo(out); - } - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startArray(Fields.PLUGINS); - for (PluginInfo pluginInfo : getInfos()) { - pluginInfo.toXContent(builder, params); - } - builder.endArray(); - - return builder; - } -} diff --git a/core/src/main/java/org/elasticsearch/action/admin/cluster/reroute/TransportClusterRerouteAction.java b/core/src/main/java/org/elasticsearch/action/admin/cluster/reroute/TransportClusterRerouteAction.java index f916c37aec2..d7ec84fb7a5 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/cluster/reroute/TransportClusterRerouteAction.java +++ b/core/src/main/java/org/elasticsearch/action/admin/cluster/reroute/TransportClusterRerouteAction.java @@ -68,7 +68,7 @@ public class TransportClusterRerouteAction extends TransportMasterNodeAction listener) { - clusterService.submitStateUpdateTask("cluster_reroute (api)", Priority.IMMEDIATE, new AckedClusterStateUpdateTask(request, listener) { + clusterService.submitStateUpdateTask("cluster_reroute (api)", new AckedClusterStateUpdateTask(Priority.IMMEDIATE, request, listener) { private volatile ClusterState clusterStateToSend; private volatile RoutingExplanations explanations; diff --git a/core/src/main/java/org/elasticsearch/action/admin/cluster/settings/TransportClusterUpdateSettingsAction.java b/core/src/main/java/org/elasticsearch/action/admin/cluster/settings/TransportClusterUpdateSettingsAction.java index b0934781dcd..73d14a2bb11 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/cluster/settings/TransportClusterUpdateSettingsAction.java +++ b/core/src/main/java/org/elasticsearch/action/admin/cluster/settings/TransportClusterUpdateSettingsAction.java @@ -91,7 +91,8 @@ public class TransportClusterUpdateSettingsAction extends TransportMasterNodeAct final Settings.Builder transientUpdates = Settings.settingsBuilder(); final Settings.Builder persistentUpdates = Settings.settingsBuilder(); - clusterService.submitStateUpdateTask("cluster_update_settings", Priority.IMMEDIATE, new AckedClusterStateUpdateTask(request, listener) { + clusterService.submitStateUpdateTask("cluster_update_settings", + new AckedClusterStateUpdateTask(Priority.IMMEDIATE, request, listener) { private volatile boolean changed = false; @@ -132,7 +133,8 @@ public class TransportClusterUpdateSettingsAction extends TransportMasterNodeAct // in the components (e.g. FilterAllocationDecider), so the changes made by the first call aren't visible // to the components until the ClusterStateListener instances have been invoked, but are visible after // the first update task has been completed. - clusterService.submitStateUpdateTask("reroute_after_cluster_update_settings", Priority.URGENT, new AckedClusterStateUpdateTask(request, listener) { + clusterService.submitStateUpdateTask("reroute_after_cluster_update_settings", + new AckedClusterStateUpdateTask(Priority.URGENT, request, listener) { @Override public boolean mustAck(DiscoveryNode discoveryNode) { diff --git a/core/src/main/java/org/elasticsearch/action/admin/cluster/stats/ClusterStatsNodes.java b/core/src/main/java/org/elasticsearch/action/admin/cluster/stats/ClusterStatsNodes.java index fe8d4121c75..d8f2a5bbd20 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/cluster/stats/ClusterStatsNodes.java +++ b/core/src/main/java/org/elasticsearch/action/admin/cluster/stats/ClusterStatsNodes.java @@ -74,7 +74,7 @@ public class ClusterStatsNodes implements ToXContent, Streamable { versions.add(nodeResponse.nodeInfo().getVersion()); process.addNodeStats(nodeResponse.nodeStats()); jvm.addNodeInfoStats(nodeResponse.nodeInfo(), nodeResponse.nodeStats()); - plugins.addAll(nodeResponse.nodeInfo().getPlugins().getInfos()); + plugins.addAll(nodeResponse.nodeInfo().getPlugins().getPluginInfos()); // now do the stats that should be deduped by hardware (implemented by ip deduping) TransportAddress publishAddress = nodeResponse.nodeInfo().getTransport().address().publishAddress(); diff --git a/core/src/main/java/org/elasticsearch/action/bulk/BulkRequest.java b/core/src/main/java/org/elasticsearch/action/bulk/BulkRequest.java index 260fd5e732d..02e0ea40d65 100644 --- a/core/src/main/java/org/elasticsearch/action/bulk/BulkRequest.java +++ b/core/src/main/java/org/elasticsearch/action/bulk/BulkRequest.java @@ -46,9 +46,10 @@ import java.util.List; import static org.elasticsearch.action.ValidateActions.addValidationError; /** - * A bulk request holds an ordered {@link IndexRequest}s and {@link DeleteRequest}s and allows to executes - * it in a single batch. + * A bulk request holds an ordered {@link IndexRequest}s, {@link DeleteRequest}s and {@link UpdateRequest}s + * and allows to executes it in a single batch. * + * Note that we only support refresh on the bulk request not per item. * @see org.elasticsearch.client.Client#bulk(BulkRequest) */ public class BulkRequest extends ActionRequest implements CompositeIndicesRequest { @@ -89,6 +90,12 @@ public class BulkRequest extends ActionRequest implements Composite return add(request, null); } + /** + * Add a request to the current BulkRequest. + * @param request Request to add + * @param payload Optional payload + * @return the current bulk request + */ public BulkRequest add(ActionRequest request, @Nullable Object payload) { if (request instanceof IndexRequest) { add((IndexRequest) request, payload); @@ -127,7 +134,8 @@ public class BulkRequest extends ActionRequest implements Composite BulkRequest internalAdd(IndexRequest request, @Nullable Object payload) { requests.add(request); addPayload(payload); - sizeInBytes += request.source().length() + REQUEST_OVERHEAD; + // lack of source is validated in validate() method + sizeInBytes += (request.source() != null ? request.source().length() : 0) + REQUEST_OVERHEAD; return this; } @@ -292,7 +300,7 @@ public class BulkRequest extends ActionRequest implements Composite String parent = null; String[] fields = defaultFields; String timestamp = null; - Long ttl = null; + TimeValue ttl = null; String opType = null; long version = Versions.MATCH_ANY; VersionType versionType = VersionType.INTERNAL; @@ -325,9 +333,9 @@ public class BulkRequest extends ActionRequest implements Composite timestamp = parser.text(); } else if ("_ttl".equals(currentFieldName) || "ttl".equals(currentFieldName)) { if (parser.currentToken() == XContentParser.Token.VALUE_STRING) { - ttl = TimeValue.parseTimeValue(parser.text(), null, currentFieldName).millis(); + ttl = TimeValue.parseTimeValue(parser.text(), null, currentFieldName); } else { - ttl = parser.longValue(); + ttl = new TimeValue(parser.longValue()); } } else if ("op_type".equals(currentFieldName) || "opType".equals(currentFieldName)) { opType = parser.text(); @@ -478,8 +486,14 @@ public class BulkRequest extends ActionRequest implements Composite if (requests.isEmpty()) { validationException = addValidationError("no requests added", validationException); } - for (int i = 0; i < requests.size(); i++) { - ActionRequestValidationException ex = requests.get(i).validate(); + for (ActionRequest request : requests) { + // We first check if refresh has been set + if ((request instanceof DeleteRequest && ((DeleteRequest)request).refresh()) || + (request instanceof UpdateRequest && ((UpdateRequest)request).refresh()) || + (request instanceof IndexRequest && ((IndexRequest)request).refresh())) { + validationException = addValidationError("Refresh is not supported on an item request, set the refresh flag on the BulkRequest instead.", validationException); + } + ActionRequestValidationException ex = request.validate(); if (ex != null) { if (validationException == null) { validationException = new ActionRequestValidationException(); diff --git a/core/src/main/java/org/elasticsearch/action/index/IndexRequest.java b/core/src/main/java/org/elasticsearch/action/index/IndexRequest.java index 88ade451404..501c003c249 100644 --- a/core/src/main/java/org/elasticsearch/action/index/IndexRequest.java +++ b/core/src/main/java/org/elasticsearch/action/index/IndexRequest.java @@ -35,6 +35,7 @@ import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.lucene.uid.Versions; +import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.xcontent.*; import org.elasticsearch.index.IndexNotFoundException; import org.elasticsearch.index.VersionType; @@ -136,7 +137,8 @@ public class IndexRequest extends ReplicationRequest implements Do private String parent; @Nullable private String timestamp; - private long ttl = -1; + @Nullable + private TimeValue ttl; private BytesReference source; @@ -229,6 +231,12 @@ public class IndexRequest extends ReplicationRequest implements Do if (!versionType.validateVersionForWrites(version)) { validationException = addValidationError("illegal version value [" + version + "] for version type [" + versionType.name() + "]", validationException); } + + if (ttl != null) { + if (ttl.millis() < 0) { + validationException = addValidationError("ttl must not be negative", validationException); + } + } return validationException; } @@ -324,22 +332,33 @@ public class IndexRequest extends ReplicationRequest implements Do } /** - * Sets the relative ttl value. It musts be > 0 as it makes little sense otherwise. Setting it - * to null will reset to have no ttl. + * Sets the ttl value as a time value expression. */ - public IndexRequest ttl(Long ttl) throws ElasticsearchGenerationException { - if (ttl == null) { - this.ttl = -1; - return this; - } - if (ttl <= 0) { - throw new IllegalArgumentException("TTL value must be > 0. Illegal value provided [" + ttl + "]"); - } + public IndexRequest ttl(String ttl) { + this.ttl = TimeValue.parseTimeValue(ttl, null, "ttl"); + return this; + } + + /** + * Sets the ttl as a {@link TimeValue} instance. + */ + public IndexRequest ttl(TimeValue ttl) { this.ttl = ttl; return this; } - public long ttl() { + /** + * Sets the relative ttl value in milliseconds. It musts be greater than 0 as it makes little sense otherwise. + */ + public IndexRequest ttl(long ttl) { + this.ttl = new TimeValue(ttl); + return this; + } + + /** + * Returns the ttl as a {@link TimeValue} + */ + public TimeValue ttl() { return this.ttl; } @@ -665,7 +684,7 @@ public class IndexRequest extends ReplicationRequest implements Do routing = in.readOptionalString(); parent = in.readOptionalString(); timestamp = in.readOptionalString(); - ttl = in.readLong(); + ttl = in.readBoolean() ? TimeValue.readTimeValue(in) : null; source = in.readBytesReference(); opType = OpType.fromId(in.readByte()); @@ -682,7 +701,12 @@ public class IndexRequest extends ReplicationRequest implements Do out.writeOptionalString(routing); out.writeOptionalString(parent); out.writeOptionalString(timestamp); - out.writeLong(ttl); + if (ttl == null) { + out.writeBoolean(false); + } else { + out.writeBoolean(true); + ttl.writeTo(out); + } out.writeBytesReference(source); out.writeByte(opType.id()); out.writeBoolean(refresh); diff --git a/core/src/main/java/org/elasticsearch/action/index/IndexRequestBuilder.java b/core/src/main/java/org/elasticsearch/action/index/IndexRequestBuilder.java index 2df8fec6d22..f7134d84843 100644 --- a/core/src/main/java/org/elasticsearch/action/index/IndexRequestBuilder.java +++ b/core/src/main/java/org/elasticsearch/action/index/IndexRequestBuilder.java @@ -23,6 +23,7 @@ import org.elasticsearch.action.support.replication.ReplicationRequestBuilder; import org.elasticsearch.client.ElasticsearchClient; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.index.VersionType; @@ -254,9 +255,27 @@ public class IndexRequestBuilder extends ReplicationRequestBuilder 0 as it makes little sense otherwise. + /** + * Sets the ttl value as a time value expression. + */ + public IndexRequestBuilder setTTL(String ttl) { + request.ttl(ttl); + return this; + } + + /** + * Sets the relative ttl value in milliseconds. It musts be greater than 0 as it makes little sense otherwise. + */ public IndexRequestBuilder setTTL(long ttl) { request.ttl(ttl); return this; } + + /** + * Sets the ttl as a {@link TimeValue} instance. + */ + public IndexRequestBuilder setTTL(TimeValue ttl) { + request.ttl(ttl); + return this; + } } diff --git a/core/src/main/java/org/elasticsearch/action/support/broadcast/node/TransportBroadcastByNodeAction.java b/core/src/main/java/org/elasticsearch/action/support/broadcast/node/TransportBroadcastByNodeAction.java index bc78f13433f..8bcba8ad544 100644 --- a/core/src/main/java/org/elasticsearch/action/support/broadcast/node/TransportBroadcastByNodeAction.java +++ b/core/src/main/java/org/elasticsearch/action/support/broadcast/node/TransportBroadcastByNodeAction.java @@ -223,7 +223,9 @@ public abstract class TransportBroadcastByNodeAction(); @@ -300,7 +302,9 @@ public abstract class TransportBroadcastByNodeAction shards = request.getShards(); final int totalShards = shards.size(); - logger.trace("[{}] executing operation on [{}] shards", actionName, totalShards); + if (logger.isTraceEnabled()) { + logger.trace("[{}] executing operation on [{}] shards", actionName, totalShards); + } final Object[] shardResultOrExceptions = new Object[totalShards]; int shardIndex = -1; @@ -375,10 +381,14 @@ public abstract class TransportBroadcastByNodeAction entry : shardReplicaFailures.entrySet()) { RestStatus restStatus = ExceptionsHelper.status(entry.getValue()); failuresArray[slot++] = new ReplicationResponse.ShardInfo.Failure( - shardId.getIndex(), shardId.getId(), entry.getKey(), entry.getValue(), restStatus, false + shardId.getIndex(), shardId.getId(), entry.getKey(), entry.getValue(), restStatus, false ); } } else { failuresArray = ReplicationResponse.EMPTY; } finalResponse.setShardInfo(new ReplicationResponse.ShardInfo( - totalShards, - success.get(), - failuresArray + totalShards, + success.get(), + failuresArray - ) + ) ); listener.onResponse(finalResponse); } diff --git a/core/src/main/java/org/elasticsearch/action/update/UpdateHelper.java b/core/src/main/java/org/elasticsearch/action/update/UpdateHelper.java index 247c672b8ed..3d204888699 100644 --- a/core/src/main/java/org/elasticsearch/action/update/UpdateHelper.java +++ b/core/src/main/java/org/elasticsearch/action/update/UpdateHelper.java @@ -89,7 +89,7 @@ public class UpdateHelper extends AbstractComponent { throw new DocumentMissingException(shardId, request.type(), request.id()); } IndexRequest indexRequest = request.docAsUpsert() ? request.doc() : request.upsertRequest(); - Long ttl = indexRequest.ttl(); + TimeValue ttl = indexRequest.ttl(); if (request.scriptedUpsert() && request.script() != null) { // Run the script to perform the create logic IndexRequest upsert = request.upsertRequest(); @@ -100,7 +100,7 @@ public class UpdateHelper extends AbstractComponent { ctx.put("_source", upsertDoc); ctx = executeScript(request, ctx); //Allow the script to set TTL using ctx._ttl - if (ttl < 0) { + if (ttl == null) { ttl = getTTLFromScriptContext(ctx); } @@ -125,7 +125,7 @@ public class UpdateHelper extends AbstractComponent { indexRequest.index(request.index()).type(request.type()).id(request.id()) // it has to be a "create!" .create(true) - .ttl(ttl == null || ttl < 0 ? null : ttl) + .ttl(ttl) .refresh(request.refresh()) .routing(request.routing()) .parent(request.parent()) @@ -152,7 +152,7 @@ public class UpdateHelper extends AbstractComponent { Tuple> sourceAndContent = XContentHelper.convertToMap(getResult.internalSourceRef(), true); String operation = null; String timestamp = null; - Long ttl = null; + TimeValue ttl = null; final Map updatedSourceAsMap; final XContentType updateSourceContentType = sourceAndContent.v1(); String routing = getResult.getFields().containsKey(RoutingFieldMapper.NAME) ? getResult.field(RoutingFieldMapper.NAME).getValue().toString() : null; @@ -161,7 +161,7 @@ public class UpdateHelper extends AbstractComponent { if (request.script() == null && request.doc() != null) { IndexRequest indexRequest = request.doc(); updatedSourceAsMap = sourceAndContent.v2(); - if (indexRequest.ttl() > 0) { + if (indexRequest.ttl() != null) { ttl = indexRequest.ttl(); } timestamp = indexRequest.timestamp(); @@ -212,9 +212,9 @@ public class UpdateHelper extends AbstractComponent { // apply script to update the source // No TTL has been given in the update script so we keep previous TTL value if there is one if (ttl == null) { - ttl = getResult.getFields().containsKey(TTLFieldMapper.NAME) ? (Long) getResult.field(TTLFieldMapper.NAME).getValue() : null; - if (ttl != null) { - ttl = ttl - TimeValue.nsecToMSec(System.nanoTime() - getDateNS); // It is an approximation of exact TTL value, could be improved + Long ttlAsLong = getResult.getFields().containsKey(TTLFieldMapper.NAME) ? (Long) getResult.field(TTLFieldMapper.NAME).getValue() : null; + if (ttlAsLong != null) { + ttl = new TimeValue(ttlAsLong - TimeValue.nsecToMSec(System.nanoTime() - getDateNS));// It is an approximation of exact TTL value, could be improved } } @@ -257,17 +257,15 @@ public class UpdateHelper extends AbstractComponent { return ctx; } - private Long getTTLFromScriptContext(Map ctx) { - Long ttl = null; + private TimeValue getTTLFromScriptContext(Map ctx) { Object fetchedTTL = ctx.get("_ttl"); if (fetchedTTL != null) { if (fetchedTTL instanceof Number) { - ttl = ((Number) fetchedTTL).longValue(); - } else { - ttl = TimeValue.parseTimeValue((String) fetchedTTL, null, "_ttl").millis(); + return new TimeValue(((Number) fetchedTTL).longValue()); } + return TimeValue.parseTimeValue((String) fetchedTTL, null, "_ttl"); } - return ttl; + return null; } /** @@ -338,13 +336,10 @@ public class UpdateHelper extends AbstractComponent { } } - public static enum Operation { - + public enum Operation { UPSERT, INDEX, DELETE, NONE - } - } diff --git a/core/src/main/java/org/elasticsearch/action/update/UpdateRequestBuilder.java b/core/src/main/java/org/elasticsearch/action/update/UpdateRequestBuilder.java index 5b0a5bb9c5b..30b636f4efc 100644 --- a/core/src/main/java/org/elasticsearch/action/update/UpdateRequestBuilder.java +++ b/core/src/main/java/org/elasticsearch/action/update/UpdateRequestBuilder.java @@ -24,6 +24,7 @@ import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.support.single.instance.InstanceShardOperationRequestBuilder; import org.elasticsearch.client.ElasticsearchClient; import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.index.VersionType; @@ -325,7 +326,7 @@ public class UpdateRequestBuilder extends InstanceShardOperationRequestBuilder getFieldOrder() { + return Arrays.asList(new String[] { + "PerProcessUserTimeLimit", "PerJobUserTimeLimit", "LimitFlags", "MinimumWorkingSetSize", + "MaximumWorkingSetSize", "ActiveProcessLimit", "Affinity", "PriorityClass", "SchedulingClass" + }); + } + } + + /** + * Constant for JOBOBJECT_BASIC_LIMIT_INFORMATION in Query/Set InformationJobObject + */ + static final int JOBOBJECT_BASIC_LIMIT_INFORMATION_CLASS = 2; + + /** + * Constant for LimitFlags, indicating a process limit has been set + */ + static final int JOB_OBJECT_LIMIT_ACTIVE_PROCESS = 8; + + /** + * Get job limit and state information + * + * https://msdn.microsoft.com/en-us/library/windows/desktop/ms684925%28v=vs.85%29.aspx + * + * @param job job handle + * @param infoClass information class constant + * @param info pointer to information structure + * @param infoLength size of information structure + * @param returnLength length of data written back to structure (or null if not wanted) + * @return true if the function succeeds + */ + native boolean QueryInformationJobObject(Pointer job, int infoClass, Pointer info, int infoLength, Pointer returnLength); + + /** + * Set job limit and state information + * + * https://msdn.microsoft.com/en-us/library/windows/desktop/ms686216%28v=vs.85%29.aspx + * + * @param job job handle + * @param infoClass information class constant + * @param info pointer to information structure + * @param infoLength size of information structure + * @return true if the function succeeds + */ + native boolean SetInformationJobObject(Pointer job, int infoClass, Pointer info, int infoLength); } diff --git a/core/src/main/java/org/elasticsearch/bootstrap/JNANatives.java b/core/src/main/java/org/elasticsearch/bootstrap/JNANatives.java index 5db88ec254d..5356d33bb8e 100644 --- a/core/src/main/java/org/elasticsearch/bootstrap/JNANatives.java +++ b/core/src/main/java/org/elasticsearch/bootstrap/JNANatives.java @@ -191,7 +191,7 @@ class JNANatives { if (logger.isDebugEnabled()) { logger.debug("unable to install syscall filter", t); } - logger.warn("unable to install syscall filter: " + t.getMessage()); + logger.warn("unable to install syscall filter: ", t); } } } diff --git a/core/src/main/java/org/elasticsearch/bootstrap/Seccomp.java b/core/src/main/java/org/elasticsearch/bootstrap/Seccomp.java index 8e2d96f8729..9a4a26c74e3 100644 --- a/core/src/main/java/org/elasticsearch/bootstrap/Seccomp.java +++ b/core/src/main/java/org/elasticsearch/bootstrap/Seccomp.java @@ -47,7 +47,7 @@ import java.util.Map; * Installs a limited form of secure computing mode, * to filters system calls to block process execution. *

- * This is only supported on the Linux, Solaris, and Mac OS X operating systems. + * This is supported on Linux, Solaris, FreeBSD, OpenBSD, Mac OS X, and Windows. *

* On Linux it currently supports amd64 and i386 architectures, requires Linux kernel 3.5 or above, and requires * {@code CONFIG_SECCOMP} and {@code CONFIG_SECCOMP_FILTER} compiled into the kernel. @@ -71,6 +71,8 @@ import java.util.Map; *

  • {@code PRIV_PROC_EXEC}
  • * *

    + * On BSD systems, process creation is restricted with {@code setrlimit(RLIMIT_NPROC)}. + *

    * On Mac OS X Leopard or above, a custom {@code sandbox(7)} ("Seatbelt") profile is installed that * denies the following rules: *

      @@ -78,6 +80,8 @@ import java.util.Map; *
    • {@code process-exec}
    • *
    *

    + * On Windows, process creation is restricted with {@code SetInformationJobObject/ActiveProcessLimit}. + *

    * This is not intended as a sandbox. It is another level of security, mostly intended to annoy * security researchers and make their lives more difficult in achieving "remote execution" exploits. * @see @@ -327,7 +331,8 @@ final class Seccomp { case 1: break; // already set by caller default: int errno = Native.getLastError(); - if (errno == ENOSYS) { + if (errno == EINVAL) { + // friendly error, this will be the typical case for an old kernel throw new UnsupportedOperationException("seccomp unavailable: requires kernel 3.5+ with CONFIG_SECCOMP and CONFIG_SECCOMP_FILTER compiled in"); } else { throw new UnsupportedOperationException("prctl(PR_GET_NO_NEW_PRIVS): " + JNACLibrary.strerror(errno)); @@ -534,6 +539,73 @@ final class Seccomp { logger.debug("Solaris priv_set initialization successful"); } + // BSD implementation via setrlimit(2) + + // TODO: add OpenBSD to Lucene Constants + // TODO: JNA doesn't have netbsd support, but this mechanism should work there too. + static final boolean OPENBSD = Constants.OS_NAME.startsWith("OpenBSD"); + + // not a standard limit, means something different on linux, etc! + static final int RLIMIT_NPROC = 7; + + static void bsdImpl() { + boolean supported = Constants.FREE_BSD || OPENBSD || Constants.MAC_OS_X; + if (supported == false) { + throw new IllegalStateException("bug: should not be trying to initialize RLIMIT_NPROC for an unsupported OS"); + } + + JNACLibrary.Rlimit limit = new JNACLibrary.Rlimit(); + limit.rlim_cur.setValue(0); + limit.rlim_max.setValue(0); + if (JNACLibrary.setrlimit(RLIMIT_NPROC, limit) != 0) { + throw new UnsupportedOperationException("RLIMIT_NPROC unavailable: " + JNACLibrary.strerror(Native.getLastError())); + } + + logger.debug("BSD RLIMIT_NPROC initialization successful"); + } + + // windows impl via job ActiveProcessLimit + + static void windowsImpl() { + if (!Constants.WINDOWS) { + throw new IllegalStateException("bug: should not be trying to initialize ActiveProcessLimit for an unsupported OS"); + } + + JNAKernel32Library lib = JNAKernel32Library.getInstance(); + + // create a new Job + Pointer job = lib.CreateJobObjectW(null, null); + if (job == null) { + throw new UnsupportedOperationException("CreateJobObject: " + Native.getLastError()); + } + + try { + // retrieve the current basic limits of the job + int clazz = JNAKernel32Library.JOBOBJECT_BASIC_LIMIT_INFORMATION_CLASS; + JNAKernel32Library.JOBOBJECT_BASIC_LIMIT_INFORMATION limits = new JNAKernel32Library.JOBOBJECT_BASIC_LIMIT_INFORMATION(); + limits.write(); + if (!lib.QueryInformationJobObject(job, clazz, limits.getPointer(), limits.size(), null)) { + throw new UnsupportedOperationException("QueryInformationJobObject: " + Native.getLastError()); + } + limits.read(); + // modify the number of active processes to be 1 (exactly the one process we will add to the job). + limits.ActiveProcessLimit = 1; + limits.LimitFlags = JNAKernel32Library.JOB_OBJECT_LIMIT_ACTIVE_PROCESS; + limits.write(); + if (!lib.SetInformationJobObject(job, clazz, limits.getPointer(), limits.size())) { + throw new UnsupportedOperationException("SetInformationJobObject: " + Native.getLastError()); + } + // assign ourselves to the job + if (!lib.AssignProcessToJobObject(job, lib.GetCurrentProcess())) { + throw new UnsupportedOperationException("AssignProcessToJobObject: " + Native.getLastError()); + } + } finally { + lib.CloseHandle(job); + } + + logger.debug("Windows ActiveProcessLimit initialization successful"); + } + /** * Attempt to drop the capability to execute for the process. *

    @@ -544,11 +616,19 @@ final class Seccomp { if (Constants.LINUX) { return linuxImpl(); } else if (Constants.MAC_OS_X) { + // try to enable both mechanisms if possible + bsdImpl(); macImpl(tmpFile); return 1; } else if (Constants.SUN_OS) { solarisImpl(); return 1; + } else if (Constants.FREE_BSD || OPENBSD) { + bsdImpl(); + return 1; + } else if (Constants.WINDOWS) { + windowsImpl(); + return 1; } else { throw new UnsupportedOperationException("syscall filtering not supported for OS: '" + Constants.OS_NAME + "'"); } diff --git a/core/src/main/java/org/elasticsearch/bootstrap/Security.java b/core/src/main/java/org/elasticsearch/bootstrap/Security.java index 3d7ce9a1fca..2d342eb5743 100644 --- a/core/src/main/java/org/elasticsearch/bootstrap/Security.java +++ b/core/src/main/java/org/elasticsearch/bootstrap/Security.java @@ -131,34 +131,48 @@ final class Security { @SuppressForbidden(reason = "proper use of URL") static Map getPluginPermissions(Environment environment) throws IOException, NoSuchAlgorithmException { Map map = new HashMap<>(); + // collect up lists of plugins and modules + List pluginsAndModules = new ArrayList<>(); if (Files.exists(environment.pluginsFile())) { try (DirectoryStream stream = Files.newDirectoryStream(environment.pluginsFile())) { for (Path plugin : stream) { - Path policyFile = plugin.resolve(PluginInfo.ES_PLUGIN_POLICY); - if (Files.exists(policyFile)) { - // first get a list of URLs for the plugins' jars: - // we resolve symlinks so map is keyed on the normalize codebase name - List codebases = new ArrayList<>(); - try (DirectoryStream jarStream = Files.newDirectoryStream(plugin, "*.jar")) { - for (Path jar : jarStream) { - codebases.add(jar.toRealPath().toUri().toURL()); - } - } - - // parse the plugin's policy file into a set of permissions - Policy policy = readPolicy(policyFile.toUri().toURL(), codebases.toArray(new URL[codebases.size()])); - - // consult this policy for each of the plugin's jars: - for (URL url : codebases) { - if (map.put(url.getFile(), policy) != null) { - // just be paranoid ok? - throw new IllegalStateException("per-plugin permissions already granted for jar file: " + url); - } - } + pluginsAndModules.add(plugin); + } + } + } + if (Files.exists(environment.modulesFile())) { + try (DirectoryStream stream = Files.newDirectoryStream(environment.modulesFile())) { + for (Path plugin : stream) { + pluginsAndModules.add(plugin); + } + } + } + // now process each one + for (Path plugin : pluginsAndModules) { + Path policyFile = plugin.resolve(PluginInfo.ES_PLUGIN_POLICY); + if (Files.exists(policyFile)) { + // first get a list of URLs for the plugins' jars: + // we resolve symlinks so map is keyed on the normalize codebase name + List codebases = new ArrayList<>(); + try (DirectoryStream jarStream = Files.newDirectoryStream(plugin, "*.jar")) { + for (Path jar : jarStream) { + codebases.add(jar.toRealPath().toUri().toURL()); + } + } + + // parse the plugin's policy file into a set of permissions + Policy policy = readPolicy(policyFile.toUri().toURL(), codebases.toArray(new URL[codebases.size()])); + + // consult this policy for each of the plugin's jars: + for (URL url : codebases) { + if (map.put(url.getFile(), policy) != null) { + // just be paranoid ok? + throw new IllegalStateException("per-plugin permissions already granted for jar file: " + url); } } } } + return Collections.unmodifiableMap(map); } @@ -228,6 +242,7 @@ final class Security { // read-only dirs addPath(policy, "path.home", environment.binFile(), "read,readlink"); addPath(policy, "path.home", environment.libFile(), "read,readlink"); + addPath(policy, "path.home", environment.modulesFile(), "read,readlink"); addPath(policy, "path.plugins", environment.pluginsFile(), "read,readlink"); addPath(policy, "path.conf", environment.configFile(), "read,readlink"); addPath(policy, "path.scripts", environment.scriptsFile(), "read,readlink"); diff --git a/core/src/main/java/org/elasticsearch/client/transport/TransportClient.java b/core/src/main/java/org/elasticsearch/client/transport/TransportClient.java index 10ef7bcb13c..33cf3479419 100644 --- a/core/src/main/java/org/elasticsearch/client/transport/TransportClient.java +++ b/core/src/main/java/org/elasticsearch/client/transport/TransportClient.java @@ -38,7 +38,9 @@ import org.elasticsearch.common.inject.Injector; import org.elasticsearch.common.inject.Module; import org.elasticsearch.common.inject.ModulesBuilder; import org.elasticsearch.common.network.NetworkModule; +import org.elasticsearch.common.network.NetworkService; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.settings.SettingsFilter; import org.elasticsearch.common.settings.SettingsModule; import org.elasticsearch.common.transport.TransportAddress; import org.elasticsearch.env.Environment; @@ -46,6 +48,7 @@ import org.elasticsearch.env.EnvironmentModule; import org.elasticsearch.indices.breaker.CircuitBreakerModule; import org.elasticsearch.monitor.MonitorService; import org.elasticsearch.node.internal.InternalSettingsPreparer; +import org.elasticsearch.node.settings.NodeSettingsService; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.plugins.PluginsModule; import org.elasticsearch.plugins.PluginsService; @@ -122,13 +125,14 @@ public class TransportClient extends AbstractClient { .put(CLIENT_TYPE_SETTING, CLIENT_TYPE) .build(); - PluginsService pluginsService = new PluginsService(settings, null, pluginClasses); + PluginsService pluginsService = new PluginsService(settings, null, null, pluginClasses); this.settings = pluginsService.updatedSettings(); Version version = Version.CURRENT; final ThreadPool threadPool = new ThreadPool(settings); - + final NetworkService networkService = new NetworkService(settings); + final SettingsFilter settingsFilter = new SettingsFilter(settings); boolean success = false; try { ModulesBuilder modules = new ModulesBuilder(); @@ -138,8 +142,8 @@ public class TransportClient extends AbstractClient { modules.add(pluginModule); } modules.add(new PluginsModule(pluginsService)); - modules.add(new SettingsModule(this.settings)); - modules.add(new NetworkModule()); + modules.add(new SettingsModule(this.settings, settingsFilter )); + modules.add(new NetworkModule(networkService)); modules.add(new ClusterNameModule(this.settings)); modules.add(new ThreadPoolModule(threadPool)); modules.add(new TransportModule(this.settings)); diff --git a/core/src/main/java/org/elasticsearch/cluster/AckedClusterStateTaskListener.java b/core/src/main/java/org/elasticsearch/cluster/AckedClusterStateTaskListener.java new file mode 100644 index 00000000000..cdd9b2204ff --- /dev/null +++ b/core/src/main/java/org/elasticsearch/cluster/AckedClusterStateTaskListener.java @@ -0,0 +1,54 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.cluster; + +import org.elasticsearch.cluster.node.DiscoveryNode; +import org.elasticsearch.common.Nullable; +import org.elasticsearch.common.unit.TimeValue; + +public interface AckedClusterStateTaskListener extends ClusterStateTaskListener { + + /** + * Called to determine which nodes the acknowledgement is expected from + * + * @param discoveryNode a node + * @return true if the node is expected to send ack back, false otherwise + */ + boolean mustAck(DiscoveryNode discoveryNode); + + /** + * Called once all the nodes have acknowledged the cluster state update request. Must be + * very lightweight execution, since it gets executed on the cluster service thread. + * + * @param t optional error that might have been thrown + */ + void onAllNodesAcked(@Nullable Throwable t); + + /** + * Called once the acknowledgement timeout defined by + * {@link AckedClusterStateUpdateTask#ackTimeout()} has expired + */ + void onAckTimeout(); + + /** + * Acknowledgement timeout, maximum time interval to wait for acknowledgements + */ + TimeValue ackTimeout(); + +} diff --git a/core/src/main/java/org/elasticsearch/cluster/AckedClusterStateUpdateTask.java b/core/src/main/java/org/elasticsearch/cluster/AckedClusterStateUpdateTask.java index 21c6cd5032a..b833f6e1879 100644 --- a/core/src/main/java/org/elasticsearch/cluster/AckedClusterStateUpdateTask.java +++ b/core/src/main/java/org/elasticsearch/cluster/AckedClusterStateUpdateTask.java @@ -22,18 +22,24 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.cluster.ack.AckedRequest; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.common.Nullable; +import org.elasticsearch.common.Priority; import org.elasticsearch.common.unit.TimeValue; /** * An extension interface to {@link ClusterStateUpdateTask} that allows to be notified when * all the nodes have acknowledged a cluster state update request */ -public abstract class AckedClusterStateUpdateTask extends ClusterStateUpdateTask { +public abstract class AckedClusterStateUpdateTask extends ClusterStateUpdateTask implements AckedClusterStateTaskListener { private final ActionListener listener; private final AckedRequest request; protected AckedClusterStateUpdateTask(AckedRequest request, ActionListener listener) { + this(Priority.NORMAL, request, listener); + } + + protected AckedClusterStateUpdateTask(Priority priority, AckedRequest request, ActionListener listener) { + super(priority); this.listener = listener; this.request = request; } diff --git a/core/src/main/java/org/elasticsearch/cluster/ClusterService.java b/core/src/main/java/org/elasticsearch/cluster/ClusterService.java index 8a3a287bac8..b682b0cc61d 100644 --- a/core/src/main/java/org/elasticsearch/cluster/ClusterService.java +++ b/core/src/main/java/org/elasticsearch/cluster/ClusterService.java @@ -24,7 +24,6 @@ import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.routing.OperationRouting; import org.elasticsearch.cluster.service.PendingClusterTask; import org.elasticsearch.common.Nullable; -import org.elasticsearch.common.Priority; import org.elasticsearch.common.component.LifecycleComponent; import org.elasticsearch.common.unit.TimeValue; @@ -101,12 +100,35 @@ public interface ClusterService extends LifecycleComponent { void add(@Nullable TimeValue timeout, TimeoutClusterStateListener listener); /** - * Submits a task that will update the cluster state. + * Submits a cluster state update task; submitted updates will be + * batched across the same instance of executor. The exact batching + * semantics depend on the underlying implementation but a rough + * guideline is that if the update task is submitted while there + * are pending update tasks for the same executor, these update + * tasks will all be executed on the executor in a single batch + * + * @param source the source of the cluster state update task + * @param task the state needed for the cluster state update task + * @param config the cluster state update task configuration + * @param executor the cluster state update task executor; tasks + * that share the same executor will be executed + * batches on this executor + * @param listener callback after the cluster state update task + * completes + * @param the type of the cluster state update task state */ - void submitStateUpdateTask(final String source, Priority priority, final ClusterStateUpdateTask updateTask); + void submitStateUpdateTask(final String source, final T task, + final ClusterStateTaskConfig config, + final ClusterStateTaskExecutor executor, + final ClusterStateTaskListener listener); /** - * Submits a task that will update the cluster state (the task has a default priority of {@link Priority#NORMAL}). + * Submits a cluster state update task; unlike {@link #submitStateUpdateTask(String, Object, ClusterStateTaskConfig, ClusterStateTaskExecutor, ClusterStateTaskListener)}, + * submitted updates will not be batched. + * + * @param source the source of the cluster state update task + * @param updateTask the full context for the cluster state update + * task */ void submitStateUpdateTask(final String source, final ClusterStateUpdateTask updateTask); diff --git a/core/src/main/java/org/elasticsearch/cluster/ClusterState.java b/core/src/main/java/org/elasticsearch/cluster/ClusterState.java index 0f0e2695878..e02775cc82a 100644 --- a/core/src/main/java/org/elasticsearch/cluster/ClusterState.java +++ b/core/src/main/java/org/elasticsearch/cluster/ClusterState.java @@ -19,9 +19,9 @@ package org.elasticsearch.cluster; +import com.carrotsearch.hppc.cursors.IntObjectCursor; import com.carrotsearch.hppc.cursors.ObjectCursor; import com.carrotsearch.hppc.cursors.ObjectObjectCursor; -import org.elasticsearch.cluster.DiffableUtils.KeyedReader; import org.elasticsearch.cluster.block.ClusterBlock; import org.elasticsearch.cluster.block.ClusterBlocks; import org.elasticsearch.cluster.metadata.IndexMetaData; @@ -475,6 +475,17 @@ public class ClusterState implements ToXContent, Diffable { } builder.endObject(); + builder.startObject(IndexMetaData.KEY_ACTIVE_ALLOCATIONS); + for (IntObjectCursor> cursor : indexMetaData.getActiveAllocationIds()) { + builder.startArray(String.valueOf(cursor.key)); + for (String allocationId : cursor.value) { + builder.value(allocationId); + } + builder.endArray(); + } + builder.endObject(); + + // index metdata data builder.endObject(); } builder.endObject(); @@ -766,7 +777,7 @@ public class ClusterState implements ToXContent, Diffable { nodes = after.nodes.diff(before.nodes); metaData = after.metaData.diff(before.metaData); blocks = after.blocks.diff(before.blocks); - customs = DiffableUtils.diff(before.customs, after.customs); + customs = DiffableUtils.diff(before.customs, after.customs, DiffableUtils.getStringKeySerializer()); } public ClusterStateDiff(StreamInput in, ClusterState proto) throws IOException { @@ -778,17 +789,18 @@ public class ClusterState implements ToXContent, Diffable { nodes = proto.nodes.readDiffFrom(in); metaData = proto.metaData.readDiffFrom(in); blocks = proto.blocks.readDiffFrom(in); - customs = DiffableUtils.readImmutableOpenMapDiff(in, new KeyedReader() { - @Override - public Custom readFrom(StreamInput in, String key) throws IOException { - return lookupPrototypeSafe(key).readFrom(in); - } + customs = DiffableUtils.readImmutableOpenMapDiff(in, DiffableUtils.getStringKeySerializer(), + new DiffableUtils.DiffableValueSerializer() { + @Override + public Custom read(StreamInput in, String key) throws IOException { + return lookupPrototypeSafe(key).readFrom(in); + } - @Override - public Diff readDiffFrom(StreamInput in, String key) throws IOException { - return lookupPrototypeSafe(key).readDiffFrom(in); - } - }); + @Override + public Diff readDiff(StreamInput in, String key) throws IOException { + return lookupPrototypeSafe(key).readDiffFrom(in); + } + }); } @Override diff --git a/core/src/main/java/org/elasticsearch/cluster/ClusterStateTaskConfig.java b/core/src/main/java/org/elasticsearch/cluster/ClusterStateTaskConfig.java new file mode 100644 index 00000000000..2ef2438991e --- /dev/null +++ b/core/src/main/java/org/elasticsearch/cluster/ClusterStateTaskConfig.java @@ -0,0 +1,92 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.cluster; + +import org.elasticsearch.common.Nullable; +import org.elasticsearch.common.Priority; +import org.elasticsearch.common.unit.TimeValue; + +/** + * Cluster state update task configuration for timeout and priority + */ +public interface ClusterStateTaskConfig { + /** + * The timeout for this cluster state update task configuration. If + * the cluster state update task isn't processed within this + * timeout, the associated {@link ClusterStateTaskListener#onFailure(String, Throwable)} + * is invoked. + * + * @return the timeout, or null if one is not set + */ + @Nullable + TimeValue timeout(); + + /** + * The {@link Priority} for this cluster state update task configuration. + * + * @return the priority + */ + Priority priority(); + + /** + * Build a cluster state update task configuration with the + * specified {@link Priority} and no timeout. + * + * @param priority the priority for the associated cluster state + * update task + * @return the resulting cluster state update task configuration + */ + static ClusterStateTaskConfig build(Priority priority) { + return new Basic(priority, null); + } + + /** + * Build a cluster state update task configuration with the + * specified {@link Priority} and timeout. + * + * @param priority the priority for the associated cluster state + * update task + * @param timeout the timeout for the associated cluster state + * update task + * @return the result cluster state update task configuration + */ + static ClusterStateTaskConfig build(Priority priority, TimeValue timeout) { + return new Basic(priority, timeout); + } + + class Basic implements ClusterStateTaskConfig { + final TimeValue timeout; + final Priority priority; + + public Basic(Priority priority, TimeValue timeout) { + this.timeout = timeout; + this.priority = priority; + } + + @Override + public TimeValue timeout() { + return timeout; + } + + @Override + public Priority priority() { + return priority; + } + } +} diff --git a/core/src/main/java/org/elasticsearch/cluster/ClusterStateTaskExecutor.java b/core/src/main/java/org/elasticsearch/cluster/ClusterStateTaskExecutor.java new file mode 100644 index 00000000000..ab85d9540f0 --- /dev/null +++ b/core/src/main/java/org/elasticsearch/cluster/ClusterStateTaskExecutor.java @@ -0,0 +1,132 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.cluster; + +import java.util.IdentityHashMap; +import java.util.List; +import java.util.Map; +import java.util.function.Consumer; + +public interface ClusterStateTaskExecutor { + /** + * Update the cluster state based on the current state and the given tasks. Return the *same instance* if no state + * should be changed. + */ + BatchResult execute(ClusterState currentState, List tasks) throws Exception; + + /** + * indicates whether this task should only run if current node is master + */ + default boolean runOnlyOnMaster() { + return true; + } + + /** + * Represents the result of a batched execution of cluster state update tasks + * @param the type of the cluster state update task + */ + class BatchResult { + final public ClusterState resultingState; + final public Map executionResults; + + /** + * Construct an execution result instance with a correspondence between the tasks and their execution result + * @param resultingState the resulting cluster state + * @param executionResults the correspondence between tasks and their outcome + */ + BatchResult(ClusterState resultingState, Map executionResults) { + this.resultingState = resultingState; + this.executionResults = executionResults; + } + + public static Builder builder() { + return new Builder<>(); + } + + public static class Builder { + private final Map executionResults = new IdentityHashMap<>(); + + public Builder success(T task) { + return result(task, TaskResult.success()); + } + + public Builder successes(Iterable tasks) { + for (T task : tasks) { + success(task); + } + return this; + } + + public Builder failure(T task, Throwable t) { + return result(task, TaskResult.failure(t)); + } + + public Builder failures(Iterable tasks, Throwable t) { + for (T task : tasks) { + failure(task, t); + } + return this; + } + + private Builder result(T task, TaskResult executionResult) { + executionResults.put(task, executionResult); + return this; + } + + public BatchResult build(ClusterState resultingState) { + return new BatchResult<>(resultingState, executionResults); + } + } + } + + final class TaskResult { + private final Throwable failure; + + private static final TaskResult SUCCESS = new TaskResult(null); + + public static TaskResult success() { + return SUCCESS; + } + + public static TaskResult failure(Throwable failure) { + return new TaskResult(failure); + } + + private TaskResult(Throwable failure) { + this.failure = failure; + } + + public boolean isSuccess() { + return failure != null; + } + + /** + * Handle the execution result with the provided consumers + * @param onSuccess handler to invoke on success + * @param onFailure handler to invoke on failure; the throwable passed through will not be null + */ + public void handle(Runnable onSuccess, Consumer onFailure) { + if (failure == null) { + onSuccess.run(); + } else { + onFailure.accept(failure); + } + } + } +} diff --git a/test-framework/src/main/java/org/elasticsearch/test/rest/Rest0IT.java b/core/src/main/java/org/elasticsearch/cluster/ClusterStateTaskListener.java similarity index 52% rename from test-framework/src/main/java/org/elasticsearch/test/rest/Rest0IT.java rename to core/src/main/java/org/elasticsearch/cluster/ClusterStateTaskListener.java index e73bf347093..3bf7887cd1c 100644 --- a/test-framework/src/main/java/org/elasticsearch/test/rest/Rest0IT.java +++ b/core/src/main/java/org/elasticsearch/cluster/ClusterStateTaskListener.java @@ -16,23 +16,28 @@ * specific language governing permissions and limitations * under the License. */ +package org.elasticsearch.cluster; -package org.elasticsearch.test.rest; +import java.util.List; -import com.carrotsearch.randomizedtesting.annotations.Name; -import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; +public interface ClusterStateTaskListener { -import org.elasticsearch.test.rest.parser.RestTestParseException; + /** + * A callback called when execute fails. + */ + void onFailure(String source, Throwable t); -import java.io.IOException; - -/** Rest API tests subset 0 */ -public class Rest0IT extends ESRestTestCase { - public Rest0IT(@Name("yaml") RestTestCandidate testCandidate) { - super(testCandidate); + /** + * called when the task was rejected because the local node is no longer master + */ + default void onNoLongerMaster(String source) { + onFailure(source, new NotMasterException("no longer master. source: [" + source + "]")); } - @ParametersFactory - public static Iterable parameters() throws IOException, RestTestParseException { - return createParameters(0, 8); + + /** + * Called when the result of the {@link ClusterStateTaskExecutor#execute(ClusterState, List)} have been processed + * properly by all listeners. + */ + default void clusterStateProcessed(String source, ClusterState oldState, ClusterState newState) { } } diff --git a/core/src/main/java/org/elasticsearch/cluster/ClusterStateUpdateTask.java b/core/src/main/java/org/elasticsearch/cluster/ClusterStateUpdateTask.java index 7fef94d5c17..3e2881134f8 100644 --- a/core/src/main/java/org/elasticsearch/cluster/ClusterStateUpdateTask.java +++ b/core/src/main/java/org/elasticsearch/cluster/ClusterStateUpdateTask.java @@ -20,13 +20,31 @@ package org.elasticsearch.cluster; import org.elasticsearch.common.Nullable; +import org.elasticsearch.common.Priority; import org.elasticsearch.common.unit.TimeValue; -import org.elasticsearch.common.util.concurrent.EsRejectedExecutionException; + +import java.util.List; /** * A task that can update the cluster state. */ -abstract public class ClusterStateUpdateTask { +abstract public class ClusterStateUpdateTask implements ClusterStateTaskConfig, ClusterStateTaskExecutor, ClusterStateTaskListener { + + final private Priority priority; + + public ClusterStateUpdateTask() { + this(Priority.NORMAL); + } + + public ClusterStateUpdateTask(Priority priority) { + this.priority = priority; + } + + @Override + final public BatchResult execute(ClusterState currentState, List tasks) throws Exception { + ClusterState result = execute(currentState); + return BatchResult.builder().successes(tasks).build(result); + } /** * Update the cluster state based on the current state. Return the *same instance* if no state @@ -39,28 +57,6 @@ abstract public class ClusterStateUpdateTask { */ abstract public void onFailure(String source, Throwable t); - - /** - * indicates whether this task should only run if current node is master - */ - public boolean runOnlyOnMaster() { - return true; - } - - /** - * called when the task was rejected because the local node is no longer master - */ - public void onNoLongerMaster(String source) { - onFailure(source, new NotMasterException("no longer master. source: [" + source + "]")); - } - - /** - * Called when the result of the {@link #execute(ClusterState)} have been processed - * properly by all listeners. - */ - public void clusterStateProcessed(String source, ClusterState oldState, ClusterState newState) { - } - /** * If the cluster state update task wasn't processed by the provided timeout, call * {@link #onFailure(String, Throwable)}. May return null to indicate no timeout is needed (default). @@ -70,5 +66,8 @@ abstract public class ClusterStateUpdateTask { return null; } - + @Override + public Priority priority() { + return priority; + } } diff --git a/core/src/main/java/org/elasticsearch/cluster/Diff.java b/core/src/main/java/org/elasticsearch/cluster/Diff.java index 1a9fff246a9..76535a4b763 100644 --- a/core/src/main/java/org/elasticsearch/cluster/Diff.java +++ b/core/src/main/java/org/elasticsearch/cluster/Diff.java @@ -29,7 +29,7 @@ import java.io.IOException; public interface Diff { /** - * Applies difference to the specified part and retunrs the resulted part + * Applies difference to the specified part and returns the resulted part */ T apply(T part); diff --git a/core/src/main/java/org/elasticsearch/cluster/DiffableUtils.java b/core/src/main/java/org/elasticsearch/cluster/DiffableUtils.java index 84e0021ee00..1488f059437 100644 --- a/core/src/main/java/org/elasticsearch/cluster/DiffableUtils.java +++ b/core/src/main/java/org/elasticsearch/cluster/DiffableUtils.java @@ -19,263 +19,630 @@ package org.elasticsearch.cluster; +import com.carrotsearch.hppc.cursors.IntCursor; +import com.carrotsearch.hppc.cursors.IntObjectCursor; import com.carrotsearch.hppc.cursors.ObjectCursor; import com.carrotsearch.hppc.cursors.ObjectObjectCursor; +import org.elasticsearch.common.collect.ImmutableOpenIntMap; import org.elasticsearch.common.collect.ImmutableOpenMap; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import java.io.IOException; import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collections; import java.util.HashMap; +import java.util.HashSet; import java.util.List; import java.util.Map; +import java.util.Set; public final class DiffableUtils { private DiffableUtils() { } + /** + * Returns a map key serializer for String keys + */ + public static KeySerializer getStringKeySerializer() { + return StringKeySerializer.INSTANCE; + } + + /** + * Returns a map key serializer for Integer keys. Encodes as Int. + */ + public static KeySerializer getIntKeySerializer() { + return IntKeySerializer.INSTANCE; + } + + /** + * Returns a map key serializer for Integer keys. Encodes as VInt. + */ + public static KeySerializer getVIntKeySerializer() { + return VIntKeySerializer.INSTANCE; + } + /** * Calculates diff between two ImmutableOpenMaps of Diffable objects */ - public static > Diff> diff(ImmutableOpenMap before, ImmutableOpenMap after) { + public static > MapDiff> diff(ImmutableOpenMap before, ImmutableOpenMap after, KeySerializer keySerializer) { assert after != null && before != null; - return new ImmutableOpenMapDiff<>(before, after); + return new ImmutableOpenMapDiff<>(before, after, keySerializer, DiffableValueSerializer.getWriteOnlyInstance()); + } + + /** + * Calculates diff between two ImmutableOpenMaps of non-diffable objects + */ + public static MapDiff> diff(ImmutableOpenMap before, ImmutableOpenMap after, KeySerializer keySerializer, NonDiffableValueSerializer valueSerializer) { + assert after != null && before != null; + return new ImmutableOpenMapDiff<>(before, after, keySerializer, valueSerializer); + } + + /** + * Calculates diff between two ImmutableOpenIntMaps of Diffable objects + */ + public static > MapDiff> diff(ImmutableOpenIntMap before, ImmutableOpenIntMap after, KeySerializer keySerializer) { + assert after != null && before != null; + return new ImmutableOpenIntMapDiff<>(before, after, keySerializer, DiffableValueSerializer.getWriteOnlyInstance()); + } + + /** + * Calculates diff between two ImmutableOpenIntMaps of non-diffable objects + */ + public static MapDiff> diff(ImmutableOpenIntMap before, ImmutableOpenIntMap after, KeySerializer keySerializer, NonDiffableValueSerializer valueSerializer) { + assert after != null && before != null; + return new ImmutableOpenIntMapDiff<>(before, after, keySerializer, valueSerializer); } /** * Calculates diff between two Maps of Diffable objects. */ - public static > Diff> diff(Map before, Map after) { + public static > MapDiff> diff(Map before, Map after, KeySerializer keySerializer) { assert after != null && before != null; - return new JdkMapDiff<>(before, after); + return new JdkMapDiff<>(before, after, keySerializer, DiffableValueSerializer.getWriteOnlyInstance()); + } + + /** + * Calculates diff between two Maps of non-diffable objects + */ + public static MapDiff> diff(Map before, Map after, KeySerializer keySerializer, NonDiffableValueSerializer valueSerializer) { + assert after != null && before != null; + return new JdkMapDiff<>(before, after, keySerializer, valueSerializer); } /** * Loads an object that represents difference between two ImmutableOpenMaps */ - public static > Diff> readImmutableOpenMapDiff(StreamInput in, KeyedReader keyedReader) throws IOException { - return new ImmutableOpenMapDiff<>(in, keyedReader); - } - - /** - * Loads an object that represents difference between two Maps. - */ - public static > Diff> readJdkMapDiff(StreamInput in, KeyedReader keyedReader) throws IOException { - return new JdkMapDiff<>(in, keyedReader); + public static MapDiff> readImmutableOpenMapDiff(StreamInput in, KeySerializer keySerializer, ValueSerializer valueSerializer) throws IOException { + return new ImmutableOpenMapDiff<>(in, keySerializer, valueSerializer); } /** * Loads an object that represents difference between two ImmutableOpenMaps */ - public static > Diff> readImmutableOpenMapDiff(StreamInput in, T proto) throws IOException { - return new ImmutableOpenMapDiff<>(in, new PrototypeReader<>(proto)); + public static MapDiff> readImmutableOpenIntMapDiff(StreamInput in, KeySerializer keySerializer, ValueSerializer valueSerializer) throws IOException { + return new ImmutableOpenIntMapDiff<>(in, keySerializer, valueSerializer); } /** - * Loads an object that represents difference between two Maps. + * Loads an object that represents difference between two Maps of Diffable objects */ - public static > Diff> readJdkMapDiff(StreamInput in, T proto) throws IOException { - return new JdkMapDiff<>(in, new PrototypeReader<>(proto)); + public static MapDiff> readJdkMapDiff(StreamInput in, KeySerializer keySerializer, ValueSerializer valueSerializer) throws IOException { + return new JdkMapDiff<>(in, keySerializer, valueSerializer); } /** - * A reader that can deserialize an object. The reader can select the deserialization type based on the key. It's - * used in custom metadata deserialization. + * Loads an object that represents difference between two ImmutableOpenMaps of Diffable objects using Diffable proto object */ - public interface KeyedReader { - - /** - * reads an object of the type T from the stream input - */ - T readFrom(StreamInput in, String key) throws IOException; - - /** - * reads an object that respresents differences between two objects with the type T from the stream input - */ - Diff readDiffFrom(StreamInput in, String key) throws IOException; + public static > MapDiff> readImmutableOpenMapDiff(StreamInput in, KeySerializer keySerializer, T proto) throws IOException { + return new ImmutableOpenMapDiff<>(in, keySerializer, new DiffablePrototypeValueReader<>(proto)); } /** - * Implementation of the KeyedReader that is using a prototype object for reading operations - * - * Note: this implementation is ignoring the key. + * Loads an object that represents difference between two ImmutableOpenIntMaps of Diffable objects using Diffable proto object */ - public static class PrototypeReader> implements KeyedReader { - private T proto; - - public PrototypeReader(T proto) { - this.proto = proto; - } - - @Override - public T readFrom(StreamInput in, String key) throws IOException { - return proto.readFrom(in); - } - - @Override - public Diff readDiffFrom(StreamInput in, String key) throws IOException { - return proto.readDiffFrom(in); - } + public static > MapDiff> readImmutableOpenIntMapDiff(StreamInput in, KeySerializer keySerializer, T proto) throws IOException { + return new ImmutableOpenIntMapDiff<>(in, keySerializer, new DiffablePrototypeValueReader<>(proto)); } /** - * Represents differences between two Maps of Diffable objects. + * Loads an object that represents difference between two Maps of Diffable objects using Diffable proto object + */ + public static > MapDiff> readJdkMapDiff(StreamInput in, KeySerializer keySerializer, T proto) throws IOException { + return new JdkMapDiff<>(in, keySerializer, new DiffablePrototypeValueReader<>(proto)); + } + + /** + * Represents differences between two Maps of (possibly diffable) objects. * * @param the diffable object */ - private static class JdkMapDiff> extends MapDiff> { + private static class JdkMapDiff extends MapDiff> { - protected JdkMapDiff(StreamInput in, KeyedReader reader) throws IOException { - super(in, reader); + protected JdkMapDiff(StreamInput in, KeySerializer keySerializer, ValueSerializer valueSerializer) throws IOException { + super(in, keySerializer, valueSerializer); } - public JdkMapDiff(Map before, Map after) { + public JdkMapDiff(Map before, Map after, + KeySerializer keySerializer, ValueSerializer valueSerializer) { + super(keySerializer, valueSerializer); assert after != null && before != null; - for (String key : before.keySet()) { + + for (K key : before.keySet()) { if (!after.containsKey(key)) { deletes.add(key); } } - for (Map.Entry partIter : after.entrySet()) { + + for (Map.Entry partIter : after.entrySet()) { T beforePart = before.get(partIter.getKey()); if (beforePart == null) { - adds.put(partIter.getKey(), partIter.getValue()); + upserts.put(partIter.getKey(), partIter.getValue()); } else if (partIter.getValue().equals(beforePart) == false) { - diffs.put(partIter.getKey(), partIter.getValue().diff(beforePart)); + if (valueSerializer.supportsDiffableValues()) { + diffs.put(partIter.getKey(), valueSerializer.diff(partIter.getValue(), beforePart)); + } else { + upserts.put(partIter.getKey(), partIter.getValue()); + } } } } @Override - public Map apply(Map map) { - Map builder = new HashMap<>(); + public Map apply(Map map) { + Map builder = new HashMap<>(); builder.putAll(map); - for (String part : deletes) { + for (K part : deletes) { builder.remove(part); } - for (Map.Entry> diff : diffs.entrySet()) { + for (Map.Entry> diff : diffs.entrySet()) { builder.put(diff.getKey(), diff.getValue().apply(builder.get(diff.getKey()))); } - for (Map.Entry additon : adds.entrySet()) { - builder.put(additon.getKey(), additon.getValue()); + for (Map.Entry upsert : upserts.entrySet()) { + builder.put(upsert.getKey(), upsert.getValue()); } return builder; } } /** - * Represents differences between two ImmutableOpenMap of diffable objects + * Represents differences between two ImmutableOpenMap of (possibly diffable) objects * - * @param the diffable object + * @param the object type */ - private static class ImmutableOpenMapDiff> extends MapDiff> { + private static class ImmutableOpenMapDiff extends MapDiff> { - protected ImmutableOpenMapDiff(StreamInput in, KeyedReader reader) throws IOException { - super(in, reader); + protected ImmutableOpenMapDiff(StreamInput in, KeySerializer keySerializer, ValueSerializer valueSerializer) throws IOException { + super(in, keySerializer, valueSerializer); } - public ImmutableOpenMapDiff(ImmutableOpenMap before, ImmutableOpenMap after) { + public ImmutableOpenMapDiff(ImmutableOpenMap before, ImmutableOpenMap after, + KeySerializer keySerializer, ValueSerializer valueSerializer) { + super(keySerializer, valueSerializer); assert after != null && before != null; - for (ObjectCursor key : before.keys()) { + + for (ObjectCursor key : before.keys()) { if (!after.containsKey(key.value)) { deletes.add(key.value); } } - for (ObjectObjectCursor partIter : after) { + + for (ObjectObjectCursor partIter : after) { T beforePart = before.get(partIter.key); if (beforePart == null) { - adds.put(partIter.key, partIter.value); + upserts.put(partIter.key, partIter.value); } else if (partIter.value.equals(beforePart) == false) { - diffs.put(partIter.key, partIter.value.diff(beforePart)); + if (valueSerializer.supportsDiffableValues()) { + diffs.put(partIter.key, valueSerializer.diff(partIter.value, beforePart)); + } else { + upserts.put(partIter.key, partIter.value); + } } } } @Override - public ImmutableOpenMap apply(ImmutableOpenMap map) { - ImmutableOpenMap.Builder builder = ImmutableOpenMap.builder(); + public ImmutableOpenMap apply(ImmutableOpenMap map) { + ImmutableOpenMap.Builder builder = ImmutableOpenMap.builder(); builder.putAll(map); - for (String part : deletes) { + for (K part : deletes) { builder.remove(part); } - for (Map.Entry> diff : diffs.entrySet()) { + for (Map.Entry> diff : diffs.entrySet()) { builder.put(diff.getKey(), diff.getValue().apply(builder.get(diff.getKey()))); } - for (Map.Entry additon : adds.entrySet()) { - builder.put(additon.getKey(), additon.getValue()); + for (Map.Entry upsert : upserts.entrySet()) { + builder.put(upsert.getKey(), upsert.getValue()); } return builder.build(); } } /** - * Represents differences between two maps of diffable objects + * Represents differences between two ImmutableOpenIntMap of (possibly diffable) objects * - * This class is used as base class for different map implementations - * - * @param the diffable object + * @param the object type */ - private static abstract class MapDiff, M> implements Diff { + private static class ImmutableOpenIntMapDiff extends MapDiff> { - protected final List deletes; - protected final Map> diffs; - protected final Map adds; - - protected MapDiff() { - deletes = new ArrayList<>(); - diffs = new HashMap<>(); - adds = new HashMap<>(); + protected ImmutableOpenIntMapDiff(StreamInput in, KeySerializer keySerializer, ValueSerializer valueSerializer) throws IOException { + super(in, keySerializer, valueSerializer); } - protected MapDiff(StreamInput in, KeyedReader reader) throws IOException { + public ImmutableOpenIntMapDiff(ImmutableOpenIntMap before, ImmutableOpenIntMap after, + KeySerializer keySerializer, ValueSerializer valueSerializer) { + super(keySerializer, valueSerializer); + assert after != null && before != null; + + for (IntCursor key : before.keys()) { + if (!after.containsKey(key.value)) { + deletes.add(key.value); + } + } + + for (IntObjectCursor partIter : after) { + T beforePart = before.get(partIter.key); + if (beforePart == null) { + upserts.put(partIter.key, partIter.value); + } else if (partIter.value.equals(beforePart) == false) { + if (valueSerializer.supportsDiffableValues()) { + diffs.put(partIter.key, valueSerializer.diff(partIter.value, beforePart)); + } else { + upserts.put(partIter.key, partIter.value); + } + } + } + } + + @Override + public ImmutableOpenIntMap apply(ImmutableOpenIntMap map) { + ImmutableOpenIntMap.Builder builder = ImmutableOpenIntMap.builder(); + builder.putAll(map); + + for (Integer part : deletes) { + builder.remove(part); + } + + for (Map.Entry> diff : diffs.entrySet()) { + builder.put(diff.getKey(), diff.getValue().apply(builder.get(diff.getKey()))); + } + + for (Map.Entry upsert : upserts.entrySet()) { + builder.put(upsert.getKey(), upsert.getValue()); + } + return builder.build(); + } + } + + /** + * Represents differences between two maps of objects and is used as base class for different map implementations. + * + * Implements serialization. How differences are applied is left to subclasses. + * + * @param the type of map keys + * @param the type of map values + * @param the map implementation type + */ + public static abstract class MapDiff implements Diff { + + protected final List deletes; + protected final Map> diffs; // incremental updates + protected final Map upserts; // additions or full updates + protected final KeySerializer keySerializer; + protected final ValueSerializer valueSerializer; + + protected MapDiff(KeySerializer keySerializer, ValueSerializer valueSerializer) { + this.keySerializer = keySerializer; + this.valueSerializer = valueSerializer; deletes = new ArrayList<>(); diffs = new HashMap<>(); - adds = new HashMap<>(); + upserts = new HashMap<>(); + } + + protected MapDiff(StreamInput in, KeySerializer keySerializer, ValueSerializer valueSerializer) throws IOException { + this.keySerializer = keySerializer; + this.valueSerializer = valueSerializer; + deletes = new ArrayList<>(); + diffs = new HashMap<>(); + upserts = new HashMap<>(); int deletesCount = in.readVInt(); for (int i = 0; i < deletesCount; i++) { - deletes.add(in.readString()); + deletes.add(keySerializer.readKey(in)); } - int diffsCount = in.readVInt(); for (int i = 0; i < diffsCount; i++) { - String key = in.readString(); - Diff diff = reader.readDiffFrom(in, key); + K key = keySerializer.readKey(in); + Diff diff = valueSerializer.readDiff(in, key); diffs.put(key, diff); } - - int addsCount = in.readVInt(); - for (int i = 0; i < addsCount; i++) { - String key = in.readString(); - T part = reader.readFrom(in, key); - adds.put(key, part); + int upsertsCount = in.readVInt(); + for (int i = 0; i < upsertsCount; i++) { + K key = keySerializer.readKey(in); + T newValue = valueSerializer.read(in, key); + upserts.put(key, newValue); } } + + /** + * The keys that, when this diff is applied to a map, should be removed from the map. + * + * @return the list of keys that are deleted + */ + public List getDeletes() { + return deletes; + } + + /** + * Map entries that, when this diff is applied to a map, should be + * incrementally updated. The incremental update is represented using + * the {@link Diff} interface. + * + * @return the map entries that are incrementally updated + */ + public Map> getDiffs() { + return diffs; + } + + /** + * Map entries that, when this diff is applied to a map, should be + * added to the map or fully replace the previous value. + * + * @return the map entries that are additions or full updates + */ + public Map getUpserts() { + return upserts; + } + @Override public void writeTo(StreamOutput out) throws IOException { out.writeVInt(deletes.size()); - for (String delete : deletes) { - out.writeString(delete); + for (K delete : deletes) { + keySerializer.writeKey(delete, out); } - out.writeVInt(diffs.size()); - for (Map.Entry> entry : diffs.entrySet()) { - out.writeString(entry.getKey()); - entry.getValue().writeTo(out); + for (Map.Entry> entry : diffs.entrySet()) { + keySerializer.writeKey(entry.getKey(), out); + valueSerializer.writeDiff(entry.getValue(), out); } - - out.writeVInt(adds.size()); - for (Map.Entry entry : adds.entrySet()) { - out.writeString(entry.getKey()); - entry.getValue().writeTo(out); + out.writeVInt(upserts.size()); + for (Map.Entry entry : upserts.entrySet()) { + keySerializer.writeKey(entry.getKey(), out); + valueSerializer.write(entry.getValue(), out); } } } + + /** + * Provides read and write operations to serialize keys of map + * @param type of key + */ + public interface KeySerializer { + void writeKey(K key, StreamOutput out) throws IOException; + K readKey(StreamInput in) throws IOException; + } + + /** + * Serializes String keys of a map + */ + private static final class StringKeySerializer implements KeySerializer { + private static final StringKeySerializer INSTANCE = new StringKeySerializer(); + + @Override + public void writeKey(String key, StreamOutput out) throws IOException { + out.writeString(key); + } + + @Override + public String readKey(StreamInput in) throws IOException { + return in.readString(); + } + } + + /** + * Serializes Integer keys of a map as an Int + */ + private static final class IntKeySerializer implements KeySerializer { + public static final IntKeySerializer INSTANCE = new IntKeySerializer(); + + @Override + public void writeKey(Integer key, StreamOutput out) throws IOException { + out.writeInt(key); + } + + @Override + public Integer readKey(StreamInput in) throws IOException { + return in.readInt(); + } + } + + /** + * Serializes Integer keys of a map as a VInt. Requires keys to be positive. + */ + private static final class VIntKeySerializer implements KeySerializer { + public static final IntKeySerializer INSTANCE = new IntKeySerializer(); + + @Override + public void writeKey(Integer key, StreamOutput out) throws IOException { + if (key < 0) { + throw new IllegalArgumentException("Map key [" + key + "] must be positive"); + } + out.writeVInt(key); + } + + @Override + public Integer readKey(StreamInput in) throws IOException { + return in.readVInt(); + } + } + + /** + * Provides read and write operations to serialize map values. + * Reading of values can be made dependent on map key. + * + * Also provides operations to distinguish whether map values are diffable. + * + * Should not be directly implemented, instead implement either + * {@link DiffableValueSerializer} or {@link NonDiffableValueSerializer}. + * + * @param key type of map + * @param value type of map + */ + public interface ValueSerializer { + + /** + * Writes value to stream + */ + void write(V value, StreamOutput out) throws IOException; + + /** + * Reads value from stream. Reading operation can be made dependent on map key. + */ + V read(StreamInput in, K key) throws IOException; + + /** + * Whether this serializer supports diffable values + */ + boolean supportsDiffableValues(); + + /** + * Computes diff if this serializer supports diffable values + */ + Diff diff(V value, V beforePart); + + /** + * Writes value as diff to stream if this serializer supports diffable values + */ + void writeDiff(Diff value, StreamOutput out) throws IOException; + + /** + * Reads value as diff from stream if this serializer supports diffable values. + * Reading operation can be made dependent on map key. + */ + Diff readDiff(StreamInput in, K key) throws IOException; + } + + /** + * Serializer for Diffable map values. Needs to implement read and readDiff methods. + * + * @param type of map keys + * @param type of map values + */ + public static abstract class DiffableValueSerializer> implements ValueSerializer { + private static final DiffableValueSerializer WRITE_ONLY_INSTANCE = new DiffableValueSerializer() { + @Override + public Object read(StreamInput in, Object key) throws IOException { + throw new UnsupportedOperationException(); + } + + @Override + public Diff readDiff(StreamInput in, Object key) throws IOException { + throw new UnsupportedOperationException(); + } + }; + + private static > DiffableValueSerializer getWriteOnlyInstance() { + return WRITE_ONLY_INSTANCE; + } + + @Override + public boolean supportsDiffableValues() { + return true; + } + + @Override + public Diff diff(V value, V beforePart) { + return value.diff(beforePart); + } + + @Override + public void write(V value, StreamOutput out) throws IOException { + value.writeTo(out); + } + + public void writeDiff(Diff value, StreamOutput out) throws IOException { + value.writeTo(out); + } + } + + /** + * Serializer for non-diffable map values + * + * @param type of map keys + * @param type of map values + */ + public static abstract class NonDiffableValueSerializer implements ValueSerializer { + @Override + public boolean supportsDiffableValues() { + return false; + } + + @Override + public Diff diff(V value, V beforePart) { + throw new UnsupportedOperationException(); + } + + @Override + public void writeDiff(Diff value, StreamOutput out) throws IOException { + throw new UnsupportedOperationException(); + } + + @Override + public Diff readDiff(StreamInput in, K key) throws IOException { + throw new UnsupportedOperationException(); + } + } + + /** + * Implementation of the ValueSerializer that uses a prototype object for reading operations + * + * Note: this implementation is ignoring the key. + */ + public static class DiffablePrototypeValueReader> extends DiffableValueSerializer { + private final V proto; + + public DiffablePrototypeValueReader(V proto) { + this.proto = proto; + } + + @Override + public V read(StreamInput in, K key) throws IOException { + return proto.readFrom(in); + } + + @Override + public Diff readDiff(StreamInput in, K key) throws IOException { + return proto.readDiffFrom(in); + } + } + + /** + * Implementation of ValueSerializer that serializes immutable sets + * + * @param type of map key + */ + public static class StringSetValueSerializer extends NonDiffableValueSerializer> { + private static final StringSetValueSerializer INSTANCE = new StringSetValueSerializer(); + + public static StringSetValueSerializer getInstance() { + return INSTANCE; + } + + @Override + public void write(Set value, StreamOutput out) throws IOException { + out.writeStringArray(value.toArray(new String[value.size()])); + } + + @Override + public Set read(StreamInput in, K key) throws IOException { + return Collections.unmodifiableSet(new HashSet<>(Arrays.asList(in.readStringArray()))); + } + } } diff --git a/core/src/main/java/org/elasticsearch/cluster/action/index/NodeMappingRefreshAction.java b/core/src/main/java/org/elasticsearch/cluster/action/index/NodeMappingRefreshAction.java index d0eb29d6b22..f8507e5b689 100644 --- a/core/src/main/java/org/elasticsearch/cluster/action/index/NodeMappingRefreshAction.java +++ b/core/src/main/java/org/elasticsearch/cluster/action/index/NodeMappingRefreshAction.java @@ -25,7 +25,6 @@ import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.MetaDataMappingService; import org.elasticsearch.cluster.node.DiscoveryNodes; -import org.elasticsearch.common.Strings; import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.io.stream.StreamInput; @@ -57,7 +56,7 @@ public class NodeMappingRefreshAction extends AbstractComponent { public void nodeMappingRefresh(final ClusterState state, final NodeMappingRefreshRequest request) { final DiscoveryNodes nodes = state.nodes(); if (nodes.masterNode() == null) { - logger.warn("can't send mapping refresh for [{}][{}], no master known.", request.index(), Strings.arrayToCommaDelimitedString(request.types())); + logger.warn("can't send mapping refresh for [{}], no master known.", request.index()); return; } transportService.sendRequest(nodes.masterNode(), ACTION_NAME, request, EmptyTransportResponseHandler.INSTANCE_SAME); @@ -67,7 +66,7 @@ public class NodeMappingRefreshAction extends AbstractComponent { @Override public void messageReceived(NodeMappingRefreshRequest request, TransportChannel channel) throws Exception { - metaDataMappingService.refreshMapping(request.index(), request.indexUUID(), request.types()); + metaDataMappingService.refreshMapping(request.index(), request.indexUUID()); channel.sendResponse(TransportResponse.Empty.INSTANCE); } } @@ -76,16 +75,14 @@ public class NodeMappingRefreshAction extends AbstractComponent { private String index; private String indexUUID = IndexMetaData.INDEX_UUID_NA_VALUE; - private String[] types; private String nodeId; public NodeMappingRefreshRequest() { } - public NodeMappingRefreshRequest(String index, String indexUUID, String[] types, String nodeId) { + public NodeMappingRefreshRequest(String index, String indexUUID, String nodeId) { this.index = index; this.indexUUID = indexUUID; - this.types = types; this.nodeId = nodeId; } @@ -107,11 +104,6 @@ public class NodeMappingRefreshAction extends AbstractComponent { return indexUUID; } - - public String[] types() { - return types; - } - public String nodeId() { return nodeId; } @@ -120,7 +112,6 @@ public class NodeMappingRefreshAction extends AbstractComponent { public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); out.writeString(index); - out.writeStringArray(types); out.writeString(nodeId); out.writeString(indexUUID); } @@ -129,7 +120,6 @@ public class NodeMappingRefreshAction extends AbstractComponent { public void readFrom(StreamInput in) throws IOException { super.readFrom(in); index = in.readString(); - types = in.readStringArray(); nodeId = in.readString(); indexUUID = in.readString(); } diff --git a/core/src/main/java/org/elasticsearch/cluster/action/shard/ShardStateAction.java b/core/src/main/java/org/elasticsearch/cluster/action/shard/ShardStateAction.java index 83897baa50d..d09df094a68 100644 --- a/core/src/main/java/org/elasticsearch/cluster/action/shard/ShardStateAction.java +++ b/core/src/main/java/org/elasticsearch/cluster/action/shard/ShardStateAction.java @@ -20,9 +20,7 @@ package org.elasticsearch.cluster.action.shard; import org.elasticsearch.ExceptionsHelper; -import org.elasticsearch.cluster.ClusterService; -import org.elasticsearch.cluster.ClusterState; -import org.elasticsearch.cluster.ClusterStateUpdateTask; +import org.elasticsearch.cluster.*; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.routing.RoutingService; @@ -38,15 +36,12 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; -import org.elasticsearch.common.util.concurrent.ConcurrentCollections; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.*; import java.io.IOException; import java.util.ArrayList; import java.util.List; -import java.util.concurrent.BlockingQueue; -import java.util.concurrent.CountDownLatch; import static org.elasticsearch.cluster.routing.ShardRouting.readShardRoutingEntry; @@ -63,9 +58,6 @@ public class ShardStateAction extends AbstractComponent { private final AllocationService allocationService; private final RoutingService routingService; - private final BlockingQueue startedShardsQueue = ConcurrentCollections.newBlockingQueue(); - private final BlockingQueue failedShardQueue = ConcurrentCollections.newBlockingQueue(); - @Inject public ShardStateAction(Settings settings, ClusterService clusterService, TransportService transportService, AllocationService allocationService, RoutingService routingService) { @@ -141,104 +133,94 @@ public class ShardStateAction extends AbstractComponent { }); } + private final ShardFailedClusterStateHandler shardFailedClusterStateHandler = new ShardFailedClusterStateHandler(); + private void handleShardFailureOnMaster(final ShardRoutingEntry shardRoutingEntry) { logger.warn("{} received shard failed for {}", shardRoutingEntry.failure, shardRoutingEntry.shardRouting.shardId(), shardRoutingEntry); - failedShardQueue.add(shardRoutingEntry); - clusterService.submitStateUpdateTask("shard-failed (" + shardRoutingEntry.shardRouting + "), message [" + shardRoutingEntry.message + "]", Priority.HIGH, new ClusterStateUpdateTask() { + clusterService.submitStateUpdateTask( + "shard-failed (" + shardRoutingEntry.shardRouting + "), message [" + shardRoutingEntry.message + "]", + shardRoutingEntry, + ClusterStateTaskConfig.build(Priority.HIGH), + shardFailedClusterStateHandler, + shardFailedClusterStateHandler); + } - @Override - public ClusterState execute(ClusterState currentState) { - if (shardRoutingEntry.processed) { - return currentState; - } - - List shardRoutingEntries = new ArrayList<>(); - failedShardQueue.drainTo(shardRoutingEntries); - - // nothing to process (a previous event has processed it already) - if (shardRoutingEntries.isEmpty()) { - return currentState; - } - - List shardRoutingsToBeApplied = new ArrayList<>(shardRoutingEntries.size()); - - // mark all entries as processed - for (ShardRoutingEntry entry : shardRoutingEntries) { - entry.processed = true; - shardRoutingsToBeApplied.add(new FailedRerouteAllocation.FailedShard(entry.shardRouting, entry.message, entry.failure)); - } - - RoutingAllocation.Result routingResult = allocationService.applyFailedShards(currentState, shardRoutingsToBeApplied); - if (!routingResult.changed()) { - return currentState; - } - return ClusterState.builder(currentState).routingResult(routingResult).build(); + class ShardFailedClusterStateHandler implements ClusterStateTaskExecutor, ClusterStateTaskListener { + @Override + public BatchResult execute(ClusterState currentState, List tasks) throws Exception { + BatchResult.Builder batchResultBuilder = BatchResult.builder(); + List shardRoutingsToBeApplied = new ArrayList<>(tasks.size()); + for (ShardRoutingEntry task : tasks) { + shardRoutingsToBeApplied.add(new FailedRerouteAllocation.FailedShard(task.shardRouting, task.message, task.failure)); } - - @Override - public void onFailure(String source, Throwable t) { - logger.error("unexpected failure during [{}]", t, source); + ClusterState maybeUpdatedState = currentState; + try { + RoutingAllocation.Result result = allocationService.applyFailedShards(currentState, shardRoutingsToBeApplied); + if (result.changed()) { + maybeUpdatedState = ClusterState.builder(currentState).routingResult(result).build(); + } + batchResultBuilder.successes(tasks); + } catch (Throwable t) { + batchResultBuilder.failures(tasks, t); } + return batchResultBuilder.build(maybeUpdatedState); + } - @Override - public void clusterStateProcessed(String source, ClusterState oldState, ClusterState newState) { + @Override + public void clusterStateProcessed(String source, ClusterState oldState, ClusterState newState) { if (oldState != newState && newState.getRoutingNodes().unassigned().size() > 0) { logger.trace("unassigned shards after shard failures. scheduling a reroute."); routingService.reroute("unassigned shards after shard failures, scheduling a reroute"); } - } - }); + } + + @Override + public void onFailure(String source, Throwable t) { + logger.error("unexpected failure during [{}]", t, source); + } } + private final ShardStartedClusterStateHandler shardStartedClusterStateHandler = + new ShardStartedClusterStateHandler(); + private void shardStartedOnMaster(final ShardRoutingEntry shardRoutingEntry) { logger.debug("received shard started for {}", shardRoutingEntry); - // buffer shard started requests, and the state update tasks will simply drain it - // this is to optimize the number of "started" events we generate, and batch them - // possibly, we can do time based batching as well, but usually, we would want to - // process started events as fast as possible, to make shards available - startedShardsQueue.add(shardRoutingEntry); - clusterService.submitStateUpdateTask("shard-started (" + shardRoutingEntry.shardRouting + "), reason [" + shardRoutingEntry.message + "]", Priority.URGENT, - new ClusterStateUpdateTask() { - @Override - public ClusterState execute(ClusterState currentState) { + clusterService.submitStateUpdateTask( + "shard-started (" + shardRoutingEntry.shardRouting + "), reason [" + shardRoutingEntry.message + "]", + shardRoutingEntry, + ClusterStateTaskConfig.build(Priority.URGENT), + shardStartedClusterStateHandler, + shardStartedClusterStateHandler); + } - if (shardRoutingEntry.processed) { - return currentState; - } + class ShardStartedClusterStateHandler implements ClusterStateTaskExecutor, ClusterStateTaskListener { + @Override + public BatchResult execute(ClusterState currentState, List tasks) throws Exception { + BatchResult.Builder builder = BatchResult.builder(); + List shardRoutingsToBeApplied = new ArrayList<>(tasks.size()); + for (ShardRoutingEntry task : tasks) { + shardRoutingsToBeApplied.add(task.shardRouting); + } + ClusterState maybeUpdatedState = currentState; + try { + RoutingAllocation.Result result = + allocationService.applyStartedShards(currentState, shardRoutingsToBeApplied, true); + if (result.changed()) { + maybeUpdatedState = ClusterState.builder(currentState).routingResult(result).build(); + } + builder.successes(tasks); + } catch (Throwable t) { + builder.failures(tasks, t); + } - List shardRoutingEntries = new ArrayList<>(); - startedShardsQueue.drainTo(shardRoutingEntries); + return builder.build(maybeUpdatedState); + } - // nothing to process (a previous event has processed it already) - if (shardRoutingEntries.isEmpty()) { - return currentState; - } - - List shardRoutingToBeApplied = new ArrayList<>(shardRoutingEntries.size()); - - // mark all entries as processed - for (ShardRoutingEntry entry : shardRoutingEntries) { - entry.processed = true; - shardRoutingToBeApplied.add(entry.shardRouting); - } - - if (shardRoutingToBeApplied.isEmpty()) { - return currentState; - } - - RoutingAllocation.Result routingResult = allocationService.applyStartedShards(currentState, shardRoutingToBeApplied, true); - if (!routingResult.changed()) { - return currentState; - } - return ClusterState.builder(currentState).routingResult(routingResult).build(); - } - - @Override - public void onFailure(String source, Throwable t) { - logger.error("unexpected failure during [{}]", t, source); - } - }); + @Override + public void onFailure(String source, Throwable t) { + logger.error("unexpected failure during [{}]", t, source); + } } private class ShardFailedTransportHandler implements TransportRequestHandler { @@ -266,8 +248,6 @@ public class ShardStateAction extends AbstractComponent { String message; Throwable failure; - volatile boolean processed; // state field, no need to serialize - public ShardRoutingEntry() { } diff --git a/core/src/main/java/org/elasticsearch/cluster/metadata/IndexMetaData.java b/core/src/main/java/org/elasticsearch/cluster/metadata/IndexMetaData.java index 5c03ba8b163..a1cdf26faab 100644 --- a/core/src/main/java/org/elasticsearch/cluster/metadata/IndexMetaData.java +++ b/core/src/main/java/org/elasticsearch/cluster/metadata/IndexMetaData.java @@ -20,6 +20,7 @@ package org.elasticsearch.cluster.metadata; import com.carrotsearch.hppc.LongArrayList; +import com.carrotsearch.hppc.cursors.IntObjectCursor; import com.carrotsearch.hppc.cursors.ObjectCursor; import com.carrotsearch.hppc.cursors.ObjectObjectCursor; import org.elasticsearch.Version; @@ -32,6 +33,7 @@ import org.elasticsearch.cluster.node.DiscoveryNodeFilters; import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.ParseFieldMatcher; +import org.elasticsearch.common.collect.ImmutableOpenIntMap; import org.elasticsearch.common.collect.ImmutableOpenMap; import org.elasticsearch.common.collect.MapBuilder; import org.elasticsearch.common.compress.CompressedXContent; @@ -60,8 +62,8 @@ import static org.elasticsearch.common.settings.Settings.*; public class IndexMetaData implements Diffable, FromXContentBuilder, ToXContent { public static final IndexMetaData PROTO = IndexMetaData.builder("") - .settings(Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT)) - .numberOfShards(1).numberOfReplicas(0).build(); + .settings(Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT)) + .numberOfShards(1).numberOfReplicas(0).build(); public interface Custom extends Diffable, ToXContent { @@ -168,6 +170,14 @@ public class IndexMetaData implements Diffable, FromXContentBuild public static final String SETTING_SHARED_FS_ALLOW_RECOVERY_ON_ANY_NODE = "index.shared_filesystem.recover_on_any_node"; public static final String INDEX_UUID_NA_VALUE = "_na_"; + public static final String KEY_ACTIVE_ALLOCATIONS = "active_allocations"; + static final String KEY_VERSION = "version"; + static final String KEY_SETTINGS = "settings"; + static final String KEY_STATE = "state"; + static final String KEY_MAPPINGS = "mappings"; + static final String KEY_ALIASES = "aliases"; + static final String KEY_PRIMARY_TERMS = "primary_terms"; + private final int numberOfShards; private final int numberOfReplicas; @@ -185,6 +195,8 @@ public class IndexMetaData implements Diffable, FromXContentBuild private final ImmutableOpenMap customs; + private final ImmutableOpenIntMap> activeAllocationIds; + private transient final int totalNumberOfShards; private final DiscoveryNodeFilters requireFilters; @@ -195,67 +207,31 @@ public class IndexMetaData implements Diffable, FromXContentBuild private final Version indexUpgradedVersion; private final org.apache.lucene.util.Version minimumCompatibleLuceneVersion; - private IndexMetaData(String index, long version, long[] primaryTerms, State state, Settings settings, ImmutableOpenMap mappings, ImmutableOpenMap aliases, ImmutableOpenMap customs) { - Integer maybeNumberOfShards = settings.getAsInt(SETTING_NUMBER_OF_SHARDS, null); - if (maybeNumberOfShards == null) { - throw new IllegalArgumentException("must specify numberOfShards for index [" + index + "]"); - } - int numberOfShards = maybeNumberOfShards; - if (numberOfShards <= 0) { - throw new IllegalArgumentException("must specify positive number of shards for index [" + index + "]"); - } + private IndexMetaData(String index, long version, long[] primaryTerms, State state, int numberOfShards, int numberOfReplicas, Settings settings, + ImmutableOpenMap mappings, ImmutableOpenMap aliases, + ImmutableOpenMap customs, ImmutableOpenIntMap> activeAllocationIds, + DiscoveryNodeFilters requireFilters, DiscoveryNodeFilters includeFilters, DiscoveryNodeFilters excludeFilters, + Version indexCreatedVersion, Version indexUpgradedVersion, org.apache.lucene.util.Version minimumCompatibleLuceneVersion) { - Integer maybeNumberOfReplicas = settings.getAsInt(SETTING_NUMBER_OF_REPLICAS, null); - if (maybeNumberOfReplicas == null) { - throw new IllegalArgumentException("must specify numberOfReplicas for index [" + index + "]"); - } - int numberOfReplicas = maybeNumberOfReplicas; - if (numberOfReplicas < 0) { - throw new IllegalArgumentException("must specify non-negative number of shards for index [" + index + "]"); - } - this.settings = settings; this.index = index; this.version = version; this.primaryTerms = primaryTerms; assert primaryTerms.length == numberOfShards; this.state = state; - this.mappings = mappings; - this.customs = customs; this.numberOfShards = numberOfShards; this.numberOfReplicas = numberOfReplicas; this.totalNumberOfShards = numberOfShards * (numberOfReplicas + 1); + this.settings = settings; + this.mappings = mappings; + this.customs = customs; this.aliases = aliases; - - Map requireMap = settings.getByPrefix("index.routing.allocation.require.").getAsMap(); - if (requireMap.isEmpty()) { - requireFilters = null; - } else { - requireFilters = DiscoveryNodeFilters.buildFromKeyValue(AND, requireMap); - } - Map includeMap = settings.getByPrefix("index.routing.allocation.include.").getAsMap(); - if (includeMap.isEmpty()) { - includeFilters = null; - } else { - includeFilters = DiscoveryNodeFilters.buildFromKeyValue(OR, includeMap); - } - Map excludeMap = settings.getByPrefix("index.routing.allocation.exclude.").getAsMap(); - if (excludeMap.isEmpty()) { - excludeFilters = null; - } else { - excludeFilters = DiscoveryNodeFilters.buildFromKeyValue(OR, excludeMap); - } - indexCreatedVersion = Version.indexCreated(settings); - indexUpgradedVersion = settings.getAsVersion(IndexMetaData.SETTING_VERSION_UPGRADED, indexCreatedVersion); - String stringLuceneVersion = settings.get(SETTING_VERSION_MINIMUM_COMPATIBLE); - if (stringLuceneVersion != null) { - try { - this.minimumCompatibleLuceneVersion = org.apache.lucene.util.Version.parse(stringLuceneVersion); - } catch (ParseException ex) { - throw new IllegalStateException("Cannot parse lucene version [" + stringLuceneVersion + "] in the [" + SETTING_VERSION_MINIMUM_COMPATIBLE + "] setting", ex); - } - } else { - this.minimumCompatibleLuceneVersion = null; - } + this.activeAllocationIds = activeAllocationIds; + this.requireFilters = requireFilters; + this.includeFilters = includeFilters; + this.excludeFilters = excludeFilters; + this.indexCreatedVersion = indexCreatedVersion; + this.indexUpgradedVersion = indexUpgradedVersion; + this.minimumCompatibleLuceneVersion = minimumCompatibleLuceneVersion; } public String getIndex() { @@ -377,6 +353,15 @@ public class IndexMetaData implements Diffable, FromXContentBuild return (T) customs.get(type); } + public ImmutableOpenIntMap> getActiveAllocationIds() { + return activeAllocationIds; + } + + public Set activeAllocationIds(int shardId) { + assert shardId >= 0 && shardId < numberOfShards; + return activeAllocationIds.get(shardId); + } + @Nullable public DiscoveryNodeFilters requireFilters() { return requireFilters; @@ -429,6 +414,9 @@ public class IndexMetaData implements Diffable, FromXContentBuild if (Arrays.equals(primaryTerms, that.primaryTerms) == false) { return false; } + if (!activeAllocationIds.equals(that.activeAllocationIds)) { + return false; + } return true; } @@ -442,6 +430,7 @@ public class IndexMetaData implements Diffable, FromXContentBuild result = 31 * result + mappings.hashCode(); result = 31 * result + customs.hashCode(); result = 31 * result + Arrays.hashCode(primaryTerms); + result = 31 * result + activeAllocationIds.hashCode(); return result; } @@ -476,7 +465,8 @@ public class IndexMetaData implements Diffable, FromXContentBuild private final Settings settings; private final Diff> mappings; private final Diff> aliases; - private Diff> customs; + private final Diff> customs; + private final Diff>> activeAllocationIds; public IndexMetaDataDiff(IndexMetaData before, IndexMetaData after) { index = after.index; @@ -484,9 +474,11 @@ public class IndexMetaData implements Diffable, FromXContentBuild state = after.state; settings = after.settings; primaryTerms = after.primaryTerms; - mappings = DiffableUtils.diff(before.mappings, after.mappings); - aliases = DiffableUtils.diff(before.aliases, after.aliases); - customs = DiffableUtils.diff(before.customs, after.customs); + mappings = DiffableUtils.diff(before.mappings, after.mappings, DiffableUtils.getStringKeySerializer()); + aliases = DiffableUtils.diff(before.aliases, after.aliases, DiffableUtils.getStringKeySerializer()); + customs = DiffableUtils.diff(before.customs, after.customs, DiffableUtils.getStringKeySerializer()); + activeAllocationIds = DiffableUtils.diff(before.activeAllocationIds, after.activeAllocationIds, + DiffableUtils.getVIntKeySerializer(), DiffableUtils.StringSetValueSerializer.getInstance()); } public IndexMetaDataDiff(StreamInput in) throws IOException { @@ -495,19 +487,22 @@ public class IndexMetaData implements Diffable, FromXContentBuild state = State.fromId(in.readByte()); settings = Settings.readSettingsFromStream(in); primaryTerms = in.readVLongArray(); - mappings = DiffableUtils.readImmutableOpenMapDiff(in, MappingMetaData.PROTO); - aliases = DiffableUtils.readImmutableOpenMapDiff(in, AliasMetaData.PROTO); - customs = DiffableUtils.readImmutableOpenMapDiff(in, new DiffableUtils.KeyedReader() { - @Override - public Custom readFrom(StreamInput in, String key) throws IOException { - return lookupPrototypeSafe(key).readFrom(in); - } + mappings = DiffableUtils.readImmutableOpenMapDiff(in, DiffableUtils.getStringKeySerializer(), MappingMetaData.PROTO); + aliases = DiffableUtils.readImmutableOpenMapDiff(in, DiffableUtils.getStringKeySerializer(), AliasMetaData.PROTO); + customs = DiffableUtils.readImmutableOpenMapDiff(in, DiffableUtils.getStringKeySerializer(), + new DiffableUtils.DiffableValueSerializer() { + @Override + public Custom read(StreamInput in, String key) throws IOException { + return lookupPrototypeSafe(key).readFrom(in); + } - @Override - public Diff readDiffFrom(StreamInput in, String key) throws IOException { - return lookupPrototypeSafe(key).readDiffFrom(in); - } - }); + @Override + public Diff readDiff(StreamInput in, String key) throws IOException { + return lookupPrototypeSafe(key).readDiffFrom(in); + } + }); + activeAllocationIds = DiffableUtils.readImmutableOpenIntMapDiff(in, DiffableUtils.getVIntKeySerializer(), + DiffableUtils.StringSetValueSerializer.getInstance()); } @Override @@ -520,6 +515,7 @@ public class IndexMetaData implements Diffable, FromXContentBuild mappings.writeTo(out); aliases.writeTo(out); customs.writeTo(out); + activeAllocationIds.writeTo(out); } @Override @@ -532,6 +528,7 @@ public class IndexMetaData implements Diffable, FromXContentBuild builder.mappings.putAll(mappings.apply(part.mappings)); builder.aliases.putAll(aliases.apply(part.aliases)); builder.customs.putAll(customs.apply(part.customs)); + builder.activeAllocationIds.putAll(activeAllocationIds.apply(part.activeAllocationIds)); return builder.build(); } } @@ -559,6 +556,12 @@ public class IndexMetaData implements Diffable, FromXContentBuild Custom customIndexMetaData = lookupPrototypeSafe(type).readFrom(in); builder.putCustom(type, customIndexMetaData); } + int activeAllocationIdsSize = in.readVInt(); + for (int i = 0; i < activeAllocationIdsSize; i++) { + int key = in.readVInt(); + Set allocationIds = DiffableUtils.StringSetValueSerializer.getInstance().read(in, key); + builder.putActiveAllocationIds(key, allocationIds); + } return builder.build(); } @@ -582,6 +585,11 @@ public class IndexMetaData implements Diffable, FromXContentBuild out.writeString(cursor.key); cursor.value.writeTo(out); } + out.writeVInt(activeAllocationIds.size()); + for (IntObjectCursor> cursor : activeAllocationIds) { + out.writeVInt(cursor.key); + DiffableUtils.StringSetValueSerializer.getInstance().write(cursor.value, out); + } } public static Builder builder(String index) { @@ -602,12 +610,14 @@ public class IndexMetaData implements Diffable, FromXContentBuild private final ImmutableOpenMap.Builder mappings; private final ImmutableOpenMap.Builder aliases; private final ImmutableOpenMap.Builder customs; + private final ImmutableOpenIntMap.Builder> activeAllocationIds; public Builder(String index) { this.index = index; this.mappings = ImmutableOpenMap.builder(); this.aliases = ImmutableOpenMap.builder(); this.customs = ImmutableOpenMap.builder(); + this.activeAllocationIds = ImmutableOpenIntMap.builder(); } public Builder(IndexMetaData indexMetaData) { @@ -619,6 +629,7 @@ public class IndexMetaData implements Diffable, FromXContentBuild this.mappings = ImmutableOpenMap.builder(indexMetaData.mappings); this.aliases = ImmutableOpenMap.builder(indexMetaData.aliases); this.customs = ImmutableOpenMap.builder(indexMetaData.customs); + this.activeAllocationIds = ImmutableOpenIntMap.builder(indexMetaData.activeAllocationIds); } public String index() { @@ -726,6 +737,15 @@ public class IndexMetaData implements Diffable, FromXContentBuild return this.customs.get(type); } + public Builder putActiveAllocationIds(int shardId, Set allocationIds) { + activeAllocationIds.put(shardId, new HashSet(allocationIds)); + return this; + } + + public Set getActiveAllocationIds(int shardId) { + return activeAllocationIds.get(shardId); + } + public long version() { return this.version; } @@ -783,40 +803,96 @@ public class IndexMetaData implements Diffable, FromXContentBuild } } - if (primaryTerms == null) { - initializePrimaryTerms(); - } else if (primaryTerms.length != numberOfShards()) { - throw new IllegalStateException("primaryTerms length is [" + primaryTerms.length - + "] but should be equal to number of shards [" + numberOfShards() + "]"); + Integer maybeNumberOfShards = settings.getAsInt(SETTING_NUMBER_OF_SHARDS, null); + if (maybeNumberOfShards == null) { + throw new IllegalArgumentException("must specify numberOfShards for index [" + index + "]"); + } + int numberOfShards = maybeNumberOfShards; + if (numberOfShards <= 0) { + throw new IllegalArgumentException("must specify positive number of shards for index [" + index + "]"); } - return new IndexMetaData(index, version, primaryTerms, state, tmpSettings, mappings.build(), tmpAliases.build(), customs.build()); - } + Integer maybeNumberOfReplicas = settings.getAsInt(SETTING_NUMBER_OF_REPLICAS, null); + if (maybeNumberOfReplicas == null) { + throw new IllegalArgumentException("must specify numberOfReplicas for index [" + index + "]"); + } + int numberOfReplicas = maybeNumberOfReplicas; + if (numberOfReplicas < 0) { + throw new IllegalArgumentException("must specify non-negative number of shards for index [" + index + "]"); + } - static final class Fields { - static final XContentBuilderString VERSION = new XContentBuilderString("version"); - static final XContentBuilderString SETTINGS = new XContentBuilderString("settings"); - static final XContentBuilderString STATE = new XContentBuilderString("state"); - static final XContentBuilderString MAPPINGS = new XContentBuilderString("mappings"); - static final XContentBuilderString ALIASES = new XContentBuilderString("aliases"); - static final XContentBuilderString PRIMARY_TERMS = new XContentBuilderString("primary_terms"); + // fill missing slots in activeAllocationIds with empty set if needed and make all entries immutable + ImmutableOpenIntMap.Builder> filledActiveAllocationIds = ImmutableOpenIntMap.builder(); + for (int i = 0; i < numberOfShards; i++) { + if (activeAllocationIds.containsKey(i)) { + filledActiveAllocationIds.put(i, Collections.unmodifiableSet(new HashSet<>(activeAllocationIds.get(i)))); + } else { + filledActiveAllocationIds.put(i, Collections.emptySet()); + } + } + + Map requireMap = settings.getByPrefix("index.routing.allocation.require.").getAsMap(); + final DiscoveryNodeFilters requireFilters; + if (requireMap.isEmpty()) { + requireFilters = null; + } else { + requireFilters = DiscoveryNodeFilters.buildFromKeyValue(AND, requireMap); + } + Map includeMap = settings.getByPrefix("index.routing.allocation.include.").getAsMap(); + final DiscoveryNodeFilters includeFilters; + if (includeMap.isEmpty()) { + includeFilters = null; + } else { + includeFilters = DiscoveryNodeFilters.buildFromKeyValue(OR, includeMap); + } + Map excludeMap = settings.getByPrefix("index.routing.allocation.exclude.").getAsMap(); + final DiscoveryNodeFilters excludeFilters; + if (excludeMap.isEmpty()) { + excludeFilters = null; + } else { + excludeFilters = DiscoveryNodeFilters.buildFromKeyValue(OR, excludeMap); + } + Version indexCreatedVersion = Version.indexCreated(settings); + Version indexUpgradedVersion = settings.getAsVersion(IndexMetaData.SETTING_VERSION_UPGRADED, indexCreatedVersion); + String stringLuceneVersion = settings.get(SETTING_VERSION_MINIMUM_COMPATIBLE); + final org.apache.lucene.util.Version minimumCompatibleLuceneVersion; + if (stringLuceneVersion != null) { + try { + minimumCompatibleLuceneVersion = org.apache.lucene.util.Version.parse(stringLuceneVersion); + } catch (ParseException ex) { + throw new IllegalStateException("Cannot parse lucene version [" + stringLuceneVersion + "] in the [" + SETTING_VERSION_MINIMUM_COMPATIBLE + "] setting", ex); + } + } else { + minimumCompatibleLuceneVersion = null; + } + + if (primaryTerms == null) { + initializePrimaryTerms(); + } else if (primaryTerms.length != numberOfShards) { + throw new IllegalStateException("primaryTerms length is [" + primaryTerms.length + + "] but should be equal to number of shards [" + numberOfShards() + "]"); + } + + return new IndexMetaData(index, version, primaryTerms, state, numberOfShards, numberOfReplicas, tmpSettings, mappings.build(), + tmpAliases.build(), customs.build(), filledActiveAllocationIds.build(), requireFilters, includeFilters, excludeFilters, + indexCreatedVersion, indexUpgradedVersion, minimumCompatibleLuceneVersion); } public static void toXContent(IndexMetaData indexMetaData, XContentBuilder builder, ToXContent.Params params) throws IOException { builder.startObject(indexMetaData.getIndex(), XContentBuilder.FieldCaseConversion.NONE); - builder.field(Fields.VERSION, indexMetaData.getVersion()); - builder.field(Fields.STATE, indexMetaData.getState().toString().toLowerCase(Locale.ENGLISH)); + builder.field(KEY_VERSION, indexMetaData.getVersion()); + builder.field(KEY_STATE, indexMetaData.getState().toString().toLowerCase(Locale.ENGLISH)); boolean binary = params.paramAsBoolean("binary", false); - builder.startObject(Fields.SETTINGS); + builder.startObject(KEY_SETTINGS); for (Map.Entry entry : indexMetaData.getSettings().getAsMap().entrySet()) { builder.field(entry.getKey(), entry.getValue()); } builder.endObject(); - builder.startArray(Fields.MAPPINGS); + builder.startArray(KEY_MAPPINGS); for (ObjectObjectCursor cursor : indexMetaData.getMappings()) { if (binary) { builder.value(cursor.value.source().compressed()); @@ -836,24 +912,29 @@ public class IndexMetaData implements Diffable, FromXContentBuild builder.endObject(); } - builder.startObject(Fields.ALIASES); + builder.startObject(KEY_ALIASES); for (ObjectCursor cursor : indexMetaData.getAliases().values()) { AliasMetaData.Builder.toXContent(cursor.value, builder, params); } builder.endObject(); - builder.startArray(Fields.PRIMARY_TERMS); + builder.startArray(KEY_PRIMARY_TERMS); for (int i = 0; i < indexMetaData.getNumberOfShards(); i++) { builder.value(indexMetaData.primaryTerm(i)); } builder.endArray(); + builder.startObject(KEY_ACTIVE_ALLOCATIONS); + for (IntObjectCursor> cursor : indexMetaData.activeAllocationIds) { + builder.startArray(String.valueOf(cursor.key)); + for (String allocationId : cursor.value) { + builder.value(allocationId); + } + builder.endArray(); + } builder.endObject(); - } - // TODO move it somewhere where it will be useful for other code? - private static boolean fieldEquals(XContentBuilderString field, String currentFieldName) { - return field.underscore().getValue().equals(currentFieldName); + builder.endObject(); } public static IndexMetaData fromXContent(XContentParser parser) throws IOException { @@ -871,9 +952,9 @@ public class IndexMetaData implements Diffable, FromXContentBuild if (token == XContentParser.Token.FIELD_NAME) { currentFieldName = parser.currentName(); } else if (token == XContentParser.Token.START_OBJECT) { - if (fieldEquals(Fields.SETTINGS, currentFieldName)) { + if (KEY_SETTINGS.equals(currentFieldName)) { builder.settings(Settings.settingsBuilder().put(SettingsLoader.Helper.loadNestedFromMap(parser.mapOrdered()))); - } else if (fieldEquals(Fields.MAPPINGS, currentFieldName)) { + } else if (KEY_MAPPINGS.equals(currentFieldName)) { while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { if (token == XContentParser.Token.FIELD_NAME) { currentFieldName = parser.currentName(); @@ -883,10 +964,25 @@ public class IndexMetaData implements Diffable, FromXContentBuild builder.putMapping(new MappingMetaData(mappingType, mappingSource)); } } - } else if (fieldEquals(Fields.ALIASES, currentFieldName)) { + } else if (KEY_ALIASES.equals(currentFieldName)) { while (parser.nextToken() != XContentParser.Token.END_OBJECT) { builder.putAlias(AliasMetaData.Builder.fromXContent(parser)); } + } else if (KEY_ACTIVE_ALLOCATIONS.equals(currentFieldName)) { + while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { + if (token == XContentParser.Token.FIELD_NAME) { + currentFieldName = parser.currentName(); + } else if (token == XContentParser.Token.START_ARRAY) { + String shardId = currentFieldName; + Set allocationIds = new HashSet<>(); + while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) { + if (token == XContentParser.Token.VALUE_STRING) { + allocationIds.add(parser.text()); + } + } + builder.putActiveAllocationIds(Integer.valueOf(shardId), allocationIds); + } + } } else { // check if its a custom index metadata Custom proto = lookupPrototype(currentFieldName); @@ -899,7 +995,7 @@ public class IndexMetaData implements Diffable, FromXContentBuild } } } else if (token == XContentParser.Token.START_ARRAY) { - if (fieldEquals(Fields.MAPPINGS, currentFieldName)) { + if (KEY_MAPPINGS.equals(currentFieldName)) { while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) { if (token == XContentParser.Token.VALUE_EMBEDDED_OBJECT) { builder.putMapping(new MappingMetaData(new CompressedXContent(parser.binaryValue()))); @@ -911,21 +1007,21 @@ public class IndexMetaData implements Diffable, FromXContentBuild } } } - } else if (fieldEquals(Fields.PRIMARY_TERMS, currentFieldName)) { + } else if (KEY_PRIMARY_TERMS.equals(currentFieldName)) { LongArrayList list = new LongArrayList(); while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) { if (token == XContentParser.Token.VALUE_NUMBER) { list.add(parser.longValue()); } else { - throw new IllegalStateException("found a non-numeric value under [" + Fields.PRIMARY_TERMS.underscore() + "]"); + throw new IllegalStateException("found a non-numeric value under [" + KEY_PRIMARY_TERMS + "]"); } } builder.primaryTerms(list.toArray()); } } else if (token.isValue()) { - if (fieldEquals(Fields.STATE, currentFieldName)) { + if (KEY_STATE.equals(currentFieldName)) { builder.state(State.fromString(parser.text())); - } else if (fieldEquals(Fields.VERSION, currentFieldName)) { + } else if (KEY_VERSION.equals(currentFieldName)) { builder.version(parser.longValue()); } } diff --git a/core/src/main/java/org/elasticsearch/cluster/metadata/MetaData.java b/core/src/main/java/org/elasticsearch/cluster/metadata/MetaData.java index 9945de1f8d3..657259747dd 100644 --- a/core/src/main/java/org/elasticsearch/cluster/metadata/MetaData.java +++ b/core/src/main/java/org/elasticsearch/cluster/metadata/MetaData.java @@ -27,7 +27,6 @@ import org.apache.lucene.util.CollectionUtil; import org.elasticsearch.cluster.Diff; import org.elasticsearch.cluster.Diffable; import org.elasticsearch.cluster.DiffableUtils; -import org.elasticsearch.cluster.DiffableUtils.KeyedReader; import org.elasticsearch.cluster.InternalClusterInfoService; import org.elasticsearch.cluster.block.ClusterBlock; import org.elasticsearch.cluster.block.ClusterBlockLevel; @@ -41,6 +40,7 @@ import org.elasticsearch.common.collect.ImmutableOpenMap; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.logging.ESLogger; +import org.elasticsearch.common.logging.support.LoggerMessageFormat; import org.elasticsearch.common.regex.Regex; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.loader.SettingsLoader; @@ -54,7 +54,6 @@ import org.elasticsearch.discovery.DiscoverySettings; import org.elasticsearch.index.IndexNotFoundException; import org.elasticsearch.index.store.IndexStoreConfig; import org.elasticsearch.indices.recovery.RecoverySettings; -import org.elasticsearch.indices.store.IndicesStore; import org.elasticsearch.indices.ttl.IndicesTTLService; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.search.warmer.IndexWarmersMetaData; @@ -640,9 +639,9 @@ public class MetaData implements Iterable, Diffable, Fr version = after.version; transientSettings = after.transientSettings; persistentSettings = after.persistentSettings; - indices = DiffableUtils.diff(before.indices, after.indices); - templates = DiffableUtils.diff(before.templates, after.templates); - customs = DiffableUtils.diff(before.customs, after.customs); + indices = DiffableUtils.diff(before.indices, after.indices, DiffableUtils.getStringKeySerializer()); + templates = DiffableUtils.diff(before.templates, after.templates, DiffableUtils.getStringKeySerializer()); + customs = DiffableUtils.diff(before.customs, after.customs, DiffableUtils.getStringKeySerializer()); } public MetaDataDiff(StreamInput in) throws IOException { @@ -650,16 +649,17 @@ public class MetaData implements Iterable, Diffable, Fr version = in.readLong(); transientSettings = Settings.readSettingsFromStream(in); persistentSettings = Settings.readSettingsFromStream(in); - indices = DiffableUtils.readImmutableOpenMapDiff(in, IndexMetaData.PROTO); - templates = DiffableUtils.readImmutableOpenMapDiff(in, IndexTemplateMetaData.PROTO); - customs = DiffableUtils.readImmutableOpenMapDiff(in, new KeyedReader() { + indices = DiffableUtils.readImmutableOpenMapDiff(in, DiffableUtils.getStringKeySerializer(), IndexMetaData.PROTO); + templates = DiffableUtils.readImmutableOpenMapDiff(in, DiffableUtils.getStringKeySerializer(), IndexTemplateMetaData.PROTO); + customs = DiffableUtils.readImmutableOpenMapDiff(in, DiffableUtils.getStringKeySerializer(), + new DiffableUtils.DiffableValueSerializer() { @Override - public Custom readFrom(StreamInput in, String key) throws IOException { + public Custom read(StreamInput in, String key) throws IOException { return lookupPrototypeSafe(key).readFrom(in); } @Override - public Diff readDiffFrom(StreamInput in, String key) throws IOException { + public Diff readDiff(StreamInput in, String key) throws IOException { return lookupPrototypeSafe(key).readDiffFrom(in); } }); @@ -1029,12 +1029,18 @@ public class MetaData implements Iterable, Diffable, Fr for (ObjectObjectCursor aliasCursor : indexMetaData.getAliases()) { AliasMetaData aliasMetaData = aliasCursor.value; - AliasOrIndex.Alias aliasOrIndex = (AliasOrIndex.Alias) aliasAndIndexLookup.get(aliasMetaData.getAlias()); + AliasOrIndex aliasOrIndex = aliasAndIndexLookup.get(aliasMetaData.getAlias()); if (aliasOrIndex == null) { aliasOrIndex = new AliasOrIndex.Alias(aliasMetaData, indexMetaData); aliasAndIndexLookup.put(aliasMetaData.getAlias(), aliasOrIndex); + } else if (aliasOrIndex instanceof AliasOrIndex.Alias) { + AliasOrIndex.Alias alias = (AliasOrIndex.Alias) aliasOrIndex; + alias.addIndex(indexMetaData); + } else if (aliasOrIndex instanceof AliasOrIndex.Index) { + AliasOrIndex.Index index = (AliasOrIndex.Index) aliasOrIndex; + throw new IllegalStateException("index and alias names need to be unique, but alias [" + aliasMetaData.getAlias() + "] and index [" + index.getIndex().getIndex() + "] have the same name"); } else { - aliasOrIndex.addIndex(indexMetaData); + throw new IllegalStateException("unexpected alias [" + aliasMetaData.getAlias() + "][" + aliasOrIndex + "]"); } } } diff --git a/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataCreateIndexService.java b/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataCreateIndexService.java index 5b83870cc3c..96d378af042 100644 --- a/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataCreateIndexService.java +++ b/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataCreateIndexService.java @@ -170,12 +170,12 @@ public class MetaDataCreateIndexService extends AbstractComponent { updatedSettingsBuilder.put(request.settings()).normalizePrefix(IndexMetaData.INDEX_SETTING_PREFIX); request.settings(updatedSettingsBuilder.build()); - clusterService.submitStateUpdateTask("create-index [" + request.index() + "], cause [" + request.cause() + "]", Priority.URGENT, new AckedClusterStateUpdateTask(request, listener) { - - @Override - protected ClusterStateUpdateResponse newResponse(boolean acknowledged) { - return new ClusterStateUpdateResponse(acknowledged); - } + clusterService.submitStateUpdateTask("create-index [" + request.index() + "], cause [" + request.cause() + "]", + new AckedClusterStateUpdateTask(Priority.URGENT, request, listener) { + @Override + protected ClusterStateUpdateResponse newResponse(boolean acknowledged) { + return new ClusterStateUpdateResponse(acknowledged); + } @Override public ClusterState execute(ClusterState currentState) throws Exception { @@ -299,7 +299,7 @@ public class MetaDataCreateIndexService extends AbstractComponent { // Set up everything, now locally create the index to see that things are ok, and apply final IndexMetaData tmpImd = IndexMetaData.builder(request.index()).settings(actualIndexSettings).build(); // create the index here (on the master) to validate it can be created, as well as adding the mapping - indicesService.createIndex(nodeServicesProvider, tmpImd, Collections.EMPTY_LIST); + indicesService.createIndex(nodeServicesProvider, tmpImd, Collections.emptyList()); indexCreated = true; // now add the mappings IndexService indexService = indicesService.indexServiceSafe(request.index()); diff --git a/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataDeleteIndexService.java b/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataDeleteIndexService.java index fc87bae6507..54c014fb4ed 100644 --- a/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataDeleteIndexService.java +++ b/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataDeleteIndexService.java @@ -39,7 +39,6 @@ import org.elasticsearch.threadpool.ThreadPool; import java.util.Arrays; import java.util.Collection; -import java.util.Locale; import java.util.concurrent.ScheduledFuture; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicInteger; @@ -71,7 +70,7 @@ public class MetaDataDeleteIndexService extends AbstractComponent { Collection indices = Arrays.asList(request.indices); final DeleteIndexListener listener = new DeleteIndexListener(userListener); - clusterService.submitStateUpdateTask("delete-index " + indices, Priority.URGENT, new ClusterStateUpdateTask() { + clusterService.submitStateUpdateTask("delete-index " + indices, new ClusterStateUpdateTask(Priority.URGENT) { @Override public TimeValue timeout() { diff --git a/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataIndexAliasesService.java b/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataIndexAliasesService.java index 80ff68e6cf9..71ef9c22c33 100644 --- a/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataIndexAliasesService.java +++ b/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataIndexAliasesService.java @@ -62,7 +62,7 @@ public class MetaDataIndexAliasesService extends AbstractComponent { } public void indicesAliases(final IndicesAliasesClusterStateUpdateRequest request, final ActionListener listener) { - clusterService.submitStateUpdateTask("index-aliases", Priority.URGENT, new AckedClusterStateUpdateTask(request, listener) { + clusterService.submitStateUpdateTask("index-aliases", new AckedClusterStateUpdateTask(Priority.URGENT, request, listener) { @Override protected ClusterStateUpdateResponse newResponse(boolean acknowledged) { return new ClusterStateUpdateResponse(acknowledged); @@ -99,7 +99,7 @@ public class MetaDataIndexAliasesService extends AbstractComponent { if (indexService == null) { // temporarily create the index and add mappings so we can parse the filter try { - indexService = indicesService.createIndex(nodeServicesProvider, indexMetaData, Collections.EMPTY_LIST); + indexService = indicesService.createIndex(nodeServicesProvider, indexMetaData, Collections.emptyList()); if (indexMetaData.getMappings().containsKey(MapperService.DEFAULT_MAPPING)) { indexService.mapperService().merge(MapperService.DEFAULT_MAPPING, indexMetaData.getMappings().get(MapperService.DEFAULT_MAPPING).source(), false, false); } diff --git a/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataIndexStateService.java b/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataIndexStateService.java index b1c9f0749b0..1fa1b702f66 100644 --- a/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataIndexStateService.java +++ b/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataIndexStateService.java @@ -76,7 +76,7 @@ public class MetaDataIndexStateService extends AbstractComponent { } final String indicesAsString = Arrays.toString(request.indices()); - clusterService.submitStateUpdateTask("close-indices " + indicesAsString, Priority.URGENT, new AckedClusterStateUpdateTask(request, listener) { + clusterService.submitStateUpdateTask("close-indices " + indicesAsString, new AckedClusterStateUpdateTask(Priority.URGENT, request, listener) { @Override protected ClusterStateUpdateResponse newResponse(boolean acknowledged) { return new ClusterStateUpdateResponse(acknowledged); @@ -140,7 +140,7 @@ public class MetaDataIndexStateService extends AbstractComponent { } final String indicesAsString = Arrays.toString(request.indices()); - clusterService.submitStateUpdateTask("open-indices " + indicesAsString, Priority.URGENT, new AckedClusterStateUpdateTask(request, listener) { + clusterService.submitStateUpdateTask("open-indices " + indicesAsString, new AckedClusterStateUpdateTask(Priority.URGENT, request, listener) { @Override protected ClusterStateUpdateResponse newResponse(boolean acknowledged) { return new ClusterStateUpdateResponse(acknowledged); diff --git a/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataIndexTemplateService.java b/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataIndexTemplateService.java index 13823e8ebdd..790cb99c64b 100644 --- a/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataIndexTemplateService.java +++ b/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataIndexTemplateService.java @@ -56,7 +56,7 @@ public class MetaDataIndexTemplateService extends AbstractComponent { } public void removeTemplates(final RemoveRequest request, final RemoveListener listener) { - clusterService.submitStateUpdateTask("remove-index-template [" + request.name + "]", Priority.URGENT, new ClusterStateUpdateTask() { + clusterService.submitStateUpdateTask("remove-index-template [" + request.name + "]", new ClusterStateUpdateTask(Priority.URGENT) { @Override public TimeValue timeout() { @@ -143,7 +143,8 @@ public class MetaDataIndexTemplateService extends AbstractComponent { } final IndexTemplateMetaData template = templateBuilder.build(); - clusterService.submitStateUpdateTask("create-index-template [" + request.name + "], cause [" + request.cause + "]", Priority.URGENT, new ClusterStateUpdateTask() { + clusterService.submitStateUpdateTask("create-index-template [" + request.name + "], cause [" + request.cause + "]", + new ClusterStateUpdateTask(Priority.URGENT) { @Override public TimeValue timeout() { @@ -216,6 +217,9 @@ public class MetaDataIndexTemplateService extends AbstractComponent { for (Alias alias : request.aliases) { //we validate the alias only partially, as we don't know yet to which index it'll get applied to aliasValidator.validateAliasStandalone(alias); + if (request.template.equals(alias.name())) { + throw new IllegalArgumentException("Alias [" + alias.name() + "] cannot be the same as the template pattern [" + request.template + "]"); + } } } diff --git a/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataIndexUpgradeService.java b/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataIndexUpgradeService.java index c19558a8808..00904af8915 100644 --- a/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataIndexUpgradeService.java +++ b/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataIndexUpgradeService.java @@ -218,8 +218,8 @@ public class MetaDataIndexUpgradeService extends AbstractComponent { try { // We cannot instantiate real analysis server at this point because the node might not have // been started yet. However, we don't really need real analyzers at this stage - so we can fake it - IndexSettings indexSettings = new IndexSettings(indexMetaData, this.settings, Collections.EMPTY_LIST); - SimilarityService similarityService = new SimilarityService(indexSettings, Collections.EMPTY_MAP); + IndexSettings indexSettings = new IndexSettings(indexMetaData, this.settings, Collections.emptyList()); + SimilarityService similarityService = new SimilarityService(indexSettings, Collections.emptyMap()); try (AnalysisService analysisService = new FakeAnalysisService(indexSettings)) { try (MapperService mapperService = new MapperService(indexSettings, analysisService, similarityService, mapperRegistry)) { @@ -231,7 +231,7 @@ public class MetaDataIndexUpgradeService extends AbstractComponent { } } catch (Exception ex) { // Wrap the inner exception so we have the index name in the exception message - throw new IllegalStateException("unable to upgrade the mappings for the index [" + indexMetaData.getIndex() + "], reason: [" + ex.getMessage() + "]", ex); + throw new IllegalStateException("unable to upgrade the mappings for the index [" + indexMetaData.getIndex() + "]", ex); } } @@ -256,7 +256,7 @@ public class MetaDataIndexUpgradeService extends AbstractComponent { }; public FakeAnalysisService(IndexSettings indexSettings) { - super(indexSettings, Collections.EMPTY_MAP, Collections.EMPTY_MAP, Collections.EMPTY_MAP, Collections.EMPTY_MAP); + super(indexSettings, Collections.emptyMap(), Collections.emptyMap(), Collections.emptyMap(), Collections.emptyMap()); } @Override diff --git a/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataMappingService.java b/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataMappingService.java index 725f06d9fae..957125703b6 100644 --- a/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataMappingService.java +++ b/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataMappingService.java @@ -22,28 +22,27 @@ package org.elasticsearch.cluster.metadata; import com.carrotsearch.hppc.cursors.ObjectCursor; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.admin.indices.mapping.put.PutMappingClusterStateUpdateRequest; -import org.elasticsearch.cluster.AckedClusterStateUpdateTask; -import org.elasticsearch.cluster.ClusterService; -import org.elasticsearch.cluster.ClusterState; -import org.elasticsearch.cluster.ClusterStateUpdateTask; +import org.elasticsearch.cluster.*; import org.elasticsearch.cluster.ack.ClusterStateUpdateResponse; +import org.elasticsearch.cluster.node.DiscoveryNode; +import org.elasticsearch.common.Nullable; import org.elasticsearch.common.Priority; -import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.index.IndexNotFoundException; import org.elasticsearch.index.IndexService; import org.elasticsearch.index.NodeServicesProvider; import org.elasticsearch.index.mapper.DocumentMapper; import org.elasticsearch.index.mapper.MapperService; -import org.elasticsearch.index.mapper.MergeMappingException; import org.elasticsearch.index.mapper.MergeResult; import org.elasticsearch.indices.IndicesService; import org.elasticsearch.indices.InvalidTypeNameException; import org.elasticsearch.percolator.PercolatorService; +import java.io.IOException; import java.util.*; /** * Service responsible for submitting mapping changes @@ -53,13 +52,11 @@ public class MetaDataMappingService extends AbstractComponent { private final ClusterService clusterService; private final IndicesService indicesService; - // the mutex protect all the refreshOrUpdate variables! - private final Object refreshOrUpdateMutex = new Object(); - private final List refreshOrUpdateQueue = new ArrayList<>(); - private long refreshOrUpdateInsertOrder; - private long refreshOrUpdateProcessedInsertOrder; + final ClusterStateTaskExecutor refreshExecutor = new RefreshTaskExecutor(); + final ClusterStateTaskExecutor putMappingExecutor = new PutMappingExecutor(); private final NodeServicesProvider nodeServicesProvider; + @Inject public MetaDataMappingService(Settings settings, ClusterService clusterService, IndicesService indicesService, NodeServicesProvider nodeServicesProvider) { super(settings); @@ -68,89 +65,44 @@ public class MetaDataMappingService extends AbstractComponent { this.nodeServicesProvider = nodeServicesProvider; } - static class MappingTask { + static class RefreshTask { final String index; final String indexUUID; - MappingTask(String index, final String indexUUID) { + RefreshTask(String index, final String indexUUID) { this.index = index; this.indexUUID = indexUUID; } } - static class RefreshTask extends MappingTask { - final String[] types; - - RefreshTask(String index, final String indexUUID, String[] types) { - super(index, indexUUID); - this.types = types; - } - } - - static class UpdateTask extends MappingTask { - final String type; - final CompressedXContent mappingSource; - final String nodeId; // null fr unknown - final ActionListener listener; - - UpdateTask(String index, String indexUUID, String type, CompressedXContent mappingSource, String nodeId, ActionListener listener) { - super(index, indexUUID); - this.type = type; - this.mappingSource = mappingSource; - this.nodeId = nodeId; - this.listener = listener; + class RefreshTaskExecutor implements ClusterStateTaskExecutor { + @Override + public BatchResult execute(ClusterState currentState, List tasks) throws Exception { + ClusterState newClusterState = executeRefresh(currentState, tasks); + return BatchResult.builder().successes(tasks).build(newClusterState); } } /** - * Batch method to apply all the queued refresh or update operations. The idea is to try and batch as much + * Batch method to apply all the queued refresh operations. The idea is to try and batch as much * as possible so we won't create the same index all the time for example for the updates on the same mapping * and generate a single cluster change event out of all of those. */ - Tuple> executeRefreshOrUpdate(final ClusterState currentState, final long insertionOrder) throws Exception { - final List allTasks = new ArrayList<>(); - - synchronized (refreshOrUpdateMutex) { - if (refreshOrUpdateQueue.isEmpty()) { - return Tuple.tuple(currentState, allTasks); - } - - // we already processed this task in a bulk manner in a previous cluster event, simply ignore - // it so we will let other tasks get in and processed ones, we will handle the queued ones - // later on in a subsequent cluster state event - if (insertionOrder < refreshOrUpdateProcessedInsertOrder) { - return Tuple.tuple(currentState, allTasks); - } - - allTasks.addAll(refreshOrUpdateQueue); - refreshOrUpdateQueue.clear(); - - refreshOrUpdateProcessedInsertOrder = refreshOrUpdateInsertOrder; - } - - if (allTasks.isEmpty()) { - return Tuple.tuple(currentState, allTasks); - } - + ClusterState executeRefresh(final ClusterState currentState, final List allTasks) throws Exception { // break down to tasks per index, so we can optimize the on demand index service creation // to only happen for the duration of a single index processing of its respective events - Map> tasksPerIndex = new HashMap<>(); - for (MappingTask task : allTasks) { + Map> tasksPerIndex = new HashMap<>(); + for (RefreshTask task : allTasks) { if (task.index == null) { logger.debug("ignoring a mapping task of type [{}] with a null index.", task); } - List indexTasks = tasksPerIndex.get(task.index); - if (indexTasks == null) { - indexTasks = new ArrayList<>(); - tasksPerIndex.put(task.index, indexTasks); - } - indexTasks.add(task); + tasksPerIndex.computeIfAbsent(task.index, k -> new ArrayList<>()).add(task); } boolean dirty = false; MetaData.Builder mdBuilder = MetaData.builder(currentState.metaData()); - for (Map.Entry> entry : tasksPerIndex.entrySet()) { + for (Map.Entry> entry : tasksPerIndex.entrySet()) { String index = entry.getKey(); IndexMetaData indexMetaData = mdBuilder.get(index); if (indexMetaData == null) { @@ -160,14 +112,17 @@ public class MetaDataMappingService extends AbstractComponent { } // the tasks lists to iterate over, filled with the list of mapping tasks, trying to keep // the latest (based on order) update mapping one per node - List allIndexTasks = entry.getValue(); - List tasks = new ArrayList<>(); - for (MappingTask task : allIndexTasks) { - if (!indexMetaData.isSameUUID(task.indexUUID)) { + List allIndexTasks = entry.getValue(); + boolean hasTaskWithRightUUID = false; + for (RefreshTask task : allIndexTasks) { + if (indexMetaData.isSameUUID(task.indexUUID)) { + hasTaskWithRightUUID = true; + } else { logger.debug("[{}] ignoring task [{}] - index meta data doesn't match task uuid", index, task); - continue; } - tasks.add(task); + } + if (hasTaskWithRightUUID == false) { + continue; } // construct the actual index if needed, and make sure the relevant mappings are there @@ -175,28 +130,17 @@ public class MetaDataMappingService extends AbstractComponent { IndexService indexService = indicesService.indexService(index); if (indexService == null) { // we need to create the index here, and add the current mapping to it, so we can merge - indexService = indicesService.createIndex(nodeServicesProvider, indexMetaData, Collections.EMPTY_LIST); + indexService = indicesService.createIndex(nodeServicesProvider, indexMetaData, Collections.emptyList()); removeIndex = true; - Set typesToIntroduce = new HashSet<>(); - for (MappingTask task : tasks) { - if (task instanceof UpdateTask) { - typesToIntroduce.add(((UpdateTask) task).type); - } else if (task instanceof RefreshTask) { - Collections.addAll(typesToIntroduce, ((RefreshTask) task).types); - } - } - for (String type : typesToIntroduce) { - // only add the current relevant mapping (if exists) - if (indexMetaData.getMappings().containsKey(type)) { - // don't apply the default mapping, it has been applied when the mapping was created - indexService.mapperService().merge(type, indexMetaData.getMappings().get(type).source(), false, true); - } + for (ObjectCursor metaData : indexMetaData.getMappings().values()) { + // don't apply the default mapping, it has been applied when the mapping was created + indexService.mapperService().merge(metaData.value.type(), metaData.value.source(), false, true); } } IndexMetaData.Builder builder = IndexMetaData.builder(indexMetaData); try { - boolean indexDirty = processIndexMappingTasks(tasks, indexService, builder); + boolean indexDirty = refreshIndexMapping(indexService, builder); if (indexDirty) { mdBuilder.put(builder); dirty = true; @@ -209,81 +153,33 @@ public class MetaDataMappingService extends AbstractComponent { } if (!dirty) { - return Tuple.tuple(currentState, allTasks); + return currentState; } - return Tuple.tuple(ClusterState.builder(currentState).metaData(mdBuilder).build(), allTasks); + return ClusterState.builder(currentState).metaData(mdBuilder).build(); } - private boolean processIndexMappingTasks(List tasks, IndexService indexService, IndexMetaData.Builder builder) { + private boolean refreshIndexMapping(IndexService indexService, IndexMetaData.Builder builder) { boolean dirty = false; String index = indexService.index().name(); - // keep track of what we already refreshed, no need to refresh it again... - Set processedRefreshes = new HashSet<>(); - for (MappingTask task : tasks) { - if (task instanceof RefreshTask) { - RefreshTask refreshTask = (RefreshTask) task; - try { - List updatedTypes = new ArrayList<>(); - for (String type : refreshTask.types) { - if (processedRefreshes.contains(type)) { - continue; - } - DocumentMapper mapper = indexService.mapperService().documentMapper(type); - if (mapper == null) { - continue; - } - if (!mapper.mappingSource().equals(builder.mapping(type).source())) { - updatedTypes.add(type); - builder.putMapping(new MappingMetaData(mapper)); - } - processedRefreshes.add(type); - } - - if (updatedTypes.isEmpty()) { - continue; - } - - logger.warn("[{}] re-syncing mappings with cluster state for types [{}]", index, updatedTypes); - dirty = true; - } catch (Throwable t) { - logger.warn("[{}] failed to refresh-mapping in cluster state, types [{}]", index, refreshTask.types); + try { + List updatedTypes = new ArrayList<>(); + for (DocumentMapper mapper : indexService.mapperService().docMappers(true)) { + final String type = mapper.type(); + if (!mapper.mappingSource().equals(builder.mapping(type).source())) { + updatedTypes.add(type); } - } else if (task instanceof UpdateTask) { - UpdateTask updateTask = (UpdateTask) task; - try { - String type = updateTask.type; - CompressedXContent mappingSource = updateTask.mappingSource; - - MappingMetaData mappingMetaData = builder.mapping(type); - if (mappingMetaData != null && mappingMetaData.source().equals(mappingSource)) { - logger.debug("[{}] update_mapping [{}] ignoring mapping update task as its source is equal to ours", index, updateTask.type); - continue; - } - - DocumentMapper updatedMapper = indexService.mapperService().merge(type, mappingSource, false, true); - processedRefreshes.add(type); - - // if we end up with the same mapping as the original once, ignore - if (mappingMetaData != null && mappingMetaData.source().equals(updatedMapper.mappingSource())) { - logger.debug("[{}] update_mapping [{}] ignoring mapping update task as it results in the same source as what we have", index, updateTask.type); - continue; - } - - // build the updated mapping source - if (logger.isDebugEnabled()) { - logger.debug("[{}] update_mapping [{}] (dynamic) with source [{}]", index, type, updatedMapper.mappingSource()); - } else if (logger.isInfoEnabled()) { - logger.info("[{}] update_mapping [{}] (dynamic)", index, type); - } - - builder.putMapping(new MappingMetaData(updatedMapper)); - dirty = true; - } catch (Throwable t) { - logger.warn("[{}] failed to update-mapping in cluster state, type [{}]", index, updateTask.type); - } - } else { - logger.warn("illegal state, got wrong mapping task type [{}]", task); } + + // if a single type is not up-to-date, re-send everything + if (updatedTypes.isEmpty() == false) { + logger.warn("[{}] re-syncing mappings with cluster state because of types [{}]", index, updatedTypes); + dirty = true; + for (DocumentMapper mapper : indexService.mapperService().docMappers(true)) { + builder.putMapping(new MappingMetaData(mapper)); + } + } + } catch (Throwable t) { + logger.warn("[{}] failed to refresh-mapping in cluster state", t, index); } return dirty; } @@ -291,198 +187,198 @@ public class MetaDataMappingService extends AbstractComponent { /** * Refreshes mappings if they are not the same between original and parsed version */ - public void refreshMapping(final String index, final String indexUUID, final String... types) { - final long insertOrder; - synchronized (refreshOrUpdateMutex) { - insertOrder = ++refreshOrUpdateInsertOrder; - refreshOrUpdateQueue.add(new RefreshTask(index, indexUUID, types)); - } - clusterService.submitStateUpdateTask("refresh-mapping [" + index + "][" + Arrays.toString(types) + "]", Priority.HIGH, new ClusterStateUpdateTask() { - private volatile List allTasks; + public void refreshMapping(final String index, final String indexUUID) { + final RefreshTask refreshTask = new RefreshTask(index, indexUUID); + clusterService.submitStateUpdateTask("refresh-mapping [" + index + "]", + refreshTask, + ClusterStateTaskConfig.build(Priority.HIGH), + refreshExecutor, + (source, t) -> logger.warn("failure during [{}]", t, source) + ); + } - @Override - public void onFailure(String source, Throwable t) { - logger.warn("failure during [{}]", t, source); - } - - @Override - public ClusterState execute(ClusterState currentState) throws Exception { - Tuple> tuple = executeRefreshOrUpdate(currentState, insertOrder); - this.allTasks = tuple.v2(); - return tuple.v1(); - } - - @Override - public void clusterStateProcessed(String source, ClusterState oldState, ClusterState newState) { - if (allTasks == null) { - return; + class PutMappingExecutor implements ClusterStateTaskExecutor { + @Override + public BatchResult execute(ClusterState currentState, List tasks) throws Exception { + Set indicesToClose = new HashSet<>(); + BatchResult.Builder builder = BatchResult.builder(); + try { + // precreate incoming indices; + for (PutMappingClusterStateUpdateRequest request : tasks) { + // failures here mean something is broken with our cluster state - fail all tasks by letting exceptions bubble up + for (String index : request.indices()) { + final IndexMetaData indexMetaData = currentState.metaData().index(index); + if (indexMetaData != null && indicesService.hasIndex(index) == false) { + // if we don't have the index, we will throw exceptions later; + indicesToClose.add(index); + IndexService indexService = indicesService.createIndex(nodeServicesProvider, indexMetaData, Collections.emptyList()); + // add mappings for all types, we need them for cross-type validation + for (ObjectCursor mapping : indexMetaData.getMappings().values()) { + indexService.mapperService().merge(mapping.value.type(), mapping.value.source(), false, request.updateAllTypes()); + } + } + } } - for (Object task : allTasks) { - if (task instanceof UpdateTask) { - UpdateTask uTask = (UpdateTask) task; - ClusterStateUpdateResponse response = new ClusterStateUpdateResponse(true); - uTask.listener.onResponse(response); + for (PutMappingClusterStateUpdateRequest request : tasks) { + try { + currentState = applyRequest(currentState, request); + builder.success(request); + } catch (Throwable t) { + builder.failure(request, t); + } + } + + return builder.build(currentState); + } finally { + for (String index : indicesToClose) { + indicesService.removeIndex(index, "created for mapping processing"); + } + } + } + + private ClusterState applyRequest(ClusterState currentState, PutMappingClusterStateUpdateRequest request) throws IOException { + Map newMappers = new HashMap<>(); + Map existingMappers = new HashMap<>(); + for (String index : request.indices()) { + IndexService indexService = indicesService.indexServiceSafe(index); + // try and parse it (no need to add it here) so we can bail early in case of parsing exception + DocumentMapper newMapper; + DocumentMapper existingMapper = indexService.mapperService().documentMapper(request.type()); + if (MapperService.DEFAULT_MAPPING.equals(request.type())) { + // _default_ types do not go through merging, but we do test the new settings. Also don't apply the old default + newMapper = indexService.mapperService().parse(request.type(), new CompressedXContent(request.source()), false); + } else { + newMapper = indexService.mapperService().parse(request.type(), new CompressedXContent(request.source()), existingMapper == null); + if (existingMapper != null) { + // first, simulate + MergeResult mergeResult = existingMapper.merge(newMapper.mapping(), true, request.updateAllTypes()); + // if we have conflicts, throw an exception + if (mergeResult.hasConflicts()) { + throw new IllegalArgumentException("Merge failed with failures {" + Arrays.toString(mergeResult.buildConflicts()) + "}"); + } + } else { + // TODO: can we find a better place for this validation? + // The reason this validation is here is that the mapper service doesn't learn about + // new types all at once , which can create a false error. + + // For example in MapperService we can't distinguish between a create index api call + // and a put mapping api call, so we don't which type did exist before. + // Also the order of the mappings may be backwards. + if (newMapper.parentFieldMapper().active()) { + IndexMetaData indexMetaData = currentState.metaData().index(index); + for (ObjectCursor mapping : indexMetaData.getMappings().values()) { + if (newMapper.parentFieldMapper().type().equals(mapping.value.type())) { + throw new IllegalArgumentException("can't add a _parent field that points to an already existing type"); + } + } + } + } + } + newMappers.put(index, newMapper); + if (existingMapper != null) { + existingMappers.put(index, existingMapper); + } + } + + String mappingType = request.type(); + if (mappingType == null) { + mappingType = newMappers.values().iterator().next().type(); + } else if (!mappingType.equals(newMappers.values().iterator().next().type())) { + throw new InvalidTypeNameException("Type name provided does not match type name within mapping definition"); + } + if (!MapperService.DEFAULT_MAPPING.equals(mappingType) && !PercolatorService.TYPE_NAME.equals(mappingType) && mappingType.charAt(0) == '_') { + throw new InvalidTypeNameException("Document mapping type name can't start with '_'"); + } + final Map mappings = new HashMap<>(); + for (Map.Entry entry : newMappers.entrySet()) { + String index = entry.getKey(); + // do the actual merge here on the master, and update the mapping source + DocumentMapper newMapper = entry.getValue(); + IndexService indexService = indicesService.indexService(index); + if (indexService == null) { + continue; + } + + CompressedXContent existingSource = null; + if (existingMappers.containsKey(entry.getKey())) { + existingSource = existingMappers.get(entry.getKey()).mappingSource(); + } + DocumentMapper mergedMapper = indexService.mapperService().merge(newMapper.type(), newMapper.mappingSource(), false, request.updateAllTypes()); + CompressedXContent updatedSource = mergedMapper.mappingSource(); + + if (existingSource != null) { + if (existingSource.equals(updatedSource)) { + // same source, no changes, ignore it + } else { + // use the merged mapping source + mappings.put(index, new MappingMetaData(mergedMapper)); + if (logger.isDebugEnabled()) { + logger.debug("[{}] update_mapping [{}] with source [{}]", index, mergedMapper.type(), updatedSource); + } else if (logger.isInfoEnabled()) { + logger.info("[{}] update_mapping [{}]", index, mergedMapper.type()); + } + + } + } else { + mappings.put(index, new MappingMetaData(mergedMapper)); + if (logger.isDebugEnabled()) { + logger.debug("[{}] create_mapping [{}] with source [{}]", index, newMapper.type(), updatedSource); + } else if (logger.isInfoEnabled()) { + logger.info("[{}] create_mapping [{}]", index, newMapper.type()); } } } - }); + if (mappings.isEmpty()) { + // no changes, return + return currentState; + } + MetaData.Builder builder = MetaData.builder(currentState.metaData()); + for (String indexName : request.indices()) { + IndexMetaData indexMetaData = currentState.metaData().index(indexName); + if (indexMetaData == null) { + throw new IndexNotFoundException(indexName); + } + MappingMetaData mappingMd = mappings.get(indexName); + if (mappingMd != null) { + builder.put(IndexMetaData.builder(indexMetaData).putMapping(mappingMd)); + } + } + + return ClusterState.builder(currentState).metaData(builder).build(); + } } public void putMapping(final PutMappingClusterStateUpdateRequest request, final ActionListener listener) { + clusterService.submitStateUpdateTask("put-mapping [" + request.type() + "]", + request, + ClusterStateTaskConfig.build(Priority.HIGH, request.masterNodeTimeout()), + putMappingExecutor, + new AckedClusterStateTaskListener() { - clusterService.submitStateUpdateTask("put-mapping [" + request.type() + "]", Priority.HIGH, new AckedClusterStateUpdateTask(request, listener) { - - @Override - protected ClusterStateUpdateResponse newResponse(boolean acknowledged) { - return new ClusterStateUpdateResponse(acknowledged); - } - - @Override - public ClusterState execute(final ClusterState currentState) throws Exception { - List indicesToClose = new ArrayList<>(); - try { - for (String index : request.indices()) { - if (!currentState.metaData().hasIndex(index)) { - throw new IndexNotFoundException(index); - } + @Override + public void onFailure(String source, Throwable t) { + listener.onFailure(t); } - // pre create indices here and add mappings to them so we can merge the mappings here if needed - for (String index : request.indices()) { - if (indicesService.hasIndex(index)) { - continue; - } - final IndexMetaData indexMetaData = currentState.metaData().index(index); - IndexService indexService = indicesService.createIndex(nodeServicesProvider, indexMetaData, Collections.EMPTY_LIST); - indicesToClose.add(indexMetaData.getIndex()); - // make sure to add custom default mapping if exists - if (indexMetaData.getMappings().containsKey(MapperService.DEFAULT_MAPPING)) { - indexService.mapperService().merge(MapperService.DEFAULT_MAPPING, indexMetaData.getMappings().get(MapperService.DEFAULT_MAPPING).source(), false, request.updateAllTypes()); - } - // only add the current relevant mapping (if exists) - if (indexMetaData.getMappings().containsKey(request.type())) { - indexService.mapperService().merge(request.type(), indexMetaData.getMappings().get(request.type()).source(), false, request.updateAllTypes()); - } + @Override + public boolean mustAck(DiscoveryNode discoveryNode) { + return true; } - Map newMappers = new HashMap<>(); - Map existingMappers = new HashMap<>(); - for (String index : request.indices()) { - IndexService indexService = indicesService.indexServiceSafe(index); - // try and parse it (no need to add it here) so we can bail early in case of parsing exception - DocumentMapper newMapper; - DocumentMapper existingMapper = indexService.mapperService().documentMapper(request.type()); - if (MapperService.DEFAULT_MAPPING.equals(request.type())) { - // _default_ types do not go through merging, but we do test the new settings. Also don't apply the old default - newMapper = indexService.mapperService().parse(request.type(), new CompressedXContent(request.source()), false); - } else { - newMapper = indexService.mapperService().parse(request.type(), new CompressedXContent(request.source()), existingMapper == null); - if (existingMapper != null) { - // first, simulate - MergeResult mergeResult = existingMapper.merge(newMapper.mapping(), true, request.updateAllTypes()); - // if we have conflicts, throw an exception - if (mergeResult.hasConflicts()) { - throw new MergeMappingException(mergeResult.buildConflicts()); - } - } else { - // TODO: can we find a better place for this validation? - // The reason this validation is here is that the mapper service doesn't learn about - // new types all at once , which can create a false error. - - // For example in MapperService we can't distinguish between a create index api call - // and a put mapping api call, so we don't which type did exist before. - // Also the order of the mappings may be backwards. - if (newMapper.parentFieldMapper().active()) { - IndexMetaData indexMetaData = currentState.metaData().index(index); - for (ObjectCursor mapping : indexMetaData.getMappings().values()) { - if (newMapper.parentFieldMapper().type().equals(mapping.value.type())) { - throw new IllegalArgumentException("can't add a _parent field that points to an already existing type"); - } - } - } - } - } - - - newMappers.put(index, newMapper); - if (existingMapper != null) { - existingMappers.put(index, existingMapper); - } + @Override + public void onAllNodesAcked(@Nullable Throwable t) { + listener.onResponse(new ClusterStateUpdateResponse(true)); } - String mappingType = request.type(); - if (mappingType == null) { - mappingType = newMappers.values().iterator().next().type(); - } else if (!mappingType.equals(newMappers.values().iterator().next().type())) { - throw new InvalidTypeNameException("Type name provided does not match type name within mapping definition"); - } - if (!MapperService.DEFAULT_MAPPING.equals(mappingType) && !PercolatorService.TYPE_NAME.equals(mappingType) && mappingType.charAt(0) == '_') { - throw new InvalidTypeNameException("Document mapping type name can't start with '_'"); + @Override + public void onAckTimeout() { + listener.onResponse(new ClusterStateUpdateResponse(false)); } - final Map mappings = new HashMap<>(); - for (Map.Entry entry : newMappers.entrySet()) { - String index = entry.getKey(); - // do the actual merge here on the master, and update the mapping source - DocumentMapper newMapper = entry.getValue(); - IndexService indexService = indicesService.indexService(index); - if (indexService == null) { - continue; - } - - CompressedXContent existingSource = null; - if (existingMappers.containsKey(entry.getKey())) { - existingSource = existingMappers.get(entry.getKey()).mappingSource(); - } - DocumentMapper mergedMapper = indexService.mapperService().merge(newMapper.type(), newMapper.mappingSource(), false, request.updateAllTypes()); - CompressedXContent updatedSource = mergedMapper.mappingSource(); - - if (existingSource != null) { - if (existingSource.equals(updatedSource)) { - // same source, no changes, ignore it - } else { - // use the merged mapping source - mappings.put(index, new MappingMetaData(mergedMapper)); - if (logger.isDebugEnabled()) { - logger.debug("[{}] update_mapping [{}] with source [{}]", index, mergedMapper.type(), updatedSource); - } else if (logger.isInfoEnabled()) { - logger.info("[{}] update_mapping [{}]", index, mergedMapper.type()); - } - } - } else { - mappings.put(index, new MappingMetaData(mergedMapper)); - if (logger.isDebugEnabled()) { - logger.debug("[{}] create_mapping [{}] with source [{}]", index, newMapper.type(), updatedSource); - } else if (logger.isInfoEnabled()) { - logger.info("[{}] create_mapping [{}]", index, newMapper.type()); - } - } + @Override + public TimeValue ackTimeout() { + return request.ackTimeout(); } - - if (mappings.isEmpty()) { - // no changes, return - return currentState; - } - - MetaData.Builder builder = MetaData.builder(currentState.metaData()); - for (String indexName : request.indices()) { - IndexMetaData indexMetaData = currentState.metaData().index(indexName); - if (indexMetaData == null) { - throw new IndexNotFoundException(indexName); - } - MappingMetaData mappingMd = mappings.get(indexName); - if (mappingMd != null) { - builder.put(IndexMetaData.builder(indexMetaData).putMapping(mappingMd)); - } - } - - return ClusterState.builder(currentState).metaData(builder).build(); - } finally { - for (String index : indicesToClose) { - indicesService.removeIndex(index, "created for mapping processing"); - } - } - } - }); + }); } } diff --git a/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataUpdateSettingsService.java b/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataUpdateSettingsService.java index 1a928dd41ea..eaa1eefd25e 100644 --- a/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataUpdateSettingsService.java +++ b/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataUpdateSettingsService.java @@ -24,11 +24,7 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.admin.indices.settings.put.UpdateSettingsClusterStateUpdateRequest; import org.elasticsearch.action.admin.indices.upgrade.post.UpgradeSettingsClusterStateUpdateRequest; import org.elasticsearch.action.support.IndicesOptions; -import org.elasticsearch.cluster.AckedClusterStateUpdateTask; -import org.elasticsearch.cluster.ClusterChangedEvent; -import org.elasticsearch.cluster.ClusterService; -import org.elasticsearch.cluster.ClusterState; -import org.elasticsearch.cluster.ClusterStateListener; +import org.elasticsearch.cluster.*; import org.elasticsearch.cluster.ack.ClusterStateUpdateResponse; import org.elasticsearch.cluster.block.ClusterBlocks; import org.elasticsearch.cluster.routing.RoutingTable; @@ -44,13 +40,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.index.settings.IndexDynamicSettings; -import java.util.ArrayList; -import java.util.HashMap; -import java.util.HashSet; -import java.util.List; -import java.util.Locale; -import java.util.Map; -import java.util.Set; +import java.util.*; import static org.elasticsearch.common.settings.Settings.settingsBuilder; @@ -219,7 +209,8 @@ public class MetaDataUpdateSettingsService extends AbstractComponent implements } final Settings openSettings = updatedSettingsBuilder.build(); - clusterService.submitStateUpdateTask("update-settings", Priority.URGENT, new AckedClusterStateUpdateTask(request, listener) { + clusterService.submitStateUpdateTask("update-settings", + new AckedClusterStateUpdateTask(Priority.URGENT, request, listener) { @Override protected ClusterStateUpdateResponse newResponse(boolean acknowledged) { @@ -334,7 +325,7 @@ public class MetaDataUpdateSettingsService extends AbstractComponent implements public void upgradeIndexSettings(final UpgradeSettingsClusterStateUpdateRequest request, final ActionListener listener) { - clusterService.submitStateUpdateTask("update-index-compatibility-versions", Priority.URGENT, new AckedClusterStateUpdateTask(request, listener) { + clusterService.submitStateUpdateTask("update-index-compatibility-versions", new AckedClusterStateUpdateTask(Priority.URGENT, request, listener) { @Override protected ClusterStateUpdateResponse newResponse(boolean acknowledged) { diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/RoutingService.java b/core/src/main/java/org/elasticsearch/cluster/routing/RoutingService.java index 6f43e880e3f..5cd4366bea4 100644 --- a/core/src/main/java/org/elasticsearch/cluster/routing/RoutingService.java +++ b/core/src/main/java/org/elasticsearch/cluster/routing/RoutingService.java @@ -147,7 +147,7 @@ public class RoutingService extends AbstractLifecycleComponent i return; } logger.trace("rerouting {}", reason); - clusterService.submitStateUpdateTask(CLUSTER_UPDATE_TASK_SOURCE + "(" + reason + ")", Priority.HIGH, new ClusterStateUpdateTask() { + clusterService.submitStateUpdateTask(CLUSTER_UPDATE_TASK_SOURCE + "(" + reason + ")", new ClusterStateUpdateTask(Priority.HIGH) { @Override public ClusterState execute(ClusterState currentState) { rerouting.set(false); diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/RoutingTable.java b/core/src/main/java/org/elasticsearch/cluster/routing/RoutingTable.java index 80c43cd20be..0e657491104 100644 --- a/core/src/main/java/org/elasticsearch/cluster/routing/RoutingTable.java +++ b/core/src/main/java/org/elasticsearch/cluster/routing/RoutingTable.java @@ -308,12 +308,12 @@ public class RoutingTable implements Iterable, Diffable s.shardId().toString()); logClusterHealthStateChange( - new ClusterStateHealth(clusterState), - new ClusterStateHealth(result.metaData(), result.routingTable()), - "shards started [" + startedShardsAsString + "] ..." + new ClusterStateHealth(clusterState), + new ClusterStateHealth(clusterState.metaData(), result.routingTable()), + "shards started [" + startedShardsAsString + "] ..." ); return result; } - protected RoutingAllocation.Result buildChangedResult(MetaData metaData, RoutingNodes routingNodes) { return buildChangedResult(metaData, routingNodes, new RoutingExplanations()); @@ -115,10 +111,10 @@ public class AllocationService extends AbstractComponent { * * @param currentMetaData {@link MetaData} object from before the routing table was changed. * @param newRoutingTable new {@link RoutingTable} created by the allocation change - * @return adpated {@link MetaData}, potentially the original one if no change was needed. + * @return adapted {@link MetaData}, potentially the original one if no change was needed. */ static MetaData updateMetaDataWithRoutingTable(MetaData currentMetaData, RoutingTable newRoutingTable) { - // make sure index meta data and routing tables are in sync w.r.t primaryTerm + // make sure index meta data and routing tables are in sync w.r.t active allocation ids MetaData.Builder metaDataBuilder = null; for (IndexRoutingTable indexRoutingTable : newRoutingTable) { final IndexMetaData indexMetaData = currentMetaData.index(indexRoutingTable.getIndex()); @@ -127,6 +123,26 @@ public class AllocationService extends AbstractComponent { } IndexMetaData.Builder indexMetaDataBuilder = null; for (IndexShardRoutingTable shardRoutings : indexRoutingTable) { + + // update activeAllocationIds + Set activeAllocationIds = shardRoutings.activeShards().stream() + .map(ShardRouting::allocationId) + .filter(Objects::nonNull) + .map(AllocationId::getId) + .collect(Collectors.toSet()); + // only update active allocation ids if there is an active shard + if (activeAllocationIds.isEmpty() == false) { + // get currently stored allocation ids + Set storedAllocationIds = indexMetaData.activeAllocationIds(shardRoutings.shardId().id()); + if (activeAllocationIds.equals(storedAllocationIds) == false) { + if (indexMetaDataBuilder == null) { + indexMetaDataBuilder = IndexMetaData.builder(indexMetaData); + } + indexMetaDataBuilder.putActiveAllocationIds(shardRoutings.shardId().id(), activeAllocationIds); + } + } + + // update primary terms final ShardRouting primary = shardRoutings.primaryShard(); if (primary == null) { throw new IllegalStateException("missing primary shard for " + shardRoutings.shardId()); @@ -134,8 +150,8 @@ public class AllocationService extends AbstractComponent { final int shardId = primary.shardId().id(); if (primary.primaryTerm() != indexMetaData.primaryTerm(shardId)) { assert primary.primaryTerm() > indexMetaData.primaryTerm(shardId) : - "primary term should only increase. Index primary term [" - + indexMetaData.primaryTerm(shardId) + "] but primary routing is " + primary; + "primary term should only increase. Index primary term [" + + indexMetaData.primaryTerm(shardId) + "] but primary routing is " + primary; if (indexMetaDataBuilder == null) { indexMetaDataBuilder = IndexMetaData.builder(indexMetaData); } @@ -173,20 +189,19 @@ public class AllocationService extends AbstractComponent { boolean changed = false; for (FailedRerouteAllocation.FailedShard failedShard : failedShards) { changed |= applyFailedShard(allocation, failedShard.shard, true, new UnassignedInfo(UnassignedInfo.Reason.ALLOCATION_FAILED, failedShard.message, failedShard.failure, - System.nanoTime(), System.currentTimeMillis())); + System.nanoTime(), System.currentTimeMillis())); } if (!changed) { return new RoutingAllocation.Result(false, clusterState.routingTable(), clusterState.metaData()); } shardsAllocators.applyFailedShards(allocation); reroute(allocation); - RoutingAllocation.Result result = buildChangedResult(clusterState.metaData(), routingNodes); - + final RoutingAllocation.Result result = buildChangedResult(clusterState.metaData(), routingNodes); String failedShardsAsString = firstListElementsToCommaDelimitedString(failedShards, s -> s.shard.shardId().toString()); logClusterHealthStateChange( - new ClusterStateHealth(clusterState), - new ClusterStateHealth(clusterState.getMetaData(), clusterState.routingTable()), - "shards failed [" + failedShardsAsString + "] ..." + new ClusterStateHealth(clusterState), + new ClusterStateHealth(clusterState.getMetaData(), result.routingTable()), + "shards failed [" + failedShardsAsString + "] ..." ); return result; } @@ -202,10 +217,10 @@ public class AllocationService extends AbstractComponent { private String firstListElementsToCommaDelimitedString(List elements, Function formatter) { final int maxNumberOfElements = 10; return elements - .stream() - .limit(maxNumberOfElements) - .map(formatter) - .collect(Collectors.joining(", ")); + .stream() + .limit(maxNumberOfElements) + .map(formatter) + .collect(Collectors.joining(", ")); } public RoutingAllocation.Result reroute(ClusterState clusterState, AllocationCommands commands) { @@ -230,9 +245,9 @@ public class AllocationService extends AbstractComponent { reroute(allocation); RoutingAllocation.Result result = buildChangedResult(clusterState.metaData(), routingNodes, explanations); logClusterHealthStateChange( - new ClusterStateHealth(clusterState), - new ClusterStateHealth(clusterState.getMetaData(), result.routingTable()), - "reroute commands" + new ClusterStateHealth(clusterState), + new ClusterStateHealth(clusterState.getMetaData(), result.routingTable()), + "reroute commands" ); return result; } @@ -263,9 +278,9 @@ public class AllocationService extends AbstractComponent { } RoutingAllocation.Result result = buildChangedResult(clusterState.metaData(), routingNodes); logClusterHealthStateChange( - new ClusterStateHealth(clusterState), - new ClusterStateHealth(clusterState.getMetaData(), result.routingTable()), - reason + new ClusterStateHealth(clusterState), + new ClusterStateHealth(clusterState.getMetaData(), result.routingTable()), + reason ); return result; } @@ -377,8 +392,8 @@ public class AllocationService extends AbstractComponent { } for (ShardRouting shardToFail : shardsToFail) { changed |= applyFailedShard(allocation, shardToFail, false, - new UnassignedInfo(UnassignedInfo.Reason.ALLOCATION_FAILED, "primary failed while replica initializing", - null, allocation.getCurrentNanoTime(), System.currentTimeMillis())); + new UnassignedInfo(UnassignedInfo.Reason.ALLOCATION_FAILED, "primary failed while replica initializing", + null, allocation.getCurrentNanoTime(), System.currentTimeMillis())); } // now, go over and elect a new primary if possible, not, from this code block on, if one is elected, @@ -440,7 +455,7 @@ public class AllocationService extends AbstractComponent { // now, go over all the shards routing on the node, and fail them for (ShardRouting shardRouting : node.copyShards()) { UnassignedInfo unassignedInfo = new UnassignedInfo(UnassignedInfo.Reason.NODE_LEFT, "node_left[" + node.nodeId() + "]", null, - allocation.getCurrentNanoTime(), System.currentTimeMillis()); + allocation.getCurrentNanoTime(), System.currentTimeMillis()); applyFailedShard(allocation, shardRouting, false, unassignedInfo); } // its a dead node, remove it, note, its important to remove it *after* we apply failed shard diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/RoutingAllocation.java b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/RoutingAllocation.java index 8d436bb8f69..fa9841173d2 100644 --- a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/RoutingAllocation.java +++ b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/RoutingAllocation.java @@ -39,12 +39,12 @@ import static java.util.Collections.unmodifiableSet; /** * The {@link RoutingAllocation} keep the state of the current allocation * of shards and holds the {@link AllocationDeciders} which are responsible - * for the current routing state. + * for the current routing state. */ public class RoutingAllocation { /** - * this class is used to describe results of a {@link RoutingAllocation} + * this class is used to describe results of a {@link RoutingAllocation} */ public static class Result { @@ -58,9 +58,10 @@ public class RoutingAllocation { /** * Creates a new {@link RoutingAllocation.Result} - * @param changed a flag to determine whether the actual {@link RoutingTable} has been changed + * + * @param changed a flag to determine whether the actual {@link RoutingTable} has been changed * @param routingTable the {@link RoutingTable} this Result references - * @param metaData the {@link MetaData} this result refrences + * @param metaData the {@link MetaData} this Result references */ public Result(boolean changed, RoutingTable routingTable, MetaData metaData) { this.changed = changed; @@ -70,9 +71,10 @@ public class RoutingAllocation { /** * Creates a new {@link RoutingAllocation.Result} - * @param changed a flag to determine whether the actual {@link RoutingTable} has been changed + * + * @param changed a flag to determine whether the actual {@link RoutingTable} has been changed * @param routingTable the {@link RoutingTable} this Result references - * @param metaData the {@link MetaData} this Result references + * @param metaData the {@link MetaData} this Result references * @param explanations Explanation for the reroute actions */ public Result(boolean changed, RoutingTable routingTable, MetaData metaData, RoutingExplanations explanations) { @@ -82,7 +84,9 @@ public class RoutingAllocation { this.explanations = explanations; } - /** determine whether the actual {@link RoutingTable} has been changed + /** + * determine whether the actual {@link RoutingTable} has been changed + * * @return true if the {@link RoutingTable} has been changed by allocation. Otherwise false */ public boolean changed() { @@ -91,6 +95,7 @@ public class RoutingAllocation { /** * Get the {@link MetaData} referenced by this result + * * @return referenced {@link MetaData} */ public MetaData metaData() { @@ -99,6 +104,7 @@ public class RoutingAllocation { /** * Get the {@link RoutingTable} referenced by this result + * * @return referenced {@link RoutingTable} */ public RoutingTable routingTable() { @@ -107,6 +113,7 @@ public class RoutingAllocation { /** * Get the explanation of this result + * * @return explanation */ public RoutingExplanations explanations() { @@ -137,9 +144,10 @@ public class RoutingAllocation { /** * Creates a new {@link RoutingAllocation} - * @param deciders {@link AllocationDeciders} to used to make decisions for routing allocations - * @param routingNodes Routing nodes in the current cluster - * @param nodes TODO: Documentation + * + * @param deciders {@link AllocationDeciders} to used to make decisions for routing allocations + * @param routingNodes Routing nodes in the current cluster + * @param nodes TODO: Documentation * @param currentNanoTime the nano time to use for all delay allocation calculation (typically {@link System#nanoTime()}) */ public RoutingAllocation(AllocationDeciders deciders, RoutingNodes routingNodes, DiscoveryNodes nodes, ClusterInfo clusterInfo, long currentNanoTime) { @@ -157,6 +165,7 @@ public class RoutingAllocation { /** * Get {@link AllocationDeciders} used for allocation + * * @return {@link AllocationDeciders} used for allocation */ public AllocationDeciders deciders() { @@ -165,6 +174,7 @@ public class RoutingAllocation { /** * Get routing table of current nodes + * * @return current routing table */ public RoutingTable routingTable() { @@ -173,6 +183,7 @@ public class RoutingAllocation { /** * Get current routing nodes + * * @return routing nodes */ public RoutingNodes routingNodes() { @@ -181,6 +192,7 @@ public class RoutingAllocation { /** * Get metadata of routing nodes + * * @return Metadata of routing nodes */ public MetaData metaData() { @@ -189,6 +201,7 @@ public class RoutingAllocation { /** * Get discovery nodes in current routing + * * @return discovery nodes */ public DiscoveryNodes nodes() { @@ -201,6 +214,7 @@ public class RoutingAllocation { /** * Get explanations of current routing + * * @return explanation of routing */ public AllocationExplanation explanation() { @@ -257,10 +271,11 @@ public class RoutingAllocation { /** * Create a routing decision, including the reason if the debug flag is * turned on - * @param decision decision whether to allow/deny allocation + * + * @param decision decision whether to allow/deny allocation * @param deciderLabel a human readable label for the AllocationDecider - * @param reason a format string explanation of the decision - * @param params format string parameters + * @param reason a format string explanation of the decision + * @param params format string parameters */ public Decision decision(Decision decision, String deciderLabel, String reason, Object... params) { if (debugDecision()) { diff --git a/core/src/main/java/org/elasticsearch/cluster/service/InternalClusterService.java b/core/src/main/java/org/elasticsearch/cluster/service/InternalClusterService.java index 8fb8dc185b8..d8393d07f54 100644 --- a/core/src/main/java/org/elasticsearch/cluster/service/InternalClusterService.java +++ b/core/src/main/java/org/elasticsearch/cluster/service/InternalClusterService.java @@ -33,6 +33,7 @@ import org.elasticsearch.cluster.routing.OperationRouting; import org.elasticsearch.cluster.routing.RoutingTable; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.Priority; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.component.AbstractLifecycleComponent; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.logging.ESLogger; @@ -51,6 +52,8 @@ import org.elasticsearch.transport.TransportService; import java.util.*; import java.util.concurrent.*; +import java.util.concurrent.atomic.AtomicBoolean; +import java.util.stream.Collectors; import static org.elasticsearch.common.util.concurrent.EsExecutors.daemonThreadFactory; @@ -87,6 +90,7 @@ public class InternalClusterService extends AbstractLifecycleComponent priorityClusterStateListeners = new CopyOnWriteArrayList<>(); private final Collection clusterStateListeners = new CopyOnWriteArrayList<>(); private final Collection lastClusterStateListeners = new CopyOnWriteArrayList<>(); + private final Map> updateTasksPerExecutor = new HashMap<>(); // TODO this is rather frequently changing I guess a Synced Set would be better here and a dedicated remove API private final Collection postAppliedListeners = new CopyOnWriteArrayList<>(); private final Iterable preAppliedListeners = Iterables.concat(priorityClusterStateListeners, clusterStateListeners, lastClusterStateListeners); @@ -265,30 +269,34 @@ public class InternalClusterService extends AbstractLifecycleComponent void submitStateUpdateTask(final String source, final T task, + final ClusterStateTaskConfig config, + final ClusterStateTaskExecutor executor, + final ClusterStateTaskListener listener + ) { if (!lifecycle.started()) { return; } try { - final UpdateTask task = new UpdateTask(source, priority, updateTask); - if (updateTask.timeout() != null) { - updateTasksExecutor.execute(task, threadPool.scheduler(), updateTask.timeout(), new Runnable() { - @Override - public void run() { - threadPool.generic().execute(new Runnable() { - @Override - public void run() { - updateTask.onFailure(task.source(), new ProcessClusterEventTimeoutException(updateTask.timeout(), task.source())); - } - }); + final UpdateTask updateTask = new UpdateTask<>(source, task, config, executor, listener); + + synchronized (updateTasksPerExecutor) { + updateTasksPerExecutor.computeIfAbsent(executor, k -> new ArrayList<>()).add(updateTask); + } + + if (config.timeout() != null) { + updateTasksExecutor.execute(updateTask, threadPool.scheduler(), config.timeout(), () -> threadPool.generic().execute(() -> { + if (updateTask.processed.getAndSet(true) == false) { + listener.onFailure(source, new ProcessClusterEventTimeoutException(config.timeout(), source)); } - }); + })); } else { - updateTasksExecutor.execute(task); + updateTasksExecutor.execute(updateTask); } } catch (EsRejectedExecutionException e) { // ignore cases where we are shutting down..., there is really nothing interesting @@ -355,190 +363,240 @@ public class InternalClusterService extends AbstractLifecycleComponent void runTasksForExecutor(ClusterStateTaskExecutor executor) { + final ArrayList> toExecute = new ArrayList<>(); + final ArrayList sources = new ArrayList<>(); + synchronized (updateTasksPerExecutor) { + List pending = updateTasksPerExecutor.remove(executor); + if (pending != null) { + for (UpdateTask task : pending) { + if (task.processed.getAndSet(true) == false) { + logger.trace("will process [{}]", task.source); + toExecute.add(task); + sources.add(task.source); + } else { + logger.trace("skipping [{}], already processed", task.source); + } + } + } + } + if (toExecute.isEmpty()) { + return; + } + final String source = Strings.collectionToCommaDelimitedString(sources); + if (!lifecycle.started()) { + logger.debug("processing [{}]: ignoring, cluster_service not started", source); + return; + } + logger.debug("processing [{}]: execute", source); + ClusterState previousClusterState = clusterState; + if (!previousClusterState.nodes().localNodeMaster() && executor.runOnlyOnMaster()) { + logger.debug("failing [{}]: local node is no longer master", source); + toExecute.stream().forEach(task -> task.listener.onNoLongerMaster(task.source)); + return; + } + ClusterStateTaskExecutor.BatchResult batchResult; + long startTimeNS = System.nanoTime(); + try { + List inputs = toExecute.stream().map(tUpdateTask -> tUpdateTask.task).collect(Collectors.toList()); + batchResult = executor.execute(previousClusterState, inputs); + } catch (Throwable e) { + TimeValue executionTime = TimeValue.timeValueMillis(Math.max(0, TimeValue.nsecToMSec(System.nanoTime() - startTimeNS))); + if (logger.isTraceEnabled()) { + StringBuilder sb = new StringBuilder("failed to execute cluster state update in ").append(executionTime).append(", state:\nversion [").append(previousClusterState.version()).append("], source [").append(source).append("]\n"); + sb.append(previousClusterState.nodes().prettyPrint()); + sb.append(previousClusterState.routingTable().prettyPrint()); + sb.append(previousClusterState.getRoutingNodes().prettyPrint()); + logger.trace(sb.toString(), e); + } + warnAboutSlowTaskIfNeeded(executionTime, source); + batchResult = ClusterStateTaskExecutor.BatchResult.builder().failures(toExecute.stream().map(updateTask -> updateTask.task)::iterator, e).build(previousClusterState); } - @Override - public void run() { - if (!lifecycle.started()) { - logger.debug("processing [{}]: ignoring, cluster_service not started", source); - return; - } - logger.debug("processing [{}]: execute", source); - ClusterState previousClusterState = clusterState; - if (!previousClusterState.nodes().localNodeMaster() && updateTask.runOnlyOnMaster()) { - logger.debug("failing [{}]: local node is no longer master", source); - updateTask.onNoLongerMaster(source); - return; - } - ClusterState newClusterState; - long startTimeNS = System.nanoTime(); - try { - newClusterState = updateTask.execute(previousClusterState); - } catch (Throwable e) { - TimeValue executionTime = TimeValue.timeValueMillis(Math.max(0, TimeValue.nsecToMSec(System.nanoTime() - startTimeNS))); - if (logger.isTraceEnabled()) { - StringBuilder sb = new StringBuilder("failed to execute cluster state update in ").append(executionTime).append(", state:\nversion [").append(previousClusterState.version()).append("], source [").append(source).append("]\n"); - sb.append(previousClusterState.nodes().prettyPrint()); - sb.append(previousClusterState.routingTable().prettyPrint()); - sb.append(previousClusterState.getRoutingNodes().prettyPrint()); - logger.trace(sb.toString(), e); - } - warnAboutSlowTaskIfNeeded(executionTime, source); - updateTask.onFailure(source, e); - return; - } + assert batchResult.executionResults != null; - if (previousClusterState == newClusterState) { - if (updateTask instanceof AckedClusterStateUpdateTask) { + ClusterState newClusterState = batchResult.resultingState; + final ArrayList> proccessedListeners = new ArrayList<>(); + // fail all tasks that have failed and extract those that are waiting for results + for (UpdateTask updateTask : toExecute) { + assert batchResult.executionResults.containsKey(updateTask.task) : "missing " + updateTask.task.toString(); + final ClusterStateTaskExecutor.TaskResult executionResult = + batchResult.executionResults.get(updateTask.task); + executionResult.handle(() -> proccessedListeners.add(updateTask), ex -> updateTask.listener.onFailure(updateTask.source, ex)); + } + + if (previousClusterState == newClusterState) { + for (UpdateTask task : proccessedListeners) { + if (task.listener instanceof AckedClusterStateTaskListener) { //no need to wait for ack if nothing changed, the update can be counted as acknowledged - ((AckedClusterStateUpdateTask) updateTask).onAllNodesAcked(null); + ((AckedClusterStateTaskListener) task.listener).onAllNodesAcked(null); } - updateTask.clusterStateProcessed(source, previousClusterState, newClusterState); - TimeValue executionTime = TimeValue.timeValueMillis(Math.max(0, TimeValue.nsecToMSec(System.nanoTime() - startTimeNS))); - logger.debug("processing [{}]: took {} no change in cluster_state", source, executionTime); - warnAboutSlowTaskIfNeeded(executionTime, source); - return; + task.listener.clusterStateProcessed(task.source, previousClusterState, newClusterState); } + TimeValue executionTime = TimeValue.timeValueMillis(Math.max(0, TimeValue.nsecToMSec(System.nanoTime() - startTimeNS))); + logger.debug("processing [{}]: took {} no change in cluster_state", source, executionTime); + warnAboutSlowTaskIfNeeded(executionTime, source); + return; + } - try { - Discovery.AckListener ackListener = new NoOpAckListener(); - if (newClusterState.nodes().localNodeMaster()) { - // only the master controls the version numbers - Builder builder = ClusterState.builder(newClusterState).incrementVersion(); - if (previousClusterState.routingTable() != newClusterState.routingTable()) { - builder.routingTable(RoutingTable.builder(newClusterState.routingTable()).version(newClusterState.routingTable().version() + 1).build()); - } - if (previousClusterState.metaData() != newClusterState.metaData()) { - builder.metaData(MetaData.builder(newClusterState.metaData()).version(newClusterState.metaData().version() + 1)); - } - newClusterState = builder.build(); - - if (updateTask instanceof AckedClusterStateUpdateTask) { - final AckedClusterStateUpdateTask ackedUpdateTask = (AckedClusterStateUpdateTask) updateTask; - if (ackedUpdateTask.ackTimeout() == null || ackedUpdateTask.ackTimeout().millis() == 0) { - ackedUpdateTask.onAckTimeout(); + try { + ArrayList ackListeners = new ArrayList<>(); + if (newClusterState.nodes().localNodeMaster()) { + // only the master controls the version numbers + Builder builder = ClusterState.builder(newClusterState).incrementVersion(); + if (previousClusterState.routingTable() != newClusterState.routingTable()) { + builder.routingTable(RoutingTable.builder(newClusterState.routingTable()).version(newClusterState.routingTable().version() + 1).build()); + } + if (previousClusterState.metaData() != newClusterState.metaData()) { + builder.metaData(MetaData.builder(newClusterState.metaData()).version(newClusterState.metaData().version() + 1)); + } + newClusterState = builder.build(); + for (UpdateTask task : proccessedListeners) { + if (task.listener instanceof AckedClusterStateTaskListener) { + final AckedClusterStateTaskListener ackedListener = (AckedClusterStateTaskListener) task.listener; + if (ackedListener.ackTimeout() == null || ackedListener.ackTimeout().millis() == 0) { + ackedListener.onAckTimeout(); } else { try { - ackListener = new AckCountDownListener(ackedUpdateTask, newClusterState.version(), newClusterState.nodes(), threadPool); + ackListeners.add(new AckCountDownListener(ackedListener, newClusterState.version(), newClusterState.nodes(), threadPool)); } catch (EsRejectedExecutionException ex) { if (logger.isDebugEnabled()) { logger.debug("Couldn't schedule timeout thread - node might be shutting down", ex); } //timeout straightaway, otherwise we could wait forever as the timeout thread has not started - ackedUpdateTask.onAckTimeout(); + ackedListener.onAckTimeout(); } } } } - - newClusterState.status(ClusterState.ClusterStateStatus.BEING_APPLIED); - - if (logger.isTraceEnabled()) { - StringBuilder sb = new StringBuilder("cluster state updated, source [").append(source).append("]\n"); - sb.append(newClusterState.prettyPrint()); - logger.trace(sb.toString()); - } else if (logger.isDebugEnabled()) { - logger.debug("cluster state updated, version [{}], source [{}]", newClusterState.version(), source); - } - - ClusterChangedEvent clusterChangedEvent = new ClusterChangedEvent(source, newClusterState, previousClusterState); - // new cluster state, notify all listeners - final DiscoveryNodes.Delta nodesDelta = clusterChangedEvent.nodesDelta(); - if (nodesDelta.hasChanges() && logger.isInfoEnabled()) { - String summary = nodesDelta.shortSummary(); - if (summary.length() > 0) { - logger.info("{}, reason: {}", summary, source); - } - } - - // TODO, do this in parallel (and wait) - for (DiscoveryNode node : nodesDelta.addedNodes()) { - if (!nodeRequiresConnection(node)) { - continue; - } - try { - transportService.connectToNode(node); - } catch (Throwable e) { - // the fault detection will detect it as failed as well - logger.warn("failed to connect to node [" + node + "]", e); - } - } - - // if we are the master, publish the new state to all nodes - // we publish here before we send a notification to all the listeners, since if it fails - // we don't want to notify - if (newClusterState.nodes().localNodeMaster()) { - logger.debug("publishing cluster state version [{}]", newClusterState.version()); - try { - discoveryService.publish(clusterChangedEvent, ackListener); - } catch (Discovery.FailedToCommitClusterStateException t) { - logger.warn("failing [{}]: failed to commit cluster state version [{}]", t, source, newClusterState.version()); - updateTask.onFailure(source, t); - return; - } - } - - // update the current cluster state - clusterState = newClusterState; - logger.debug("set local cluster state to version {}", newClusterState.version()); - for (ClusterStateListener listener : preAppliedListeners) { - try { - logger.trace("calling [{}] with change to version [{}]", listener, newClusterState.version()); - listener.clusterChanged(clusterChangedEvent); - } catch (Exception ex) { - logger.warn("failed to notify ClusterStateListener", ex); - } - } - - for (DiscoveryNode node : nodesDelta.removedNodes()) { - try { - transportService.disconnectFromNode(node); - } catch (Throwable e) { - logger.warn("failed to disconnect to node [" + node + "]", e); - } - } - - newClusterState.status(ClusterState.ClusterStateStatus.APPLIED); - - for (ClusterStateListener listener : postAppliedListeners) { - try { - logger.trace("calling [{}] with change to version [{}]", listener, newClusterState.version()); - listener.clusterChanged(clusterChangedEvent); - } catch (Exception ex) { - logger.warn("failed to notify ClusterStateListener", ex); - } - } - - //manual ack only from the master at the end of the publish - if (newClusterState.nodes().localNodeMaster()) { - try { - ackListener.onNodeAck(newClusterState.nodes().localNode(), null); - } catch (Throwable t) { - logger.debug("error while processing ack for master node [{}]", t, newClusterState.nodes().localNode()); - } - } - - updateTask.clusterStateProcessed(source, previousClusterState, newClusterState); - - TimeValue executionTime = TimeValue.timeValueMillis(Math.max(0, TimeValue.nsecToMSec(System.nanoTime() - startTimeNS))); - logger.debug("processing [{}]: took {} done applying updated cluster_state (version: {}, uuid: {})", source, executionTime, newClusterState.version(), newClusterState.stateUUID()); - warnAboutSlowTaskIfNeeded(executionTime, source); - } catch (Throwable t) { - TimeValue executionTime = TimeValue.timeValueMillis(Math.max(0, TimeValue.nsecToMSec(System.nanoTime() - startTimeNS))); - StringBuilder sb = new StringBuilder("failed to apply updated cluster state in ").append(executionTime).append(":\nversion [").append(newClusterState.version()).append("], uuid [").append(newClusterState.stateUUID()).append("], source [").append(source).append("]\n"); - sb.append(newClusterState.nodes().prettyPrint()); - sb.append(newClusterState.routingTable().prettyPrint()); - sb.append(newClusterState.getRoutingNodes().prettyPrint()); - logger.warn(sb.toString(), t); - // TODO: do we want to call updateTask.onFailure here? } + final Discovery.AckListener ackListener = new DelegetingAckListener(ackListeners); + + newClusterState.status(ClusterState.ClusterStateStatus.BEING_APPLIED); + + if (logger.isTraceEnabled()) { + StringBuilder sb = new StringBuilder("cluster state updated, source [").append(source).append("]\n"); + sb.append(newClusterState.prettyPrint()); + logger.trace(sb.toString()); + } else if (logger.isDebugEnabled()) { + logger.debug("cluster state updated, version [{}], source [{}]", newClusterState.version(), source); + } + + ClusterChangedEvent clusterChangedEvent = new ClusterChangedEvent(source, newClusterState, previousClusterState); + // new cluster state, notify all listeners + final DiscoveryNodes.Delta nodesDelta = clusterChangedEvent.nodesDelta(); + if (nodesDelta.hasChanges() && logger.isInfoEnabled()) { + String summary = nodesDelta.shortSummary(); + if (summary.length() > 0) { + logger.info("{}, reason: {}", summary, source); + } + } + + // TODO, do this in parallel (and wait) + for (DiscoveryNode node : nodesDelta.addedNodes()) { + if (!nodeRequiresConnection(node)) { + continue; + } + try { + transportService.connectToNode(node); + } catch (Throwable e) { + // the fault detection will detect it as failed as well + logger.warn("failed to connect to node [" + node + "]", e); + } + } + + // if we are the master, publish the new state to all nodes + // we publish here before we send a notification to all the listeners, since if it fails + // we don't want to notify + if (newClusterState.nodes().localNodeMaster()) { + logger.debug("publishing cluster state version [{}]", newClusterState.version()); + try { + discoveryService.publish(clusterChangedEvent, ackListener); + } catch (Discovery.FailedToCommitClusterStateException t) { + logger.warn("failing [{}]: failed to commit cluster state version [{}]", t, source, newClusterState.version()); + proccessedListeners.forEach(task -> task.listener.onFailure(task.source, t)); + return; + } + } + + // update the current cluster state + clusterState = newClusterState; + logger.debug("set local cluster state to version {}", newClusterState.version()); + for (ClusterStateListener listener : preAppliedListeners) { + try { + logger.trace("calling [{}] with change to version [{}]", listener, newClusterState.version()); + listener.clusterChanged(clusterChangedEvent); + } catch (Exception ex) { + logger.warn("failed to notify ClusterStateListener", ex); + } + } + + for (DiscoveryNode node : nodesDelta.removedNodes()) { + try { + transportService.disconnectFromNode(node); + } catch (Throwable e) { + logger.warn("failed to disconnect to node [" + node + "]", e); + } + } + + newClusterState.status(ClusterState.ClusterStateStatus.APPLIED); + + for (ClusterStateListener listener : postAppliedListeners) { + try { + logger.trace("calling [{}] with change to version [{}]", listener, newClusterState.version()); + listener.clusterChanged(clusterChangedEvent); + } catch (Exception ex) { + logger.warn("failed to notify ClusterStateListener", ex); + } + } + + //manual ack only from the master at the end of the publish + if (newClusterState.nodes().localNodeMaster()) { + try { + ackListener.onNodeAck(newClusterState.nodes().localNode(), null); + } catch (Throwable t) { + logger.debug("error while processing ack for master node [{}]", t, newClusterState.nodes().localNode()); + } + } + + for (UpdateTask task : proccessedListeners) { + task.listener.clusterStateProcessed(task.source, previousClusterState, newClusterState); + } + + TimeValue executionTime = TimeValue.timeValueMillis(Math.max(0, TimeValue.nsecToMSec(System.nanoTime() - startTimeNS))); + logger.debug("processing [{}]: took {} done applying updated cluster_state (version: {}, uuid: {})", source, executionTime, newClusterState.version(), newClusterState.stateUUID()); + warnAboutSlowTaskIfNeeded(executionTime, source); + } catch (Throwable t) { + TimeValue executionTime = TimeValue.timeValueMillis(Math.max(0, TimeValue.nsecToMSec(System.nanoTime() - startTimeNS))); + StringBuilder sb = new StringBuilder("failed to apply updated cluster state in ").append(executionTime).append(":\nversion [").append(newClusterState.version()).append("], uuid [").append(newClusterState.stateUUID()).append("], source [").append(source).append("]\n"); + sb.append(newClusterState.nodes().prettyPrint()); + sb.append(newClusterState.routingTable().prettyPrint()); + sb.append(newClusterState.getRoutingNodes().prettyPrint()); + logger.warn(sb.toString(), t); + // TODO: do we want to call updateTask.onFailure here? + } + + } + + class UpdateTask extends SourcePrioritizedRunnable { + + public final T task; + public final ClusterStateTaskConfig config; + public final ClusterStateTaskExecutor executor; + public final ClusterStateTaskListener listener; + public final AtomicBoolean processed = new AtomicBoolean(); + + UpdateTask(String source, T task, ClusterStateTaskConfig config, ClusterStateTaskExecutor executor, ClusterStateTaskListener listener) { + super(config.priority(), source); + this.task = task; + this.config = config; + this.executor = executor; + this.listener = listener; + } + + @Override + public void run() { + runTasksForExecutor(executor); } } @@ -707,13 +765,24 @@ public class InternalClusterService extends AbstractLifecycleComponent listeners; + + private DelegetingAckListener(List listeners) { + this.listeners = listeners; + } + @Override public void onNodeAck(DiscoveryNode node, @Nullable Throwable t) { + for (Discovery.AckListener listener : listeners) { + listener.onNodeAck(node, t); + } } @Override public void onTimeout() { + throw new UnsupportedOperationException("no timeout delegation"); } } @@ -721,20 +790,20 @@ public class InternalClusterService extends AbstractLifecycleComponent ackTimeoutCallback; private Throwable lastFailure; - AckCountDownListener(AckedClusterStateUpdateTask ackedUpdateTask, long clusterStateVersion, DiscoveryNodes nodes, ThreadPool threadPool) { - this.ackedUpdateTask = ackedUpdateTask; + AckCountDownListener(AckedClusterStateTaskListener ackedTaskListener, long clusterStateVersion, DiscoveryNodes nodes, ThreadPool threadPool) { + this.ackedTaskListener = ackedTaskListener; this.clusterStateVersion = clusterStateVersion; this.nodes = nodes; int countDown = 0; for (DiscoveryNode node : nodes) { - if (ackedUpdateTask.mustAck(node)) { + if (ackedTaskListener.mustAck(node)) { countDown++; } } @@ -742,7 +811,7 @@ public class InternalClusterService extends AbstractLifecycleComponent= 3 && + bytes.get(0) == 'Z' && + bytes.get(1) == 'V' && + (bytes.get(2) == 0 || bytes.get(2) == 1); + } + public static Compressor compressor(ChannelBuffer buffer) { for (Compressor compressor : compressors) { if (compressor.isCompressed(buffer)) { diff --git a/core/src/main/java/org/elasticsearch/common/compress/lzf/LZFCompressedIndexInput.java b/core/src/main/java/org/elasticsearch/common/compress/lzf/LZFCompressedIndexInput.java deleted file mode 100644 index 93bd583662b..00000000000 --- a/core/src/main/java/org/elasticsearch/common/compress/lzf/LZFCompressedIndexInput.java +++ /dev/null @@ -1,80 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.common.compress.lzf; - -import com.ning.compress.lzf.ChunkDecoder; -import com.ning.compress.lzf.LZFChunk; -import org.apache.lucene.store.BufferedIndexInput; -import org.apache.lucene.store.IndexInput; -import org.elasticsearch.common.compress.CompressedIndexInput; -import org.elasticsearch.common.lucene.store.InputStreamIndexInput; - -import java.io.IOException; -import java.util.Arrays; - -/** - */ -@Deprecated -public class LZFCompressedIndexInput extends CompressedIndexInput { - - private final ChunkDecoder decoder; - // scratch area buffer - private byte[] inputBuffer; - - public LZFCompressedIndexInput(IndexInput in, ChunkDecoder decoder) throws IOException { - super(in); - - this.decoder = decoder; - this.uncompressed = new byte[LZFChunk.MAX_CHUNK_LEN]; - this.uncompressedLength = LZFChunk.MAX_CHUNK_LEN; - this.inputBuffer = new byte[LZFChunk.MAX_CHUNK_LEN]; - } - - @Override - protected void readHeader(IndexInput in) throws IOException { - byte[] header = new byte[LZFCompressor.LUCENE_HEADER.length]; - in.readBytes(header, 0, header.length, false); - if (!Arrays.equals(header, LZFCompressor.LUCENE_HEADER)) { - throw new IOException("wrong lzf compressed header [" + Arrays.toString(header) + "]"); - } - } - - @Override - protected int uncompress(IndexInput in, byte[] out) throws IOException { - return decoder.decodeChunk(new InputStreamIndexInput(in, Long.MAX_VALUE), inputBuffer, out); - } - - @Override - protected void doClose() throws IOException { - // nothing to do here... - } - - @Override - public IndexInput clone() { - LZFCompressedIndexInput cloned = (LZFCompressedIndexInput) super.clone(); - cloned.inputBuffer = new byte[LZFChunk.MAX_CHUNK_LEN]; - return cloned; - } - - @Override - public IndexInput slice(String description, long offset, long length) throws IOException { - return BufferedIndexInput.wrap(description, this, offset, length); - } -} diff --git a/core/src/main/java/org/elasticsearch/common/compress/lzf/LZFCompressedStreamInput.java b/core/src/main/java/org/elasticsearch/common/compress/lzf/LZFCompressedStreamInput.java deleted file mode 100644 index baefcaa8928..00000000000 --- a/core/src/main/java/org/elasticsearch/common/compress/lzf/LZFCompressedStreamInput.java +++ /dev/null @@ -1,73 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.common.compress.lzf; - -import com.ning.compress.BufferRecycler; -import com.ning.compress.lzf.ChunkDecoder; -import com.ning.compress.lzf.LZFChunk; -import org.elasticsearch.common.compress.CompressedStreamInput; -import org.elasticsearch.common.io.stream.StreamInput; - -import java.io.IOException; - -/** - */ -public class LZFCompressedStreamInput extends CompressedStreamInput { - - private final BufferRecycler recycler; - - private final ChunkDecoder decoder; - - // scratch area buffer - private byte[] inputBuffer; - - public LZFCompressedStreamInput(StreamInput in, ChunkDecoder decoder) throws IOException { - super(in); - this.recycler = BufferRecycler.instance(); - this.decoder = decoder; - - this.uncompressed = recycler.allocDecodeBuffer(LZFChunk.MAX_CHUNK_LEN); - this.inputBuffer = recycler.allocInputBuffer(LZFChunk.MAX_CHUNK_LEN); - } - - @Override - public void readHeader(StreamInput in) throws IOException { - // nothing to do here, each chunk has a header - } - - @Override - public int uncompress(StreamInput in, byte[] out) throws IOException { - return decoder.decodeChunk(in, inputBuffer, out); - } - - @Override - protected void doClose() throws IOException { - byte[] buf = inputBuffer; - if (buf != null) { - inputBuffer = null; - recycler.releaseInputBuffer(buf); - } - buf = uncompressed; - if (buf != null) { - uncompressed = null; - recycler.releaseDecodeBuffer(uncompressed); - } - } -} diff --git a/core/src/main/java/org/elasticsearch/common/compress/lzf/LZFCompressor.java b/core/src/main/java/org/elasticsearch/common/compress/lzf/LZFCompressor.java deleted file mode 100644 index bb7a642c987..00000000000 --- a/core/src/main/java/org/elasticsearch/common/compress/lzf/LZFCompressor.java +++ /dev/null @@ -1,100 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.common.compress.lzf; - -import com.ning.compress.lzf.ChunkDecoder; -import com.ning.compress.lzf.LZFChunk; -import com.ning.compress.lzf.util.ChunkDecoderFactory; -import org.apache.lucene.store.IndexInput; -import org.elasticsearch.common.bytes.BytesReference; -import org.elasticsearch.common.compress.CompressedIndexInput; -import org.elasticsearch.common.compress.Compressor; -import org.elasticsearch.common.compress.deflate.DeflateCompressor; -import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.logging.Loggers; -import org.jboss.netty.buffer.ChannelBuffer; - -import java.io.IOException; - -/** - * @deprecated Use {@link DeflateCompressor} instead - */ -@Deprecated -public class LZFCompressor implements Compressor { - - static final byte[] LUCENE_HEADER = {'L', 'Z', 'F', 0}; - - private ChunkDecoder decoder; - - public LZFCompressor() { - this.decoder = ChunkDecoderFactory.safeInstance(); - Loggers.getLogger(LZFCompressor.class).debug("using decoder[{}] ", this.decoder.getClass().getSimpleName()); - } - - @Override - public boolean isCompressed(BytesReference bytes) { - return bytes.length() >= 3 && - bytes.get(0) == LZFChunk.BYTE_Z && - bytes.get(1) == LZFChunk.BYTE_V && - (bytes.get(2) == LZFChunk.BLOCK_TYPE_COMPRESSED || bytes.get(2) == LZFChunk.BLOCK_TYPE_NON_COMPRESSED); - } - - @Override - public boolean isCompressed(ChannelBuffer buffer) { - int offset = buffer.readerIndex(); - return buffer.readableBytes() >= 3 && - buffer.getByte(offset) == LZFChunk.BYTE_Z && - buffer.getByte(offset + 1) == LZFChunk.BYTE_V && - (buffer.getByte(offset + 2) == LZFChunk.BLOCK_TYPE_COMPRESSED || buffer.getByte(offset + 2) == LZFChunk.BLOCK_TYPE_NON_COMPRESSED); - } - - @Override - public boolean isCompressed(IndexInput in) throws IOException { - long currentPointer = in.getFilePointer(); - // since we have some metdata before the first compressed header, we check on our specific header - if (in.length() - currentPointer < (LUCENE_HEADER.length)) { - return false; - } - for (int i = 0; i < LUCENE_HEADER.length; i++) { - if (in.readByte() != LUCENE_HEADER[i]) { - in.seek(currentPointer); - return false; - } - } - in.seek(currentPointer); - return true; - } - - @Override - public StreamInput streamInput(StreamInput in) throws IOException { - return new LZFCompressedStreamInput(in, decoder); - } - - @Override - public StreamOutput streamOutput(StreamOutput out) throws IOException { - throw new UnsupportedOperationException("LZF is only here for back compat, no write support"); - } - - @Override - public CompressedIndexInput indexInput(IndexInput in) throws IOException { - return new LZFCompressedIndexInput(in, decoder); - } -} diff --git a/core/src/main/java/org/elasticsearch/common/geo/builders/CircleBuilder.java b/core/src/main/java/org/elasticsearch/common/geo/builders/CircleBuilder.java index f1054e18663..5f11d12a4bf 100644 --- a/core/src/main/java/org/elasticsearch/common/geo/builders/CircleBuilder.java +++ b/core/src/main/java/org/elasticsearch/common/geo/builders/CircleBuilder.java @@ -21,24 +21,30 @@ package org.elasticsearch.common.geo.builders; import com.spatial4j.core.shape.Circle; import com.vividsolutions.jts.geom.Coordinate; + +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.unit.DistanceUnit; import org.elasticsearch.common.unit.DistanceUnit.Distance; import org.elasticsearch.common.xcontent.XContentBuilder; import java.io.IOException; +import java.util.Objects; public class CircleBuilder extends ShapeBuilder { public static final String FIELD_RADIUS = "radius"; public static final GeoShapeType TYPE = GeoShapeType.CIRCLE; + public static final CircleBuilder PROTOTYPE = new CircleBuilder(); + private DistanceUnit unit; private double radius; private Coordinate center; - + /** * Set the center of the circle - * + * * @param center coordinate of the circles center * @return this */ @@ -57,6 +63,13 @@ public class CircleBuilder extends ShapeBuilder { return center(new Coordinate(lon, lat)); } + /** + * Get the center of the circle + */ + public Coordinate center() { + return center; + } + /** * Set the radius of the circle. The String value will be parsed by {@link DistanceUnit} * @param radius Value and unit of the circle combined in a string @@ -97,10 +110,24 @@ public class CircleBuilder extends ShapeBuilder { return this; } + /** + * Get the radius of the circle without unit + */ + public double radius() { + return this.radius; + } + + /** + * Get the radius unit of the circle + */ + public DistanceUnit unit() { + return this.unit; + } + @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); - builder.field(FIELD_TYPE, TYPE.shapename); + builder.field(FIELD_TYPE, TYPE.shapeName()); builder.field(FIELD_RADIUS, unit.toString(radius)); builder.field(FIELD_COORDINATES); toXContent(builder, center); @@ -116,4 +143,37 @@ public class CircleBuilder extends ShapeBuilder { public GeoShapeType type() { return TYPE; } + + @Override + public int hashCode() { + return Objects.hash(center, radius, unit.ordinal()); + } + + @Override + public boolean equals(Object obj) { + if (this == obj) { + return true; + } + if (obj == null || getClass() != obj.getClass()) { + return false; + } + CircleBuilder other = (CircleBuilder) obj; + return Objects.equals(center, other.center) && + Objects.equals(radius, other.radius) && + Objects.equals(unit.ordinal(), other.unit.ordinal()); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + writeCoordinateTo(center, out); + out.writeDouble(radius); + DistanceUnit.writeDistanceUnit(out, unit); + } + + @Override + public CircleBuilder readFrom(StreamInput in) throws IOException { + return new CircleBuilder() + .center(readCoordinateFrom(in)) + .radius(in.readDouble(), DistanceUnit.readDistanceUnit(in)); + } } diff --git a/core/src/main/java/org/elasticsearch/common/geo/builders/EnvelopeBuilder.java b/core/src/main/java/org/elasticsearch/common/geo/builders/EnvelopeBuilder.java index a296b3406ef..62f29d2bad7 100644 --- a/core/src/main/java/org/elasticsearch/common/geo/builders/EnvelopeBuilder.java +++ b/core/src/main/java/org/elasticsearch/common/geo/builders/EnvelopeBuilder.java @@ -21,13 +21,19 @@ package org.elasticsearch.common.geo.builders; import com.spatial4j.core.shape.Rectangle; import com.vividsolutions.jts.geom.Coordinate; + +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.XContentBuilder; import java.io.IOException; +import java.util.Locale; +import java.util.Objects; public class EnvelopeBuilder extends ShapeBuilder { - public static final GeoShapeType TYPE = GeoShapeType.ENVELOPE; + public static final GeoShapeType TYPE = GeoShapeType.ENVELOPE; + public static final EnvelopeBuilder PROTOTYPE = new EnvelopeBuilder(); protected Coordinate topLeft; protected Coordinate bottomRight; @@ -61,7 +67,8 @@ public class EnvelopeBuilder extends ShapeBuilder { @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); - builder.field(FIELD_TYPE, TYPE.shapename); + builder.field(FIELD_TYPE, TYPE.shapeName()); + builder.field(FIELD_ORIENTATION, orientation.name().toLowerCase(Locale.ROOT)); builder.startArray(FIELD_COORDINATES); toXContent(builder, topLeft); toXContent(builder, bottomRight); @@ -78,4 +85,38 @@ public class EnvelopeBuilder extends ShapeBuilder { public GeoShapeType type() { return TYPE; } + + @Override + public int hashCode() { + return Objects.hash(orientation, topLeft, bottomRight); + } + + @Override + public boolean equals(Object obj) { + if (this == obj) { + return true; + } + if (obj == null || getClass() != obj.getClass()) { + return false; + } + EnvelopeBuilder other = (EnvelopeBuilder) obj; + return Objects.equals(orientation, other.orientation) && + Objects.equals(topLeft, other.topLeft) && + Objects.equals(bottomRight, other.bottomRight); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeBoolean(orientation == Orientation.RIGHT); + writeCoordinateTo(topLeft, out); + writeCoordinateTo(bottomRight, out); + } + + @Override + public EnvelopeBuilder readFrom(StreamInput in) throws IOException { + Orientation orientation = in.readBoolean() ? Orientation.RIGHT : Orientation.LEFT; + return new EnvelopeBuilder(orientation) + .topLeft(readCoordinateFrom(in)) + .bottomRight(readCoordinateFrom(in)); + } } diff --git a/core/src/main/java/org/elasticsearch/common/geo/builders/GeometryCollectionBuilder.java b/core/src/main/java/org/elasticsearch/common/geo/builders/GeometryCollectionBuilder.java index 57f3fc67b64..45397ed962f 100644 --- a/core/src/main/java/org/elasticsearch/common/geo/builders/GeometryCollectionBuilder.java +++ b/core/src/main/java/org/elasticsearch/common/geo/builders/GeometryCollectionBuilder.java @@ -102,7 +102,7 @@ public class GeometryCollectionBuilder extends ShapeBuilder { @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); - builder.field(FIELD_TYPE, TYPE.shapename); + builder.field(FIELD_TYPE, TYPE.shapeName()); builder.startArray(FIELD_GEOMETRIES); for (ShapeBuilder shape : shapes) { shape.toXContent(builder, params); diff --git a/core/src/main/java/org/elasticsearch/common/geo/builders/LineStringBuilder.java b/core/src/main/java/org/elasticsearch/common/geo/builders/LineStringBuilder.java index 265efe11621..c7ba9b72f55 100644 --- a/core/src/main/java/org/elasticsearch/common/geo/builders/LineStringBuilder.java +++ b/core/src/main/java/org/elasticsearch/common/geo/builders/LineStringBuilder.java @@ -34,12 +34,10 @@ public class LineStringBuilder extends PointCollection { public static final GeoShapeType TYPE = GeoShapeType.LINESTRING; - protected boolean translated = false; - @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); - builder.field(FIELD_TYPE, TYPE.shapename); + builder.field(FIELD_TYPE, TYPE.shapeName()); builder.field(FIELD_COORDINATES); coordinatesToXcontent(builder, false); builder.endObject(); diff --git a/core/src/main/java/org/elasticsearch/common/geo/builders/MultiLineStringBuilder.java b/core/src/main/java/org/elasticsearch/common/geo/builders/MultiLineStringBuilder.java index 10ad25c89e1..a004b90a2dc 100644 --- a/core/src/main/java/org/elasticsearch/common/geo/builders/MultiLineStringBuilder.java +++ b/core/src/main/java/org/elasticsearch/common/geo/builders/MultiLineStringBuilder.java @@ -57,7 +57,7 @@ public class MultiLineStringBuilder extends ShapeBuilder { @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); - builder.field(FIELD_TYPE, TYPE.shapename); + builder.field(FIELD_TYPE, TYPE.shapeName()); builder.field(FIELD_COORDINATES); builder.startArray(); for(LineStringBuilder line : lines) { diff --git a/core/src/main/java/org/elasticsearch/common/geo/builders/MultiPointBuilder.java b/core/src/main/java/org/elasticsearch/common/geo/builders/MultiPointBuilder.java index d12baad70d9..8d5cfabdabb 100644 --- a/core/src/main/java/org/elasticsearch/common/geo/builders/MultiPointBuilder.java +++ b/core/src/main/java/org/elasticsearch/common/geo/builders/MultiPointBuilder.java @@ -37,7 +37,7 @@ public class MultiPointBuilder extends PointCollection { @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); - builder.field(FIELD_TYPE, TYPE.shapename); + builder.field(FIELD_TYPE, TYPE.shapeName()); builder.field(FIELD_COORDINATES); super.coordinatesToXcontent(builder, false); builder.endObject(); diff --git a/core/src/main/java/org/elasticsearch/common/geo/builders/MultiPolygonBuilder.java b/core/src/main/java/org/elasticsearch/common/geo/builders/MultiPolygonBuilder.java index 0998cd2944b..e7762e51b61 100644 --- a/core/src/main/java/org/elasticsearch/common/geo/builders/MultiPolygonBuilder.java +++ b/core/src/main/java/org/elasticsearch/common/geo/builders/MultiPolygonBuilder.java @@ -51,7 +51,7 @@ public class MultiPolygonBuilder extends ShapeBuilder { @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); - builder.field(FIELD_TYPE, TYPE.shapename); + builder.field(FIELD_TYPE, TYPE.shapeName()); builder.startArray(FIELD_COORDINATES); for(PolygonBuilder polygon : polygons) { builder.startArray(); @@ -89,6 +89,4 @@ public class MultiPolygonBuilder extends ShapeBuilder { return new XShapeCollection<>(shapes, SPATIAL_CONTEXT); //note: ShapeCollection is probably faster than a Multi* geom. } - - } diff --git a/core/src/main/java/org/elasticsearch/common/geo/builders/PointBuilder.java b/core/src/main/java/org/elasticsearch/common/geo/builders/PointBuilder.java index 53c67387e91..d6d62c28b8c 100644 --- a/core/src/main/java/org/elasticsearch/common/geo/builders/PointBuilder.java +++ b/core/src/main/java/org/elasticsearch/common/geo/builders/PointBuilder.java @@ -20,7 +20,10 @@ package org.elasticsearch.common.geo.builders; import java.io.IOException; +import java.util.Objects; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.XContentBuilder; import com.spatial4j.core.shape.Point; @@ -30,6 +33,8 @@ public class PointBuilder extends ShapeBuilder { public static final GeoShapeType TYPE = GeoShapeType.POINT; + public static final PointBuilder PROTOTYPE = new PointBuilder(); + private Coordinate coordinate; public PointBuilder coordinate(Coordinate coordinate) { @@ -48,10 +53,10 @@ public class PointBuilder extends ShapeBuilder { @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); - builder.field(FIELD_TYPE, TYPE.shapename); + builder.field(FIELD_TYPE, TYPE.shapeName()); builder.field(FIELD_COORDINATES); toXContent(builder, coordinate); - return builder.endObject(); + return builder.endObject(); } @Override @@ -63,4 +68,31 @@ public class PointBuilder extends ShapeBuilder { public GeoShapeType type() { return TYPE; } + + @Override + public int hashCode() { + return Objects.hash(coordinate); + } + + @Override + public boolean equals(Object obj) { + if (this == obj) { + return true; + } + if (obj == null || getClass() != obj.getClass()) { + return false; + } + PointBuilder other = (PointBuilder) obj; + return Objects.equals(coordinate, other.coordinate); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + writeCoordinateTo(coordinate, out); + } + + @Override + public PointBuilder readFrom(StreamInput in) throws IOException { + return new PointBuilder().coordinate(readCoordinateFrom(in)); + } } diff --git a/core/src/main/java/org/elasticsearch/common/geo/builders/PolygonBuilder.java b/core/src/main/java/org/elasticsearch/common/geo/builders/PolygonBuilder.java index 4a406eb22b8..04540df27e9 100644 --- a/core/src/main/java/org/elasticsearch/common/geo/builders/PolygonBuilder.java +++ b/core/src/main/java/org/elasticsearch/common/geo/builders/PolygonBuilder.java @@ -38,6 +38,7 @@ import java.util.Arrays; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; +import java.util.concurrent.atomic.AtomicBoolean; /** * The {@link PolygonBuilder} implements the groundwork to create polygons. This contains @@ -141,9 +142,10 @@ public class PolygonBuilder extends ShapeBuilder { Edge[] edges = new Edge[numEdges]; Edge[] holeComponents = new Edge[holes.size()]; - int offset = createEdges(0, orientation, shell, null, edges, 0); + final AtomicBoolean translated = new AtomicBoolean(false); + int offset = createEdges(0, orientation, shell, null, edges, 0, translated); for (int i = 0; i < holes.size(); i++) { - int length = createEdges(i+1, orientation, shell, this.holes.get(i), edges, offset); + int length = createEdges(i+1, orientation, shell, this.holes.get(i), edges, offset, translated); holeComponents[i] = edges[offset]; offset += length; } @@ -172,7 +174,7 @@ public class PolygonBuilder extends ShapeBuilder { @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); - builder.field(FIELD_TYPE, TYPE.shapename); + builder.field(FIELD_TYPE, TYPE.shapeName()); builder.startArray(FIELD_COORDINATES); coordinatesArray(builder, params); builder.endArray(); @@ -508,14 +510,157 @@ public class PolygonBuilder extends ShapeBuilder { } private static int createEdges(int component, Orientation orientation, LineStringBuilder shell, - LineStringBuilder hole, - Edge[] edges, int offset) { + LineStringBuilder hole, Edge[] edges, int offset, final AtomicBoolean translated) { // inner rings (holes) have an opposite direction than the outer rings // XOR will invert the orientation for outer ring cases (Truth Table:, T/T = F, T/F = T, F/T = T, F/F = F) boolean direction = (component == 0 ^ orientation == Orientation.RIGHT); // set the points array accordingly (shell or hole) Coordinate[] points = (hole != null) ? hole.coordinates(false) : shell.coordinates(false); - Edge.ring(component, direction, orientation == Orientation.LEFT, shell, points, 0, edges, offset, points.length-1); + ring(component, direction, orientation == Orientation.LEFT, shell, points, 0, edges, offset, points.length-1, translated); return points.length-1; } + + /** + * Create a connected list of a list of coordinates + * + * @param points + * array of point + * @param offset + * index of the first point + * @param length + * number of points + * @return Array of edges + */ + private static Edge[] ring(int component, boolean direction, boolean handedness, LineStringBuilder shell, + Coordinate[] points, int offset, Edge[] edges, int toffset, int length, final AtomicBoolean translated) { + // calculate the direction of the points: + // find the point a the top of the set and check its + // neighbors orientation. So direction is equivalent + // to clockwise/counterclockwise + final int top = top(points, offset, length); + final int prev = (offset + ((top + length - 1) % length)); + final int next = (offset + ((top + 1) % length)); + boolean orientation = points[offset + prev].x > points[offset + next].x; + + // OGC requires shell as ccw (Right-Handedness) and holes as cw (Left-Handedness) + // since GeoJSON doesn't specify (and doesn't need to) GEO core will assume OGC standards + // thus if orientation is computed as cw, the logic will translate points across dateline + // and convert to a right handed system + + // compute the bounding box and calculate range + double[] range = range(points, offset, length); + final double rng = range[1] - range[0]; + // translate the points if the following is true + // 1. shell orientation is cw and range is greater than a hemisphere (180 degrees) but not spanning 2 hemispheres + // (translation would result in a collapsed poly) + // 2. the shell of the candidate hole has been translated (to preserve the coordinate system) + boolean incorrectOrientation = component == 0 && handedness != orientation; + if ( (incorrectOrientation && (rng > DATELINE && rng != 2*DATELINE)) || (translated.get() && component != 0)) { + translate(points); + // flip the translation bit if the shell is being translated + if (component == 0) { + translated.set(true); + } + // correct the orientation post translation (ccw for shell, cw for holes) + if (component == 0 || (component != 0 && handedness == orientation)) { + orientation = !orientation; + } + } + return concat(component, direction ^ orientation, points, offset, edges, toffset, length); + } + + private static final int top(Coordinate[] points, int offset, int length) { + int top = 0; // we start at 1 here since top points to 0 + for (int i = 1; i < length; i++) { + if (points[offset + i].y < points[offset + top].y) { + top = i; + } else if (points[offset + i].y == points[offset + top].y) { + if (points[offset + i].x < points[offset + top].x) { + top = i; + } + } + } + return top; + } + + private static final double[] range(Coordinate[] points, int offset, int length) { + double minX = points[0].x; + double maxX = points[0].x; + double minY = points[0].y; + double maxY = points[0].y; + // compute the bounding coordinates (@todo: cleanup brute force) + for (int i = 1; i < length; ++i) { + if (points[offset + i].x < minX) { + minX = points[offset + i].x; + } + if (points[offset + i].x > maxX) { + maxX = points[offset + i].x; + } + if (points[offset + i].y < minY) { + minY = points[offset + i].y; + } + if (points[offset + i].y > maxY) { + maxY = points[offset + i].y; + } + } + return new double[] {minX, maxX, minY, maxY}; + } + + /** + * Concatenate a set of points to a polygon + * + * @param component + * component id of the polygon + * @param direction + * direction of the ring + * @param points + * list of points to concatenate + * @param pointOffset + * index of the first point + * @param edges + * Array of edges to write the result to + * @param edgeOffset + * index of the first edge in the result + * @param length + * number of points to use + * @return the edges creates + */ + private static Edge[] concat(int component, boolean direction, Coordinate[] points, final int pointOffset, Edge[] edges, final int edgeOffset, + int length) { + assert edges.length >= length+edgeOffset; + assert points.length >= length+pointOffset; + edges[edgeOffset] = new Edge(points[pointOffset], null); + for (int i = 1; i < length; i++) { + if (direction) { + edges[edgeOffset + i] = new Edge(points[pointOffset + i], edges[edgeOffset + i - 1]); + edges[edgeOffset + i].component = component; + } else if(!edges[edgeOffset + i - 1].coordinate.equals(points[pointOffset + i])) { + edges[edgeOffset + i - 1].next = edges[edgeOffset + i] = new Edge(points[pointOffset + i], null); + edges[edgeOffset + i - 1].component = component; + } else { + throw new InvalidShapeException("Provided shape has duplicate consecutive coordinates at: " + points[pointOffset + i]); + } + } + + if (direction) { + edges[edgeOffset].setNext(edges[edgeOffset + length - 1]); + edges[edgeOffset].component = component; + } else { + edges[edgeOffset + length - 1].setNext(edges[edgeOffset]); + edges[edgeOffset + length - 1].component = component; + } + + return edges; + } + + /** + * Transforms coordinates in the eastern hemisphere (-180:0) to a (180:360) range + */ + private static void translate(Coordinate[] points) { + for (Coordinate c : points) { + if (c.x < 0) { + c.x += 2*DATELINE; + } + } + } } diff --git a/core/src/main/java/org/elasticsearch/common/geo/builders/ShapeBuilder.java b/core/src/main/java/org/elasticsearch/common/geo/builders/ShapeBuilder.java index 13237727173..d8689ee737f 100644 --- a/core/src/main/java/org/elasticsearch/common/geo/builders/ShapeBuilder.java +++ b/core/src/main/java/org/elasticsearch/common/geo/builders/ShapeBuilder.java @@ -26,8 +26,12 @@ import com.spatial4j.core.shape.jts.JtsGeometry; import com.vividsolutions.jts.geom.Coordinate; import com.vividsolutions.jts.geom.Geometry; import com.vividsolutions.jts.geom.GeometryFactory; + import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.action.support.ToXContentToBytes; +import org.elasticsearch.common.io.stream.NamedWriteable; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.logging.ESLoggerFactory; import org.elasticsearch.common.unit.DistanceUnit.Distance; @@ -43,7 +47,7 @@ import java.util.*; /** * Basic class for building GeoJSON shapes like Polygons, Linestrings, etc */ -public abstract class ShapeBuilder extends ToXContentToBytes { +public abstract class ShapeBuilder extends ToXContentToBytes implements NamedWriteable { protected static final ESLogger LOGGER = ESLoggerFactory.getLogger(ShapeBuilder.class.getName()); @@ -173,6 +177,15 @@ public abstract class ShapeBuilder extends ToXContentToBytes { return builder.startArray().value(coordinate.x).value(coordinate.y).endArray(); } + protected static void writeCoordinateTo(Coordinate coordinate, StreamOutput out) throws IOException { + out.writeDouble(coordinate.x); + out.writeDouble(coordinate.y); + } + + protected Coordinate readCoordinateFrom(StreamInput in) throws IOException { + return new Coordinate(in.readDouble(), in.readDouble()); + } + public static Orientation orientationFromString(String orientation) { orientation = orientation.toLowerCase(Locale.ROOT); switch (orientation) { @@ -349,150 +362,6 @@ public abstract class ShapeBuilder extends ToXContentToBytes { } } - private static final int top(Coordinate[] points, int offset, int length) { - int top = 0; // we start at 1 here since top points to 0 - for (int i = 1; i < length; i++) { - if (points[offset + i].y < points[offset + top].y) { - top = i; - } else if (points[offset + i].y == points[offset + top].y) { - if (points[offset + i].x < points[offset + top].x) { - top = i; - } - } - } - return top; - } - - private static final double[] range(Coordinate[] points, int offset, int length) { - double minX = points[0].x; - double maxX = points[0].x; - double minY = points[0].y; - double maxY = points[0].y; - // compute the bounding coordinates (@todo: cleanup brute force) - for (int i = 1; i < length; ++i) { - if (points[offset + i].x < minX) { - minX = points[offset + i].x; - } - if (points[offset + i].x > maxX) { - maxX = points[offset + i].x; - } - if (points[offset + i].y < minY) { - minY = points[offset + i].y; - } - if (points[offset + i].y > maxY) { - maxY = points[offset + i].y; - } - } - return new double[] {minX, maxX, minY, maxY}; - } - - /** - * Concatenate a set of points to a polygon - * - * @param component - * component id of the polygon - * @param direction - * direction of the ring - * @param points - * list of points to concatenate - * @param pointOffset - * index of the first point - * @param edges - * Array of edges to write the result to - * @param edgeOffset - * index of the first edge in the result - * @param length - * number of points to use - * @return the edges creates - */ - private static Edge[] concat(int component, boolean direction, Coordinate[] points, final int pointOffset, Edge[] edges, final int edgeOffset, - int length) { - assert edges.length >= length+edgeOffset; - assert points.length >= length+pointOffset; - edges[edgeOffset] = new Edge(points[pointOffset], null); - for (int i = 1; i < length; i++) { - if (direction) { - edges[edgeOffset + i] = new Edge(points[pointOffset + i], edges[edgeOffset + i - 1]); - edges[edgeOffset + i].component = component; - } else if(!edges[edgeOffset + i - 1].coordinate.equals(points[pointOffset + i])) { - edges[edgeOffset + i - 1].next = edges[edgeOffset + i] = new Edge(points[pointOffset + i], null); - edges[edgeOffset + i - 1].component = component; - } else { - throw new InvalidShapeException("Provided shape has duplicate consecutive coordinates at: " + points[pointOffset + i]); - } - } - - if (direction) { - edges[edgeOffset].setNext(edges[edgeOffset + length - 1]); - edges[edgeOffset].component = component; - } else { - edges[edgeOffset + length - 1].setNext(edges[edgeOffset]); - edges[edgeOffset + length - 1].component = component; - } - - return edges; - } - - /** - * Create a connected list of a list of coordinates - * - * @param points - * array of point - * @param offset - * index of the first point - * @param length - * number of points - * @return Array of edges - */ - protected static Edge[] ring(int component, boolean direction, boolean handedness, LineStringBuilder shell, - Coordinate[] points, int offset, Edge[] edges, int toffset, int length) { - // calculate the direction of the points: - // find the point a the top of the set and check its - // neighbors orientation. So direction is equivalent - // to clockwise/counterclockwise - final int top = top(points, offset, length); - final int prev = (offset + ((top + length - 1) % length)); - final int next = (offset + ((top + 1) % length)); - boolean orientation = points[offset + prev].x > points[offset + next].x; - - // OGC requires shell as ccw (Right-Handedness) and holes as cw (Left-Handedness) - // since GeoJSON doesn't specify (and doesn't need to) GEO core will assume OGC standards - // thus if orientation is computed as cw, the logic will translate points across dateline - // and convert to a right handed system - - // compute the bounding box and calculate range - double[] range = range(points, offset, length); - final double rng = range[1] - range[0]; - // translate the points if the following is true - // 1. shell orientation is cw and range is greater than a hemisphere (180 degrees) but not spanning 2 hemispheres - // (translation would result in a collapsed poly) - // 2. the shell of the candidate hole has been translated (to preserve the coordinate system) - boolean incorrectOrientation = component == 0 && handedness != orientation; - if ( (incorrectOrientation && (rng > DATELINE && rng != 2*DATELINE)) || (shell.translated && component != 0)) { - translate(points); - // flip the translation bit if the shell is being translated - if (component == 0) { - shell.translated = true; - } - // correct the orientation post translation (ccw for shell, cw for holes) - if (component == 0 || (component != 0 && handedness == orientation)) { - orientation = !orientation; - } - } - return concat(component, direction ^ orientation, points, offset, edges, toffset, length); - } - - /** - * Transforms coordinates in the eastern hemisphere (-180:0) to a (180:360) range - */ - protected static void translate(Coordinate[] points) { - for (Coordinate c : points) { - if (c.x < 0) { - c.x += 2*DATELINE; - } - } - } - /** * Set the intersection of this line segment to the given position * @@ -504,7 +373,7 @@ public abstract class ShapeBuilder extends ToXContentToBytes { return intersect = position(coordinate, next.coordinate, position); } - public static Coordinate position(Coordinate p1, Coordinate p2, double position) { + protected static Coordinate position(Coordinate p1, Coordinate p2, double position) { if (position == 0) { return p1; } else if (position == 1) { @@ -529,7 +398,6 @@ public abstract class ShapeBuilder extends ToXContentToBytes { public int compare(Edge o1, Edge o2) { return Double.compare(o1.intersect.y, o2.intersect.y); } - } public static enum Orientation { @@ -565,12 +433,16 @@ public abstract class ShapeBuilder extends ToXContentToBytes { ENVELOPE("envelope"), CIRCLE("circle"); - protected final String shapename; + private final String shapename; private GeoShapeType(String shapename) { this.shapename = shapename; } + protected String shapeName() { + return shapename; + } + public static GeoShapeType forName(String geoshapename) { String typename = geoshapename.toLowerCase(Locale.ROOT); for (GeoShapeType type : values()) { @@ -823,4 +695,20 @@ public abstract class ShapeBuilder extends ToXContentToBytes { return geometryCollection; } } + + @Override + public String getWriteableName() { + return type().shapeName(); + } + + // NORELEASE this should be deleted as soon as all shape builders implement writable + @Override + public void writeTo(StreamOutput out) throws IOException { + } + + // NORELEASE this should be deleted as soon as all shape builders implement writable + @Override + public ShapeBuilder readFrom(StreamInput in) throws IOException { + return null; + } } diff --git a/core/src/main/java/org/elasticsearch/common/inject/multibindings/Multibinder.java b/core/src/main/java/org/elasticsearch/common/inject/multibindings/Multibinder.java index 56f0ec0f055..5bc1595be5f 100644 --- a/core/src/main/java/org/elasticsearch/common/inject/multibindings/Multibinder.java +++ b/core/src/main/java/org/elasticsearch/common/inject/multibindings/Multibinder.java @@ -331,6 +331,6 @@ public abstract class Multibinder { NullPointerException npe = new NullPointerException(name); throw new ConfigurationException(singleton( - new Message(emptyList(), npe.toString(), npe))); + new Message(emptyList(), npe))); } } diff --git a/core/src/main/java/org/elasticsearch/common/inject/spi/Message.java b/core/src/main/java/org/elasticsearch/common/inject/spi/Message.java index e5488d07417..5a39b9edf13 100644 --- a/core/src/main/java/org/elasticsearch/common/inject/spi/Message.java +++ b/core/src/main/java/org/elasticsearch/common/inject/spi/Message.java @@ -58,6 +58,10 @@ public final class Message implements Serializable, Element { this(Collections.singletonList(source), message, null); } + public Message(Object source, Throwable cause) { + this(Collections.singletonList(source), null, cause); + } + public Message(String message) { this(Collections.emptyList(), message, null); } diff --git a/core/src/main/java/org/elasticsearch/common/lease/Releasables.java b/core/src/main/java/org/elasticsearch/common/lease/Releasables.java index c91494a235d..e91bc5c0f71 100644 --- a/core/src/main/java/org/elasticsearch/common/lease/Releasables.java +++ b/core/src/main/java/org/elasticsearch/common/lease/Releasables.java @@ -19,8 +19,6 @@ package org.elasticsearch.common.lease; -import org.elasticsearch.ElasticsearchException; - import java.util.Arrays; /** Utility methods to work with {@link Releasable}s. */ diff --git a/core/src/main/java/org/elasticsearch/common/network/NetworkModule.java b/core/src/main/java/org/elasticsearch/common/network/NetworkModule.java index 6f5d728a6dd..c1f282ac234 100644 --- a/core/src/main/java/org/elasticsearch/common/network/NetworkModule.java +++ b/core/src/main/java/org/elasticsearch/common/network/NetworkModule.java @@ -26,8 +26,14 @@ import org.elasticsearch.common.inject.AbstractModule; */ public class NetworkModule extends AbstractModule { + private final NetworkService networkService; + + public NetworkModule(NetworkService networkService) { + this.networkService = networkService; + } + @Override protected void configure() { - bind(NetworkService.class).asEagerSingleton(); + bind(NetworkService.class).toInstance(networkService); } } diff --git a/core/src/main/java/org/elasticsearch/common/network/NetworkService.java b/core/src/main/java/org/elasticsearch/common/network/NetworkService.java index 6a280519c55..05eaac15f42 100644 --- a/core/src/main/java/org/elasticsearch/common/network/NetworkService.java +++ b/core/src/main/java/org/elasticsearch/common/network/NetworkService.java @@ -20,7 +20,6 @@ package org.elasticsearch.common.network; import org.elasticsearch.common.component.AbstractComponent; -import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.unit.TimeValue; @@ -80,7 +79,6 @@ public class NetworkService extends AbstractComponent { private final List customNameResolvers = new CopyOnWriteArrayList<>(); - @Inject public NetworkService(Settings settings) { super(settings); IfConfig.logIfNecessary(); diff --git a/core/src/main/java/org/elasticsearch/common/settings/SettingsFilter.java b/core/src/main/java/org/elasticsearch/common/settings/SettingsFilter.java index 421e0081998..11fbe65cf01 100644 --- a/core/src/main/java/org/elasticsearch/common/settings/SettingsFilter.java +++ b/core/src/main/java/org/elasticsearch/common/settings/SettingsFilter.java @@ -20,7 +20,6 @@ package org.elasticsearch.common.settings; import org.elasticsearch.common.Strings; import org.elasticsearch.common.component.AbstractComponent; -import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.regex.Regex; import org.elasticsearch.common.xcontent.ToXContent.Params; import org.elasticsearch.rest.RestRequest; @@ -35,7 +34,7 @@ import java.util.concurrent.CopyOnWriteArrayList; /** * */ -public class SettingsFilter extends AbstractComponent { +public final class SettingsFilter extends AbstractComponent { /** * Can be used to specify settings filter that will be used to filter out matching settings in toXContent method */ @@ -43,7 +42,6 @@ public class SettingsFilter extends AbstractComponent { private final CopyOnWriteArrayList patterns = new CopyOnWriteArrayList<>(); - @Inject public SettingsFilter(Settings settings) { super(settings); } diff --git a/core/src/main/java/org/elasticsearch/common/settings/SettingsModule.java b/core/src/main/java/org/elasticsearch/common/settings/SettingsModule.java index 20e65760245..2ae4799d9f3 100644 --- a/core/src/main/java/org/elasticsearch/common/settings/SettingsModule.java +++ b/core/src/main/java/org/elasticsearch/common/settings/SettingsModule.java @@ -29,14 +29,16 @@ import org.elasticsearch.common.inject.AbstractModule; public class SettingsModule extends AbstractModule { private final Settings settings; + private final SettingsFilter settingsFilter; - public SettingsModule(Settings settings) { + public SettingsModule(Settings settings, SettingsFilter settingsFilter) { this.settings = settings; + this.settingsFilter = settingsFilter; } @Override protected void configure() { bind(Settings.class).toInstance(settings); - bind(SettingsFilter.class).asEagerSingleton(); + bind(SettingsFilter.class).toInstance(settingsFilter); } } \ No newline at end of file diff --git a/core/src/main/java/org/elasticsearch/common/util/ExtensionPoint.java b/core/src/main/java/org/elasticsearch/common/util/ExtensionPoint.java index 2ce4190a00e..d25113a54bb 100644 --- a/core/src/main/java/org/elasticsearch/common/util/ExtensionPoint.java +++ b/core/src/main/java/org/elasticsearch/common/util/ExtensionPoint.java @@ -123,7 +123,7 @@ public abstract class ExtensionPoint { public static final class SelectedType extends ClassMap { public SelectedType(String name, Class extensionClass) { - super(name, extensionClass, Collections.EMPTY_SET); + super(name, extensionClass, Collections.emptySet()); } /** diff --git a/core/src/main/java/org/elasticsearch/discovery/zen/NodeJoinController.java b/core/src/main/java/org/elasticsearch/discovery/zen/NodeJoinController.java index 78df4e21a03..9cec672ad43 100644 --- a/core/src/main/java/org/elasticsearch/discovery/zen/NodeJoinController.java +++ b/core/src/main/java/org/elasticsearch/discovery/zen/NodeJoinController.java @@ -133,7 +133,7 @@ public class NodeJoinController extends AbstractComponent { /** utility method to fail the given election context under the cluster state thread */ private void failContext(final ElectionContext context, final String reason, final Throwable throwable) { - clusterService.submitStateUpdateTask("zen-disco-join(failure [" + reason + "])", Priority.IMMEDIATE, new ClusterStateUpdateTask() { + clusterService.submitStateUpdateTask("zen-disco-join(failure [" + reason + "])", new ClusterStateUpdateTask(Priority.IMMEDIATE) { @Override public boolean runOnlyOnMaster() { @@ -231,7 +231,7 @@ public class NodeJoinController extends AbstractComponent { } final String source = "zen-disco-join(elected_as_master, [" + pendingMasterJoins + "] joins received)"; - clusterService.submitStateUpdateTask(source, Priority.IMMEDIATE, new ProcessJoinsTask() { + clusterService.submitStateUpdateTask(source, new ProcessJoinsTask(Priority.IMMEDIATE) { @Override public ClusterState execute(ClusterState currentState) { // Take into account the previous known nodes, if they happen not to be available @@ -280,7 +280,7 @@ public class NodeJoinController extends AbstractComponent { /** process all pending joins */ private void processJoins(String reason) { - clusterService.submitStateUpdateTask("zen-disco-join(" + reason + ")", Priority.URGENT, new ProcessJoinsTask()); + clusterService.submitStateUpdateTask("zen-disco-join(" + reason + ")", new ProcessJoinsTask(Priority.URGENT)); } @@ -356,6 +356,10 @@ public class NodeJoinController extends AbstractComponent { private final List joinCallbacksToRespondTo = new ArrayList<>(); private boolean nodeAdded = false; + public ProcessJoinsTask(Priority priority) { + super(priority); + } + @Override public ClusterState execute(ClusterState currentState) { DiscoveryNodes.Builder nodesBuilder; diff --git a/core/src/main/java/org/elasticsearch/discovery/zen/ZenDiscovery.java b/core/src/main/java/org/elasticsearch/discovery/zen/ZenDiscovery.java index c2b16046b0c..03111d141ef 100644 --- a/core/src/main/java/org/elasticsearch/discovery/zen/ZenDiscovery.java +++ b/core/src/main/java/org/elasticsearch/discovery/zen/ZenDiscovery.java @@ -320,7 +320,7 @@ public class ZenDiscovery extends AbstractLifecycleComponent implemen } catch (FailedToCommitClusterStateException t) { // cluster service logs a WARN message logger.debug("failed to publish cluster state version [{}] (not enough nodes acknowledged, min master nodes [{}])", clusterChangedEvent.state().version(), electMaster.minimumMasterNodes()); - clusterService.submitStateUpdateTask("zen-disco-failed-to-publish", Priority.IMMEDIATE, new ClusterStateUpdateTask() { + clusterService.submitStateUpdateTask("zen-disco-failed-to-publish", new ClusterStateUpdateTask(Priority.IMMEDIATE) { @Override public ClusterState execute(ClusterState currentState) { return rejoin(currentState, "failed to publish to min_master_nodes"); @@ -498,7 +498,7 @@ public class ZenDiscovery extends AbstractLifecycleComponent implemen return; } if (localNodeMaster()) { - clusterService.submitStateUpdateTask("zen-disco-node_left(" + node + ")", Priority.IMMEDIATE, new ClusterStateUpdateTask() { + clusterService.submitStateUpdateTask("zen-disco-node_left(" + node + ")", new ClusterStateUpdateTask(Priority.IMMEDIATE) { @Override public ClusterState execute(ClusterState currentState) { DiscoveryNodes.Builder builder = DiscoveryNodes.builder(currentState.nodes()).remove(node.id()); @@ -525,7 +525,7 @@ public class ZenDiscovery extends AbstractLifecycleComponent implemen } }); } else if (node.equals(nodes().masterNode())) { - handleMasterGone(node, "shut_down"); + handleMasterGone(node, null, "shut_down"); } } @@ -538,7 +538,7 @@ public class ZenDiscovery extends AbstractLifecycleComponent implemen // nothing to do here... return; } - clusterService.submitStateUpdateTask("zen-disco-node_failed(" + node + "), reason " + reason, Priority.IMMEDIATE, new ClusterStateUpdateTask() { + clusterService.submitStateUpdateTask("zen-disco-node_failed(" + node + "), reason " + reason, new ClusterStateUpdateTask(Priority.IMMEDIATE) { @Override public ClusterState execute(ClusterState currentState) { if (currentState.nodes().get(node.id()) == null) { @@ -587,7 +587,7 @@ public class ZenDiscovery extends AbstractLifecycleComponent implemen // We only set the new value. If the master doesn't see enough nodes it will revoke it's mastership. return; } - clusterService.submitStateUpdateTask("zen-disco-minimum_master_nodes_changed", Priority.IMMEDIATE, new ClusterStateUpdateTask() { + clusterService.submitStateUpdateTask("zen-disco-minimum_master_nodes_changed", new ClusterStateUpdateTask(Priority.IMMEDIATE) { @Override public ClusterState execute(ClusterState currentState) { // check if we have enough master nodes, if not, we need to move into joining the cluster again @@ -615,7 +615,7 @@ public class ZenDiscovery extends AbstractLifecycleComponent implemen }); } - private void handleMasterGone(final DiscoveryNode masterNode, final String reason) { + private void handleMasterGone(final DiscoveryNode masterNode, final Throwable cause, final String reason) { if (lifecycleState() != Lifecycle.State.STARTED) { // not started, ignore a master failure return; @@ -625,9 +625,9 @@ public class ZenDiscovery extends AbstractLifecycleComponent implemen return; } - logger.info("master_left [{}], reason [{}]", masterNode, reason); + logger.info("master_left [{}], reason [{}]", cause, masterNode, reason); - clusterService.submitStateUpdateTask("zen-disco-master_failed (" + masterNode + ")", Priority.IMMEDIATE, new ClusterStateUpdateTask() { + clusterService.submitStateUpdateTask("zen-disco-master_failed (" + masterNode + ")", new ClusterStateUpdateTask(Priority.IMMEDIATE) { @Override public boolean runOnlyOnMaster() { @@ -694,7 +694,7 @@ public class ZenDiscovery extends AbstractLifecycleComponent implemen } void processNextPendingClusterState(String reason) { - clusterService.submitStateUpdateTask("zen-disco-receive(from master [" + reason + "])", Priority.URGENT, new ClusterStateUpdateTask() { + clusterService.submitStateUpdateTask("zen-disco-receive(from master [" + reason + "])", new ClusterStateUpdateTask(Priority.URGENT) { @Override public boolean runOnlyOnMaster() { return false; @@ -1059,7 +1059,7 @@ public class ZenDiscovery extends AbstractLifecycleComponent implemen return; } logger.debug("got a ping from another master {}. resolving who should rejoin. current ping count: [{}]", pingRequest.masterNode(), pingsWhileMaster.get()); - clusterService.submitStateUpdateTask("ping from another master", Priority.IMMEDIATE, new ClusterStateUpdateTask() { + clusterService.submitStateUpdateTask("ping from another master", new ClusterStateUpdateTask(Priority.IMMEDIATE) { @Override public ClusterState execute(ClusterState currentState) throws Exception { @@ -1078,8 +1078,8 @@ public class ZenDiscovery extends AbstractLifecycleComponent implemen private class MasterNodeFailureListener implements MasterFaultDetection.Listener { @Override - public void onMasterFailure(DiscoveryNode masterNode, String reason) { - handleMasterGone(masterNode, reason); + public void onMasterFailure(DiscoveryNode masterNode, Throwable cause, String reason) { + handleMasterGone(masterNode, cause, reason); } } @@ -1114,7 +1114,7 @@ public class ZenDiscovery extends AbstractLifecycleComponent implemen class RejoinClusterRequestHandler implements TransportRequestHandler { @Override public void messageReceived(final RejoinClusterRequest request, final TransportChannel channel) throws Exception { - clusterService.submitStateUpdateTask("received a request to rejoin the cluster from [" + request.fromNodeId + "]", Priority.IMMEDIATE, new ClusterStateUpdateTask() { + clusterService.submitStateUpdateTask("received a request to rejoin the cluster from [" + request.fromNodeId + "]", new ClusterStateUpdateTask(Priority.IMMEDIATE) { @Override public boolean runOnlyOnMaster() { diff --git a/core/src/main/java/org/elasticsearch/discovery/zen/fd/MasterFaultDetection.java b/core/src/main/java/org/elasticsearch/discovery/zen/fd/MasterFaultDetection.java index 8333b967c2f..8842bafb116 100644 --- a/core/src/main/java/org/elasticsearch/discovery/zen/fd/MasterFaultDetection.java +++ b/core/src/main/java/org/elasticsearch/discovery/zen/fd/MasterFaultDetection.java @@ -49,7 +49,7 @@ public class MasterFaultDetection extends FaultDetection { public static interface Listener { /** called when pinging the master failed, like a timeout, transport disconnects etc */ - void onMasterFailure(DiscoveryNode masterNode, String reason); + void onMasterFailure(DiscoveryNode masterNode, Throwable cause, String reason); } @@ -117,7 +117,7 @@ public class MasterFaultDetection extends FaultDetection { transportService.connectToNode(masterNode); } catch (final Exception e) { // notify master failure (which stops also) and bail.. - notifyMasterFailure(masterNode, "failed to perform initial connect [" + e.getMessage() + "]"); + notifyMasterFailure(masterNode, e, "failed to perform initial connect "); return; } if (masterPinger != null) { @@ -176,22 +176,22 @@ public class MasterFaultDetection extends FaultDetection { threadPool.schedule(TimeValue.timeValueMillis(0), ThreadPool.Names.SAME, masterPinger); } catch (Exception e) { logger.trace("[master] [{}] transport disconnected (with verified connect)", masterNode); - notifyMasterFailure(masterNode, "transport disconnected (with verified connect)"); + notifyMasterFailure(masterNode, null, "transport disconnected (with verified connect)"); } } else { logger.trace("[master] [{}] transport disconnected", node); - notifyMasterFailure(node, "transport disconnected"); + notifyMasterFailure(node, null, "transport disconnected"); } } } - private void notifyMasterFailure(final DiscoveryNode masterNode, final String reason) { + private void notifyMasterFailure(final DiscoveryNode masterNode, final Throwable cause, final String reason) { if (notifiedMasterFailure.compareAndSet(false, true)) { threadPool.generic().execute(new Runnable() { @Override public void run() { for (Listener listener : listeners) { - listener.onMasterFailure(masterNode, reason); + listener.onMasterFailure(masterNode, cause, reason); } } }); @@ -255,15 +255,15 @@ public class MasterFaultDetection extends FaultDetection { return; } else if (exp.getCause() instanceof NotMasterException) { logger.debug("[master] pinging a master {} that is no longer a master", masterNode); - notifyMasterFailure(masterToPing, "no longer master"); + notifyMasterFailure(masterToPing, exp, "no longer master"); return; } else if (exp.getCause() instanceof ThisIsNotTheMasterYouAreLookingForException) { logger.debug("[master] pinging a master {} that is not the master", masterNode); - notifyMasterFailure(masterToPing, "not master"); + notifyMasterFailure(masterToPing, exp,"not master"); return; } else if (exp.getCause() instanceof NodeDoesNotExistOnMasterException) { logger.debug("[master] pinging a master {} but we do not exists on it, act as if its master failure", masterNode); - notifyMasterFailure(masterToPing, "do not exists on master, act as master failure"); + notifyMasterFailure(masterToPing, exp,"do not exists on master, act as master failure"); return; } @@ -272,7 +272,7 @@ public class MasterFaultDetection extends FaultDetection { if (retryCount >= pingRetryCount) { logger.debug("[master] failed to ping [{}], tried [{}] times, each with maximum [{}] timeout", masterNode, pingRetryCount, pingRetryTimeout); // not good, failure - notifyMasterFailure(masterToPing, "failed to ping, tried [" + pingRetryCount + "] times, each with maximum [" + pingRetryTimeout + "] timeout"); + notifyMasterFailure(masterToPing, null, "failed to ping, tried [" + pingRetryCount + "] times, each with maximum [" + pingRetryTimeout + "] timeout"); } else { // resend the request, not reschedule, rely on send timeout transportService.sendRequest(masterToPing, MASTER_PING_ACTION_NAME, request, options, this); diff --git a/core/src/main/java/org/elasticsearch/discovery/zen/fd/NodesFaultDetection.java b/core/src/main/java/org/elasticsearch/discovery/zen/fd/NodesFaultDetection.java index 53081f55d21..2abe730b1e8 100644 --- a/core/src/main/java/org/elasticsearch/discovery/zen/fd/NodesFaultDetection.java +++ b/core/src/main/java/org/elasticsearch/discovery/zen/fd/NodesFaultDetection.java @@ -26,6 +26,7 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.common.util.concurrent.EsRejectedExecutionException; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.*; @@ -41,7 +42,7 @@ import static org.elasticsearch.common.util.concurrent.ConcurrentCollections.new public class NodesFaultDetection extends FaultDetection { public static final String PING_ACTION_NAME = "internal:discovery/zen/fd/ping"; - + public abstract static class Listener { public void onNodeFailure(DiscoveryNode node, String reason) {} @@ -145,14 +146,18 @@ public class NodesFaultDetection extends FaultDetection { } private void notifyNodeFailure(final DiscoveryNode node, final String reason) { - threadPool.generic().execute(new Runnable() { - @Override - public void run() { - for (Listener listener : listeners) { - listener.onNodeFailure(node, reason); + try { + threadPool.generic().execute(new Runnable() { + @Override + public void run() { + for (Listener listener : listeners) { + listener.onNodeFailure(node, reason); + } } - } - }); + }); + } catch (EsRejectedExecutionException ex) { + logger.trace("[node ] [{}] ignoring node failure (reason [{}]). Local node is shutting down", ex, node, reason); + } } private void notifyPingReceived(final PingRequest pingRequest) { diff --git a/core/src/main/java/org/elasticsearch/discovery/zen/publish/PublishClusterStateAction.java b/core/src/main/java/org/elasticsearch/discovery/zen/publish/PublishClusterStateAction.java index 93d457d7382..91fd622023f 100644 --- a/core/src/main/java/org/elasticsearch/discovery/zen/publish/PublishClusterStateAction.java +++ b/core/src/main/java/org/elasticsearch/discovery/zen/publish/PublishClusterStateAction.java @@ -140,9 +140,9 @@ public class PublishClusterStateAction extends AbstractComponent { throw t; } catch (Throwable t) { // try to fail committing, in cause it's still on going - if (sendingController.markAsFailed("unexpected error [" + t.getMessage() + "]")) { + if (sendingController.markAsFailed("unexpected error", t)) { // signal the change should be rejected - throw new Discovery.FailedToCommitClusterStateException("unexpected error [{}]", t, t.getMessage()); + throw new Discovery.FailedToCommitClusterStateException("unexpected error", t); } else { throw t; } @@ -583,6 +583,21 @@ public class PublishClusterStateAction extends AbstractComponent { return true; } + /** + * tries marking the publishing as failed, if a decision wasn't made yet + * + * @return true if the publishing was failed and the cluster state is *not* committed + **/ + synchronized private boolean markAsFailed(String details, Throwable reason) { + if (committedOrFailed()) { + return committed == false; + } + logger.trace("failed to commit version [{}]. {}", reason, clusterState.version(), details); + committed = false; + committedOrFailedLatch.countDown(); + return true; + } + /** * tries marking the publishing as failed, if a decision wasn't made yet * diff --git a/core/src/main/java/org/elasticsearch/env/Environment.java b/core/src/main/java/org/elasticsearch/env/Environment.java index c5c769a73c2..7982c2f35ea 100644 --- a/core/src/main/java/org/elasticsearch/env/Environment.java +++ b/core/src/main/java/org/elasticsearch/env/Environment.java @@ -58,6 +58,8 @@ public class Environment { private final Path pluginsFile; + private final Path modulesFile; + private final Path sharedDataFile; /** location of bin/, used by plugin manager */ @@ -157,6 +159,7 @@ public class Environment { binFile = homeFile.resolve("bin"); libFile = homeFile.resolve("lib"); + modulesFile = homeFile.resolve("modules"); } /** @@ -275,6 +278,10 @@ public class Environment { return libFile; } + public Path modulesFile() { + return modulesFile; + } + public Path logsFile() { return logsFile; } diff --git a/core/src/main/java/org/elasticsearch/gateway/TransportNodesListGatewayStartedShards.java b/core/src/main/java/org/elasticsearch/gateway/TransportNodesListGatewayStartedShards.java index bfc078a6679..d91b4bd8cdd 100644 --- a/core/src/main/java/org/elasticsearch/gateway/TransportNodesListGatewayStartedShards.java +++ b/core/src/main/java/org/elasticsearch/gateway/TransportNodesListGatewayStartedShards.java @@ -131,7 +131,7 @@ public class TransportNodesListGatewayStartedShards extends TransportNodesAction if (metaData != null) { ShardPath shardPath = null; try { - IndexSettings indexSettings = new IndexSettings(metaData, settings, Collections.EMPTY_LIST); + IndexSettings indexSettings = new IndexSettings(metaData, settings, Collections.emptyList()); shardPath = ShardPath.loadShardPath(logger, nodeEnv, shardId, indexSettings); if (shardPath == null) { throw new IllegalStateException(shardId + " no shard path found"); diff --git a/core/src/main/java/org/elasticsearch/index/analysis/Analysis.java b/core/src/main/java/org/elasticsearch/index/analysis/Analysis.java index 27cd9fd8c4c..a2c65c6441d 100644 --- a/core/src/main/java/org/elasticsearch/index/analysis/Analysis.java +++ b/core/src/main/java/org/elasticsearch/index/analysis/Analysis.java @@ -235,7 +235,7 @@ public class Analysis { try (BufferedReader reader = FileSystemUtils.newBufferedReader(wordListFile.toUri().toURL(), StandardCharsets.UTF_8)) { return loadWordList(reader, "#"); } catch (IOException ioe) { - String message = String.format(Locale.ROOT, "IOException while reading %s_path: %s", settingPrefix, ioe.getMessage()); + String message = String.format(Locale.ROOT, "IOException while reading %s_path: %s", settingPrefix); throw new IllegalArgumentException(message, ioe); } } @@ -282,7 +282,7 @@ public class Analysis { try { return FileSystemUtils.newBufferedReader(path.toUri().toURL(), StandardCharsets.UTF_8); } catch (IOException ioe) { - String message = String.format(Locale.ROOT, "IOException while reading %s_path: %s", settingPrefix, ioe.getMessage()); + String message = String.format(Locale.ROOT, "IOException while reading %s_path: %s", settingPrefix); throw new IllegalArgumentException(message, ioe); } } diff --git a/core/src/main/java/org/elasticsearch/index/analysis/AnalysisRegistry.java b/core/src/main/java/org/elasticsearch/index/analysis/AnalysisRegistry.java index 4e7b66dec8f..86c06dbe54f 100644 --- a/core/src/main/java/org/elasticsearch/index/analysis/AnalysisRegistry.java +++ b/core/src/main/java/org/elasticsearch/index/analysis/AnalysisRegistry.java @@ -55,7 +55,7 @@ public final class AnalysisRegistry implements Closeable { private final Environment environemnt; public AnalysisRegistry(HunspellService hunspellService, Environment environment) { - this(hunspellService, environment, Collections.EMPTY_MAP, Collections.EMPTY_MAP, Collections.EMPTY_MAP, Collections.EMPTY_MAP); + this(hunspellService, environment, Collections.emptyMap(), Collections.emptyMap(), Collections.emptyMap(), Collections.emptyMap()); } public AnalysisRegistry(HunspellService hunspellService, Environment environment, diff --git a/core/src/main/java/org/elasticsearch/index/fieldvisitor/CustomFieldsVisitor.java b/core/src/main/java/org/elasticsearch/index/fieldvisitor/CustomFieldsVisitor.java index 922a27c70ab..bd1fd69eb74 100644 --- a/core/src/main/java/org/elasticsearch/index/fieldvisitor/CustomFieldsVisitor.java +++ b/core/src/main/java/org/elasticsearch/index/fieldvisitor/CustomFieldsVisitor.java @@ -19,22 +19,32 @@ package org.elasticsearch.index.fieldvisitor; import org.apache.lucene.index.FieldInfo; +import org.elasticsearch.common.regex.Regex; import java.io.IOException; +import java.util.Collections; +import java.util.List; import java.util.Set; /** - * A field visitor that allows to load a selection of the stored fields. + * A field visitor that allows to load a selection of the stored fields by exact name or by pattern. + * Supported pattern styles: "xxx*", "*xxx", "*xxx*" and "xxx*yyy". * The Uid field is always loaded. * The class is optimized for source loading as it is a common use case. */ public class CustomFieldsVisitor extends FieldsVisitor { private final Set fields; + private final List patterns; - public CustomFieldsVisitor(Set fields, boolean loadSource) { + public CustomFieldsVisitor(Set fields, List patterns, boolean loadSource) { super(loadSource); this.fields = fields; + this.patterns = patterns; + } + + public CustomFieldsVisitor(Set fields, boolean loadSource) { + this(fields, Collections.emptyList(), loadSource); } @Override @@ -42,7 +52,14 @@ public class CustomFieldsVisitor extends FieldsVisitor { if (super.needsField(fieldInfo) == Status.YES) { return Status.YES; } - - return fields.contains(fieldInfo.name) ? Status.YES : Status.NO; + if (fields.contains(fieldInfo.name)) { + return Status.YES; + } + for (String pattern : patterns) { + if (Regex.simpleMatch(pattern, fieldInfo.name)) { + return Status.YES; + } + } + return Status.NO; } } diff --git a/core/src/main/java/org/elasticsearch/index/mapper/DocumentMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/DocumentMapper.java index 5f266cbd48f..c4fec8cf095 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/DocumentMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/DocumentMapper.java @@ -336,8 +336,6 @@ public class DocumentMapper implements ToXContent { private void addMappers(Collection objectMappers, Collection fieldMappers, boolean updateAllTypes) { assert mappingLock.isWriteLockedByCurrentThread(); - // first ensure we don't have any incompatible new fields - mapperService.checkNewMappersCompatibility(objectMappers, fieldMappers, updateAllTypes); // update mappers for this document type Map builder = new HashMap<>(this.objectMappers); @@ -351,11 +349,12 @@ public class DocumentMapper implements ToXContent { this.fieldMappers = this.fieldMappers.copyAndAllAll(fieldMappers); // finally update for the entire index - mapperService.addMappers(objectMappers, fieldMappers); + mapperService.addMappers(type, objectMappers, fieldMappers); } public MergeResult merge(Mapping mapping, boolean simulate, boolean updateAllTypes) { try (ReleasableLock lock = mappingWriteLock.acquire()) { + mapperService.checkMappersCompatibility(type, mapping, updateAllTypes); final MergeResult mergeResult = new MergeResult(simulate, updateAllTypes); this.mapping.merge(mapping, mergeResult); if (simulate == false) { diff --git a/core/src/main/java/org/elasticsearch/index/mapper/DocumentParser.java b/core/src/main/java/org/elasticsearch/index/mapper/DocumentParser.java index 87dfce192b6..8128904c8d1 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/DocumentParser.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/DocumentParser.java @@ -76,6 +76,10 @@ class DocumentParser implements Closeable { } private ParsedDocument innerParseDocument(SourceToParse source) throws MapperParsingException { + if (docMapper.type().equals(MapperService.DEFAULT_MAPPING)) { + throw new IllegalArgumentException("It is forbidden to index into the default mapping [" + MapperService.DEFAULT_MAPPING + "]"); + } + ParseContext.InternalParseContext context = cache.get(); final Mapping mapping = docMapper.mapping(); diff --git a/core/src/main/java/org/elasticsearch/index/mapper/FieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/FieldMapper.java index 45bef68ee00..c277cdc4728 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/FieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/FieldMapper.java @@ -307,7 +307,6 @@ public abstract class FieldMapper extends Mapper { if (ref.get().equals(fieldType()) == false) { throw new IllegalStateException("Cannot overwrite field type reference to unequal reference"); } - ref.incrementAssociatedMappers(); this.fieldTypeRef = ref; } @@ -360,7 +359,7 @@ public abstract class FieldMapper extends Mapper { } @Override - public void merge(Mapper mergeWith, MergeResult mergeResult) throws MergeMappingException { + public void merge(Mapper mergeWith, MergeResult mergeResult) { if (!this.getClass().equals(mergeWith.getClass())) { String mergedType = mergeWith.getClass().getSimpleName(); if (mergeWith instanceof FieldMapper) { @@ -380,11 +379,6 @@ public abstract class FieldMapper extends Mapper { return; } - boolean strict = this.fieldTypeRef.getNumAssociatedMappers() > 1 && mergeResult.updateAllTypes() == false; - fieldType().checkCompatibility(fieldMergeWith.fieldType(), subConflicts, strict); - for (String conflict : subConflicts) { - mergeResult.addConflict(conflict); - } multiFields.merge(mergeWith, mergeResult); if (mergeResult.simulate() == false && mergeResult.hasConflicts() == false) { @@ -614,7 +608,7 @@ public abstract class FieldMapper extends Mapper { } // No need for locking, because locking is taken care of in ObjectMapper#merge and DocumentMapper#merge - public void merge(Mapper mergeWith, MergeResult mergeResult) throws MergeMappingException { + public void merge(Mapper mergeWith, MergeResult mergeResult) { FieldMapper mergeWithMultiField = (FieldMapper) mergeWith; List newFieldMappers = null; diff --git a/core/src/main/java/org/elasticsearch/index/mapper/FieldTypeLookup.java b/core/src/main/java/org/elasticsearch/index/mapper/FieldTypeLookup.java index 1b0e827ac35..eaaa47c3bd2 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/FieldTypeLookup.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/FieldTypeLookup.java @@ -24,9 +24,11 @@ import org.elasticsearch.common.regex.Regex; import java.util.ArrayList; import java.util.Collection; +import java.util.Collections; import java.util.HashSet; import java.util.Iterator; import java.util.List; +import java.util.Objects; import java.util.Set; /** @@ -37,18 +39,49 @@ class FieldTypeLookup implements Iterable { /** Full field name to field type */ private final CopyOnWriteHashMap fullNameToFieldType; + /** Full field name to types containing a mapping for this full name. */ + private final CopyOnWriteHashMap> fullNameToTypes; + /** Index field name to field type */ private final CopyOnWriteHashMap indexNameToFieldType; + /** Index field name to types containing a mapping for this index name. */ + private final CopyOnWriteHashMap> indexNameToTypes; + /** Create a new empty instance. */ public FieldTypeLookup() { fullNameToFieldType = new CopyOnWriteHashMap<>(); + fullNameToTypes = new CopyOnWriteHashMap<>(); indexNameToFieldType = new CopyOnWriteHashMap<>(); + indexNameToTypes = new CopyOnWriteHashMap<>(); } - private FieldTypeLookup(CopyOnWriteHashMap fullName, CopyOnWriteHashMap indexName) { - fullNameToFieldType = fullName; - indexNameToFieldType = indexName; + private FieldTypeLookup( + CopyOnWriteHashMap fullName, + CopyOnWriteHashMap> fullNameToTypes, + CopyOnWriteHashMap indexName, + CopyOnWriteHashMap> indexNameToTypes) { + this.fullNameToFieldType = fullName; + this.fullNameToTypes = fullNameToTypes; + this.indexNameToFieldType = indexName; + this.indexNameToTypes = indexNameToTypes; + } + + private static CopyOnWriteHashMap> addType(CopyOnWriteHashMap> map, String key, String type) { + Set types = map.get(key); + if (types == null) { + return map.copyAndPut(key, Collections.singleton(type)); + } else if (types.contains(type)) { + // noting to do + return map; + } else { + Set newTypes = new HashSet<>(types.size() + 1); + newTypes.addAll(types); + newTypes.add(type); + assert newTypes.size() == types.size() + 1; + newTypes = Collections.unmodifiableSet(newTypes); + return map.copyAndPut(key, newTypes); + } } /** @@ -56,9 +89,15 @@ class FieldTypeLookup implements Iterable { * from the provided fields. If a field already exists, the field type will be updated * to use the new mappers field type. */ - public FieldTypeLookup copyAndAddAll(Collection newFieldMappers) { + public FieldTypeLookup copyAndAddAll(String type, Collection newFieldMappers) { + Objects.requireNonNull(type, "type must not be null"); + if (MapperService.DEFAULT_MAPPING.equals(type)) { + throw new IllegalArgumentException("Default mappings should not be added to the lookup"); + } CopyOnWriteHashMap fullName = this.fullNameToFieldType; + CopyOnWriteHashMap> fullNameToTypes = this.fullNameToTypes; CopyOnWriteHashMap indexName = this.indexNameToFieldType; + CopyOnWriteHashMap> indexNameToTypes = this.indexNameToTypes; for (FieldMapper fieldMapper : newFieldMappers) { MappedFieldType fieldType = fieldMapper.fieldType(); @@ -86,8 +125,23 @@ class FieldTypeLookup implements Iterable { // this new field bridges between two existing field names (a full and index name), which we cannot support throw new IllegalStateException("insane mappings found. field " + fieldType.names().fullName() + " maps across types to field " + fieldType.names().indexName()); } + + fullNameToTypes = addType(fullNameToTypes, fieldType.names().fullName(), type); + indexNameToTypes = addType(indexNameToTypes, fieldType.names().indexName(), type); + } + return new FieldTypeLookup(fullName, fullNameToTypes, indexName, indexNameToTypes); + } + + private static boolean beStrict(String type, Set types, boolean updateAllTypes) { + assert types.size() >= 1; + if (updateAllTypes) { + return false; + } else if (types.size() == 1 && types.contains(type)) { + // we are implicitly updating all types + return false; + } else { + return true; } - return new FieldTypeLookup(fullName, indexName); } /** @@ -95,14 +149,15 @@ class FieldTypeLookup implements Iterable { * If any are not compatible, an IllegalArgumentException is thrown. * If updateAllTypes is true, only basic compatibility is checked. */ - public void checkCompatibility(Collection newFieldMappers, boolean updateAllTypes) { - for (FieldMapper fieldMapper : newFieldMappers) { + public void checkCompatibility(String type, Collection fieldMappers, boolean updateAllTypes) { + for (FieldMapper fieldMapper : fieldMappers) { MappedFieldTypeReference ref = fullNameToFieldType.get(fieldMapper.fieldType().names().fullName()); if (ref != null) { List conflicts = new ArrayList<>(); ref.get().checkTypeName(fieldMapper.fieldType(), conflicts); if (conflicts.isEmpty()) { // only check compat if they are the same type - boolean strict = updateAllTypes == false; + final Set types = fullNameToTypes.get(fieldMapper.fieldType().names().fullName()); + boolean strict = beStrict(type, types, updateAllTypes); ref.get().checkCompatibility(fieldMapper.fieldType(), conflicts, strict); } if (conflicts.isEmpty() == false) { @@ -116,7 +171,8 @@ class FieldTypeLookup implements Iterable { List conflicts = new ArrayList<>(); indexNameRef.get().checkTypeName(fieldMapper.fieldType(), conflicts); if (conflicts.isEmpty()) { // only check compat if they are the same type - boolean strict = updateAllTypes == false; + final Set types = indexNameToTypes.get(fieldMapper.fieldType().names().indexName()); + boolean strict = beStrict(type, types, updateAllTypes); indexNameRef.get().checkCompatibility(fieldMapper.fieldType(), conflicts, strict); } if (conflicts.isEmpty() == false) { @@ -133,6 +189,15 @@ class FieldTypeLookup implements Iterable { return ref.get(); } + /** Get the set of types that have a mapping for the given field. */ + public Set getTypes(String field) { + Set types = fullNameToTypes.get(field); + if (types == null) { + types = Collections.emptySet(); + } + return types; + } + /** Returns the field type for the given index name */ public MappedFieldType getByIndexName(String field) { MappedFieldTypeReference ref = indexNameToFieldType.get(field); @@ -140,6 +205,15 @@ class FieldTypeLookup implements Iterable { return ref.get(); } + /** Get the set of types that have a mapping for the given field. */ + public Set getTypesByIndexName(String field) { + Set types = indexNameToTypes.get(field); + if (types == null) { + types = Collections.emptySet(); + } + return types; + } + /** * Returns a list of the index names of a simple match regex like pattern against full name and index name. */ diff --git a/core/src/main/java/org/elasticsearch/index/mapper/MappedFieldTypeReference.java b/core/src/main/java/org/elasticsearch/index/mapper/MappedFieldTypeReference.java index d3c6b83a6a3..1a9d0b70b37 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/MappedFieldTypeReference.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/MappedFieldTypeReference.java @@ -23,12 +23,10 @@ package org.elasticsearch.index.mapper; */ public class MappedFieldTypeReference { private MappedFieldType fieldType; // the current field type this reference points to - private int numAssociatedMappers; public MappedFieldTypeReference(MappedFieldType fieldType) { fieldType.freeze(); // ensure frozen this.fieldType = fieldType; - this.numAssociatedMappers = 1; } public MappedFieldType get() { @@ -40,11 +38,4 @@ public class MappedFieldTypeReference { this.fieldType = fieldType; } - public int getNumAssociatedMappers() { - return numAssociatedMappers; - } - - public void incrementAssociatedMappers() { - ++numAssociatedMappers; - } } diff --git a/core/src/main/java/org/elasticsearch/index/mapper/Mapper.java b/core/src/main/java/org/elasticsearch/index/mapper/Mapper.java index 9ca34e1c573..a8d0c0a706d 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/Mapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/Mapper.java @@ -154,5 +154,5 @@ public abstract class Mapper implements ToXContent, Iterable { /** Returns the canonical name which uniquely identifies the mapper against other mappers in a type. */ public abstract String name(); - public abstract void merge(Mapper mergeWith, MergeResult mergeResult) throws MergeMappingException; + public abstract void merge(Mapper mergeWith, MergeResult mergeResult); } diff --git a/core/src/main/java/org/elasticsearch/index/mapper/MapperService.java b/core/src/main/java/org/elasticsearch/index/mapper/MapperService.java index f617dd5c6f0..938f610d6db 100755 --- a/core/src/main/java/org/elasticsearch/index/mapper/MapperService.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/MapperService.java @@ -33,6 +33,7 @@ import org.elasticsearch.ElasticsearchGenerationException; import org.elasticsearch.Version; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.collect.ImmutableOpenMap; +import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.lucene.search.Queries; import org.elasticsearch.common.regex.Regex; @@ -250,23 +251,21 @@ public class MapperService extends AbstractIndexComponent implements Closeable { DocumentMapper oldMapper = mappers.get(mapper.type()); if (oldMapper != null) { - MergeResult result = oldMapper.merge(mapper.mapping(), false, updateAllTypes); + // simulate first + MergeResult result = oldMapper.merge(mapper.mapping(), true, updateAllTypes); if (result.hasConflicts()) { - // TODO: What should we do??? - if (logger.isDebugEnabled()) { - logger.debug("merging mapping for type [{}] resulted in conflicts: [{}]", mapper.type(), Arrays.toString(result.buildConflicts())); - } + throw new IllegalArgumentException("Merge failed with failures {" + Arrays.toString(result.buildConflicts()) + "}"); } + // then apply for real + result = oldMapper.merge(mapper.mapping(), false, updateAllTypes); + assert result.hasConflicts() == false; // we already simulated return oldMapper; } else { - List newObjectMappers = new ArrayList<>(); - List newFieldMappers = new ArrayList<>(); - for (MetadataFieldMapper metadataMapper : mapper.mapping().metadataMappers) { - newFieldMappers.add(metadataMapper); - } - MapperUtils.collect(mapper.mapping().root, newObjectMappers, newFieldMappers); - checkNewMappersCompatibility(newObjectMappers, newFieldMappers, updateAllTypes); - addMappers(newObjectMappers, newFieldMappers); + Tuple, Collection> newMappers = checkMappersCompatibility( + mapper.type(), mapper.mapping(), updateAllTypes); + Collection newObjectMappers = newMappers.v1(); + Collection newFieldMappers = newMappers.v2(); + addMappers(mapper.type(), newObjectMappers, newFieldMappers); for (DocumentTypeListener typeListener : typeListeners) { typeListener.beforeCreate(mapper); @@ -301,9 +300,9 @@ public class MapperService extends AbstractIndexComponent implements Closeable { return true; } - protected void checkNewMappersCompatibility(Collection newObjectMappers, Collection newFieldMappers, boolean updateAllTypes) { + protected void checkMappersCompatibility(String type, Collection objectMappers, Collection fieldMappers, boolean updateAllTypes) { assert mappingLock.isWriteLockedByCurrentThread(); - for (ObjectMapper newObjectMapper : newObjectMappers) { + for (ObjectMapper newObjectMapper : objectMappers) { ObjectMapper existingObjectMapper = fullPathObjectMappers.get(newObjectMapper.fullPath()); if (existingObjectMapper != null) { MergeResult result = new MergeResult(true, updateAllTypes); @@ -314,10 +313,22 @@ public class MapperService extends AbstractIndexComponent implements Closeable { } } } - fieldTypes.checkCompatibility(newFieldMappers, updateAllTypes); + fieldTypes.checkCompatibility(type, fieldMappers, updateAllTypes); } - protected void addMappers(Collection objectMappers, Collection fieldMappers) { + protected Tuple, Collection> checkMappersCompatibility( + String type, Mapping mapping, boolean updateAllTypes) { + List objectMappers = new ArrayList<>(); + List fieldMappers = new ArrayList<>(); + for (MetadataFieldMapper metadataMapper : mapping.metadataMappers) { + fieldMappers.add(metadataMapper); + } + MapperUtils.collect(mapping.root, objectMappers, fieldMappers); + checkMappersCompatibility(type, objectMappers, fieldMappers, updateAllTypes); + return new Tuple<>(objectMappers, fieldMappers); + } + + protected void addMappers(String type, Collection objectMappers, Collection fieldMappers) { assert mappingLock.isWriteLockedByCurrentThread(); ImmutableOpenMap.Builder fullPathObjectMappers = ImmutableOpenMap.builder(this.fullPathObjectMappers); for (ObjectMapper objectMapper : objectMappers) { @@ -327,7 +338,7 @@ public class MapperService extends AbstractIndexComponent implements Closeable { } } this.fullPathObjectMappers = fullPathObjectMappers.build(); - this.fieldTypes = this.fieldTypes.copyAndAddAll(fieldMappers); + this.fieldTypes = this.fieldTypes.copyAndAddAll(type, fieldMappers); } public DocumentMapper parse(String mappingType, CompressedXContent mappingSource, boolean applyDefault) throws MapperParsingException { @@ -344,10 +355,21 @@ public class MapperService extends AbstractIndexComponent implements Closeable { return mappers.containsKey(mappingType); } + /** + * Return the set of concrete types that have a mapping. + * NOTE: this does not return the default mapping. + */ public Collection types() { - return mappers.keySet(); + final Set types = new HashSet<>(mappers.keySet()); + types.remove(DEFAULT_MAPPING); + return Collections.unmodifiableSet(types); } + /** + * Return the {@link DocumentMapper} for the given type. By using the special + * {@value #DEFAULT_MAPPING} type, you can get a {@link DocumentMapper} for + * the default mapping. + */ public DocumentMapper documentMapper(String type) { return mappers.get(type); } diff --git a/core/src/main/java/org/elasticsearch/index/mapper/MergeMappingException.java b/core/src/main/java/org/elasticsearch/index/mapper/MergeMappingException.java deleted file mode 100644 index 4a5e75fcb56..00000000000 --- a/core/src/main/java/org/elasticsearch/index/mapper/MergeMappingException.java +++ /dev/null @@ -1,62 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.index.mapper; - -import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.rest.RestStatus; - -import java.io.IOException; -import java.util.Arrays; -import java.util.Objects; - -/** - * - */ -public final class MergeMappingException extends MapperException { - - private final String[] failures; - - public MergeMappingException(String[] failures) { - super("Merge failed with failures {" + Arrays.toString(failures) + "}"); - Objects.requireNonNull(failures, "failures must be non-null"); - this.failures = failures; - } - - public MergeMappingException(StreamInput in) throws IOException { - super(in); - failures = in.readStringArray(); - } - - @Override - public void writeTo(StreamOutput out) throws IOException { - super.writeTo(out); - out.writeStringArray(failures); - } - - public String[] failures() { - return failures; - } - - @Override - public RestStatus status() { - return RestStatus.BAD_REQUEST; - } -} diff --git a/core/src/main/java/org/elasticsearch/index/mapper/SourceToParse.java b/core/src/main/java/org/elasticsearch/index/mapper/SourceToParse.java index e9d62c8e88d..f65072d489e 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/SourceToParse.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/SourceToParse.java @@ -20,6 +20,7 @@ package org.elasticsearch.index.mapper; import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.xcontent.XContentParser; /** @@ -159,16 +160,22 @@ public class SourceToParse { return this.ttl; } + public SourceToParse ttl(TimeValue ttl) { + if (ttl == null) { + this.ttl = -1; + return this; + } + this.ttl = ttl.millis(); + return this; + } + public SourceToParse ttl(long ttl) { this.ttl = ttl; return this; } - public static enum Origin { - + public enum Origin { PRIMARY, REPLICA - } - } diff --git a/core/src/main/java/org/elasticsearch/index/mapper/core/CompletionFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/core/CompletionFieldMapper.java index d3cf998ef2c..5b4df635a34 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/core/CompletionFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/core/CompletionFieldMapper.java @@ -605,7 +605,7 @@ public class CompletionFieldMapper extends FieldMapper implements ArrayValueMapp } @Override - public void merge(Mapper mergeWith, MergeResult mergeResult) throws MergeMappingException { + public void merge(Mapper mergeWith, MergeResult mergeResult) { super.merge(mergeWith, mergeResult); CompletionFieldMapper fieldMergeWith = (CompletionFieldMapper) mergeWith; if (!mergeResult.simulate()) { diff --git a/core/src/main/java/org/elasticsearch/index/mapper/core/NumberFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/core/NumberFieldMapper.java index 3fba511fb52..87a63de99ec 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/core/NumberFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/core/NumberFieldMapper.java @@ -135,6 +135,15 @@ public abstract class NumberFieldMapper extends FieldMapper implements AllFieldM super(ref); } + @Override + public void checkCompatibility(MappedFieldType other, + List conflicts, boolean strict) { + super.checkCompatibility(other, conflicts, strict); + if (numericPrecisionStep() != other.numericPrecisionStep()) { + conflicts.add("mapper [" + names().fullName() + "] has different [precision_step] values"); + } + } + public abstract NumberFieldType clone(); @Override @@ -245,17 +254,12 @@ public abstract class NumberFieldMapper extends FieldMapper implements AllFieldM } @Override - public void merge(Mapper mergeWith, MergeResult mergeResult) throws MergeMappingException { + public void merge(Mapper mergeWith, MergeResult mergeResult) { super.merge(mergeWith, mergeResult); if (!this.getClass().equals(mergeWith.getClass())) { return; } NumberFieldMapper nfmMergeWith = (NumberFieldMapper) mergeWith; - if (this.fieldTypeRef.getNumAssociatedMappers() > 1 && mergeResult.updateAllTypes() == false) { - if (fieldType().numericPrecisionStep() != nfmMergeWith.fieldType().numericPrecisionStep()) { - mergeResult.addConflict("mapper [" + fieldType().names().fullName() + "] is used by multiple types. Set update_all_types to true to update precision_step across all types."); - } - } if (mergeResult.simulate() == false && mergeResult.hasConflicts() == false) { this.includeInAll = nfmMergeWith.includeInAll; diff --git a/core/src/main/java/org/elasticsearch/index/mapper/core/StringFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/core/StringFieldMapper.java index cb89cb4973b..a5c681d59a5 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/core/StringFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/core/StringFieldMapper.java @@ -35,7 +35,6 @@ import org.elasticsearch.index.mapper.FieldMapper; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.Mapper; import org.elasticsearch.index.mapper.MapperParsingException; -import org.elasticsearch.index.mapper.MergeMappingException; import org.elasticsearch.index.mapper.MergeResult; import org.elasticsearch.index.mapper.ParseContext; import org.elasticsearch.index.mapper.internal.AllFieldMapper; @@ -360,7 +359,7 @@ public class StringFieldMapper extends FieldMapper implements AllFieldMapper.Inc } @Override - public void merge(Mapper mergeWith, MergeResult mergeResult) throws MergeMappingException { + public void merge(Mapper mergeWith, MergeResult mergeResult) { super.merge(mergeWith, mergeResult); if (!this.getClass().equals(mergeWith.getClass())) { return; diff --git a/core/src/main/java/org/elasticsearch/index/mapper/core/TokenCountFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/core/TokenCountFieldMapper.java index faa2b7e66a0..8348892e44a 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/core/TokenCountFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/core/TokenCountFieldMapper.java @@ -33,7 +33,6 @@ import org.elasticsearch.index.mapper.FieldMapper; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.Mapper; import org.elasticsearch.index.mapper.MapperParsingException; -import org.elasticsearch.index.mapper.MergeMappingException; import org.elasticsearch.index.mapper.MergeResult; import org.elasticsearch.index.mapper.ParseContext; import org.elasticsearch.index.mapper.core.StringFieldMapper.ValueAndBoost; @@ -191,7 +190,7 @@ public class TokenCountFieldMapper extends IntegerFieldMapper { } @Override - public void merge(Mapper mergeWith, MergeResult mergeResult) throws MergeMappingException { + public void merge(Mapper mergeWith, MergeResult mergeResult) { super.merge(mergeWith, mergeResult); if (!this.getClass().equals(mergeWith.getClass())) { return; diff --git a/core/src/main/java/org/elasticsearch/index/mapper/core/TypeParsers.java b/core/src/main/java/org/elasticsearch/index/mapper/core/TypeParsers.java index 3f142cc2f9c..0bb0b213f64 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/core/TypeParsers.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/core/TypeParsers.java @@ -319,6 +319,9 @@ public class TypeParsers { for (Map.Entry multiFieldEntry : multiFieldsPropNodes.entrySet()) { String multiFieldName = multiFieldEntry.getKey(); + if (multiFieldName.contains(".")) { + throw new MapperParsingException("Field name [" + multiFieldName + "] which is a multi field of [" + name + "] cannot contain '.'"); + } if (!(multiFieldEntry.getValue() instanceof Map)) { throw new MapperParsingException("illegal field [" + multiFieldName + "], only fields can be specified inside fields"); } diff --git a/core/src/main/java/org/elasticsearch/index/mapper/geo/BaseGeoPointFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/geo/BaseGeoPointFieldMapper.java index 1c33b66fbc8..0b57d866ddd 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/geo/BaseGeoPointFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/geo/BaseGeoPointFieldMapper.java @@ -38,7 +38,6 @@ import org.elasticsearch.index.mapper.FieldMapper; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.Mapper; import org.elasticsearch.index.mapper.MapperParsingException; -import org.elasticsearch.index.mapper.MergeMappingException; import org.elasticsearch.index.mapper.MergeResult; import org.elasticsearch.index.mapper.ParseContext; import org.elasticsearch.index.mapper.core.DoubleFieldMapper; @@ -389,7 +388,7 @@ public abstract class BaseGeoPointFieldMapper extends FieldMapper implements Arr } @Override - public void merge(Mapper mergeWith, MergeResult mergeResult) throws MergeMappingException { + public void merge(Mapper mergeWith, MergeResult mergeResult) { super.merge(mergeWith, mergeResult); if (!this.getClass().equals(mergeWith.getClass())) { return; diff --git a/core/src/main/java/org/elasticsearch/index/mapper/geo/GeoPointFieldMapperLegacy.java b/core/src/main/java/org/elasticsearch/index/mapper/geo/GeoPointFieldMapperLegacy.java index 350c8f5d668..84e6bde07ac 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/geo/GeoPointFieldMapperLegacy.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/geo/GeoPointFieldMapperLegacy.java @@ -39,7 +39,6 @@ import org.elasticsearch.index.mapper.ContentPath; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.Mapper; import org.elasticsearch.index.mapper.MapperParsingException; -import org.elasticsearch.index.mapper.MergeMappingException; import org.elasticsearch.index.mapper.MergeResult; import org.elasticsearch.index.mapper.ParseContext; import org.elasticsearch.index.mapper.core.DoubleFieldMapper; @@ -298,7 +297,7 @@ public class GeoPointFieldMapperLegacy extends BaseGeoPointFieldMapper implement } @Override - public void merge(Mapper mergeWith, MergeResult mergeResult) throws MergeMappingException { + public void merge(Mapper mergeWith, MergeResult mergeResult) { super.merge(mergeWith, mergeResult); if (!this.getClass().equals(mergeWith.getClass())) { return; diff --git a/core/src/main/java/org/elasticsearch/index/mapper/geo/GeoShapeFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/geo/GeoShapeFieldMapper.java index 6d385875b18..7e784324f36 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/geo/GeoShapeFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/geo/GeoShapeFieldMapper.java @@ -45,7 +45,6 @@ import org.elasticsearch.index.mapper.FieldMapper; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.Mapper; import org.elasticsearch.index.mapper.MapperParsingException; -import org.elasticsearch.index.mapper.MergeMappingException; import org.elasticsearch.index.mapper.MergeResult; import org.elasticsearch.index.mapper.ParseContext; @@ -472,7 +471,7 @@ public class GeoShapeFieldMapper extends FieldMapper { } @Override - public void merge(Mapper mergeWith, MergeResult mergeResult) throws MergeMappingException { + public void merge(Mapper mergeWith, MergeResult mergeResult) { super.merge(mergeWith, mergeResult); if (!this.getClass().equals(mergeWith.getClass())) { return; diff --git a/core/src/main/java/org/elasticsearch/index/mapper/internal/AllFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/internal/AllFieldMapper.java index 9de14810305..3166a683397 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/internal/AllFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/internal/AllFieldMapper.java @@ -36,7 +36,6 @@ import org.elasticsearch.index.fielddata.FieldDataType; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.Mapper; import org.elasticsearch.index.mapper.MapperParsingException; -import org.elasticsearch.index.mapper.MergeMappingException; import org.elasticsearch.index.mapper.MergeResult; import org.elasticsearch.index.mapper.MetadataFieldMapper; import org.elasticsearch.index.mapper.ParseContext; @@ -310,7 +309,7 @@ public class AllFieldMapper extends MetadataFieldMapper { } @Override - public void merge(Mapper mergeWith, MergeResult mergeResult) throws MergeMappingException { + public void merge(Mapper mergeWith, MergeResult mergeResult) { if (((AllFieldMapper)mergeWith).enabled() != this.enabled() && ((AllFieldMapper)mergeWith).enabledState != Defaults.ENABLED) { mergeResult.addConflict("mapper [" + fieldType().names().fullName() + "] enabled is " + this.enabled() + " now encountering "+ ((AllFieldMapper)mergeWith).enabled()); } diff --git a/core/src/main/java/org/elasticsearch/index/mapper/internal/IdFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/internal/IdFieldMapper.java index 195d736d323..16b6c4c56da 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/internal/IdFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/internal/IdFieldMapper.java @@ -44,7 +44,6 @@ import org.elasticsearch.index.fielddata.FieldDataType; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.Mapper; import org.elasticsearch.index.mapper.MapperParsingException; -import org.elasticsearch.index.mapper.MergeMappingException; import org.elasticsearch.index.mapper.MergeResult; import org.elasticsearch.index.mapper.MetadataFieldMapper; import org.elasticsearch.index.mapper.ParseContext; @@ -332,7 +331,7 @@ public class IdFieldMapper extends MetadataFieldMapper { } @Override - public void merge(Mapper mergeWith, MergeResult mergeResult) throws MergeMappingException { + public void merge(Mapper mergeWith, MergeResult mergeResult) { // do nothing here, no merging, but also no exception } } diff --git a/core/src/main/java/org/elasticsearch/index/mapper/internal/IndexFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/internal/IndexFieldMapper.java index b82639e75b6..962332b5c4b 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/internal/IndexFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/internal/IndexFieldMapper.java @@ -34,7 +34,6 @@ import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.Mapper; import org.elasticsearch.index.mapper.MapperParsingException; -import org.elasticsearch.index.mapper.MergeMappingException; import org.elasticsearch.index.mapper.MergeResult; import org.elasticsearch.index.mapper.MetadataFieldMapper; import org.elasticsearch.index.mapper.ParseContext; @@ -280,7 +279,7 @@ public class IndexFieldMapper extends MetadataFieldMapper { } @Override - public void merge(Mapper mergeWith, MergeResult mergeResult) throws MergeMappingException { + public void merge(Mapper mergeWith, MergeResult mergeResult) { IndexFieldMapper indexFieldMapperMergeWith = (IndexFieldMapper) mergeWith; if (!mergeResult.simulate()) { if (indexFieldMapperMergeWith.enabledState != enabledState && !indexFieldMapperMergeWith.enabledState.unset()) { diff --git a/core/src/main/java/org/elasticsearch/index/mapper/internal/ParentFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/internal/ParentFieldMapper.java index d6221ae1db3..760259a1802 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/internal/ParentFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/internal/ParentFieldMapper.java @@ -38,7 +38,6 @@ import org.elasticsearch.index.mapper.DocumentMapper; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.Mapper; import org.elasticsearch.index.mapper.MapperParsingException; -import org.elasticsearch.index.mapper.MergeMappingException; import org.elasticsearch.index.mapper.MergeResult; import org.elasticsearch.index.mapper.MetadataFieldMapper; import org.elasticsearch.index.mapper.ParseContext; @@ -372,7 +371,7 @@ public class ParentFieldMapper extends MetadataFieldMapper { } @Override - public void merge(Mapper mergeWith, MergeResult mergeResult) throws MergeMappingException { + public void merge(Mapper mergeWith, MergeResult mergeResult) { super.merge(mergeWith, mergeResult); ParentFieldMapper fieldMergeWith = (ParentFieldMapper) mergeWith; if (Objects.equals(parentType, fieldMergeWith.parentType) == false) { diff --git a/core/src/main/java/org/elasticsearch/index/mapper/internal/RoutingFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/internal/RoutingFieldMapper.java index 43be36d177b..18d0645d2d5 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/internal/RoutingFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/internal/RoutingFieldMapper.java @@ -31,7 +31,6 @@ import org.elasticsearch.index.fielddata.FieldDataType; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.Mapper; import org.elasticsearch.index.mapper.MapperParsingException; -import org.elasticsearch.index.mapper.MergeMappingException; import org.elasticsearch.index.mapper.MergeResult; import org.elasticsearch.index.mapper.MetadataFieldMapper; import org.elasticsearch.index.mapper.ParseContext; @@ -250,7 +249,7 @@ public class RoutingFieldMapper extends MetadataFieldMapper { } @Override - public void merge(Mapper mergeWith, MergeResult mergeResult) throws MergeMappingException { + public void merge(Mapper mergeWith, MergeResult mergeResult) { // do nothing here, no merging, but also no exception } } diff --git a/core/src/main/java/org/elasticsearch/index/mapper/internal/SeqNoFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/internal/SeqNoFieldMapper.java index 218f7bbabfa..64e6e8a1584 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/internal/SeqNoFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/internal/SeqNoFieldMapper.java @@ -150,7 +150,7 @@ public class SeqNoFieldMapper extends MetadataFieldMapper { } @Override - public void merge(Mapper mergeWith, MergeResult mergeResult) throws MergeMappingException { + public void merge(Mapper mergeWith, MergeResult mergeResult) { // nothing to do } } diff --git a/core/src/main/java/org/elasticsearch/index/mapper/internal/SourceFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/internal/SourceFieldMapper.java index 1e41f63e305..da3b8dbc5ab 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/internal/SourceFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/internal/SourceFieldMapper.java @@ -44,7 +44,6 @@ import org.elasticsearch.common.xcontent.support.XContentMapValues; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.Mapper; import org.elasticsearch.index.mapper.MapperParsingException; -import org.elasticsearch.index.mapper.MergeMappingException; import org.elasticsearch.index.mapper.MergeResult; import org.elasticsearch.index.mapper.MetadataFieldMapper; import org.elasticsearch.index.mapper.ParseContext; @@ -442,7 +441,7 @@ public class SourceFieldMapper extends MetadataFieldMapper { } @Override - public void merge(Mapper mergeWith, MergeResult mergeResult) throws MergeMappingException { + public void merge(Mapper mergeWith, MergeResult mergeResult) { SourceFieldMapper sourceMergeWith = (SourceFieldMapper) mergeWith; if (mergeResult.simulate()) { if (this.enabled != sourceMergeWith.enabled) { diff --git a/core/src/main/java/org/elasticsearch/index/mapper/internal/TTLFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/internal/TTLFieldMapper.java index 54f631427cc..9a18befe622 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/internal/TTLFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/internal/TTLFieldMapper.java @@ -32,7 +32,6 @@ import org.elasticsearch.index.analysis.NumericLongAnalyzer; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.Mapper; import org.elasticsearch.index.mapper.MapperParsingException; -import org.elasticsearch.index.mapper.MergeMappingException; import org.elasticsearch.index.mapper.MergeResult; import org.elasticsearch.index.mapper.MetadataFieldMapper; import org.elasticsearch.index.mapper.ParseContext; @@ -259,7 +258,7 @@ public class TTLFieldMapper extends MetadataFieldMapper { } @Override - public void merge(Mapper mergeWith, MergeResult mergeResult) throws MergeMappingException { + public void merge(Mapper mergeWith, MergeResult mergeResult) { TTLFieldMapper ttlMergeWith = (TTLFieldMapper) mergeWith; if (((TTLFieldMapper) mergeWith).enabledState != Defaults.ENABLED_STATE) {//only do something if actually something was set for the document mapper that we merge with if (this.enabledState == EnabledAttributeMapper.ENABLED && ((TTLFieldMapper) mergeWith).enabledState == EnabledAttributeMapper.DISABLED) { diff --git a/core/src/main/java/org/elasticsearch/index/mapper/internal/TimestampFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/internal/TimestampFieldMapper.java index 05d3a24cd6e..468243d63cf 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/internal/TimestampFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/internal/TimestampFieldMapper.java @@ -33,7 +33,6 @@ import org.elasticsearch.index.analysis.NumericDateAnalyzer; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.Mapper; import org.elasticsearch.index.mapper.MapperParsingException; -import org.elasticsearch.index.mapper.MergeMappingException; import org.elasticsearch.index.mapper.MergeResult; import org.elasticsearch.index.mapper.ParseContext; import org.elasticsearch.index.mapper.MetadataFieldMapper; @@ -380,7 +379,7 @@ public class TimestampFieldMapper extends MetadataFieldMapper { } @Override - public void merge(Mapper mergeWith, MergeResult mergeResult) throws MergeMappingException { + public void merge(Mapper mergeWith, MergeResult mergeResult) { TimestampFieldMapper timestampFieldMapperMergeWith = (TimestampFieldMapper) mergeWith; super.merge(mergeWith, mergeResult); if (!mergeResult.simulate()) { diff --git a/core/src/main/java/org/elasticsearch/index/mapper/internal/TypeFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/internal/TypeFieldMapper.java index 248718d46e4..d4acc3c5975 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/internal/TypeFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/internal/TypeFieldMapper.java @@ -40,7 +40,6 @@ import org.elasticsearch.index.fielddata.FieldDataType; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.Mapper; import org.elasticsearch.index.mapper.MapperParsingException; -import org.elasticsearch.index.mapper.MergeMappingException; import org.elasticsearch.index.mapper.MergeResult; import org.elasticsearch.index.mapper.MetadataFieldMapper; import org.elasticsearch.index.mapper.ParseContext; @@ -226,7 +225,7 @@ public class TypeFieldMapper extends MetadataFieldMapper { } @Override - public void merge(Mapper mergeWith, MergeResult mergeResult) throws MergeMappingException { + public void merge(Mapper mergeWith, MergeResult mergeResult) { // do nothing here, no merging, but also no exception } } diff --git a/core/src/main/java/org/elasticsearch/index/mapper/internal/UidFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/internal/UidFieldMapper.java index b3c3d983361..ef4c48e62e3 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/internal/UidFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/internal/UidFieldMapper.java @@ -33,7 +33,6 @@ import org.elasticsearch.index.fielddata.FieldDataType; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.Mapper; import org.elasticsearch.index.mapper.MapperParsingException; -import org.elasticsearch.index.mapper.MergeMappingException; import org.elasticsearch.index.mapper.MergeResult; import org.elasticsearch.index.mapper.MetadataFieldMapper; import org.elasticsearch.index.mapper.ParseContext; @@ -226,7 +225,7 @@ public class UidFieldMapper extends MetadataFieldMapper { } @Override - public void merge(Mapper mergeWith, MergeResult mergeResult) throws MergeMappingException { + public void merge(Mapper mergeWith, MergeResult mergeResult) { // do nothing here, no merging, but also no exception } } diff --git a/core/src/main/java/org/elasticsearch/index/mapper/internal/VersionFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/internal/VersionFieldMapper.java index 42a58117687..292a622ab73 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/internal/VersionFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/internal/VersionFieldMapper.java @@ -30,7 +30,6 @@ import org.elasticsearch.index.fielddata.FieldDataType; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.Mapper; import org.elasticsearch.index.mapper.MapperParsingException; -import org.elasticsearch.index.mapper.MergeMappingException; import org.elasticsearch.index.mapper.MergeResult; import org.elasticsearch.index.mapper.MetadataFieldMapper; import org.elasticsearch.index.mapper.ParseContext; @@ -167,7 +166,7 @@ public class VersionFieldMapper extends MetadataFieldMapper { } @Override - public void merge(Mapper mergeWith, MergeResult mergeResult) throws MergeMappingException { + public void merge(Mapper mergeWith, MergeResult mergeResult) { // nothing to do } } diff --git a/core/src/main/java/org/elasticsearch/index/mapper/object/ObjectMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/object/ObjectMapper.java index 9dc1a1d3dc7..88f89719050 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/object/ObjectMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/object/ObjectMapper.java @@ -464,7 +464,7 @@ public class ObjectMapper extends Mapper implements AllFieldMapper.IncludeInAll, } @Override - public void merge(final Mapper mergeWith, final MergeResult mergeResult) throws MergeMappingException { + public void merge(final Mapper mergeWith, final MergeResult mergeResult) { if (!(mergeWith instanceof ObjectMapper)) { mergeResult.addConflict("Can't merge a non object mapping [" + mergeWith.name() + "] with an object mapping [" + name() + "]"); return; diff --git a/core/src/main/java/org/elasticsearch/index/query/GeoDistanceQueryBuilder.java b/core/src/main/java/org/elasticsearch/index/query/GeoDistanceQueryBuilder.java index 58d1c4b703c..823362140a1 100644 --- a/core/src/main/java/org/elasticsearch/index/query/GeoDistanceQueryBuilder.java +++ b/core/src/main/java/org/elasticsearch/index/query/GeoDistanceQueryBuilder.java @@ -128,7 +128,11 @@ public class GeoDistanceQueryBuilder extends AbstractQueryBuilder(); diff --git a/core/src/main/java/org/elasticsearch/index/translog/BufferingTranslogWriter.java b/core/src/main/java/org/elasticsearch/index/translog/BufferingTranslogWriter.java index 0e84c73f47a..6026468973a 100644 --- a/core/src/main/java/org/elasticsearch/index/translog/BufferingTranslogWriter.java +++ b/core/src/main/java/org/elasticsearch/index/translog/BufferingTranslogWriter.java @@ -47,6 +47,7 @@ public final class BufferingTranslogWriter extends TranslogWriter { @Override public Translog.Location add(BytesReference data) throws IOException { try (ReleasableLock lock = writeLock.acquire()) { + ensureOpen(); operationCounter++; final long offset = totalOffset; if (data.length() >= buffer.length) { @@ -106,19 +107,25 @@ public final class BufferingTranslogWriter extends TranslogWriter { return; } synchronized (this) { - try (ReleasableLock lock = writeLock.acquire()) { - flush(); - lastSyncedOffset = totalOffset; + channelReference.incRef(); + try { + try (ReleasableLock lock = writeLock.acquire()) { + flush(); + lastSyncedOffset = totalOffset; + } + // we can do this outside of the write lock but we have to protect from + // concurrent syncs + checkpoint(lastSyncedOffset, operationCounter, channelReference); + } finally { + channelReference.decRef(); } - // we can do this outside of the write lock but we have to protect from - // concurrent syncs - checkpoint(lastSyncedOffset, operationCounter, channelReference); } } public void updateBufferSize(int bufferSize) { try (ReleasableLock lock = writeLock.acquire()) { + ensureOpen(); if (this.buffer.length != bufferSize) { flush(); this.buffer = new byte[bufferSize]; diff --git a/core/src/main/java/org/elasticsearch/index/translog/Translog.java b/core/src/main/java/org/elasticsearch/index/translog/Translog.java index 2dede2706e3..f71573bf0ba 100644 --- a/core/src/main/java/org/elasticsearch/index/translog/Translog.java +++ b/core/src/main/java/org/elasticsearch/index/translog/Translog.java @@ -166,7 +166,7 @@ public class Translog extends AbstractIndexShardComponent implements IndexShardC current = createWriter(checkpoint.generation + 1); this.lastCommittedTranslogFileGeneration = translogGeneration.translogFileGeneration; } else { - this.recoveredTranslogs = Collections.EMPTY_LIST; + this.recoveredTranslogs = Collections.emptyList(); IOUtils.rm(location); logger.debug("wipe translog location - creating new translog"); Files.createDirectories(location); @@ -407,6 +407,7 @@ public class Translog extends AbstractIndexShardComponent implements IndexShardC out.seek(end); final ReleasablePagedBytesReference bytes = out.bytes(); try (ReleasableLock lock = readLock.acquire()) { + ensureOpen(); Location location = current.add(bytes); if (config.isSyncOnEachOperation()) { current.sync(); @@ -414,6 +415,8 @@ public class Translog extends AbstractIndexShardComponent implements IndexShardC assert current.assertBytesAtLocation(location, bytes); return location; } + } catch (AlreadyClosedException ex) { + throw ex; } catch (Throwable e) { throw new TranslogException(shardId, "Failed to write operation [" + operation + "]", e); } finally { @@ -579,7 +582,7 @@ public class Translog extends AbstractIndexShardComponent implements IndexShardC * and updated with any future translog. */ public static final class View implements Closeable { - public static final Translog.View EMPTY_VIEW = new View(Collections.EMPTY_LIST, null); + public static final Translog.View EMPTY_VIEW = new View(Collections.emptyList(), null); boolean closed; // last in this list is always FsTranslog.current @@ -1309,8 +1312,8 @@ public class Translog extends AbstractIndexShardComponent implements IndexShardC @Override public void prepareCommit() throws IOException { - ensureOpen(); try (ReleasableLock lock = writeLock.acquire()) { + ensureOpen(); if (currentCommittingTranslog != null) { throw new IllegalStateException("already committing a translog with generation: " + currentCommittingTranslog.getGeneration()); } @@ -1342,9 +1345,9 @@ public class Translog extends AbstractIndexShardComponent implements IndexShardC @Override public void commit() throws IOException { - ensureOpen(); ImmutableTranslogReader toClose = null; try (ReleasableLock lock = writeLock.acquire()) { + ensureOpen(); if (currentCommittingTranslog == null) { prepareCommit(); } diff --git a/core/src/main/java/org/elasticsearch/index/translog/TranslogWriter.java b/core/src/main/java/org/elasticsearch/index/translog/TranslogWriter.java index 2290dd69d87..045550cb628 100644 --- a/core/src/main/java/org/elasticsearch/index/translog/TranslogWriter.java +++ b/core/src/main/java/org/elasticsearch/index/translog/TranslogWriter.java @@ -123,9 +123,9 @@ public class TranslogWriter extends TranslogReader { * add the given bytes to the translog and return the location they were written at */ public Translog.Location add(BytesReference data) throws IOException { - ensureOpen(); final long position; try (ReleasableLock lock = writeLock.acquire()) { + ensureOpen(); position = writtenOffset; data.writeTo(channel); writtenOffset = writtenOffset + data.length(); @@ -200,9 +200,9 @@ public class TranslogWriter extends TranslogReader { * returns a new immutable reader which only exposes the current written operation * */ public ImmutableTranslogReader immutableReader() throws TranslogException { - ensureOpen(); if (channelReference.tryIncRef()) { try (ReleasableLock lock = writeLock.acquire()) { + ensureOpen(); flush(); ImmutableTranslogReader reader = new ImmutableTranslogReader(this.generation, channelReference, firstOperationOffset, writtenOffset, operationCounter); channelReference.incRef(); // for new reader diff --git a/core/src/main/java/org/elasticsearch/indices/IndicesService.java b/core/src/main/java/org/elasticsearch/indices/IndicesService.java index ca059fa25f0..dead72aee8b 100644 --- a/core/src/main/java/org/elasticsearch/indices/IndicesService.java +++ b/core/src/main/java/org/elasticsearch/indices/IndicesService.java @@ -264,7 +264,7 @@ public class IndicesService extends AbstractLifecycleComponent i } final String indexName = indexMetaData.getIndex(); final Predicate indexNameMatcher = (indexExpression) -> indexNameExpressionResolver.matchesIndex(indexName, indexExpression, clusterService.state()); - final IndexSettings idxSettings = new IndexSettings(indexMetaData, this.settings, Collections.EMPTY_LIST, indexNameMatcher); + final IndexSettings idxSettings = new IndexSettings(indexMetaData, this.settings, Collections.emptyList(), indexNameMatcher); Index index = new Index(indexMetaData.getIndex()); if (indices.containsKey(index.name())) { throw new IndexAlreadyExistsException(index); @@ -461,7 +461,7 @@ public class IndicesService extends AbstractLifecycleComponent i /** * This method deletes the shard contents on disk for the given shard ID. This method will fail if the shard deleting - * is prevented by {@link #canDeleteShardContent(org.elasticsearch.index.shard.ShardId, org.elasticsearch.cluster.metadata.IndexMetaData)} + * is prevented by {@link #canDeleteShardContent(ShardId, IndexSettings)} * of if the shards lock can not be acquired. * * On data nodes, if the deleted shard is the last shard folder in its index, the method will attempt to remove the index folder as well. @@ -529,18 +529,10 @@ public class IndicesService extends AbstractLifecycleComponent i * * * @param shardId the shard to delete. - * @param metaData the shards index metadata. This is required to access the indexes settings etc. + * @param indexSettings the shards's relevant {@link IndexSettings}. This is required to access the indexes settings etc. */ - public boolean canDeleteShardContent(ShardId shardId, IndexMetaData metaData) { - // we need the metadata here since we have to build the complete settings - // to decide where the shard content lives. In the future we might even need more info here ie. for shadow replicas - // The plan was to make it harder to miss-use and ask for metadata instead of simple settings - assert shardId.getIndex().equals(metaData.getIndex()); - final IndexSettings indexSettings = buildIndexSettings(metaData); - return canDeleteShardContent(shardId, indexSettings); - } - - private boolean canDeleteShardContent(ShardId shardId, IndexSettings indexSettings) { + public boolean canDeleteShardContent(ShardId shardId, IndexSettings indexSettings) { + assert shardId.getIndex().equals(indexSettings.getIndex().name()); final IndexService indexService = this.indices.get(shardId.getIndex()); if (indexSettings.isOnSharedFilesystem() == false) { if (indexService != null && nodeEnv.hasNodeFile()) { @@ -562,7 +554,7 @@ public class IndicesService extends AbstractLifecycleComponent i // play safe here and make sure that we take node level settings into account. // we might run on nodes where we use shard FS and then in the future don't delete // actual content. - return new IndexSettings(metaData, settings, Collections.EMPTY_LIST); + return new IndexSettings(metaData, settings, Collections.emptyList()); } /** diff --git a/core/src/main/java/org/elasticsearch/indices/cache/query/IndicesQueryCache.java b/core/src/main/java/org/elasticsearch/indices/cache/query/IndicesQueryCache.java index 148f7ba8bdb..23b4bc84c44 100644 --- a/core/src/main/java/org/elasticsearch/indices/cache/query/IndicesQueryCache.java +++ b/core/src/main/java/org/elasticsearch/indices/cache/query/IndicesQueryCache.java @@ -150,18 +150,23 @@ public class IndicesQueryCache extends AbstractComponent implements QueryCache, protected void onDocIdSetEviction(Object readerCoreKey, int numEntries, long sumRamBytesUsed) { assert Thread.holdsLock(this); super.onDocIdSetEviction(readerCoreKey, numEntries, sumRamBytesUsed); - // We can't use ShardCoreKeyMap here because its core closed - // listener is called before the listener of the cache which - // triggers this eviction. So instead we use use stats2 that - // we only evict when nothing is cached anymore on the segment - // instead of relying on close listeners - final StatsAndCount statsAndCount = stats2.get(readerCoreKey); - final Stats shardStats = statsAndCount.stats; - shardStats.cacheSize -= numEntries; - shardStats.ramBytesUsed -= sumRamBytesUsed; - statsAndCount.count -= numEntries; - if (statsAndCount.count == 0) { - stats2.remove(readerCoreKey); + // onDocIdSetEviction might sometimes be called with a number + // of entries equal to zero if the cache for the given segment + // was already empty when the close listener was called + if (numEntries > 0) { + // We can't use ShardCoreKeyMap here because its core closed + // listener is called before the listener of the cache which + // triggers this eviction. So instead we use use stats2 that + // we only evict when nothing is cached anymore on the segment + // instead of relying on close listeners + final StatsAndCount statsAndCount = stats2.get(readerCoreKey); + final Stats shardStats = statsAndCount.stats; + shardStats.cacheSize -= numEntries; + shardStats.ramBytesUsed -= sumRamBytesUsed; + statsAndCount.count -= numEntries; + if (statsAndCount.count == 0) { + stats2.remove(readerCoreKey); + } } } diff --git a/core/src/main/java/org/elasticsearch/indices/cluster/IndicesClusterStateService.java b/core/src/main/java/org/elasticsearch/indices/cluster/IndicesClusterStateService.java index cd60b87765a..64ff6c74587 100644 --- a/core/src/main/java/org/elasticsearch/indices/cluster/IndicesClusterStateService.java +++ b/core/src/main/java/org/elasticsearch/indices/cluster/IndicesClusterStateService.java @@ -245,7 +245,7 @@ public class IndicesClusterStateService extends AbstractLifecycleComponent typesToRefresh = new ArrayList<>(); + boolean requireRefresh = false; String index = indexMetaData.getIndex(); IndexService indexService = indicesService.indexService(index); if (indexService == null) { @@ -358,31 +358,17 @@ public class IndicesClusterStateService extends AbstractLifecycleComponent cursor : indexMetaData.getMappings().values()) { MappingMetaData mappingMd = cursor.value; String mappingType = mappingMd.type(); CompressedXContent mappingSource = mappingMd.source(); - if (mappingType.equals(MapperService.DEFAULT_MAPPING)) { // we processed _default_ first - continue; - } - boolean requireRefresh = processMapping(index, mapperService, mappingType, mappingSource); - if (requireRefresh) { - typesToRefresh.add(mappingType); - } + requireRefresh |= processMapping(index, mapperService, mappingType, mappingSource); } - if (!typesToRefresh.isEmpty() && sendRefreshMapping) { + if (requireRefresh && sendRefreshMapping) { nodeMappingRefreshAction.nodeMappingRefresh(event.state(), new NodeMappingRefreshAction.NodeMappingRefreshRequest(index, indexMetaData.getIndexUUID(), - typesToRefresh.toArray(new String[typesToRefresh.size()]), event.state().nodes().localNodeId()) + event.state().nodes().localNodeId()) ); } } catch (Throwable t) { @@ -398,26 +384,21 @@ public class IndicesClusterStateService extends AbstractLifecycleComponent(index, mappingType))) { - seenMappings.put(new Tuple<>(index, mappingType), true); - } - - // refresh mapping can happen for 2 reasons. The first is less urgent, and happens when the mapping on this - // node is ahead of what there is in the cluster state (yet an update-mapping has been sent to it already, - // it just hasn't been processed yet and published). Eventually, the mappings will converge, and the refresh - // mapping sent is more of a safe keeping (assuming the update mapping failed to reach the master, ...) - // the second case is where the parsing/merging of the mapping from the metadata doesn't result in the same + // refresh mapping can happen when the parsing/merging of the mapping from the metadata doesn't result in the same // mapping, in this case, we send to the master to refresh its own version of the mappings (to conform with the // merge version of it, which it does when refreshing the mappings), and warn log it. boolean requiresRefresh = false; try { - if (!mapperService.hasMapping(mappingType)) { + DocumentMapper existingMapper = mapperService.documentMapper(mappingType); + + if (existingMapper == null || mappingSource.equals(existingMapper.mappingSource()) == false) { + String op = existingMapper == null ? "adding" : "updating"; if (logger.isDebugEnabled() && mappingSource.compressed().length < 512) { - logger.debug("[{}] adding mapping [{}], source [{}]", index, mappingType, mappingSource.string()); + logger.debug("[{}] {} mapping [{}], source [{}]", index, op, mappingType, mappingSource.string()); } else if (logger.isTraceEnabled()) { - logger.trace("[{}] adding mapping [{}], source [{}]", index, mappingType, mappingSource.string()); + logger.trace("[{}] {} mapping [{}], source [{}]", index, op, mappingType, mappingSource.string()); } else { - logger.debug("[{}] adding mapping [{}] (source suppressed due to length, use TRACE level if needed)", index, mappingType); + logger.debug("[{}] {} mapping [{}] (source suppressed due to length, use TRACE level if needed)", index, op, mappingType); } // we don't apply default, since it has been applied when the mappings were parsed initially mapperService.merge(mappingType, mappingSource, false, true); @@ -425,24 +406,6 @@ public class IndicesClusterStateService extends AbstractLifecycleComponent tuple : seenMappings.keySet()) { - if (tuple.v1().equals(index)) { - seenMappings.remove(tuple); - } - } - } - private void deleteIndex(String index, String reason) { try { indicesService.deleteIndex(index, reason); } catch (Throwable e) { logger.warn("failed to delete index ({})", e, reason); } - // clear seen mappings as well - clearSeenMappings(index); } diff --git a/core/src/main/java/org/elasticsearch/indices/memory/IndexingMemoryController.java b/core/src/main/java/org/elasticsearch/indices/memory/IndexingMemoryController.java index bb6f85bc0ba..d9c08431ef8 100644 --- a/core/src/main/java/org/elasticsearch/indices/memory/IndexingMemoryController.java +++ b/core/src/main/java/org/elasticsearch/indices/memory/IndexingMemoryController.java @@ -33,7 +33,6 @@ import org.elasticsearch.index.engine.FlushNotAllowedEngineException; import org.elasticsearch.index.shard.IndexEventListener; import org.elasticsearch.index.shard.IndexShard; import org.elasticsearch.index.shard.IndexShardState; -import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.indices.IndicesService; import org.elasticsearch.monitor.jvm.JvmInfo; import org.elasticsearch.threadpool.ThreadPool; @@ -200,23 +199,17 @@ public class IndexingMemoryController extends AbstractLifecycleComponent availableShards() { - ArrayList list = new ArrayList<>(); + protected List availableShards() { + List availableShards = new ArrayList<>(); for (IndexService indexService : indicesService) { - for (IndexShard indexShard : indexService) { - if (shardAvailable(indexShard)) { - list.add(indexShard.shardId()); + for (IndexShard shard : indexService) { + if (shardAvailable(shard)) { + availableShards.add(shard); } } } - return list; - } - - /** returns true if shard exists and is availabe for updates */ - protected boolean shardAvailable(ShardId shardId) { - return shardAvailable(getShard(shardId)); + return availableShards; } /** returns true if shard exists and is availabe for updates */ @@ -225,19 +218,8 @@ public class IndexingMemoryController extends AbstractLifecycleComponent shardWasActive = new HashMap<>(); - @Override public synchronized void run() { - EnumSet changes = purgeDeletedAndClosedShards(); - - updateShardStatuses(changes); - - if (changes.isEmpty() == false) { - // Something changed: recompute indexing buffers: - calcAndSetShardBuffers("[" + changes + "]"); - } - } - - /** - * goes through all existing shards and check whether there are changes in their active status - */ - private void updateShardStatuses(EnumSet changes) { - for (ShardId shardId : availableShards()) { - - // Is the shard active now? - Boolean isActive = getShardActive(shardId); - - if (isActive == null) { - // shard was closed.. - continue; - } - - // Was the shard active last time we checked? - Boolean wasActive = shardWasActive.get(shardId); - if (wasActive == null) { - // First time we are seeing this shard - shardWasActive.put(shardId, isActive); - changes.add(ShardStatusChangeType.ADDED); - } else if (isActive) { - // Shard is active now - if (wasActive == false) { - // Shard became active itself, since we last checked (due to new indexing op arriving) - changes.add(ShardStatusChangeType.BECAME_ACTIVE); - logger.debug("marking shard {} as active indexing wise", shardId); - shardWasActive.put(shardId, true); - } else if (checkIdle(shardId) == Boolean.TRUE) { - // Make shard inactive now - changes.add(ShardStatusChangeType.BECAME_INACTIVE); - - shardWasActive.put(shardId, false); - } - } - } - } - - /** - * purge any existing statuses that are no longer updated - * - * @return the changes applied - */ - private EnumSet purgeDeletedAndClosedShards() { - EnumSet changes = EnumSet.noneOf(ShardStatusChangeType.class); - - Iterator statusShardIdIterator = shardWasActive.keySet().iterator(); - while (statusShardIdIterator.hasNext()) { - ShardId shardId = statusShardIdIterator.next(); - if (shardAvailable(shardId) == false) { - changes.add(ShardStatusChangeType.DELETED); - statusShardIdIterator.remove(); - } - } - return changes; - } - - private void calcAndSetShardBuffers(String reason) { - - // Count how many shards are now active: - int activeShardCount = 0; - for (Map.Entry ent : shardWasActive.entrySet()) { - if (ent.getValue()) { - activeShardCount++; + List availableShards = availableShards(); + List activeShards = new ArrayList<>(); + for (IndexShard shard : availableShards) { + if (!checkIdle(shard)) { + activeShards.add(shard); } } + int activeShardCount = activeShards.size(); // TODO: we could be smarter here by taking into account how RAM the IndexWriter on each shard // is actually using (using IW.ramBytesUsed), so that small indices (e.g. Marvel) would not // get the same indexing buffer as large indices. But it quickly gets tricky... if (activeShardCount == 0) { - logger.debug("no active shards (reason={})", reason); return; } @@ -372,13 +273,10 @@ public class IndexingMemoryController extends AbstractLifecycleComponent ent : shardWasActive.entrySet()) { - if (ent.getValue()) { - // This shard is active - updateShardBuffers(ent.getKey(), shardIndexingBufferSize, shardTranslogBufferSize); - } + for (IndexShard shard : activeShards) { + updateShardBuffers(shard, shardIndexingBufferSize, shardTranslogBufferSize); } } } @@ -387,38 +285,17 @@ public class IndexingMemoryController extends AbstractLifecycleComponent { private final FsService fsService; - @Inject - public MonitorService(Settings settings, JvmMonitorService jvmMonitorService, - OsService osService, ProcessService processService, JvmService jvmService, - FsService fsService) { + public MonitorService(Settings settings, NodeEnvironment nodeEnvironment, ThreadPool threadPool) throws IOException { super(settings); - this.jvmMonitorService = jvmMonitorService; - this.osService = osService; - this.processService = processService; - this.jvmService = jvmService; - this.fsService = fsService; + this.jvmMonitorService = new JvmMonitorService(settings, threadPool); + this.osService = new OsService(settings); + this.processService = new ProcessService(settings); + this.jvmService = new JvmService(settings); + this.fsService = new FsService(settings, nodeEnvironment); } public OsService osService() { diff --git a/core/src/main/java/org/elasticsearch/monitor/fs/FsProbe.java b/core/src/main/java/org/elasticsearch/monitor/fs/FsProbe.java index 56bc352a5bc..dc1958f666b 100644 --- a/core/src/main/java/org/elasticsearch/monitor/fs/FsProbe.java +++ b/core/src/main/java/org/elasticsearch/monitor/fs/FsProbe.java @@ -20,7 +20,6 @@ package org.elasticsearch.monitor.fs; import org.elasticsearch.common.component.AbstractComponent; -import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.NodeEnvironment; import org.elasticsearch.env.NodeEnvironment.NodePath; @@ -31,7 +30,6 @@ public class FsProbe extends AbstractComponent { private final NodeEnvironment nodeEnv; - @Inject public FsProbe(Settings settings, NodeEnvironment nodeEnv) { super(settings); this.nodeEnv = nodeEnv; diff --git a/core/src/main/java/org/elasticsearch/monitor/fs/FsService.java b/core/src/main/java/org/elasticsearch/monitor/fs/FsService.java index c95a7bf8b3a..7019ec48e0b 100644 --- a/core/src/main/java/org/elasticsearch/monitor/fs/FsService.java +++ b/core/src/main/java/org/elasticsearch/monitor/fs/FsService.java @@ -20,10 +20,10 @@ package org.elasticsearch.monitor.fs; import org.elasticsearch.common.component.AbstractComponent; -import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.util.SingleObjectCache; +import org.elasticsearch.env.NodeEnvironment; import java.io.IOException; @@ -35,10 +35,9 @@ public class FsService extends AbstractComponent { private final SingleObjectCache fsStatsCache; - @Inject - public FsService(Settings settings, FsProbe probe) throws IOException { + public FsService(Settings settings, NodeEnvironment nodeEnvironment) throws IOException { super(settings); - this.probe = probe; + this.probe = new FsProbe(settings, nodeEnvironment); TimeValue refreshInterval = settings.getAsTime("monitor.fs.refresh_interval", TimeValue.timeValueSeconds(1)); fsStatsCache = new FsInfoCache(refreshInterval, probe.stats()); logger.debug("Using probe [{}] with refresh_interval [{}]", probe, refreshInterval); diff --git a/core/src/main/java/org/elasticsearch/monitor/jvm/JvmMonitorService.java b/core/src/main/java/org/elasticsearch/monitor/jvm/JvmMonitorService.java index a11fc2957a4..8d83435bb98 100644 --- a/core/src/main/java/org/elasticsearch/monitor/jvm/JvmMonitorService.java +++ b/core/src/main/java/org/elasticsearch/monitor/jvm/JvmMonitorService.java @@ -20,7 +20,6 @@ package org.elasticsearch.monitor.jvm; import org.elasticsearch.common.component.AbstractLifecycleComponent; -import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.util.concurrent.FutureUtils; @@ -71,7 +70,6 @@ public class JvmMonitorService extends AbstractLifecycleComponent osStatsCache; - @Inject - public OsService(Settings settings, OsProbe probe) { + public OsService(Settings settings) { super(settings); - this.probe = probe; + this.probe = OsProbe.getInstance(); TimeValue refreshInterval = settings.getAsTime("monitor.os.refresh_interval", TimeValue.timeValueSeconds(1)); diff --git a/core/src/main/java/org/elasticsearch/monitor/process/ProcessService.java b/core/src/main/java/org/elasticsearch/monitor/process/ProcessService.java index 08d286dd983..0861dfe5b0c 100644 --- a/core/src/main/java/org/elasticsearch/monitor/process/ProcessService.java +++ b/core/src/main/java/org/elasticsearch/monitor/process/ProcessService.java @@ -20,7 +20,6 @@ package org.elasticsearch.monitor.process; import org.elasticsearch.common.component.AbstractComponent; -import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.util.SingleObjectCache; @@ -34,10 +33,9 @@ public final class ProcessService extends AbstractComponent { private final ProcessInfo info; private final SingleObjectCache processStatsCache; - @Inject - public ProcessService(Settings settings, ProcessProbe probe) { + public ProcessService(Settings settings) { super(settings); - this.probe = probe; + this.probe = ProcessProbe.getInstance(); final TimeValue refreshInterval = settings.getAsTime("monitor.process.refresh_interval", TimeValue.timeValueSeconds(1)); processStatsCache = new ProcessStatsCache(refreshInterval, probe.processStats()); diff --git a/core/src/main/java/org/elasticsearch/node/Node.java b/core/src/main/java/org/elasticsearch/node/Node.java index d245692acc9..04fc7e95565 100644 --- a/core/src/main/java/org/elasticsearch/node/Node.java +++ b/core/src/main/java/org/elasticsearch/node/Node.java @@ -20,8 +20,10 @@ package org.elasticsearch.node; import org.elasticsearch.Build; +import org.elasticsearch.ElasticsearchException; import org.elasticsearch.Version; import org.elasticsearch.action.ActionModule; +import org.elasticsearch.bootstrap.Elasticsearch; import org.elasticsearch.cache.recycler.PageCacheRecycler; import org.elasticsearch.client.Client; import org.elasticsearch.client.node.NodeClientModule; @@ -41,7 +43,9 @@ import org.elasticsearch.common.lease.Releasables; import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.network.NetworkModule; +import org.elasticsearch.common.network.NetworkService; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.settings.SettingsFilter; import org.elasticsearch.common.settings.SettingsModule; import org.elasticsearch.discovery.Discovery; import org.elasticsearch.discovery.DiscoveryModule; @@ -66,7 +70,6 @@ import org.elasticsearch.indices.memory.IndexingMemoryController; import org.elasticsearch.indices.recovery.RecoverySettings; import org.elasticsearch.indices.store.IndicesStore; import org.elasticsearch.indices.ttl.IndicesTTLService; -import org.elasticsearch.monitor.MonitorModule; import org.elasticsearch.monitor.MonitorService; import org.elasticsearch.monitor.jvm.JvmInfo; import org.elasticsearch.node.internal.InternalSettingsPreparer; @@ -144,7 +147,7 @@ public class Node implements Releasable { tmpEnv.configFile(), Arrays.toString(tmpEnv.dataFiles()), tmpEnv.logsFile(), tmpEnv.pluginsFile()); } - this.pluginsService = new PluginsService(tmpSettings, tmpEnv.pluginsFile(), classpathPlugins); + this.pluginsService = new PluginsService(tmpSettings, tmpEnv.modulesFile(), tmpEnv.pluginsFile(), classpathPlugins); this.settings = pluginsService.updatedSettings(); // create the environment based on the finalized (processed) view of the settings this.environment = new Environment(this.settings()); @@ -155,11 +158,13 @@ public class Node implements Releasable { } catch (IOException ex) { throw new IllegalStateException("Failed to created node environment", ex); } - + final NetworkService networkService = new NetworkService(settings); + final NodeSettingsService nodeSettingsService = new NodeSettingsService(settings); + final SettingsFilter settingsFilter = new SettingsFilter(settings); final ThreadPool threadPool = new ThreadPool(settings); - boolean success = false; try { + final MonitorService monitorService = new MonitorService(settings, nodeEnvironment, threadPool); ModulesBuilder modules = new ModulesBuilder(); modules.add(new Version.Module(version)); modules.add(new CircuitBreakerModule(settings)); @@ -168,9 +173,9 @@ public class Node implements Releasable { modules.add(pluginModule); } modules.add(new PluginsModule(pluginsService)); - modules.add(new SettingsModule(this.settings)); - modules.add(new NodeModule(this)); - modules.add(new NetworkModule()); + modules.add(new SettingsModule(this.settings, settingsFilter)); + modules.add(new NodeModule(this, nodeSettingsService, monitorService)); + modules.add(new NetworkModule(networkService)); modules.add(new ScriptModule(this.settings)); modules.add(new EnvironmentModule(environment)); modules.add(new NodeEnvironmentModule(nodeEnvironment)); @@ -186,7 +191,6 @@ public class Node implements Releasable { modules.add(new IndicesModule()); modules.add(new SearchModule()); modules.add(new ActionModule(false)); - modules.add(new MonitorModule(settings)); modules.add(new GatewayModule(settings)); modules.add(new NodeClientModule()); modules.add(new PercolatorModule()); @@ -195,7 +199,6 @@ public class Node implements Releasable { modules.add(new TribeModule()); modules.add(new AnalysisModule(environment)); - pluginsService.processModules(modules); injector = modules.createInjector(); @@ -203,6 +206,8 @@ public class Node implements Releasable { client = injector.getInstance(Client.class); threadPool.setNodeSettingsService(injector.getInstance(NodeSettingsService.class)); success = true; + } catch (IOException ex) { + throw new ElasticsearchException("failed to bind service", ex); } finally { if (!success) { nodeEnvironment.close(); diff --git a/core/src/main/java/org/elasticsearch/node/NodeModule.java b/core/src/main/java/org/elasticsearch/node/NodeModule.java index befba85af09..3641c325030 100644 --- a/core/src/main/java/org/elasticsearch/node/NodeModule.java +++ b/core/src/main/java/org/elasticsearch/node/NodeModule.java @@ -22,6 +22,7 @@ package org.elasticsearch.node; import org.elasticsearch.cache.recycler.PageCacheRecycler; import org.elasticsearch.common.inject.AbstractModule; import org.elasticsearch.common.util.BigArrays; +import org.elasticsearch.monitor.MonitorService; import org.elasticsearch.node.Node; import org.elasticsearch.node.service.NodeService; import org.elasticsearch.node.settings.NodeSettingsService; @@ -32,13 +33,17 @@ import org.elasticsearch.node.settings.NodeSettingsService; public class NodeModule extends AbstractModule { private final Node node; + private final NodeSettingsService nodeSettingsService; + private final MonitorService monitorService; // pkg private so tests can mock Class pageCacheRecyclerImpl = PageCacheRecycler.class; Class bigArraysImpl = BigArrays.class; - public NodeModule(Node node) { + public NodeModule(Node node, NodeSettingsService nodeSettingsService, MonitorService monitorService) { this.node = node; + this.nodeSettingsService = nodeSettingsService; + this.monitorService = monitorService; } @Override @@ -55,7 +60,8 @@ public class NodeModule extends AbstractModule { } bind(Node.class).toInstance(node); - bind(NodeSettingsService.class).asEagerSingleton(); + bind(NodeSettingsService.class).toInstance(nodeSettingsService); + bind(MonitorService.class).toInstance(monitorService); bind(NodeService.class).asEagerSingleton(); } } diff --git a/core/src/main/java/org/elasticsearch/percolator/PercolateContext.java b/core/src/main/java/org/elasticsearch/percolator/PercolateContext.java index 6e164c3afc5..70abaaaff3d 100644 --- a/core/src/main/java/org/elasticsearch/percolator/PercolateContext.java +++ b/core/src/main/java/org/elasticsearch/percolator/PercolateContext.java @@ -729,7 +729,7 @@ public class PercolateContext extends SearchContext { @Override public Set getHeaders() { - return Collections.EMPTY_SET; + return Collections.emptySet(); } @Override diff --git a/core/src/main/java/org/elasticsearch/plugins/Plugin.java b/core/src/main/java/org/elasticsearch/plugins/Plugin.java index 7c98879953f..1db0ac966d7 100644 --- a/core/src/main/java/org/elasticsearch/plugins/Plugin.java +++ b/core/src/main/java/org/elasticsearch/plugins/Plugin.java @@ -71,13 +71,7 @@ public abstract class Plugin { } /** - * Called once the given {@link IndexService} is fully constructed but not yet published. - * This is used to initialize plugin services that require acess to index level resources - */ - public void onIndexService(IndexService indexService) {} - - /** - * Called before a new index is created on a node. The given module can be used to regsiter index-leve + * Called before a new index is created on a node. The given module can be used to register index-level * extensions. */ public void onIndexModule(IndexModule indexModule) {} diff --git a/core/src/main/java/org/elasticsearch/plugins/PluginManager.java b/core/src/main/java/org/elasticsearch/plugins/PluginManager.java index fa190354018..599395807d0 100644 --- a/core/src/main/java/org/elasticsearch/plugins/PluginManager.java +++ b/core/src/main/java/org/elasticsearch/plugins/PluginManager.java @@ -66,6 +66,10 @@ public class PluginManager { "plugin", "plugin.bat", "service.bat")); + + static final Set MODULES = unmodifiableSet(newHashSet( + "lang-expression", + "lang-groovy")); static final Set OFFICIAL_PLUGINS = unmodifiableSet(newHashSet( "analysis-icu", @@ -78,8 +82,6 @@ public class PluginManager { "discovery-ec2", "discovery-gce", "discovery-multicast", - "lang-expression", - "lang-groovy", "lang-javascript", "lang-python", "mapper-attachments", @@ -221,6 +223,12 @@ public class PluginManager { PluginInfo info = PluginInfo.readFromProperties(root); terminal.println(VERBOSE, "%s", info); + // don't let luser install plugin as a module... + // they might be unavoidably in maven central and are packaged up the same way) + if (MODULES.contains(info.getName())) { + throw new IOException("plugin '" + info.getName() + "' cannot be installed like this, it is a system module"); + } + // update name in handle based on 'name' property found in descriptor file pluginHandle = new PluginHandle(info.getName(), pluginHandle.version, pluginHandle.user); final Path extractLocation = pluginHandle.extractedDir(environment); diff --git a/core/src/main/java/org/elasticsearch/plugins/PluginsService.java b/core/src/main/java/org/elasticsearch/plugins/PluginsService.java index 4fddf7ad838..4cd5f114616 100644 --- a/core/src/main/java/org/elasticsearch/plugins/PluginsService.java +++ b/core/src/main/java/org/elasticsearch/plugins/PluginsService.java @@ -25,9 +25,8 @@ import org.apache.lucene.analysis.util.TokenizerFactory; import org.apache.lucene.codecs.Codec; import org.apache.lucene.codecs.DocValuesFormat; import org.apache.lucene.codecs.PostingsFormat; -import org.apache.lucene.util.IOUtils; import org.elasticsearch.ElasticsearchException; -import org.elasticsearch.action.admin.cluster.node.info.PluginsInfo; +import org.elasticsearch.action.admin.cluster.node.info.PluginsAndModules; import org.elasticsearch.bootstrap.JarHell; import org.elasticsearch.common.Strings; import org.elasticsearch.common.collect.Tuple; @@ -39,10 +38,7 @@ import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.IndexModule; -import org.elasticsearch.index.IndexService; -import org.elasticsearch.index.shard.IndexEventListener; -import java.io.Closeable; import java.io.IOException; import java.lang.reflect.InvocationTargetException; import java.lang.reflect.Method; @@ -50,6 +46,7 @@ import java.net.URL; import java.net.URLClassLoader; import java.nio.file.DirectoryStream; import java.nio.file.Files; +import java.nio.file.NoSuchFileException; import java.nio.file.Path; import java.util.ArrayList; import java.util.Arrays; @@ -69,10 +66,10 @@ import static org.elasticsearch.common.io.FileSystemUtils.isAccessibleDirectory; public class PluginsService extends AbstractComponent { /** - * We keep around a list of plugins + * We keep around a list of plugins and modules */ private final List> plugins; - private final PluginsInfo info; + private final PluginsAndModules info; private final Map> onModuleReferences; @@ -89,13 +86,15 @@ public class PluginsService extends AbstractComponent { /** * Constructs a new PluginService * @param settings The settings of the system + * @param modulesDirectory The directory modules exist in, or null if modules should not be loaded from the filesystem * @param pluginsDirectory The directory plugins exist in, or null if plugins should not be loaded from the filesystem * @param classpathPlugins Plugins that exist in the classpath which should be loaded */ - public PluginsService(Settings settings, Path pluginsDirectory, Collection> classpathPlugins) { + public PluginsService(Settings settings, Path modulesDirectory, Path pluginsDirectory, Collection> classpathPlugins) { super(settings); + info = new PluginsAndModules(); - List> tupleBuilder = new ArrayList<>(); + List> pluginsLoaded = new ArrayList<>(); // first we load plugins that are on the classpath. this is for tests and transport clients for (Class pluginClass : classpathPlugins) { @@ -104,24 +103,39 @@ public class PluginsService extends AbstractComponent { if (logger.isTraceEnabled()) { logger.trace("plugin loaded from classpath [{}]", pluginInfo); } - tupleBuilder.add(new Tuple<>(pluginInfo, plugin)); + pluginsLoaded.add(new Tuple<>(pluginInfo, plugin)); + info.addPlugin(pluginInfo); + } + + // load modules + if (modulesDirectory != null) { + try { + List bundles = getModuleBundles(modulesDirectory); + List> loaded = loadBundles(bundles); + pluginsLoaded.addAll(loaded); + for (Tuple module : loaded) { + info.addModule(module.v1()); + } + } catch (IOException ex) { + throw new IllegalStateException("Unable to initialize modules", ex); + } } // now, find all the ones that are in plugins/ if (pluginsDirectory != null) { try { List bundles = getPluginBundles(pluginsDirectory); - tupleBuilder.addAll(loadBundles(bundles)); + List> loaded = loadBundles(bundles); + pluginsLoaded.addAll(loaded); + for (Tuple plugin : loaded) { + info.addPlugin(plugin.v1()); + } } catch (IOException ex) { throw new IllegalStateException("Unable to initialize plugins", ex); } } - plugins = Collections.unmodifiableList(tupleBuilder); - info = new PluginsInfo(); - for (Tuple tuple : plugins) { - info.add(tuple.v1()); - } + plugins = Collections.unmodifiableList(pluginsLoaded); // We need to build a List of jvm and site plugins for checking mandatory plugins Map jvmPlugins = new HashMap<>(); @@ -151,7 +165,18 @@ public class PluginsService extends AbstractComponent { } } - logger.info("loaded {}, sites {}", jvmPlugins.keySet(), sitePlugins); + // we don't log jars in lib/ we really shouldnt log modules, + // but for now: just be transparent so we can debug any potential issues + Set moduleNames = new HashSet<>(); + Set jvmPluginNames = new HashSet<>(); + for (PluginInfo moduleInfo : info.getModuleInfos()) { + moduleNames.add(moduleInfo.getName()); + } + for (PluginInfo pluginInfo : info.getPluginInfos()) { + jvmPluginNames.add(pluginInfo.getName()); + } + + logger.info("modules {}, plugins {}, sites {}", moduleNames, jvmPluginNames, sitePlugins); Map> onModuleReferences = new HashMap<>(); for (Plugin plugin : jvmPlugins.values()) { @@ -160,6 +185,10 @@ public class PluginsService extends AbstractComponent { if (!method.getName().equals("onModule")) { continue; } + // this is a deprecated final method, so all Plugin subclasses have it + if (method.getParameterTypes().length == 1 && method.getParameterTypes()[0].equals(IndexModule.class)) { + continue; + } if (method.getParameterTypes().length == 0 || method.getParameterTypes().length > 1) { logger.warn("Plugin: {} implementing onModule with no parameters or more than one parameter", plugin.name()); continue; @@ -178,7 +207,7 @@ public class PluginsService extends AbstractComponent { this.onModuleReferences = Collections.unmodifiableMap(onModuleReferences); } - public List> plugins() { + private List> plugins() { return plugins; } @@ -247,22 +276,14 @@ public class PluginsService extends AbstractComponent { for (Tuple plugin : plugins) { plugin.v2().onIndexModule(indexModule); } - indexModule.addIndexEventListener(new IndexEventListener() { - @Override - public void afterIndexCreated(IndexService indexService) { - for (Tuple plugin : plugins) { - plugin.v2().onIndexService(indexService); - } - } - }); } /** - * Get information about plugins (jvm and site plugins). + * Get information about plugins and modules */ - public PluginsInfo info() { + public PluginsAndModules info() { return info; } - + // a "bundle" is a group of plugins in a single classloader // really should be 1-1, but we are not so fortunate static class Bundle { @@ -270,6 +291,40 @@ public class PluginsService extends AbstractComponent { List urls = new ArrayList<>(); } + // similar in impl to getPluginBundles, but DO NOT try to make them share code. + // we don't need to inherit all the leniency, and things are different enough. + static List getModuleBundles(Path modulesDirectory) throws IOException { + // damn leniency + if (Files.notExists(modulesDirectory)) { + return Collections.emptyList(); + } + List bundles = new ArrayList<>(); + try (DirectoryStream stream = Files.newDirectoryStream(modulesDirectory)) { + for (Path module : stream) { + if (FileSystemUtils.isHidden(module)) { + continue; // skip over .DS_Store etc + } + PluginInfo info = PluginInfo.readFromProperties(module); + if (!info.isJvm()) { + throw new IllegalStateException("modules must be jvm plugins: " + info); + } + if (!info.isIsolated()) { + throw new IllegalStateException("modules must be isolated: " + info); + } + Bundle bundle = new Bundle(); + bundle.plugins.add(info); + // gather urls for jar files + try (DirectoryStream jarStream = Files.newDirectoryStream(module, "*.jar")) { + for (Path jar : jarStream) { + bundle.urls.add(jar.toUri().toURL()); + } + } + bundles.add(bundle); + } + } + return bundles; + } + static List getPluginBundles(Path pluginsDirectory) throws IOException { ESLogger logger = Loggers.getLogger(PluginsService.class); @@ -277,7 +332,7 @@ public class PluginsService extends AbstractComponent { if (!isAccessibleDirectory(pluginsDirectory, logger)) { return Collections.emptyList(); } - + List bundles = new ArrayList<>(); // a special purgatory for plugins that directly depend on each other bundles.add(new Bundle()); @@ -289,7 +344,14 @@ public class PluginsService extends AbstractComponent { continue; } logger.trace("--- adding plugin [{}]", plugin.toAbsolutePath()); - PluginInfo info = PluginInfo.readFromProperties(plugin); + final PluginInfo info; + try { + info = PluginInfo.readFromProperties(plugin); + } catch (IOException e) { + throw new IllegalStateException("Could not load plugin descriptor for existing plugin [" + + plugin.getFileName() + "]. Was the plugin built before 2.0?", e); + } + List urls = new ArrayList<>(); if (info.isJvm()) { // a jvm plugin: gather urls for jar files @@ -310,7 +372,7 @@ public class PluginsService extends AbstractComponent { bundle.urls.addAll(urls); } } - + return bundles; } @@ -328,7 +390,7 @@ public class PluginsService extends AbstractComponent { } catch (Exception e) { throw new IllegalStateException("failed to load bundle " + bundle.urls + " due to jar hell", e); } - + // create a child to load the plugins in this bundle ClassLoader loader = URLClassLoader.newInstance(bundle.urls.toArray(new URL[0]), getClass().getClassLoader()); for (PluginInfo pluginInfo : bundle.plugins) { diff --git a/core/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java b/core/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java index 8c5088e757b..2648a183362 100644 --- a/core/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java +++ b/core/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java @@ -294,7 +294,7 @@ public abstract class BlobStoreRepository extends AbstractLifecycleComponent indices = Collections.EMPTY_LIST; + List indices = Collections.emptyList(); Snapshot snapshot = null; try { snapshot = readSnapshot(snapshotId); diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/repositories/verify/RestVerifyRepositoryAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/repositories/verify/RestVerifyRepositoryAction.java index 200d9dc9825..6e3a889f691 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/repositories/verify/RestVerifyRepositoryAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/repositories/verify/RestVerifyRepositoryAction.java @@ -20,7 +20,6 @@ package org.elasticsearch.rest.action.admin.cluster.repositories.verify; import org.elasticsearch.action.admin.cluster.repositories.verify.VerifyRepositoryRequest; -import org.elasticsearch.action.admin.cluster.repositories.verify.VerifyRepositoryResponse; import org.elasticsearch.client.Client; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; @@ -30,9 +29,6 @@ import org.elasticsearch.rest.action.support.RestToXContentListener; import static org.elasticsearch.client.Requests.verifyRepositoryRequest; import static org.elasticsearch.rest.RestRequest.Method.POST; -/** - * Registers repositories - */ public class RestVerifyRepositoryAction extends BaseRestHandler { @Inject @@ -41,12 +37,11 @@ public class RestVerifyRepositoryAction extends BaseRestHandler { controller.registerHandler(POST, "/_snapshot/{repository}/_verify", this); } - @Override public void handleRequest(final RestRequest request, final RestChannel channel, final Client client) { VerifyRepositoryRequest verifyRepositoryRequest = verifyRepositoryRequest(request.param("repository")); verifyRepositoryRequest.masterNodeTimeout(request.paramAsTime("master_timeout", verifyRepositoryRequest.masterNodeTimeout())); verifyRepositoryRequest.timeout(request.paramAsTime("timeout", verifyRepositoryRequest.timeout())); - client.admin().cluster().verifyRepository(verifyRepositoryRequest, new RestToXContentListener(channel)); + client.admin().cluster().verifyRepository(verifyRepositoryRequest, new RestToXContentListener<>(channel)); } } diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/forcemerge/RestForceMergeAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/forcemerge/RestForceMergeAction.java index 4bec04845d0..730276c1a2b 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/forcemerge/RestForceMergeAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/forcemerge/RestForceMergeAction.java @@ -45,9 +45,6 @@ public class RestForceMergeAction extends BaseRestHandler { super(settings, controller, client); controller.registerHandler(POST, "/_forcemerge", this); controller.registerHandler(POST, "/{index}/_forcemerge", this); - - controller.registerHandler(GET, "/_forcemerge", this); - controller.registerHandler(GET, "/{index}/_forcemerge", this); } @Override diff --git a/core/src/main/java/org/elasticsearch/rest/action/cat/RestPluginsAction.java b/core/src/main/java/org/elasticsearch/rest/action/cat/RestPluginsAction.java index 058a93bf6b9..b52f8e6fc10 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/cat/RestPluginsAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/cat/RestPluginsAction.java @@ -95,7 +95,7 @@ public class RestPluginsAction extends AbstractCatAction { for (DiscoveryNode node : nodes) { NodeInfo info = nodesInfo.getNodesMap().get(node.id()); - for (PluginInfo pluginInfo : info.getPlugins().getInfos()) { + for (PluginInfo pluginInfo : info.getPlugins().getPluginInfos()) { table.startRow(); table.addCell(node.id()); table.addCell(node.name()); diff --git a/core/src/main/java/org/elasticsearch/rest/action/index/RestIndexAction.java b/core/src/main/java/org/elasticsearch/rest/action/index/RestIndexAction.java index ec673952b7c..f3730965263 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/index/RestIndexAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/index/RestIndexAction.java @@ -73,7 +73,7 @@ public class RestIndexAction extends BaseRestHandler { indexRequest.parent(request.param("parent")); // order is important, set it after routing, so it will set the routing indexRequest.timestamp(request.param("timestamp")); if (request.hasParam("ttl")) { - indexRequest.ttl(request.paramAsTime("ttl", null).millis()); + indexRequest.ttl(request.param("ttl")); } indexRequest.source(request.content()); indexRequest.timeout(request.paramAsTime("timeout", IndexRequest.DEFAULT_TIMEOUT)); diff --git a/core/src/main/java/org/elasticsearch/rest/action/update/RestUpdateAction.java b/core/src/main/java/org/elasticsearch/rest/action/update/RestUpdateAction.java index 7a46aadde81..f3b28a7e7c6 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/update/RestUpdateAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/update/RestUpdateAction.java @@ -97,7 +97,7 @@ public class RestUpdateAction extends BaseRestHandler { upsertRequest.parent(request.param("parent")); // order is important, set it after routing, so it will set the routing upsertRequest.timestamp(request.param("timestamp")); if (request.hasParam("ttl")) { - upsertRequest.ttl(request.paramAsTime("ttl", null).millis()); + upsertRequest.ttl(request.param("ttl")); } upsertRequest.version(RestActions.parseVersion(request)); upsertRequest.versionType(VersionType.fromString(request.param("version_type"), upsertRequest.versionType())); @@ -108,7 +108,7 @@ public class RestUpdateAction extends BaseRestHandler { doc.parent(request.param("parent")); // order is important, set it after routing, so it will set the routing doc.timestamp(request.param("timestamp")); if (request.hasParam("ttl")) { - doc.ttl(request.paramAsTime("ttl", null).millis()); + doc.ttl(request.param("ttl")); } doc.version(RestActions.parseVersion(request)); doc.versionType(VersionType.fromString(request.param("version_type"), doc.versionType())); diff --git a/core/src/main/java/org/elasticsearch/script/ClassPermission.java b/core/src/main/java/org/elasticsearch/script/ClassPermission.java new file mode 100644 index 00000000000..eb580bac3ea --- /dev/null +++ b/core/src/main/java/org/elasticsearch/script/ClassPermission.java @@ -0,0 +1,171 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.script; + +import java.security.BasicPermission; +import java.security.Permission; +import java.security.PermissionCollection; +import java.util.Arrays; +import java.util.Collections; +import java.util.Enumeration; +import java.util.HashSet; +import java.util.Set; + +/** + * Checked by scripting engines to allow loading a java class. + *

    + * Examples: + *

    + * Allow permission to {@code java.util.List} + *

    permission org.elasticsearch.script.ClassPermission "java.util.List";
    + * Allow permission to classes underneath {@code java.util} (and its subpackages such as {@code java.util.zip}) + *
    permission org.elasticsearch.script.ClassPermission "java.util.*";
    + * Allow permission to standard predefined list of basic classes (see list below) + *
    permission org.elasticsearch.script.ClassPermission "<<STANDARD>>";
    + * Allow permission to all classes + *
    permission org.elasticsearch.script.ClassPermission "*";
    + *

    + * Set of classes (allowed by special value <<STANDARD>>): + *

      + *
    • {@link java.lang.Boolean}
    • + *
    • {@link java.lang.Byte}
    • + *
    • {@link java.lang.Character}
    • + *
    • {@link java.lang.Double}
    • + *
    • {@link java.lang.Integer}
    • + *
    • {@link java.lang.Long}
    • + *
    • {@link java.lang.Math}
    • + *
    • {@link java.lang.Object}
    • + *
    • {@link java.lang.Short}
    • + *
    • {@link java.lang.String}
    • + *
    • {@link java.math.BigDecimal}
    • + *
    • {@link java.util.ArrayList}
    • + *
    • {@link java.util.Arrays}
    • + *
    • {@link java.util.Date}
    • + *
    • {@link java.util.HashMap}
    • + *
    • {@link java.util.HashSet}
    • + *
    • {@link java.util.Iterator}
    • + *
    • {@link java.util.List}
    • + *
    • {@link java.util.Map}
    • + *
    • {@link java.util.Set}
    • + *
    • {@link java.util.UUID}
    • + *
    • {@link org.joda.time.DateTime}
    • + *
    • {@link org.joda.time.DateTimeUtils}
    • + *
    • {@link org.joda.time.DateTimeZone}
    • + *
    • {@link org.joda.time.Instant}
    • + *
    + */ +public final class ClassPermission extends BasicPermission { + private static final long serialVersionUID = 3530711429252193884L; + + public static final String STANDARD = "<>"; + /** Typical set of classes for scripting: basic data types, math, dates, and simple collections */ + // this is the list from the old grovy sandbox impl (+ some things like String, Iterator, etc that were missing) + public static final Set STANDARD_CLASSES = Collections.unmodifiableSet(new HashSet<>(Arrays.asList( + // jdk classes + java.lang.Boolean.class.getName(), + java.lang.Byte.class.getName(), + java.lang.Character.class.getName(), + java.lang.Double.class.getName(), + java.lang.Integer.class.getName(), + java.lang.Long.class.getName(), + java.lang.Math.class.getName(), + java.lang.Object.class.getName(), + java.lang.Short.class.getName(), + java.lang.String.class.getName(), + java.math.BigDecimal.class.getName(), + java.util.ArrayList.class.getName(), + java.util.Arrays.class.getName(), + java.util.Date.class.getName(), + java.util.HashMap.class.getName(), + java.util.HashSet.class.getName(), + java.util.Iterator.class.getName(), + java.util.List.class.getName(), + java.util.Map.class.getName(), + java.util.Set.class.getName(), + java.util.UUID.class.getName(), + // joda-time + org.joda.time.DateTime.class.getName(), + org.joda.time.DateTimeUtils.class.getName(), + org.joda.time.DateTimeZone.class.getName(), + org.joda.time.Instant.class.getName() + ))); + + /** + * Creates a new ClassPermission object. + * + * @param name class to grant permission to + */ + public ClassPermission(String name) { + super(name); + } + + /** + * Creates a new ClassPermission object. + * This constructor exists for use by the {@code Policy} object to instantiate new Permission objects. + * + * @param name class to grant permission to + * @param actions ignored + */ + public ClassPermission(String name, String actions) { + this(name); + } + + @Override + public boolean implies(Permission p) { + // check for a special value of STANDARD to imply the basic set + if (p != null && p.getClass() == getClass()) { + ClassPermission other = (ClassPermission) p; + if (STANDARD.equals(getName()) && STANDARD_CLASSES.contains(other.getName())) { + return true; + } + } + return super.implies(p); + } + + @Override + public PermissionCollection newPermissionCollection() { + // BasicPermissionCollection only handles wildcards, we expand <> here + PermissionCollection impl = super.newPermissionCollection(); + return new PermissionCollection() { + private static final long serialVersionUID = 6792220143549780002L; + + @Override + public void add(Permission permission) { + if (permission instanceof ClassPermission && STANDARD.equals(permission.getName())) { + for (String clazz : STANDARD_CLASSES) { + impl.add(new ClassPermission(clazz)); + } + } else { + impl.add(permission); + } + } + + @Override + public boolean implies(Permission permission) { + return impl.implies(permission); + } + + @Override + public Enumeration elements() { + return impl.elements(); + } + }; + } +} diff --git a/core/src/main/java/org/elasticsearch/script/Template.java b/core/src/main/java/org/elasticsearch/script/Template.java index babe488d4ad..4419d6f5093 100644 --- a/core/src/main/java/org/elasticsearch/script/Template.java +++ b/core/src/main/java/org/elasticsearch/script/Template.java @@ -119,14 +119,12 @@ public class Template extends Script { return template; } - @SuppressWarnings("unchecked") public static Script parse(Map config, boolean removeMatchedEntries, ParseFieldMatcher parseFieldMatcher) { - return new TemplateParser(Collections.EMPTY_MAP, MustacheScriptEngineService.NAME).parse(config, removeMatchedEntries, parseFieldMatcher); + return new TemplateParser(Collections.emptyMap(), MustacheScriptEngineService.NAME).parse(config, removeMatchedEntries, parseFieldMatcher); } - @SuppressWarnings("unchecked") public static Template parse(XContentParser parser, ParseFieldMatcher parseFieldMatcher) throws IOException { - return new TemplateParser(Collections.EMPTY_MAP, MustacheScriptEngineService.NAME).parse(parser, parseFieldMatcher); + return new TemplateParser(Collections.emptyMap(), MustacheScriptEngineService.NAME).parse(parser, parseFieldMatcher); } @Deprecated diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/heuristics/SignificanceHeuristicStreams.java b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/heuristics/SignificanceHeuristicStreams.java index c58e923280d..64d2ae659e0 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/heuristics/SignificanceHeuristicStreams.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/heuristics/SignificanceHeuristicStreams.java @@ -31,7 +31,7 @@ import java.util.Map; */ public class SignificanceHeuristicStreams { - private static Map STREAMS = Collections.EMPTY_MAP; + private static Map STREAMS = Collections.emptyMap(); static { HashMap map = new HashMap<>(); diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/BucketMetricsPipelineAggregator.java b/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/BucketMetricsPipelineAggregator.java index 89955ef0278..0f2ffea9a75 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/BucketMetricsPipelineAggregator.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/BucketMetricsPipelineAggregator.java @@ -78,7 +78,7 @@ public abstract class BucketMetricsPipelineAggregator extends SiblingPipelineAgg } } } - return buildAggregation(Collections.EMPTY_LIST, metaData()); + return buildAggregation(Collections.emptyList(), metaData()); } /** @@ -123,4 +123,4 @@ public abstract class BucketMetricsPipelineAggregator extends SiblingPipelineAgg gapPolicy.writeTo(out); } -} \ No newline at end of file +} diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/max/MaxBucketPipelineAggregator.java b/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/max/MaxBucketPipelineAggregator.java index 8101c3caae2..95a70af7934 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/max/MaxBucketPipelineAggregator.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/max/MaxBucketPipelineAggregator.java @@ -90,7 +90,7 @@ public class MaxBucketPipelineAggregator extends BucketMetricsPipelineAggregator @Override protected InternalAggregation buildAggregation(List pipelineAggregators, Map metadata) { String[] keys = maxBucketKeys.toArray(new String[maxBucketKeys.size()]); - return new InternalBucketMetricValue(name(), keys, maxValue, formatter, Collections.EMPTY_LIST, metaData()); + return new InternalBucketMetricValue(name(), keys, maxValue, formatter, Collections.emptyList(), metaData()); } public static class Factory extends PipelineAggregatorFactory { diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/min/MinBucketPipelineAggregator.java b/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/min/MinBucketPipelineAggregator.java index 5da74281290..755b2060ae6 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/min/MinBucketPipelineAggregator.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/min/MinBucketPipelineAggregator.java @@ -91,7 +91,7 @@ public class MinBucketPipelineAggregator extends BucketMetricsPipelineAggregator protected InternalAggregation buildAggregation(java.util.List pipelineAggregators, java.util.Map metadata) { String[] keys = minBucketKeys.toArray(new String[minBucketKeys.size()]); - return new InternalBucketMetricValue(name(), keys, minValue, formatter, Collections.EMPTY_LIST, metaData()); + return new InternalBucketMetricValue(name(), keys, minValue, formatter, Collections.emptyList(), metaData()); }; public static class Factory extends PipelineAggregatorFactory { diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/movavg/models/MovAvgModelStreams.java b/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/movavg/models/MovAvgModelStreams.java index d0314f4c4f6..f1238fb9fbd 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/movavg/models/MovAvgModelStreams.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/movavg/models/MovAvgModelStreams.java @@ -32,7 +32,7 @@ import java.util.Map; */ public class MovAvgModelStreams { - private static Map STREAMS = Collections.EMPTY_MAP; + private static Map STREAMS = Collections.emptyMap(); static { HashMap map = new HashMap<>(); diff --git a/core/src/main/java/org/elasticsearch/search/builder/SearchSourceBuilder.java b/core/src/main/java/org/elasticsearch/search/builder/SearchSourceBuilder.java index b9663e4a0a0..7963b678fb3 100644 --- a/core/src/main/java/org/elasticsearch/search/builder/SearchSourceBuilder.java +++ b/core/src/main/java/org/elasticsearch/search/builder/SearchSourceBuilder.java @@ -408,7 +408,7 @@ public final class SearchSourceBuilder extends ToXContentToBytes implements Writ try { XContentBuilder builder = XContentFactory.jsonBuilder(); builder.startObject(); - highlightBuilder.innerXContent(builder, EMPTY_PARAMS); + highlightBuilder.innerXContent(builder); builder.endObject(); this.highlightBuilder = builder.bytes(); return this; diff --git a/core/src/main/java/org/elasticsearch/search/fetch/FetchPhase.java b/core/src/main/java/org/elasticsearch/search/fetch/FetchPhase.java index 46f97adfd6d..227141e4ddf 100644 --- a/core/src/main/java/org/elasticsearch/search/fetch/FetchPhase.java +++ b/core/src/main/java/org/elasticsearch/search/fetch/FetchPhase.java @@ -30,12 +30,12 @@ import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.lucene.search.Queries; +import org.elasticsearch.common.regex.Regex; import org.elasticsearch.common.text.StringAndBytesText; import org.elasticsearch.common.text.Text; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.common.xcontent.support.XContentMapValues; -import org.elasticsearch.index.fieldvisitor.AllFieldsVisitor; import org.elasticsearch.index.fieldvisitor.CustomFieldsVisitor; import org.elasticsearch.index.fieldvisitor.FieldsVisitor; import org.elasticsearch.index.mapper.DocumentMapper; @@ -55,13 +55,7 @@ import org.elasticsearch.search.internal.SearchContext; import org.elasticsearch.search.lookup.SourceLookup; import java.io.IOException; -import java.util.ArrayList; -import java.util.Collections; -import java.util.HashMap; -import java.util.HashSet; -import java.util.List; -import java.util.Map; -import java.util.Set; +import java.util.*; import static java.util.Collections.unmodifiableMap; import static org.elasticsearch.common.xcontent.XContentFactory.contentBuilder; @@ -98,9 +92,7 @@ public class FetchPhase implements SearchPhase { public void execute(SearchContext context) { FieldsVisitor fieldsVisitor; Set fieldNames = null; - List extractFieldNames = null; - - boolean loadAllStored = false; + List fieldNamePatterns = null; if (!context.hasFieldNames()) { // no fields specified, default to return source if no explicit indication if (!context.hasScriptFields() && !context.hasFetchSourceContext()) { @@ -111,10 +103,6 @@ public class FetchPhase implements SearchPhase { fieldsVisitor = new FieldsVisitor(context.sourceRequested()); } else { for (String fieldName : context.fieldNames()) { - if (fieldName.equals("*")) { - loadAllStored = true; - continue; - } if (fieldName.equals(SourceFieldMapper.NAME)) { if (context.hasFetchSourceContext()) { context.fetchSourceContext().fetchSource(true); @@ -123,32 +111,28 @@ public class FetchPhase implements SearchPhase { } continue; } - MappedFieldType fieldType = context.smartNameFieldType(fieldName); - if (fieldType == null) { - // Only fail if we know it is a object field, missing paths / fields shouldn't fail. - if (context.getObjectMapper(fieldName) != null) { - throw new IllegalArgumentException("field [" + fieldName + "] isn't a leaf field"); + if (Regex.isSimpleMatchPattern(fieldName)) { + if (fieldNamePatterns == null) { + fieldNamePatterns = new ArrayList<>(); + } + fieldNamePatterns.add(fieldName); + } else { + MappedFieldType fieldType = context.smartNameFieldType(fieldName); + if (fieldType == null) { + // Only fail if we know it is a object field, missing paths / fields shouldn't fail. + if (context.getObjectMapper(fieldName) != null) { + throw new IllegalArgumentException("field [" + fieldName + "] isn't a leaf field"); + } } - } else if (fieldType.stored()) { if (fieldNames == null) { fieldNames = new HashSet<>(); } - fieldNames.add(fieldType.names().indexName()); - } else { - if (extractFieldNames == null) { - extractFieldNames = new ArrayList<>(); - } - extractFieldNames.add(fieldName); + fieldNames.add(fieldName); } } - if (loadAllStored) { - fieldsVisitor = new AllFieldsVisitor(); // load everything, including _source - } else if (fieldNames != null) { - boolean loadSource = extractFieldNames != null || context.sourceRequested(); - fieldsVisitor = new CustomFieldsVisitor(fieldNames, loadSource); - } else { - fieldsVisitor = new FieldsVisitor(extractFieldNames != null || context.sourceRequested()); - } + boolean loadSource = context.sourceRequested(); + fieldsVisitor = new CustomFieldsVisitor(fieldNames == null ? Collections.emptySet() : fieldNames, + fieldNamePatterns == null ? Collections.emptyList() : fieldNamePatterns, loadSource); } InternalSearchHit[] hits = new InternalSearchHit[context.docIdsToLoadSize()]; @@ -163,9 +147,9 @@ public class FetchPhase implements SearchPhase { try { int rootDocId = findRootDocumentIfNested(context, subReaderContext, subDocId); if (rootDocId != -1) { - searchHit = createNestedSearchHit(context, docId, subDocId, rootDocId, extractFieldNames, loadAllStored, fieldNames, subReaderContext); + searchHit = createNestedSearchHit(context, docId, subDocId, rootDocId, fieldNames, fieldNamePatterns, subReaderContext); } else { - searchHit = createSearchHit(context, fieldsVisitor, docId, subDocId, extractFieldNames, subReaderContext); + searchHit = createSearchHit(context, fieldsVisitor, docId, subDocId, subReaderContext); } } catch (IOException e) { throw ExceptionsHelper.convertToElastic(e); @@ -199,7 +183,7 @@ public class FetchPhase implements SearchPhase { return -1; } - private InternalSearchHit createSearchHit(SearchContext context, FieldsVisitor fieldsVisitor, int docId, int subDocId, List extractFieldNames, LeafReaderContext subReaderContext) { + private InternalSearchHit createSearchHit(SearchContext context, FieldsVisitor fieldsVisitor, int docId, int subDocId, LeafReaderContext subReaderContext) { loadStoredFields(context, subReaderContext, fieldsVisitor, subDocId); fieldsVisitor.postProcess(context.mapperService()); @@ -219,45 +203,24 @@ public class FetchPhase implements SearchPhase { typeText = documentMapper.typeText(); } InternalSearchHit searchHit = new InternalSearchHit(docId, fieldsVisitor.uid().id(), typeText, searchFields); - - // go over and extract fields that are not mapped / stored + // Set _source if requested. SourceLookup sourceLookup = context.lookup().source(); sourceLookup.setSegmentAndDocument(subReaderContext, subDocId); if (fieldsVisitor.source() != null) { sourceLookup.setSource(fieldsVisitor.source()); } - if (extractFieldNames != null) { - for (String extractFieldName : extractFieldNames) { - List values = context.lookup().source().extractRawValues(extractFieldName); - if (!values.isEmpty()) { - if (searchHit.fieldsOrNull() == null) { - searchHit.fields(new HashMap(2)); - } - - SearchHitField hitField = searchHit.fields().get(extractFieldName); - if (hitField == null) { - hitField = new InternalSearchHitField(extractFieldName, new ArrayList<>(2)); - searchHit.fields().put(extractFieldName, hitField); - } - for (Object value : values) { - hitField.values().add(value); - } - } - } - } - return searchHit; } - private InternalSearchHit createNestedSearchHit(SearchContext context, int nestedTopDocId, int nestedSubDocId, int rootSubDocId, List extractFieldNames, boolean loadAllStored, Set fieldNames, LeafReaderContext subReaderContext) throws IOException { + private InternalSearchHit createNestedSearchHit(SearchContext context, int nestedTopDocId, int nestedSubDocId, int rootSubDocId, Set fieldNames, List fieldNamePatterns, LeafReaderContext subReaderContext) throws IOException { // Also if highlighting is requested on nested documents we need to fetch the _source from the root document, // otherwise highlighting will attempt to fetch the _source from the nested doc, which will fail, // because the entire _source is only stored with the root document. - final FieldsVisitor rootFieldsVisitor = new FieldsVisitor(context.sourceRequested() || extractFieldNames != null || context.highlight() != null); + final FieldsVisitor rootFieldsVisitor = new FieldsVisitor(context.sourceRequested() || context.highlight() != null); loadStoredFields(context, subReaderContext, rootFieldsVisitor, rootSubDocId); rootFieldsVisitor.postProcess(context.mapperService()); - Map searchFields = getSearchFields(context, nestedSubDocId, loadAllStored, fieldNames, subReaderContext); + Map searchFields = getSearchFields(context, nestedSubDocId, fieldNames, fieldNamePatterns, subReaderContext); DocumentMapper documentMapper = context.mapperService().documentMapper(rootFieldsVisitor.uid().type()); SourceLookup sourceLookup = context.lookup().source(); sourceLookup.setSegmentAndDocument(subReaderContext, nestedSubDocId); @@ -299,39 +262,14 @@ public class FetchPhase implements SearchPhase { } InternalSearchHit searchHit = new InternalSearchHit(nestedTopDocId, rootFieldsVisitor.uid().id(), documentMapper.typeText(), nestedIdentity, searchFields); - if (extractFieldNames != null) { - for (String extractFieldName : extractFieldNames) { - List values = context.lookup().source().extractRawValues(extractFieldName); - if (!values.isEmpty()) { - if (searchHit.fieldsOrNull() == null) { - searchHit.fields(new HashMap(2)); - } - - SearchHitField hitField = searchHit.fields().get(extractFieldName); - if (hitField == null) { - hitField = new InternalSearchHitField(extractFieldName, new ArrayList<>(2)); - searchHit.fields().put(extractFieldName, hitField); - } - for (Object value : values) { - hitField.values().add(value); - } - } - } - } - return searchHit; } - private Map getSearchFields(SearchContext context, int nestedSubDocId, boolean loadAllStored, Set fieldNames, LeafReaderContext subReaderContext) { + private Map getSearchFields(SearchContext context, int nestedSubDocId, Set fieldNames, List fieldNamePatterns, LeafReaderContext subReaderContext) { Map searchFields = null; if (context.hasFieldNames() && !context.fieldNames().isEmpty()) { - FieldsVisitor nestedFieldsVisitor = null; - if (loadAllStored) { - nestedFieldsVisitor = new AllFieldsVisitor(); - } else if (fieldNames != null) { - nestedFieldsVisitor = new CustomFieldsVisitor(fieldNames, false); - } - + FieldsVisitor nestedFieldsVisitor = new CustomFieldsVisitor(fieldNames == null ? Collections.emptySet() : fieldNames, + fieldNamePatterns == null ? Collections.emptyList() : fieldNamePatterns, false); if (nestedFieldsVisitor != null) { loadStoredFields(context, subReaderContext, nestedFieldsVisitor, nestedSubDocId); nestedFieldsVisitor.postProcess(context.mapperService()); diff --git a/core/src/main/java/org/elasticsearch/search/highlight/AbstractHighlighterBuilder.java b/core/src/main/java/org/elasticsearch/search/highlight/AbstractHighlighterBuilder.java new file mode 100644 index 00000000000..b10e2e8f58f --- /dev/null +++ b/core/src/main/java/org/elasticsearch/search/highlight/AbstractHighlighterBuilder.java @@ -0,0 +1,509 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.search.highlight; + +import org.apache.lucene.search.highlight.SimpleFragmenter; +import org.apache.lucene.search.highlight.SimpleSpanFragmenter; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.index.query.QueryBuilder; + +import java.io.IOException; +import java.util.Arrays; +import java.util.Map; +import java.util.Objects; + +/** + * This abstract class holds parameters shared by {@link HighlightBuilder} and {@link HighlightBuilder.Field} + * and provides the common setters, equality, hashCode calculation and common serialization + */ +public abstract class AbstractHighlighterBuilder { + + protected String[] preTags; + + protected String[] postTags; + + protected Integer fragmentSize; + + protected Integer numOfFragments; + + protected String highlighterType; + + protected String fragmenter; + + protected QueryBuilder highlightQuery; + + protected String order; + + protected Boolean highlightFilter; + + protected Boolean forceSource; + + protected Integer boundaryMaxScan; + + protected char[] boundaryChars; + + protected Integer noMatchSize; + + protected Integer phraseLimit; + + protected Map options; + + protected Boolean requireFieldMatch; + + /** + * Set the pre tags that will be used for highlighting. + */ + @SuppressWarnings("unchecked") + public HB preTags(String... preTags) { + this.preTags = preTags; + return (HB) this; + } + + /** + * @return the value set by {@link #preTags(String...)} + */ + public String[] preTags() { + return this.preTags; + } + + /** + * Set the post tags that will be used for highlighting. + */ + @SuppressWarnings("unchecked") + public HB postTags(String... postTags) { + this.postTags = postTags; + return (HB) this; + } + + /** + * @return the value set by {@link #postTags(String...)} + */ + public String[] postTags() { + return this.postTags; + } + + /** + * Set the fragment size in characters, defaults to {@link HighlighterParseElement#DEFAULT_FRAGMENT_CHAR_SIZE} + */ + @SuppressWarnings("unchecked") + public HB fragmentSize(Integer fragmentSize) { + this.fragmentSize = fragmentSize; + return (HB) this; + } + + /** + * @return the value set by {@link #fragmentSize(Integer)} + */ + public Integer fragmentSize() { + return this.fragmentSize; + } + + /** + * Set the number of fragments, defaults to {@link HighlighterParseElement#DEFAULT_NUMBER_OF_FRAGMENTS} + */ + @SuppressWarnings("unchecked") + public HB numOfFragments(Integer numOfFragments) { + this.numOfFragments = numOfFragments; + return (HB) this; + } + + /** + * @return the value set by {@link #numOfFragments(Integer)} + */ + public Integer numOfFragments() { + return this.numOfFragments; + } + + /** + * Set type of highlighter to use. Out of the box supported types + * are plain, fvh and postings. + * The default option selected is dependent on the mappings defined for your index. + * Details of the different highlighter types are covered in the reference guide. + */ + @SuppressWarnings("unchecked") + public HB highlighterType(String highlighterType) { + this.highlighterType = highlighterType; + return (HB) this; + } + + /** + * @return the value set by {@link #highlighterType(String)} + */ + public String highlighterType() { + return this.highlighterType; + } + + /** + * Sets what fragmenter to use to break up text that is eligible for highlighting. + * This option is only applicable when using the plain highlighterType highlighter. + * Permitted values are "simple" or "span" relating to {@link SimpleFragmenter} and + * {@link SimpleSpanFragmenter} implementations respectively with the default being "span" + */ + @SuppressWarnings("unchecked") + public HB fragmenter(String fragmenter) { + this.fragmenter = fragmenter; + return (HB) this; + } + + /** + * @return the value set by {@link #fragmenter(String)} + */ + public String fragmenter() { + return this.fragmenter; + } + + /** + * Sets a query to be used for highlighting instead of the search query. + */ + @SuppressWarnings("unchecked") + public HB highlightQuery(QueryBuilder highlightQuery) { + this.highlightQuery = highlightQuery; + return (HB) this; + } + + /** + * @return the value set by {@link #highlightQuery(QueryBuilder)} + */ + public QueryBuilder highlightQuery() { + return this.highlightQuery; + } + + /** + * The order of fragments per field. By default, ordered by the order in the + * highlighted text. Can be score, which then it will be ordered + * by score of the fragments. + */ + @SuppressWarnings("unchecked") + public HB order(String order) { + this.order = order; + return (HB) this; + } + + /** + * @return the value set by {@link #order(String)} + */ + public String order() { + return this.order; + } + + /** + * Set this to true when using the highlighterType fvh + * and you want to provide highlighting on filter clauses in your + * query. Default is false. + */ + @SuppressWarnings("unchecked") + public HB highlightFilter(Boolean highlightFilter) { + this.highlightFilter = highlightFilter; + return (HB) this; + } + + /** + * @return the value set by {@link #highlightFilter(Boolean)} + */ + public Boolean highlightFilter() { + return this.highlightFilter; + } + + /** + * When using the highlighterType fvh this setting + * controls how far to look for boundary characters, and defaults to 20. + */ + @SuppressWarnings("unchecked") + public HB boundaryMaxScan(Integer boundaryMaxScan) { + this.boundaryMaxScan = boundaryMaxScan; + return (HB) this; + } + + /** + * @return the value set by {@link #boundaryMaxScan(Integer)} + */ + public Integer boundaryMaxScan() { + return this.boundaryMaxScan; + } + + /** + * When using the highlighterType fvh this setting + * defines what constitutes a boundary for highlighting. It’s a single string with + * each boundary character defined in it. It defaults to .,!? \t\n + */ + @SuppressWarnings("unchecked") + public HB boundaryChars(char[] boundaryChars) { + this.boundaryChars = boundaryChars; + return (HB) this; + } + + /** + * @return the value set by {@link #boundaryChars(char[])} + */ + public char[] boundaryChars() { + return this.boundaryChars; + } + + /** + * Allows to set custom options for custom highlighters. + */ + @SuppressWarnings("unchecked") + public HB options(Map options) { + this.options = options; + return (HB) this; + } + + /** + * @return the value set by {@link #options(Map)} + */ + public Map options() { + return this.options; + } + + /** + * Set to true to cause a field to be highlighted only if a query matches that field. + * Default is false meaning that terms are highlighted on all requested fields regardless + * if the query matches specifically on them. + */ + @SuppressWarnings("unchecked") + public HB requireFieldMatch(Boolean requireFieldMatch) { + this.requireFieldMatch = requireFieldMatch; + return (HB) this; + } + + /** + * @return the value set by {@link #requireFieldMatch(Boolean)} + */ + public Boolean requireFieldMatch() { + return this.requireFieldMatch; + } + + /** + * Sets the size of the fragment to return from the beginning of the field if there are no matches to + * highlight and the field doesn't also define noMatchSize. + * @param noMatchSize integer to set or null to leave out of request. default is null. + * @return this for chaining + */ + @SuppressWarnings("unchecked") + public HB noMatchSize(Integer noMatchSize) { + this.noMatchSize = noMatchSize; + return (HB) this; + } + + /** + * @return the value set by {@link #noMatchSize(Integer)} + */ + public Integer noMatchSize() { + return this.noMatchSize; + } + + /** + * Sets the maximum number of phrases the fvh will consider if the field doesn't also define phraseLimit. + * @param phraseLimit maximum number of phrases the fvh will consider + * @return this for chaining + */ + @SuppressWarnings("unchecked") + public HB phraseLimit(Integer phraseLimit) { + this.phraseLimit = phraseLimit; + return (HB) this; + } + + /** + * @return the value set by {@link #phraseLimit(Integer)} + */ + public Integer phraseLimit() { + return this.noMatchSize; + } + + /** + * Forces the highlighting to highlight fields based on the source even if fields are stored separately. + */ + @SuppressWarnings("unchecked") + public HB forceSource(Boolean forceSource) { + this.forceSource = forceSource; + return (HB) this; + } + + /** + * @return the value set by {@link #forceSource(Boolean)} + */ + public Boolean forceSource() { + return this.forceSource; + } + + void commonOptionsToXContent(XContentBuilder builder) throws IOException { + if (preTags != null) { + builder.array("pre_tags", preTags); + } + if (postTags != null) { + builder.array("post_tags", postTags); + } + if (fragmentSize != null) { + builder.field("fragment_size", fragmentSize); + } + if (numOfFragments != null) { + builder.field("number_of_fragments", numOfFragments); + } + if (highlighterType != null) { + builder.field("type", highlighterType); + } + if (fragmenter != null) { + builder.field("fragmenter", fragmenter); + } + if (highlightQuery != null) { + builder.field("highlight_query", highlightQuery); + } + if (order != null) { + builder.field("order", order); + } + if (highlightFilter != null) { + builder.field("highlight_filter", highlightFilter); + } + if (boundaryMaxScan != null) { + builder.field("boundary_max_scan", boundaryMaxScan); + } + if (boundaryChars != null) { + builder.field("boundary_chars", boundaryChars); + } + if (options != null && options.size() > 0) { + builder.field("options", options); + } + if (forceSource != null) { + builder.field("force_source", forceSource); + } + if (requireFieldMatch != null) { + builder.field("require_field_match", requireFieldMatch); + } + if (noMatchSize != null) { + builder.field("no_match_size", noMatchSize); + } + if (phraseLimit != null) { + builder.field("phrase_limit", phraseLimit); + } + } + + @Override + public final int hashCode() { + return Objects.hash(getClass(), Arrays.hashCode(preTags), Arrays.hashCode(postTags), fragmentSize, + numOfFragments, highlighterType, fragmenter, highlightQuery, order, highlightFilter, + forceSource, boundaryMaxScan, Arrays.hashCode(boundaryChars), noMatchSize, + phraseLimit, options, requireFieldMatch, doHashCode()); + } + + /** + * internal hashCode calculation to overwrite for the implementing classes. + */ + protected abstract int doHashCode(); + + @Override + public final boolean equals(Object obj) { + if (this == obj) { + return true; + } + if (obj == null || getClass() != obj.getClass()) { + return false; + } + @SuppressWarnings("unchecked") + HB other = (HB) obj; + return Arrays.equals(preTags, other.preTags) && + Arrays.equals(postTags, other.postTags) && + Objects.equals(fragmentSize, other.fragmentSize) && + Objects.equals(numOfFragments, other.numOfFragments) && + Objects.equals(highlighterType, other.highlighterType) && + Objects.equals(fragmenter, other.fragmenter) && + Objects.equals(highlightQuery, other.highlightQuery) && + Objects.equals(order, other.order) && + Objects.equals(highlightFilter, other.highlightFilter) && + Objects.equals(forceSource, other.forceSource) && + Objects.equals(boundaryMaxScan, other.boundaryMaxScan) && + Arrays.equals(boundaryChars, other.boundaryChars) && + Objects.equals(noMatchSize, other.noMatchSize) && + Objects.equals(phraseLimit, other.phraseLimit) && + Objects.equals(options, other.options) && + Objects.equals(requireFieldMatch, other.requireFieldMatch) && + doEquals(other); + } + + /** + * internal equals to overwrite for the implementing classes. + */ + protected abstract boolean doEquals(HB other); + + /** + * read common parameters from {@link StreamInput} + */ + @SuppressWarnings("unchecked") + protected HB readOptionsFrom(StreamInput in) throws IOException { + preTags(in.readOptionalStringArray()); + postTags(in.readOptionalStringArray()); + fragmentSize(in.readOptionalVInt()); + numOfFragments(in.readOptionalVInt()); + highlighterType(in.readOptionalString()); + fragmenter(in.readOptionalString()); + if (in.readBoolean()) { + highlightQuery(in.readQuery()); + } + order(in.readOptionalString()); + highlightFilter(in.readOptionalBoolean()); + forceSource(in.readOptionalBoolean()); + boundaryMaxScan(in.readOptionalVInt()); + if (in.readBoolean()) { + boundaryChars(in.readString().toCharArray()); + } + noMatchSize(in.readOptionalVInt()); + phraseLimit(in.readOptionalVInt()); + if (in.readBoolean()) { + options(in.readMap()); + } + requireFieldMatch(in.readOptionalBoolean()); + return (HB) this; + } + + /** + * write common parameters to {@link StreamOutput} + */ + protected void writeOptionsTo(StreamOutput out) throws IOException { + out.writeOptionalStringArray(preTags); + out.writeOptionalStringArray(postTags); + out.writeOptionalVInt(fragmentSize); + out.writeOptionalVInt(numOfFragments); + out.writeOptionalString(highlighterType); + out.writeOptionalString(fragmenter); + boolean hasQuery = highlightQuery != null; + out.writeBoolean(hasQuery); + if (hasQuery) { + out.writeQuery(highlightQuery); + } + out.writeOptionalString(order); + out.writeOptionalBoolean(highlightFilter); + out.writeOptionalBoolean(forceSource); + out.writeOptionalVInt(boundaryMaxScan); + boolean hasBounaryChars = boundaryChars != null; + out.writeBoolean(hasBounaryChars); + if (hasBounaryChars) { + out.writeString(String.valueOf(boundaryChars)); + } + out.writeOptionalVInt(noMatchSize); + out.writeOptionalVInt(phraseLimit); + boolean hasOptions = options != null; + out.writeBoolean(hasOptions); + if (hasOptions) { + out.writeMap(options); + } + out.writeOptionalBoolean(requireFieldMatch); + } +} \ No newline at end of file diff --git a/core/src/main/java/org/elasticsearch/search/highlight/HighlightBuilder.java b/core/src/main/java/org/elasticsearch/search/highlight/HighlightBuilder.java index b321b574d6a..dbae661fde9 100644 --- a/core/src/main/java/org/elasticsearch/search/highlight/HighlightBuilder.java +++ b/core/src/main/java/org/elasticsearch/search/highlight/HighlightBuilder.java @@ -19,16 +19,19 @@ package org.elasticsearch.search.highlight; -import org.apache.lucene.search.highlight.SimpleFragmenter; -import org.apache.lucene.search.highlight.SimpleSpanFragmenter; +import org.elasticsearch.ExceptionsHelper; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.index.query.QueryBuilder; +import org.elasticsearch.common.xcontent.XContentFactory; import java.io.IOException; import java.util.ArrayList; +import java.util.Arrays; import java.util.List; -import java.util.Map; +import java.util.Objects; /** * A builder for search highlighting. Settings can control how large fields @@ -36,46 +39,14 @@ import java.util.Map; * * @see org.elasticsearch.search.builder.SearchSourceBuilder#highlight() */ -public class HighlightBuilder implements ToXContent { +public class HighlightBuilder extends AbstractHighlighterBuilder implements Writeable, ToXContent { - private List fields; + public static final HighlightBuilder PROTOTYPE = new HighlightBuilder(); - private String tagsSchema; - - private Boolean highlightFilter; - - private Integer fragmentSize; - - private Integer numOfFragments; - - private String[] preTags; - - private String[] postTags; - - private String order; + private final List fields = new ArrayList<>(); private String encoder; - private Boolean requireFieldMatch; - - private Integer boundaryMaxScan; - - private char[] boundaryChars; - - private String highlighterType; - - private String fragmenter; - - private QueryBuilder highlightQuery; - - private Integer noMatchSize; - - private Integer phraseLimit; - - private Map options; - - private Boolean forceSource; - private boolean useExplicitFieldOrder = false; /** @@ -85,14 +56,9 @@ public class HighlightBuilder implements ToXContent { * @param name The field to highlight */ public HighlightBuilder field(String name) { - if (fields == null) { - fields = new ArrayList<>(); - } - fields.add(new Field(name)); - return this; + return field(new Field(name)); } - /** * Adds a field to be highlighted with a provided fragment size (in characters), and * default number of fragments of 5. @@ -101,11 +67,7 @@ public class HighlightBuilder implements ToXContent { * @param fragmentSize The size of a fragment in characters */ public HighlightBuilder field(String name, int fragmentSize) { - if (fields == null) { - fields = new ArrayList<>(); - } - fields.add(new Field(name).fragmentSize(fragmentSize)); - return this; + return field(new Field(name).fragmentSize(fragmentSize)); } @@ -118,14 +80,9 @@ public class HighlightBuilder implements ToXContent { * @param numberOfFragments The (maximum) number of fragments */ public HighlightBuilder field(String name, int fragmentSize, int numberOfFragments) { - if (fields == null) { - fields = new ArrayList<>(); - } - fields.add(new Field(name).fragmentSize(fragmentSize).numOfFragments(numberOfFragments)); - return this; + return field(new Field(name).fragmentSize(fragmentSize).numOfFragments(numberOfFragments)); } - /** * Adds a field to be highlighted with a provided fragment size (in characters), and * a provided (maximum) number of fragments. @@ -136,56 +93,38 @@ public class HighlightBuilder implements ToXContent { * @param fragmentOffset The offset from the start of the fragment to the start of the highlight */ public HighlightBuilder field(String name, int fragmentSize, int numberOfFragments, int fragmentOffset) { - if (fields == null) { - fields = new ArrayList<>(); - } - fields.add(new Field(name).fragmentSize(fragmentSize).numOfFragments(numberOfFragments) + return field(new Field(name).fragmentSize(fragmentSize).numOfFragments(numberOfFragments) .fragmentOffset(fragmentOffset)); - return this; } public HighlightBuilder field(Field field) { - if (fields == null) { - fields = new ArrayList<>(); - } fields.add(field); return this; } + public List fields() { + return this.fields; + } + /** - * Set a tag scheme that encapsulates a built in pre and post tags. The allows schemes + * Set a tag scheme that encapsulates a built in pre and post tags. The allowed schemes * are styled and default. * * @param schemaName The tag scheme name */ public HighlightBuilder tagsSchema(String schemaName) { - this.tagsSchema = schemaName; - return this; - } - - /** - * Set this to true when using the highlighterType fvh - * and you want to provide highlighting on filter clauses in your - * query. Default is false. - */ - public HighlightBuilder highlightFilter(boolean highlightFilter) { - this.highlightFilter = highlightFilter; - return this; - } - - /** - * Sets the size of a fragment in characters (defaults to 100) - */ - public HighlightBuilder fragmentSize(Integer fragmentSize) { - this.fragmentSize = fragmentSize; - return this; - } - - /** - * Sets the maximum number of fragments returned - */ - public HighlightBuilder numOfFragments(Integer numOfFragments) { - this.numOfFragments = numOfFragments; + switch (schemaName) { + case "default": + preTags(HighlighterParseElement.DEFAULT_PRE_TAGS); + postTags(HighlighterParseElement.DEFAULT_POST_TAGS); + break; + case "styled": + preTags(HighlighterParseElement.STYLED_PRE_TAG); + postTags(HighlighterParseElement.STYLED_POST_TAGS); + break; + default: + throw new IllegalArgumentException("Unknown tag schema ["+ schemaName +"]"); + } return this; } @@ -201,125 +140,10 @@ public class HighlightBuilder implements ToXContent { } /** - * Explicitly set the pre tags that will be used for highlighting. + * Getter for {@link #encoder(String)} */ - public HighlightBuilder preTags(String... preTags) { - this.preTags = preTags; - return this; - } - - /** - * Explicitly set the post tags that will be used for highlighting. - */ - public HighlightBuilder postTags(String... postTags) { - this.postTags = postTags; - return this; - } - - /** - * The order of fragments per field. By default, ordered by the order in the - * highlighted text. Can be score, which then it will be ordered - * by score of the fragments. - */ - public HighlightBuilder order(String order) { - this.order = order; - return this; - } - - /** - * Set to true to cause a field to be highlighted only if a query matches that field. - * Default is false meaning that terms are highlighted on all requested fields regardless - * if the query matches specifically on them. - */ - public HighlightBuilder requireFieldMatch(boolean requireFieldMatch) { - this.requireFieldMatch = requireFieldMatch; - return this; - } - - /** - * When using the highlighterType fvh this setting - * controls how far to look for boundary characters, and defaults to 20. - */ - public HighlightBuilder boundaryMaxScan(Integer boundaryMaxScan) { - this.boundaryMaxScan = boundaryMaxScan; - return this; - } - - /** - * When using the highlighterType fvh this setting - * defines what constitutes a boundary for highlighting. It’s a single string with - * each boundary character defined in it. It defaults to .,!? \t\n - */ - public HighlightBuilder boundaryChars(char[] boundaryChars) { - this.boundaryChars = boundaryChars; - return this; - } - - /** - * Set type of highlighter to use. Out of the box supported types - * are plain, fvh and postings. - * The default option selected is dependent on the mappings defined for your index. - * Details of the different highlighter types are covered in the reference guide. - */ - public HighlightBuilder highlighterType(String highlighterType) { - this.highlighterType = highlighterType; - return this; - } - - /** - * Sets what fragmenter to use to break up text that is eligible for highlighting. - * This option is only applicable when using the plain highlighterType highlighter. - * Permitted values are "simple" or "span" relating to {@link SimpleFragmenter} and - * {@link SimpleSpanFragmenter} implementations respectively with the default being "span" - */ - public HighlightBuilder fragmenter(String fragmenter) { - this.fragmenter = fragmenter; - return this; - } - - /** - * Sets a query to be used for highlighting all fields instead of the search query. - */ - public HighlightBuilder highlightQuery(QueryBuilder highlightQuery) { - this.highlightQuery = highlightQuery; - return this; - } - - /** - * Sets the size of the fragment to return from the beginning of the field if there are no matches to - * highlight and the field doesn't also define noMatchSize. - * @param noMatchSize integer to set or null to leave out of request. default is null. - * @return this for chaining - */ - public HighlightBuilder noMatchSize(Integer noMatchSize) { - this.noMatchSize = noMatchSize; - return this; - } - - /** - * Sets the maximum number of phrases the fvh will consider if the field doesn't also define phraseLimit. - * @param phraseLimit maximum number of phrases the fvh will consider - * @return this for chaining - */ - public HighlightBuilder phraseLimit(Integer phraseLimit) { - this.phraseLimit = phraseLimit; - return this; - } - - /** - * Allows to set custom options for custom highlighters. - */ - public HighlightBuilder options(Map options) { - this.options = options; - return this; - } - - /** - * Forces the highlighting to highlight fields based on the source even if fields are stored separately. - */ - public HighlightBuilder forceSource(boolean forceSource) { - this.forceSource = forceSource; - return this; + public String encoder() { + return this.encoder; } /** @@ -331,71 +155,29 @@ public class HighlightBuilder implements ToXContent { return this; } + /** + * Gets value set with {@link #useExplicitFieldOrder(boolean)} + */ + public Boolean useExplicitFieldOrder() { + return this.useExplicitFieldOrder; + } + @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject("highlight"); - innerXContent(builder, params); + innerXContent(builder); builder.endObject(); return builder; } - - public void innerXContent(XContentBuilder builder, Params params) throws IOException { - if (tagsSchema != null) { - builder.field("tags_schema", tagsSchema); - } - if (preTags != null) { - builder.array("pre_tags", preTags); - } - if (postTags != null) { - builder.array("post_tags", postTags); - } - if (order != null) { - builder.field("order", order); - } - if (highlightFilter != null) { - builder.field("highlight_filter", highlightFilter); - } - if (fragmentSize != null) { - builder.field("fragment_size", fragmentSize); - } - if (numOfFragments != null) { - builder.field("number_of_fragments", numOfFragments); - } + public void innerXContent(XContentBuilder builder) throws IOException { + // first write common options + commonOptionsToXContent(builder); + // special options for top-level highlighter if (encoder != null) { builder.field("encoder", encoder); } - if (requireFieldMatch != null) { - builder.field("require_field_match", requireFieldMatch); - } - if (boundaryMaxScan != null) { - builder.field("boundary_max_scan", boundaryMaxScan); - } - if (boundaryChars != null) { - builder.field("boundary_chars", boundaryChars); - } - if (highlighterType != null) { - builder.field("type", highlighterType); - } - if (fragmenter != null) { - builder.field("fragmenter", fragmenter); - } - if (highlightQuery != null) { - builder.field("highlight_query", highlightQuery); - } - if (noMatchSize != null) { - builder.field("no_match_size", noMatchSize); - } - if (phraseLimit != null) { - builder.field("phrase_limit", phraseLimit); - } - if (options != null && options.size() > 0) { - builder.field("options", options); - } - if (forceSource != null) { - builder.field("force_source", forceSource); - } - if (fields != null) { + if (fields.size() > 0) { if (useExplicitFieldOrder) { builder.startArray("fields"); } else { @@ -405,63 +187,7 @@ public class HighlightBuilder implements ToXContent { if (useExplicitFieldOrder) { builder.startObject(); } - builder.startObject(field.name()); - if (field.preTags != null) { - builder.field("pre_tags", field.preTags); - } - if (field.postTags != null) { - builder.field("post_tags", field.postTags); - } - if (field.fragmentSize != -1) { - builder.field("fragment_size", field.fragmentSize); - } - if (field.numOfFragments != -1) { - builder.field("number_of_fragments", field.numOfFragments); - } - if (field.fragmentOffset != -1) { - builder.field("fragment_offset", field.fragmentOffset); - } - if (field.highlightFilter != null) { - builder.field("highlight_filter", field.highlightFilter); - } - if (field.order != null) { - builder.field("order", field.order); - } - if (field.requireFieldMatch != null) { - builder.field("require_field_match", field.requireFieldMatch); - } - if (field.boundaryMaxScan != -1) { - builder.field("boundary_max_scan", field.boundaryMaxScan); - } - if (field.boundaryChars != null) { - builder.field("boundary_chars", field.boundaryChars); - } - if (field.highlighterType != null) { - builder.field("type", field.highlighterType); - } - if (field.fragmenter != null) { - builder.field("fragmenter", field.fragmenter); - } - if (field.highlightQuery != null) { - builder.field("highlight_query", field.highlightQuery); - } - if (field.noMatchSize != null) { - builder.field("no_match_size", field.noMatchSize); - } - if (field.matchedFields != null) { - builder.field("matched_fields", field.matchedFields); - } - if (field.phraseLimit != null) { - builder.field("phrase_limit", field.phraseLimit); - } - if (field.options != null && field.options.size() > 0) { - builder.field("options", field.options); - } - if (field.forceSource != null) { - builder.field("force_source", field.forceSource); - } - - builder.endObject(); + field.innerXContent(builder); if (useExplicitFieldOrder) { builder.endObject(); } @@ -474,26 +200,62 @@ public class HighlightBuilder implements ToXContent { } } - public static class Field { - final String name; - String[] preTags; - String[] postTags; - int fragmentSize = -1; + @Override + public final String toString() { + try { + XContentBuilder builder = XContentFactory.jsonBuilder(); + builder.prettyPrint(); + toXContent(builder, ToXContent.EMPTY_PARAMS); + return builder.string(); + } catch (Exception e) { + return "{ \"error\" : \"" + ExceptionsHelper.detailedMessage(e) + "\"}"; + } + } + + @Override + protected int doHashCode() { + return Objects.hash(encoder, useExplicitFieldOrder, fields); + } + + @Override + protected boolean doEquals(HighlightBuilder other) { + return Objects.equals(encoder, other.encoder) && + Objects.equals(useExplicitFieldOrder, other.useExplicitFieldOrder) && + Objects.equals(fields, other.fields); + } + + @Override + public HighlightBuilder readFrom(StreamInput in) throws IOException { + HighlightBuilder highlightBuilder = new HighlightBuilder(); + highlightBuilder.readOptionsFrom(in) + .encoder(in.readOptionalString()) + .useExplicitFieldOrder(in.readBoolean()); + int fields = in.readVInt(); + for (int i = 0; i < fields; i++) { + highlightBuilder.field(Field.PROTOTYPE.readFrom(in)); + } + return highlightBuilder; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + writeOptionsTo(out); + out.writeOptionalString(encoder); + out.writeBoolean(useExplicitFieldOrder); + out.writeVInt(fields.size()); + for (int i = 0; i < fields.size(); i++) { + fields.get(i).writeTo(out); + } + } + + public static class Field extends AbstractHighlighterBuilder implements Writeable { + static final Field PROTOTYPE = new Field("_na_"); + + private final String name; + int fragmentOffset = -1; - int numOfFragments = -1; - Boolean highlightFilter; - String order; - Boolean requireFieldMatch; - int boundaryMaxScan = -1; - char[] boundaryChars; - String highlighterType; - String fragmenter; - QueryBuilder highlightQuery; - Integer noMatchSize; + String[] matchedFields; - Integer phraseLimit; - Map options; - Boolean forceSource; public Field(String name) { this.name = name; @@ -503,118 +265,11 @@ public class HighlightBuilder implements ToXContent { return name; } - /** - * Explicitly set the pre tags for this field that will be used for highlighting. - * This overrides global settings set by {@link HighlightBuilder#preTags(String...)}. - */ - public Field preTags(String... preTags) { - this.preTags = preTags; - return this; - } - - /** - * Explicitly set the post tags for this field that will be used for highlighting. - * This overrides global settings set by {@link HighlightBuilder#postTags(String...)}. - */ - public Field postTags(String... postTags) { - this.postTags = postTags; - return this; - } - - public Field fragmentSize(int fragmentSize) { - this.fragmentSize = fragmentSize; - return this; - } - public Field fragmentOffset(int fragmentOffset) { this.fragmentOffset = fragmentOffset; return this; } - public Field numOfFragments(int numOfFragments) { - this.numOfFragments = numOfFragments; - return this; - } - - public Field highlightFilter(boolean highlightFilter) { - this.highlightFilter = highlightFilter; - return this; - } - - /** - * The order of fragments per field. By default, ordered by the order in the - * highlighted text. Can be score, which then it will be ordered - * by score of the fragments. - * This overrides global settings set by {@link HighlightBuilder#order(String)}. - */ - public Field order(String order) { - this.order = order; - return this; - } - - public Field requireFieldMatch(boolean requireFieldMatch) { - this.requireFieldMatch = requireFieldMatch; - return this; - } - - public Field boundaryMaxScan(int boundaryMaxScan) { - this.boundaryMaxScan = boundaryMaxScan; - return this; - } - - public Field boundaryChars(char[] boundaryChars) { - this.boundaryChars = boundaryChars; - return this; - } - - /** - * Set type of highlighter to use. Out of the box supported types - * are plain, fvh and postings. - * This overrides global settings set by {@link HighlightBuilder#highlighterType(String)}. - */ - public Field highlighterType(String highlighterType) { - this.highlighterType = highlighterType; - return this; - } - - /** - * Sets what fragmenter to use to break up text that is eligible for highlighting. - * This option is only applicable when using plain / normal highlighter. - * This overrides global settings set by {@link HighlightBuilder#fragmenter(String)}. - */ - public Field fragmenter(String fragmenter) { - this.fragmenter = fragmenter; - return this; - } - - /** - * Sets a query to use for highlighting this field instead of the search query. - */ - public Field highlightQuery(QueryBuilder highlightQuery) { - this.highlightQuery = highlightQuery; - return this; - } - - /** - * Sets the size of the fragment to return from the beginning of the field if there are no matches to - * highlight. - * @param noMatchSize integer to set or null to leave out of request. default is null. - * @return this for chaining - */ - public Field noMatchSize(Integer noMatchSize) { - this.noMatchSize = noMatchSize; - return this; - } - - /** - * Allows to set custom options for custom highlighters. - * This overrides global settings set by {@link HighlightBuilder#options(Map)}. - */ - public Field options(Map options) { - this.options = options; - return this; - } - /** * Set the matched fields to highlight against this field data. Default to null, meaning just * the named field. If you provide a list of fields here then don't forget to include name as @@ -625,24 +280,47 @@ public class HighlightBuilder implements ToXContent { return this; } - /** - * Sets the maximum number of phrases the fvh will consider. - * @param phraseLimit maximum number of phrases the fvh will consider - * @return this for chaining - */ - public Field phraseLimit(Integer phraseLimit) { - this.phraseLimit = phraseLimit; - return this; + public void innerXContent(XContentBuilder builder) throws IOException { + builder.startObject(name); + // write common options + commonOptionsToXContent(builder); + // write special field-highlighter options + if (fragmentOffset != -1) { + builder.field("fragment_offset", fragmentOffset); + } + if (matchedFields != null) { + builder.field("matched_fields", matchedFields); + } + builder.endObject(); } - - /** - * Forces the highlighting to highlight this field based on the source even if this field is stored separately. - */ - public Field forceSource(boolean forceSource) { - this.forceSource = forceSource; - return this; + @Override + protected int doHashCode() { + return Objects.hash(name, fragmentOffset, Arrays.hashCode(matchedFields)); } + @Override + protected boolean doEquals(Field other) { + return Objects.equals(name, other.name) && + Objects.equals(fragmentOffset, other.fragmentOffset) && + Arrays.equals(matchedFields, other.matchedFields); + } + + @Override + public Field readFrom(StreamInput in) throws IOException { + Field field = new Field(in.readString()); + field.fragmentOffset(in.readVInt()); + field.matchedFields(in.readOptionalStringArray()); + field.readOptionsFrom(in); + return field; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeString(name); + out.writeVInt(fragmentOffset); + out.writeOptionalStringArray(matchedFields); + writeOptionsTo(out); + } } } diff --git a/core/src/main/java/org/elasticsearch/search/highlight/HighlighterParseElement.java b/core/src/main/java/org/elasticsearch/search/highlight/HighlighterParseElement.java index 8fddeaed279..fdf9e2c26dd 100644 --- a/core/src/main/java/org/elasticsearch/search/highlight/HighlighterParseElement.java +++ b/core/src/main/java/org/elasticsearch/search/highlight/HighlighterParseElement.java @@ -52,16 +52,38 @@ import java.util.Set; */ public class HighlighterParseElement implements SearchParseElement { - private static final String[] DEFAULT_PRE_TAGS = new String[]{""}; - private static final String[] DEFAULT_POST_TAGS = new String[]{""}; - - private static final String[] STYLED_PRE_TAG = { + /** default for whether to highlight fields based on the source even if stored separately */ + public static final boolean DEFAULT_FORCE_SOURCE = false; + /** default for whether a field should be highlighted only if a query matches that field */ + public static final boolean DEFAULT_REQUIRE_FIELD_MATCH = true; + /** default for whether fvh should provide highlighting on filter clauses */ + public static final boolean DEFAULT_HIGHLIGHT_FILTER = false; + /** default for highlight fragments being ordered by score */ + public static final boolean DEFAULT_SCORE_ORDERED = false; + /** the default encoder setting */ + public static final String DEFAULT_ENCODER = "default"; + /** default for the maximum number of phrases the fvh will consider */ + public static final int DEFAULT_PHRASE_LIMIT = 256; + /** default for fragment size when there are no matches */ + public static final int DEFAULT_NO_MATCH_SIZE = 0; + /** the default number of fragments for highlighting */ + public static final int DEFAULT_NUMBER_OF_FRAGMENTS = 5; + /** the default number of fragments size in characters */ + public static final int DEFAULT_FRAGMENT_CHAR_SIZE = 100; + /** the default opening tag */ + public static final String[] DEFAULT_PRE_TAGS = new String[]{""}; + /** the default closing tag */ + public static final String[] DEFAULT_POST_TAGS = new String[]{""}; + + /** the default opening tags when tag_schema = "styled" */ + public static final String[] STYLED_PRE_TAG = { "", "", "", "", "", "", "", "", "", "" }; - private static final String[] STYLED_POST_TAGS = {""}; + /** the default closing tags when tag_schema = "styled" */ + public static final String[] STYLED_POST_TAGS = {""}; @Override public void parse(XContentParser parser, SearchContext context) throws Exception { @@ -78,11 +100,11 @@ public class HighlighterParseElement implements SearchParseElement { final List> fieldsOptions = new ArrayList<>(); final SearchContextHighlight.FieldOptions.Builder globalOptionsBuilder = new SearchContextHighlight.FieldOptions.Builder() - .preTags(DEFAULT_PRE_TAGS).postTags(DEFAULT_POST_TAGS).scoreOrdered(false).highlightFilter(false) - .requireFieldMatch(true).forceSource(false).fragmentCharSize(100).numberOfFragments(5) - .encoder("default").boundaryMaxScan(SimpleBoundaryScanner.DEFAULT_MAX_SCAN) + .preTags(DEFAULT_PRE_TAGS).postTags(DEFAULT_POST_TAGS).scoreOrdered(DEFAULT_SCORE_ORDERED).highlightFilter(DEFAULT_HIGHLIGHT_FILTER) + .requireFieldMatch(DEFAULT_REQUIRE_FIELD_MATCH).forceSource(DEFAULT_FORCE_SOURCE).fragmentCharSize(DEFAULT_FRAGMENT_CHAR_SIZE).numberOfFragments(DEFAULT_NUMBER_OF_FRAGMENTS) + .encoder(DEFAULT_ENCODER).boundaryMaxScan(SimpleBoundaryScanner.DEFAULT_MAX_SCAN) .boundaryChars(SimpleBoundaryScanner.DEFAULT_BOUNDARY_CHARS) - .noMatchSize(0).phraseLimit(256); + .noMatchSize(DEFAULT_NO_MATCH_SIZE).phraseLimit(DEFAULT_PHRASE_LIMIT); while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { if (token == XContentParser.Token.FIELD_NAME) { diff --git a/core/src/main/java/org/elasticsearch/search/suggest/Suggesters.java b/core/src/main/java/org/elasticsearch/search/suggest/Suggesters.java index a6f66fdd763..9eba50f478a 100644 --- a/core/src/main/java/org/elasticsearch/search/suggest/Suggesters.java +++ b/core/src/main/java/org/elasticsearch/search/suggest/Suggesters.java @@ -35,7 +35,7 @@ public final class Suggesters extends ExtensionPoint.ClassMap { private final Map parsers; public Suggesters() { - this(Collections.EMPTY_MAP); + this(Collections.emptyMap()); } public Suggesters(Map suggesters) { diff --git a/core/src/main/java/org/elasticsearch/search/suggest/completion/CompletionFieldStats.java b/core/src/main/java/org/elasticsearch/search/suggest/completion/CompletionFieldStats.java index e61c221a959..08c0302f81e 100644 --- a/core/src/main/java/org/elasticsearch/search/suggest/completion/CompletionFieldStats.java +++ b/core/src/main/java/org/elasticsearch/search/suggest/completion/CompletionFieldStats.java @@ -20,6 +20,8 @@ package org.elasticsearch.search.suggest.completion; import com.carrotsearch.hppc.ObjectLongHashMap; + +import org.apache.lucene.index.Fields; import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.LeafReader; import org.apache.lucene.index.LeafReaderContext; @@ -32,28 +34,35 @@ import java.io.IOException; public class CompletionFieldStats { - public static CompletionStats completionStats(IndexReader indexReader, String ... fields) { + /** + * Returns total in-heap bytes used by all suggesters. This method has CPU cost O(numIndexedFields). + * + * @param fieldNamePatterns if non-null, any completion field name matching any of these patterns will break out its in-heap bytes + * separately in the returned {@link CompletionStats} + */ + public static CompletionStats completionStats(IndexReader indexReader, String ... fieldNamePatterns) { long sizeInBytes = 0; ObjectLongHashMap completionFields = null; - if (fields != null && fields.length > 0) { - completionFields = new ObjectLongHashMap<>(fields.length); + if (fieldNamePatterns != null && fieldNamePatterns.length > 0) { + completionFields = new ObjectLongHashMap<>(fieldNamePatterns.length); } for (LeafReaderContext atomicReaderContext : indexReader.leaves()) { LeafReader atomicReader = atomicReaderContext.reader(); try { - for (String fieldName : atomicReader.fields()) { - Terms terms = atomicReader.fields().terms(fieldName); + Fields fields = atomicReader.fields(); + for (String fieldName : fields) { + Terms terms = fields.terms(fieldName); if (terms instanceof CompletionTerms) { // TODO: currently we load up the suggester for reporting its size long fstSize = ((CompletionTerms) terms).suggester().ramBytesUsed(); - if (fields != null && fields.length > 0 && Regex.simpleMatch(fields, fieldName)) { + if (fieldNamePatterns != null && fieldNamePatterns.length > 0 && Regex.simpleMatch(fieldNamePatterns, fieldName)) { completionFields.addTo(fieldName, fstSize); } sizeInBytes += fstSize; } } - } catch (IOException ignored) { - throw new ElasticsearchException(ignored); + } catch (IOException ioe) { + throw new ElasticsearchException(ioe); } } return new CompletionStats(sizeInBytes, completionFields); diff --git a/core/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggestionContext.java b/core/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggestionContext.java index eab62a063a7..8ffd497eb3a 100644 --- a/core/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggestionContext.java +++ b/core/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggestionContext.java @@ -38,10 +38,10 @@ public class CompletionSuggestionContext extends SuggestionSearchContext.Suggest private CompletionFieldMapper.CompletionFieldType fieldType; private CompletionSuggestionBuilder.FuzzyOptionsBuilder fuzzyOptionsBuilder; private CompletionSuggestionBuilder.RegexOptionsBuilder regexOptionsBuilder; - private Map> queryContexts = Collections.EMPTY_MAP; + private Map> queryContexts = Collections.emptyMap(); private final MapperService mapperService; private final IndexFieldDataService indexFieldDataService; - private Set payloadFields = Collections.EMPTY_SET; + private Set payloadFields = Collections.emptySet(); CompletionSuggestionContext(Suggester suggester, MapperService mapperService, IndexFieldDataService indexFieldDataService) { super(suggester); diff --git a/core/src/main/java/org/elasticsearch/snapshots/Snapshot.java b/core/src/main/java/org/elasticsearch/snapshots/Snapshot.java index d4311cab901..1206ef53501 100644 --- a/core/src/main/java/org/elasticsearch/snapshots/Snapshot.java +++ b/core/src/main/java/org/elasticsearch/snapshots/Snapshot.java @@ -29,8 +29,6 @@ import java.util.ArrayList; import java.util.Collections; import java.util.List; -import static java.util.Collections.*; - /** * Represent information about snapshot */ @@ -93,7 +91,7 @@ public class Snapshot implements Comparable, ToXContent, FromXContentB * Special constructor for the prototype object */ private Snapshot() { - this("", (List) EMPTY_LIST, 0); + this("", Collections.emptyList(), 0); } private static SnapshotState snapshotState(String reason, List shardFailures) { diff --git a/core/src/main/java/org/elasticsearch/transport/TransportService.java b/core/src/main/java/org/elasticsearch/transport/TransportService.java index 964cfacc8c1..14fc9029b00 100644 --- a/core/src/main/java/org/elasticsearch/transport/TransportService.java +++ b/core/src/main/java/org/elasticsearch/transport/TransportService.java @@ -484,7 +484,7 @@ public class TransportService extends AbstractLifecycleComponent implem @Override public Map profileBoundAddresses() { - return Collections.EMPTY_MAP; + return Collections.emptyMap(); } @Override diff --git a/core/src/main/resources/org/elasticsearch/bootstrap/security.policy b/core/src/main/resources/org/elasticsearch/bootstrap/security.policy index 1b4fcf17255..7e060cd6d90 100644 --- a/core/src/main/resources/org/elasticsearch/bootstrap/security.policy +++ b/core/src/main/resources/org/elasticsearch/bootstrap/security.policy @@ -80,8 +80,9 @@ grant { // TODO: look into this and decide if users should simply set the actual sysprop?! permission java.util.PropertyPermission "org.jboss.netty.epollBugWorkaround", "write"; - // needed by lucene SPI currently - permission java.lang.RuntimePermission "getClassLoader"; + // Netty SelectorUtil wants to change this, because of https://bugs.openjdk.java.net/browse/JDK-6427854 + // the bug says it only happened rarely, and that its fixed, but apparently it still happens rarely! + permission java.util.PropertyPermission "sun.nio.ch.bugLevel", "write"; // needed by Settings permission java.lang.RuntimePermission "getenv.*"; diff --git a/core/src/main/resources/org/elasticsearch/bootstrap/untrusted.policy b/core/src/main/resources/org/elasticsearch/bootstrap/untrusted.policy index d32ea6a2435..8e7ca8d8b6e 100644 --- a/core/src/main/resources/org/elasticsearch/bootstrap/untrusted.policy +++ b/core/src/main/resources/org/elasticsearch/bootstrap/untrusted.policy @@ -34,5 +34,6 @@ grant { permission java.util.PropertyPermission "rhino.stack.style", "read"; // needed IndyInterface selectMethod (setCallSiteTarget) + // TODO: clean this up / only give it to engines that really must have it permission java.lang.RuntimePermission "getClassLoader"; }; diff --git a/core/src/main/resources/org/elasticsearch/plugins/plugin-install.help b/core/src/main/resources/org/elasticsearch/plugins/plugin-install.help index 389f6d9caa1..e6622e2743a 100644 --- a/core/src/main/resources/org/elasticsearch/plugins/plugin-install.help +++ b/core/src/main/resources/org/elasticsearch/plugins/plugin-install.help @@ -43,8 +43,6 @@ OFFICIAL PLUGINS - discovery-ec2 - discovery-gce - discovery-multicast - - lang-expression - - lang-groovy - lang-javascript - lang-python - mapper-attachments diff --git a/core/src/test/java/org/elasticsearch/ExceptionSerializationTests.java b/core/src/test/java/org/elasticsearch/ExceptionSerializationTests.java index 261f8a19fd5..b7ff2cad91f 100644 --- a/core/src/test/java/org/elasticsearch/ExceptionSerializationTests.java +++ b/core/src/test/java/org/elasticsearch/ExceptionSerializationTests.java @@ -51,7 +51,6 @@ import org.elasticsearch.index.AlreadyExpiredException; import org.elasticsearch.index.Index; import org.elasticsearch.index.engine.IndexFailedEngineException; import org.elasticsearch.index.engine.RecoveryEngineException; -import org.elasticsearch.index.mapper.MergeMappingException; import org.elasticsearch.index.query.QueryShardException; import org.elasticsearch.index.shard.IllegalIndexShardStateException; import org.elasticsearch.index.shard.IndexShardState; @@ -275,11 +274,6 @@ public class ExceptionSerializationTests extends ESTestCase { assertEquals(-3, alreadyExpiredException.now()); } - public void testMergeMappingException() throws IOException { - MergeMappingException ex = serialize(new MergeMappingException(new String[]{"one", "two"})); - assertArrayEquals(ex.failures(), new String[]{"one", "two"}); - } - public void testActionNotFoundTransportException() throws IOException { ActionNotFoundTransportException ex = serialize(new ActionNotFoundTransportException("AACCCTION")); assertEquals("AACCCTION", ex.action()); @@ -725,7 +719,6 @@ public class ExceptionSerializationTests extends ESTestCase { ids.put(84, org.elasticsearch.transport.NodeDisconnectedException.class); ids.put(85, org.elasticsearch.index.AlreadyExpiredException.class); ids.put(86, org.elasticsearch.search.aggregations.AggregationExecutionException.class); - ids.put(87, org.elasticsearch.index.mapper.MergeMappingException.class); ids.put(88, org.elasticsearch.indices.InvalidIndexTemplateException.class); ids.put(89, org.elasticsearch.percolator.PercolateException.class); ids.put(90, org.elasticsearch.index.engine.RefreshFailedEngineException.class); diff --git a/core/src/test/java/org/elasticsearch/action/admin/indices/template/put/MetaDataIndexTemplateServiceTests.java b/core/src/test/java/org/elasticsearch/action/admin/indices/template/put/MetaDataIndexTemplateServiceTests.java index f983d885124..c642bdb1e79 100644 --- a/core/src/test/java/org/elasticsearch/action/admin/indices/template/put/MetaDataIndexTemplateServiceTests.java +++ b/core/src/test/java/org/elasticsearch/action/admin/indices/template/put/MetaDataIndexTemplateServiceTests.java @@ -20,6 +20,8 @@ package org.elasticsearch.action.admin.indices.template.put; import org.elasticsearch.Version; +import org.elasticsearch.action.admin.indices.alias.Alias; +import org.elasticsearch.cluster.metadata.AliasValidator; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.MetaDataCreateIndexService; import org.elasticsearch.cluster.metadata.MetaDataIndexTemplateService; @@ -28,13 +30,10 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.indices.InvalidIndexTemplateException; import org.elasticsearch.test.ESTestCase; -import java.util.ArrayList; -import java.util.HashMap; -import java.util.HashSet; -import java.util.List; -import java.util.Map; +import java.util.*; import static org.hamcrest.CoreMatchers.containsString; +import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.CoreMatchers.instanceOf; public class MetaDataIndexTemplateServiceTests extends ESTestCase { @@ -68,6 +67,17 @@ public class MetaDataIndexTemplateServiceTests extends ESTestCase { assertThat(throwables.get(0).getMessage(), containsString("index must have 1 or more primary shards")); } + public void testIndexTemplateWithAliasNameEqualToTemplatePattern() { + PutRequest request = new PutRequest("api", "foobar_template"); + request.template("foobar"); + request.aliases(Collections.singleton(new Alias("foobar"))); + + List errors = putTemplate(request); + assertThat(errors.size(), equalTo(1)); + assertThat(errors.get(0), instanceOf(IllegalArgumentException.class)); + assertThat(errors.get(0).getMessage(), equalTo("Alias [foobar] cannot be the same as the template pattern [foobar]")); + } + private static List putTemplate(PutRequest request) { MetaDataCreateIndexService createIndexService = new MetaDataCreateIndexService( Settings.EMPTY, @@ -79,7 +89,7 @@ public class MetaDataIndexTemplateServiceTests extends ESTestCase { new HashSet<>(), null, null); - MetaDataIndexTemplateService service = new MetaDataIndexTemplateService(Settings.EMPTY, null, createIndexService, null); + MetaDataIndexTemplateService service = new MetaDataIndexTemplateService(Settings.EMPTY, null, createIndexService, new AliasValidator(Settings.EMPTY)); final List throwables = new ArrayList<>(); service.putTemplate(request, new MetaDataIndexTemplateService.PutListener() { diff --git a/core/src/test/java/org/elasticsearch/action/bulk/BulkRequestTests.java b/core/src/test/java/org/elasticsearch/action/bulk/BulkRequestTests.java index 0242edae317..78f96bab7b2 100644 --- a/core/src/test/java/org/elasticsearch/action/bulk/BulkRequestTests.java +++ b/core/src/test/java/org/elasticsearch/action/bulk/BulkRequestTests.java @@ -21,6 +21,7 @@ package org.elasticsearch.action.bulk; import org.apache.lucene.util.Constants; import org.elasticsearch.action.ActionRequest; +import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.delete.DeleteRequest; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.update.UpdateRequest; @@ -36,9 +37,7 @@ import java.util.List; import java.util.Map; import static org.elasticsearch.test.StreamsUtils.copyToStringFromClasspath; -import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.Matchers.instanceOf; -import static org.hamcrest.Matchers.notNullValue; +import static org.hamcrest.Matchers.*; public class BulkRequestTests extends ESTestCase { public void testSimpleBulk1() throws Exception { @@ -171,4 +170,39 @@ public class BulkRequestTests extends ESTestCase { bulkRequest.add(bulkAction.getBytes(StandardCharsets.UTF_8), 0, bulkAction.length(), null, null); assertThat(bulkRequest.numberOfActions(), equalTo(9)); } + + // issue 7361 + public void testBulkRequestWithRefresh() throws Exception { + BulkRequest bulkRequest = new BulkRequest(); + // We force here a "id is missing" validation error + bulkRequest.add(new DeleteRequest("index", "type", null).refresh(true)); + // We force here a "type is missing" validation error + bulkRequest.add(new DeleteRequest("index", null, "id")); + bulkRequest.add(new DeleteRequest("index", "type", "id").refresh(true)); + bulkRequest.add(new UpdateRequest("index", "type", "id").doc("{}").refresh(true)); + bulkRequest.add(new IndexRequest("index", "type", "id").source("{}").refresh(true)); + ActionRequestValidationException validate = bulkRequest.validate(); + assertThat(validate, notNullValue()); + assertThat(validate.validationErrors(), not(empty())); + assertThat(validate.validationErrors(), contains( + "Refresh is not supported on an item request, set the refresh flag on the BulkRequest instead.", + "id is missing", + "type is missing", + "Refresh is not supported on an item request, set the refresh flag on the BulkRequest instead.", + "Refresh is not supported on an item request, set the refresh flag on the BulkRequest instead.", + "Refresh is not supported on an item request, set the refresh flag on the BulkRequest instead.")); + } + + // issue 15120 + public void testBulkNoSource() throws Exception { + BulkRequest bulkRequest = new BulkRequest(); + bulkRequest.add(new UpdateRequest("index", "type", "id")); + bulkRequest.add(new IndexRequest("index", "type", "id")); + ActionRequestValidationException validate = bulkRequest.validate(); + assertThat(validate, notNullValue()); + assertThat(validate.validationErrors(), not(empty())); + assertThat(validate.validationErrors(), contains( + "script or doc is missing", + "source is missing")); + } } diff --git a/core/src/test/java/org/elasticsearch/action/index/IndexRequestTests.java b/core/src/test/java/org/elasticsearch/action/index/IndexRequestTests.java index 0739b5da02d..1d3a9e18757 100644 --- a/core/src/test/java/org/elasticsearch/action/index/IndexRequestTests.java +++ b/core/src/test/java/org/elasticsearch/action/index/IndexRequestTests.java @@ -18,6 +18,8 @@ */ package org.elasticsearch.action.index; +import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.index.VersionType; import org.elasticsearch.test.ESTestCase; @@ -25,10 +27,7 @@ import java.util.Arrays; import java.util.HashSet; import java.util.Set; -import static org.hamcrest.Matchers.containsString; -import static org.hamcrest.Matchers.empty; -import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.Matchers.not; +import static org.hamcrest.Matchers.*; /** */ @@ -67,4 +66,43 @@ public class IndexRequestTests extends ESTestCase { request.version(randomIntBetween(0, Integer.MAX_VALUE)); assertThat(request.validate().validationErrors(), not(empty())); } + + public void testSetTTLAsTimeValue() { + IndexRequest indexRequest = new IndexRequest(); + TimeValue ttl = TimeValue.parseTimeValue(randomTimeValue(), null, "ttl"); + indexRequest.ttl(ttl); + assertThat(indexRequest.ttl(), equalTo(ttl)); + } + + public void testSetTTLAsString() { + IndexRequest indexRequest = new IndexRequest(); + String ttlAsString = randomTimeValue(); + TimeValue ttl = TimeValue.parseTimeValue(ttlAsString, null, "ttl"); + indexRequest.ttl(ttlAsString); + assertThat(indexRequest.ttl(), equalTo(ttl)); + } + + public void testSetTTLAsLong() { + IndexRequest indexRequest = new IndexRequest(); + String ttlAsString = randomTimeValue(); + TimeValue ttl = TimeValue.parseTimeValue(ttlAsString, null, "ttl"); + indexRequest.ttl(ttl.millis()); + assertThat(indexRequest.ttl(), equalTo(ttl)); + } + + public void testValidateTTL() { + IndexRequest indexRequest = new IndexRequest("index", "type"); + if (randomBoolean()) { + indexRequest.ttl(randomIntBetween(Integer.MIN_VALUE, -1)); + } else { + if (randomBoolean()) { + indexRequest.ttl(new TimeValue(randomIntBetween(Integer.MIN_VALUE, -1))); + } else { + indexRequest.ttl(randomIntBetween(Integer.MIN_VALUE, -1) + "ms"); + } + } + ActionRequestValidationException validate = indexRequest.validate(); + assertThat(validate, notNullValue()); + assertThat(validate.getMessage(), containsString("ttl must not be negative")); + } } diff --git a/core/src/test/java/org/elasticsearch/action/update/UpdateRequestTests.java b/core/src/test/java/org/elasticsearch/action/update/UpdateRequestTests.java index bc9bd2b9095..6cf7a3384ab 100644 --- a/core/src/test/java/org/elasticsearch/action/update/UpdateRequestTests.java +++ b/core/src/test/java/org/elasticsearch/action/update/UpdateRequestTests.java @@ -23,6 +23,7 @@ import org.elasticsearch.Version; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.common.io.stream.Streamable; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.index.get.GetResult; @@ -33,11 +34,7 @@ import org.elasticsearch.test.ESTestCase; import java.util.Map; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; -import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.Matchers.instanceOf; -import static org.hamcrest.Matchers.is; -import static org.hamcrest.Matchers.notNullValue; -import static org.hamcrest.Matchers.nullValue; +import static org.hamcrest.Matchers.*; public class UpdateRequestTests extends ESTestCase { public void testUpdateRequest() throws Exception { @@ -127,7 +124,7 @@ public class UpdateRequestTests extends ESTestCase { // Related to issue 3256 public void testUpdateRequestWithTTL() throws Exception { - long providedTTLValue = randomIntBetween(500, 1000); + TimeValue providedTTLValue = TimeValue.parseTimeValue(randomTimeValue(), null, "ttl"); Settings settings = settings(Version.CURRENT).build(); UpdateHelper updateHelper = new UpdateHelper(settings, null); diff --git a/core/src/test/java/org/elasticsearch/bwcompat/OldIndexBackwardsCompatibilityIT.java b/core/src/test/java/org/elasticsearch/bwcompat/OldIndexBackwardsCompatibilityIT.java index 6459cc4f306..137c6c5b2c2 100644 --- a/core/src/test/java/org/elasticsearch/bwcompat/OldIndexBackwardsCompatibilityIT.java +++ b/core/src/test/java/org/elasticsearch/bwcompat/OldIndexBackwardsCompatibilityIT.java @@ -24,6 +24,10 @@ import org.apache.lucene.util.LuceneTestCase; import org.apache.lucene.util.TestUtil; import org.elasticsearch.Version; import org.elasticsearch.action.admin.indices.get.GetIndexResponse; +import org.elasticsearch.action.admin.indices.segments.IndexSegments; +import org.elasticsearch.action.admin.indices.segments.IndexShardSegments; +import org.elasticsearch.action.admin.indices.segments.IndicesSegmentResponse; +import org.elasticsearch.action.admin.indices.segments.ShardSegments; import org.elasticsearch.action.admin.indices.upgrade.UpgradeIT; import org.elasticsearch.action.get.GetResponse; import org.elasticsearch.action.search.SearchRequestBuilder; @@ -38,6 +42,7 @@ import org.elasticsearch.common.util.MultiDataPathUpgrader; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.env.NodeEnvironment; import org.elasticsearch.index.engine.EngineConfig; +import org.elasticsearch.index.engine.Segment; import org.elasticsearch.index.mapper.string.StringFieldMapperPositionIncrementGapTests; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.index.shard.MergePolicyConfig; @@ -275,51 +280,11 @@ public class OldIndexBackwardsCompatibilityIT extends ESIntegTestCase { } } - public void testHandlingOfUnsupportedDanglingIndexes() throws Exception { - setupCluster(); - Collections.shuffle(unsupportedIndexes, getRandom()); - for (String index : unsupportedIndexes) { - assertUnsupportedIndexHandling(index); - } - } - - /** - * Waits for the index to show up in the cluster state in closed state - */ - void ensureClosed(final String index) throws InterruptedException { - assertTrue(awaitBusy(() -> { - ClusterState state = client().admin().cluster().prepareState().get().getState(); - return state.metaData().hasIndex(index) && state.metaData().index(index).getState() == IndexMetaData.State.CLOSE; - } - ) - ); - } - - /** - * Checks that the given index cannot be opened due to incompatible version - */ - void assertUnsupportedIndexHandling(String index) throws Exception { - long startTime = System.currentTimeMillis(); - logger.info("--> Testing old index " + index); - String indexName = loadIndex(index); - // force reloading dangling indices with a cluster state republish - client().admin().cluster().prepareReroute().get(); - ensureClosed(indexName); - try { - client().admin().indices().prepareOpen(indexName).get(); - fail("Shouldn't be able to open an old index"); - } catch (IllegalStateException ex) { - assertThat(ex.getMessage(), containsString("was created before v2.0.0.beta1 and wasn't upgraded")); - } - unloadIndex(indexName); - logger.info("--> Done testing " + index + ", took " + ((System.currentTimeMillis() - startTime) / 1000.0) + " seconds"); - } - void assertOldIndexWorks(String index) throws Exception { Version version = extractVersion(index); String indexName = loadIndex(index); importIndex(indexName); - assertIndexSanity(indexName); + assertIndexSanity(indexName, version); assertBasicSearchWorks(indexName); assertBasicAggregationWorks(indexName); assertRealtimeGetWorks(indexName); @@ -339,11 +304,22 @@ public class OldIndexBackwardsCompatibilityIT extends ESIntegTestCase { version.luceneVersion.minor == Version.CURRENT.luceneVersion.minor; } - void assertIndexSanity(String indexName) { + void assertIndexSanity(String indexName, Version indexCreated) { GetIndexResponse getIndexResponse = client().admin().indices().prepareGetIndex().addIndices(indexName).get(); assertEquals(1, getIndexResponse.indices().length); assertEquals(indexName, getIndexResponse.indices()[0]); + Version actualVersionCreated = Version.indexCreated(getIndexResponse.getSettings().get(indexName)); + assertEquals(indexCreated, actualVersionCreated); ensureYellow(indexName); + IndicesSegmentResponse segmentsResponse = client().admin().indices().prepareSegments(indexName).get(); + IndexSegments segments = segmentsResponse.getIndices().get(indexName); + for (IndexShardSegments indexShardSegments : segments) { + for (ShardSegments shardSegments : indexShardSegments) { + for (Segment segment : shardSegments) { + assertEquals(indexCreated.luceneVersion, segment.version); + } + } + } SearchResponse test = client().prepareSearch(indexName).get(); assertThat(test.getHits().getTotalHits(), greaterThanOrEqualTo(1l)); } diff --git a/core/src/test/java/org/elasticsearch/client/transport/FailAndRetryMockTransport.java b/core/src/test/java/org/elasticsearch/client/transport/FailAndRetryMockTransport.java index 95456fda901..9f16ade87e8 100644 --- a/core/src/test/java/org/elasticsearch/client/transport/FailAndRetryMockTransport.java +++ b/core/src/test/java/org/elasticsearch/client/transport/FailAndRetryMockTransport.java @@ -189,6 +189,6 @@ abstract class FailAndRetryMockTransport imp @Override public Map profileBoundAddresses() { - return Collections.EMPTY_MAP; + return Collections.emptyMap(); } } diff --git a/core/src/test/java/org/elasticsearch/client/transport/TransportClientNodesServiceTests.java b/core/src/test/java/org/elasticsearch/client/transport/TransportClientNodesServiceTests.java index 3f5a91960fc..093e46186b3 100644 --- a/core/src/test/java/org/elasticsearch/client/transport/TransportClientNodesServiceTests.java +++ b/core/src/test/java/org/elasticsearch/client/transport/TransportClientNodesServiceTests.java @@ -63,7 +63,7 @@ public class TransportClientNodesServiceTests extends ESTestCase { transport = new FailAndRetryMockTransport(getRandom()) { @Override public List getLocalAddresses() { - return Collections.EMPTY_LIST; + return Collections.emptyList(); } @Override diff --git a/core/src/test/java/org/elasticsearch/cluster/ClusterServiceIT.java b/core/src/test/java/org/elasticsearch/cluster/ClusterServiceIT.java index 96a865f4da4..9e842a38722 100644 --- a/core/src/test/java/org/elasticsearch/cluster/ClusterServiceIT.java +++ b/core/src/test/java/org/elasticsearch/cluster/ClusterServiceIT.java @@ -43,23 +43,17 @@ import org.elasticsearch.test.MockLogAppender; import org.elasticsearch.test.junit.annotations.TestLogging; import org.elasticsearch.threadpool.ThreadPool; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Collection; -import java.util.HashSet; -import java.util.List; -import java.util.Set; +import java.util.*; +import java.util.concurrent.ConcurrentHashMap; +import java.util.concurrent.ConcurrentMap; import java.util.concurrent.CountDownLatch; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; +import java.util.concurrent.atomic.AtomicInteger; import static org.elasticsearch.common.settings.Settings.settingsBuilder; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; -import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.Matchers.greaterThan; -import static org.hamcrest.Matchers.greaterThanOrEqualTo; -import static org.hamcrest.Matchers.is; -import static org.hamcrest.Matchers.notNullValue; +import static org.hamcrest.Matchers.*; /** * @@ -711,32 +705,148 @@ public class ClusterServiceIT extends ESIntegTestCase { .build(); internalCluster().startNode(settings); ClusterService clusterService = internalCluster().getInstance(ClusterService.class); - BlockingTask block = new BlockingTask(); - clusterService.submitStateUpdateTask("test", Priority.IMMEDIATE, block); + BlockingTask block = new BlockingTask(Priority.IMMEDIATE); + clusterService.submitStateUpdateTask("test", block); int taskCount = randomIntBetween(5, 20); Priority[] priorities = Priority.values(); // will hold all the tasks in the order in which they were executed - List tasks = new ArrayList<>(taskCount); + List tasks = new ArrayList<>(taskCount); CountDownLatch latch = new CountDownLatch(taskCount); for (int i = 0; i < taskCount; i++) { Priority priority = priorities[randomIntBetween(0, priorities.length - 1)]; - clusterService.submitStateUpdateTask("test", priority, new PrioritiezedTask(priority, latch, tasks)); + clusterService.submitStateUpdateTask("test", new PrioritizedTask(priority, latch, tasks)); } block.release(); latch.await(); Priority prevPriority = null; - for (PrioritiezedTask task : tasks) { + for (PrioritizedTask task : tasks) { if (prevPriority == null) { - prevPriority = task.priority; + prevPriority = task.priority(); } else { - assertThat(task.priority.sameOrAfter(prevPriority), is(true)); + assertThat(task.priority().sameOrAfter(prevPriority), is(true)); } } } + public void testClusterStateBatchedUpdates() throws InterruptedException { + Settings settings = settingsBuilder() + .put("discovery.type", "local") + .build(); + internalCluster().startNode(settings); + ClusterService clusterService = internalCluster().getInstance(ClusterService.class); + + AtomicInteger counter = new AtomicInteger(); + class Task { + private AtomicBoolean state = new AtomicBoolean(); + + public void execute() { + if (!state.compareAndSet(false, true)) { + throw new IllegalStateException(); + } else { + counter.incrementAndGet(); + } + } + } + + class TaskExecutor implements ClusterStateTaskExecutor { + private AtomicInteger counter = new AtomicInteger(); + + @Override + public BatchResult execute(ClusterState currentState, List tasks) throws Exception { + tasks.forEach(task -> task.execute()); + counter.addAndGet(tasks.size()); + return BatchResult.builder().successes(tasks).build(currentState); + } + + @Override + public boolean runOnlyOnMaster() { + return false; + } + } + int numberOfThreads = randomIntBetween(2, 8); + int tasksSubmittedPerThread = randomIntBetween(1, 1024); + + ConcurrentMap counters = new ConcurrentHashMap<>(); + CountDownLatch updateLatch = new CountDownLatch(numberOfThreads * tasksSubmittedPerThread); + ClusterStateTaskListener listener = new ClusterStateTaskListener() { + @Override + public void onFailure(String source, Throwable t) { + assert false; + } + + @Override + public void clusterStateProcessed(String source, ClusterState oldState, ClusterState newState) { + counters.computeIfAbsent(source, key -> new AtomicInteger()).incrementAndGet(); + updateLatch.countDown(); + } + }; + + int numberOfExecutors = Math.max(1, numberOfThreads / 4); + List executors = new ArrayList<>(); + for (int i = 0; i < numberOfExecutors; i++) { + executors.add(new TaskExecutor()); + } + + // randomly assign tasks to executors + List assignments = new ArrayList<>(); + for (int i = 0; i < numberOfThreads; i++) { + for (int j = 0; j < tasksSubmittedPerThread; j++) { + assignments.add(randomFrom(executors)); + } + } + + Map counts = new HashMap<>(); + for (TaskExecutor executor : assignments) { + counts.merge(executor, 1, (previous, one) -> previous + one); + } + + CountDownLatch startingGun = new CountDownLatch(1 + numberOfThreads); + List threads = new ArrayList<>(); + for (int i = 0; i < numberOfThreads; i++) { + final int index = i; + Thread thread = new Thread(() -> { + startingGun.countDown(); + for (int j = 0; j < tasksSubmittedPerThread; j++) { + ClusterStateTaskExecutor executor = assignments.get(index * tasksSubmittedPerThread + j); + clusterService.submitStateUpdateTask( + Thread.currentThread().getName(), + new Task(), + ClusterStateTaskConfig.build(randomFrom(Priority.values())), + executor, + listener); + } + }); + threads.add(thread); + thread.start(); + } + + startingGun.countDown(); + for (Thread thread : threads) { + thread.join(); + } + + // wait until all the cluster state updates have been processed + updateLatch.await(); + + // assert the number of executed tasks is correct + assertEquals(numberOfThreads * tasksSubmittedPerThread, counter.get()); + + // assert each executor executed the correct number of tasks + for (TaskExecutor executor : executors) { + if (counts.containsKey(executor)) { + assertEquals((int) counts.get(executor), executor.counter.get()); + } + } + + // assert the correct number of clusterStateProcessed events were triggered + for (Map.Entry entry : counters.entrySet()) { + assertEquals(entry.getValue().get(), tasksSubmittedPerThread); + } + } + @TestLogging("cluster:TRACE") // To ensure that we log cluster state events on TRACE level public void testClusterStateUpdateLogging() throws Exception { Settings settings = settingsBuilder() @@ -947,6 +1057,10 @@ public class ClusterServiceIT extends ESIntegTestCase { private static class BlockingTask extends ClusterStateUpdateTask { private final CountDownLatch latch = new CountDownLatch(1); + public BlockingTask(Priority priority) { + super(priority); + } + @Override public ClusterState execute(ClusterState currentState) throws Exception { latch.await(); @@ -963,14 +1077,13 @@ public class ClusterServiceIT extends ESIntegTestCase { } - private static class PrioritiezedTask extends ClusterStateUpdateTask { + private static class PrioritizedTask extends ClusterStateUpdateTask { - private final Priority priority; private final CountDownLatch latch; - private final List tasks; + private final List tasks; - private PrioritiezedTask(Priority priority, CountDownLatch latch, List tasks) { - this.priority = priority; + private PrioritizedTask(Priority priority, CountDownLatch latch, List tasks) { + super(priority); this.latch = latch; this.tasks = tasks; } diff --git a/core/src/test/java/org/elasticsearch/cluster/metadata/MetaDataTests.java b/core/src/test/java/org/elasticsearch/cluster/metadata/MetaDataTests.java new file mode 100644 index 00000000000..59116859322 --- /dev/null +++ b/core/src/test/java/org/elasticsearch/cluster/metadata/MetaDataTests.java @@ -0,0 +1,44 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.cluster.metadata; + +import org.elasticsearch.Version; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.test.ESTestCase; + +import static org.hamcrest.Matchers.equalTo; + +public class MetaDataTests extends ESTestCase { + + public void testIndexAndAliasWithSameName() { + IndexMetaData.Builder builder = IndexMetaData.builder("index") + .settings(Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT)) + .numberOfShards(1) + .numberOfReplicas(0) + .putAlias(AliasMetaData.builder("index").build()); + try { + MetaData.builder().put(builder).build(); + fail("expection should have been thrown"); + } catch (IllegalStateException e) { + assertThat(e.getMessage(), equalTo("index and alias names need to be unique, but alias [index] and index [index] have the same name")); + } + } + +} diff --git a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/ActiveAllocationIdTests.java b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/ActiveAllocationIdTests.java new file mode 100644 index 00000000000..7a7f4722e97 --- /dev/null +++ b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/ActiveAllocationIdTests.java @@ -0,0 +1,81 @@ +package org.elasticsearch.cluster.routing.allocation; + +import org.elasticsearch.Version; +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.metadata.IndexMetaData; +import org.elasticsearch.cluster.metadata.MetaData; +import org.elasticsearch.cluster.node.DiscoveryNodes; +import org.elasticsearch.cluster.routing.RoutingTable; +import org.elasticsearch.test.ESAllocationTestCase; + +import java.util.Arrays; +import java.util.HashSet; + +import static org.elasticsearch.cluster.routing.ShardRoutingState.INITIALIZING; +import static org.elasticsearch.cluster.routing.ShardRoutingState.STARTED; +import static org.elasticsearch.cluster.routing.ShardRoutingState.UNASSIGNED; +import static org.hamcrest.Matchers.equalTo; + +public class ActiveAllocationIdTests extends ESAllocationTestCase { + + public void testActiveAllocationIdsUpdated() { + AllocationService allocation = createAllocationService(); + + logger.info("creating an index with 1 shard, 2 replicas"); + MetaData metaData = MetaData.builder() + .put(IndexMetaData.builder("test").settings(settings(Version.CURRENT)).numberOfShards(1).numberOfReplicas(2)) + // add index metadata where we have no routing nodes to check that allocation ids are not removed + .put(IndexMetaData.builder("test-old").settings(settings(Version.CURRENT)).numberOfShards(1).numberOfReplicas(2) + .putActiveAllocationIds(0, new HashSet<>(Arrays.asList("x", "y")))) + .build(); + RoutingTable routingTable = RoutingTable.builder() + .addAsNew(metaData.index("test")) + .build(); + ClusterState clusterState = ClusterState.builder(org.elasticsearch.cluster.ClusterName.DEFAULT).metaData(metaData).routingTable(routingTable).build(); + + logger.info("adding three nodes and performing rerouting"); + clusterState = ClusterState.builder(clusterState).nodes(DiscoveryNodes.builder().put( + newNode("node1")).put(newNode("node2")).put(newNode("node3"))).build(); + RoutingAllocation.Result rerouteResult = allocation.reroute(clusterState, "reroute"); + clusterState = ClusterState.builder(clusterState).routingResult(rerouteResult).build(); + + assertThat(clusterState.metaData().index("test").activeAllocationIds(0).size(), equalTo(0)); + assertThat(clusterState.metaData().index("test-old").activeAllocationIds(0), equalTo(new HashSet<>(Arrays.asList("x", "y")))); + + logger.info("start primary shard"); + rerouteResult = allocation.applyStartedShards(clusterState, clusterState.getRoutingNodes().shardsWithState(INITIALIZING)); + clusterState = ClusterState.builder(clusterState).routingResult(rerouteResult).build(); + + assertThat(clusterState.getRoutingTable().shardsWithState(STARTED).size(), equalTo(1)); + assertThat(clusterState.metaData().index("test").activeAllocationIds(0).size(), equalTo(1)); + assertThat(clusterState.getRoutingTable().shardsWithState(STARTED).get(0).allocationId().getId(), + equalTo(clusterState.metaData().index("test").activeAllocationIds(0).iterator().next())); + assertThat(clusterState.metaData().index("test-old").activeAllocationIds(0), equalTo(new HashSet<>(Arrays.asList("x", "y")))); + + logger.info("start replica shards"); + rerouteResult = allocation.applyStartedShards(clusterState, clusterState.getRoutingNodes().shardsWithState(INITIALIZING)); + clusterState = ClusterState.builder(clusterState).routingResult(rerouteResult).build(); + + assertThat(clusterState.metaData().index("test").activeAllocationIds(0).size(), equalTo(3)); + + logger.info("remove a node"); + clusterState = ClusterState.builder(clusterState).nodes(DiscoveryNodes.builder(clusterState.nodes()) + .remove("node1")) + .build(); + rerouteResult = allocation.reroute(clusterState, "reroute"); + clusterState = ClusterState.builder(clusterState).routingResult(rerouteResult).build(); + + assertThat(clusterState.metaData().index("test").activeAllocationIds(0).size(), equalTo(2)); + + logger.info("remove all remaining nodes"); + clusterState = ClusterState.builder(clusterState).nodes(DiscoveryNodes.builder(clusterState.nodes()) + .remove("node2").remove("node3")) + .build(); + rerouteResult = allocation.reroute(clusterState, "reroute"); + clusterState = ClusterState.builder(clusterState).routingResult(rerouteResult).build(); + + // active allocation ids should not be updated + assertThat(clusterState.getRoutingTable().shardsWithState(UNASSIGNED).size(), equalTo(3)); + assertThat(clusterState.metaData().index("test").activeAllocationIds(0).size(), equalTo(2)); + } +} diff --git a/core/src/test/java/org/elasticsearch/cluster/serialization/DiffableTests.java b/core/src/test/java/org/elasticsearch/cluster/serialization/DiffableTests.java index 6c120e7ac6c..b3050392c98 100644 --- a/core/src/test/java/org/elasticsearch/cluster/serialization/DiffableTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/serialization/DiffableTests.java @@ -22,68 +22,383 @@ package org.elasticsearch.cluster.serialization; import org.elasticsearch.cluster.AbstractDiffable; import org.elasticsearch.cluster.Diff; import org.elasticsearch.cluster.DiffableUtils; -import org.elasticsearch.cluster.DiffableUtils.KeyedReader; +import org.elasticsearch.cluster.DiffableUtils.MapDiff; +import org.elasticsearch.common.collect.ImmutableOpenIntMap; import org.elasticsearch.common.collect.ImmutableOpenMap; import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.StreamableReader; +import org.elasticsearch.common.util.set.Sets; import org.elasticsearch.test.ESTestCase; import java.io.IOException; import java.util.HashMap; +import java.util.HashSet; import java.util.Map; +import java.util.Set; +import static java.util.Collections.emptyMap; import static java.util.Collections.unmodifiableMap; import static org.hamcrest.CoreMatchers.equalTo; +import static org.hamcrest.CoreMatchers.not; +import static org.hamcrest.CoreMatchers.nullValue; public class DiffableTests extends ESTestCase { - public void testJdkMapDiff() throws IOException { - Map before = new HashMap<>(); - before.put("foo", new TestDiffable("1")); - before.put("bar", new TestDiffable("2")); - before.put("baz", new TestDiffable("3")); - before = unmodifiableMap(before); - Map map = new HashMap<>(); - map.putAll(before); - map.remove("bar"); - map.put("baz", new TestDiffable("4")); - map.put("new", new TestDiffable("5")); - Map after = unmodifiableMap(new HashMap<>(map)); - Diff diff = DiffableUtils.diff(before, after); - BytesStreamOutput out = new BytesStreamOutput(); - diff.writeTo(out); - StreamInput in = StreamInput.wrap(out.bytes()); - Map serialized = DiffableUtils.readJdkMapDiff(in, TestDiffable.PROTO).apply(before); - assertThat(serialized.size(), equalTo(3)); - assertThat(serialized.get("foo").value(), equalTo("1")); - assertThat(serialized.get("baz").value(), equalTo("4")); - assertThat(serialized.get("new").value(), equalTo("5")); + + public void testJKDMapDiff() throws IOException { + new JdkMapDriver() { + @Override + protected boolean diffableValues() { + return true; + } + + @Override + protected TestDiffable createValue(Integer key, boolean before) { + return new TestDiffable(String.valueOf(before ? key : key + 1)); + } + + @Override + protected MapDiff diff(Map before, Map after) { + return DiffableUtils.diff(before, after, keySerializer); + } + + @Override + protected MapDiff readDiff(StreamInput in) throws IOException { + return useProtoForDiffableSerialization + ? DiffableUtils.readJdkMapDiff(in, keySerializer, TestDiffable.PROTO) + : DiffableUtils.readJdkMapDiff(in, keySerializer, diffableValueSerializer()); + } + }.execute(); + + new JdkMapDriver() { + @Override + protected boolean diffableValues() { + return false; + } + + @Override + protected String createValue(Integer key, boolean before) { + return String.valueOf(before ? key : key + 1); + } + + @Override + protected MapDiff diff(Map before, Map after) { + return DiffableUtils.diff(before, after, keySerializer, nonDiffableValueSerializer()); + } + + @Override + protected MapDiff readDiff(StreamInput in) throws IOException { + return DiffableUtils.readJdkMapDiff(in, keySerializer, nonDiffableValueSerializer()); + } + }.execute(); } public void testImmutableOpenMapDiff() throws IOException { - ImmutableOpenMap.Builder builder = ImmutableOpenMap.builder(); - builder.put("foo", new TestDiffable("1")); - builder.put("bar", new TestDiffable("2")); - builder.put("baz", new TestDiffable("3")); - ImmutableOpenMap before = builder.build(); - builder = ImmutableOpenMap.builder(before); - builder.remove("bar"); - builder.put("baz", new TestDiffable("4")); - builder.put("new", new TestDiffable("5")); - ImmutableOpenMap after = builder.build(); - Diff diff = DiffableUtils.diff(before, after); - BytesStreamOutput out = new BytesStreamOutput(); - diff.writeTo(out); - StreamInput in = StreamInput.wrap(out.bytes()); - ImmutableOpenMap serialized = DiffableUtils.readImmutableOpenMapDiff(in, new KeyedReader() { + new ImmutableOpenMapDriver() { @Override - public TestDiffable readFrom(StreamInput in, String key) throws IOException { + protected boolean diffableValues() { + return true; + } + + @Override + protected TestDiffable createValue(Integer key, boolean before) { + return new TestDiffable(String.valueOf(before ? key : key + 1)); + } + + @Override + protected MapDiff diff(ImmutableOpenMap before, ImmutableOpenMap after) { + return DiffableUtils.diff(before, after, keySerializer); + } + + @Override + protected MapDiff readDiff(StreamInput in) throws IOException { + return useProtoForDiffableSerialization + ? DiffableUtils.readImmutableOpenMapDiff(in, keySerializer, TestDiffable.PROTO) + : DiffableUtils.readImmutableOpenMapDiff(in, keySerializer, diffableValueSerializer()); + } + }.execute(); + + new ImmutableOpenMapDriver() { + @Override + protected boolean diffableValues() { + return false; + } + + @Override + protected String createValue(Integer key, boolean before) { + return String.valueOf(before ? key : key + 1); + } + + @Override + protected MapDiff diff(ImmutableOpenMap before, ImmutableOpenMap after) { + return DiffableUtils.diff(before, after, keySerializer, nonDiffableValueSerializer()); + } + + @Override + protected MapDiff readDiff(StreamInput in) throws IOException { + return DiffableUtils.readImmutableOpenMapDiff(in, keySerializer, nonDiffableValueSerializer()); + } + }.execute(); + } + + public void testImmutableOpenIntMapDiff() throws IOException { + new ImmutableOpenIntMapDriver() { + @Override + protected boolean diffableValues() { + return true; + } + + @Override + protected TestDiffable createValue(Integer key, boolean before) { + return new TestDiffable(String.valueOf(before ? key : key + 1)); + } + + @Override + protected MapDiff diff(ImmutableOpenIntMap before, ImmutableOpenIntMap after) { + return DiffableUtils.diff(before, after, keySerializer); + } + + @Override + protected MapDiff readDiff(StreamInput in) throws IOException { + return useProtoForDiffableSerialization + ? DiffableUtils.readImmutableOpenIntMapDiff(in, keySerializer, TestDiffable.PROTO) + : DiffableUtils.readImmutableOpenIntMapDiff(in, keySerializer, diffableValueSerializer()); + } + }.execute(); + + new ImmutableOpenIntMapDriver() { + @Override + protected boolean diffableValues() { + return false; + } + + @Override + protected String createValue(Integer key, boolean before) { + return String.valueOf(before ? key : key + 1); + } + + @Override + protected MapDiff diff(ImmutableOpenIntMap before, ImmutableOpenIntMap after) { + return DiffableUtils.diff(before, after, keySerializer, nonDiffableValueSerializer()); + } + + @Override + protected MapDiff readDiff(StreamInput in) throws IOException { + return DiffableUtils.readImmutableOpenIntMapDiff(in, keySerializer, nonDiffableValueSerializer()); + } + }.execute(); + } + + /** + * Class that abstracts over specific map implementation type and value kind (Diffable or not) + * @param map type + * @param value type + */ + public abstract class MapDriver { + protected final Set keys = randomPositiveIntSet(); + protected final Set keysToRemove = new HashSet<>(randomSubsetOf(randomInt(keys.size()), keys.toArray(new Integer[keys.size()]))); + protected final Set keysThatAreNotRemoved = Sets.difference(keys, keysToRemove); + protected final Set keysToOverride = new HashSet<>(randomSubsetOf(randomInt(keysThatAreNotRemoved.size()), + keysThatAreNotRemoved.toArray(new Integer[keysThatAreNotRemoved.size()]))); + protected final Set keysToAdd = Sets.difference(randomPositiveIntSet(), keys); // make sure keysToAdd does not contain elements in keys + protected final Set keysUnchanged = Sets.difference(keysThatAreNotRemoved, keysToOverride); + + protected final DiffableUtils.KeySerializer keySerializer = randomBoolean() + ? DiffableUtils.getIntKeySerializer() + : DiffableUtils.getVIntKeySerializer(); + + protected final boolean useProtoForDiffableSerialization = randomBoolean(); + + private Set randomPositiveIntSet() { + int maxSetSize = randomInt(6); + Set result = new HashSet<>(); + for (int i = 0; i < maxSetSize; i++) { + // due to duplicates, set size can be smaller than maxSetSize + result.add(randomIntBetween(0, 100)); + } + return result; + } + + /** + * whether we operate on {@link org.elasticsearch.cluster.Diffable} values + */ + protected abstract boolean diffableValues(); + + /** + * functions that determines value in "before" or "after" map based on key + */ + protected abstract V createValue(Integer key, boolean before); + + /** + * creates map based on JDK-based map + */ + protected abstract T createMap(Map values); + + /** + * calculates diff between two maps + */ + protected abstract MapDiff diff(T before, T after); + + /** + * reads diff of maps from stream + */ + protected abstract MapDiff readDiff(StreamInput in) throws IOException; + + /** + * gets element at key "key" in map "map" + */ + protected abstract V get(T map, Integer key); + + /** + * returns size of given map + */ + protected abstract int size(T map); + + /** + * executes the actual test + */ + public void execute() throws IOException { + logger.debug("Keys in 'before' map: {}", keys); + logger.debug("Keys to remove: {}", keysToRemove); + logger.debug("Keys to override: {}", keysToOverride); + logger.debug("Keys to add: {}", keysToAdd); + + logger.debug("--> creating 'before' map"); + Map before = new HashMap<>(); + for (Integer key : keys) { + before.put(key, createValue(key, true)); + } + T beforeMap = createMap(before); + + logger.debug("--> creating 'after' map"); + Map after = new HashMap<>(); + after.putAll(before); + for (Integer key : keysToRemove) { + after.remove(key); + } + for (Integer key : keysToOverride) { + after.put(key, createValue(key, false)); + } + for (Integer key : keysToAdd) { + after.put(key, createValue(key, false)); + } + T afterMap = createMap(unmodifiableMap(after)); + + MapDiff diffMap = diff(beforeMap, afterMap); + + // check properties of diffMap + assertThat(new HashSet(diffMap.getDeletes()), equalTo(keysToRemove)); + if (diffableValues()) { + assertThat(diffMap.getDiffs().keySet(), equalTo(keysToOverride)); + for (Integer key : keysToOverride) { + assertThat(diffMap.getDiffs().get(key).apply(get(beforeMap, key)), equalTo(get(afterMap, key))); + } + assertThat(diffMap.getUpserts().keySet(), equalTo(keysToAdd)); + for (Integer key : keysToAdd) { + assertThat(diffMap.getUpserts().get(key), equalTo(get(afterMap, key))); + } + } else { + assertThat(diffMap.getDiffs(), equalTo(emptyMap())); + Set keysToAddAndOverride = Sets.union(keysToAdd, keysToOverride); + assertThat(diffMap.getUpserts().keySet(), equalTo(keysToAddAndOverride)); + for (Integer key : keysToAddAndOverride) { + assertThat(diffMap.getUpserts().get(key), equalTo(get(afterMap, key))); + } + } + + if (randomBoolean()) { + logger.debug("--> serializing diff"); + BytesStreamOutput out = new BytesStreamOutput(); + diffMap.writeTo(out); + StreamInput in = StreamInput.wrap(out.bytes()); + logger.debug("--> reading diff back"); + diffMap = readDiff(in); + } + T appliedDiffMap = diffMap.apply(beforeMap); + + // check properties of appliedDiffMap + assertThat(size(appliedDiffMap), equalTo(keys.size() - keysToRemove.size() + keysToAdd.size())); + for (Integer key : keysToRemove) { + assertThat(get(appliedDiffMap, key), nullValue()); + } + for (Integer key : keysUnchanged) { + assertThat(get(appliedDiffMap, key), equalTo(get(beforeMap, key))); + } + for (Integer key : keysToOverride) { + assertThat(get(appliedDiffMap, key), not(equalTo(get(beforeMap, key)))); + assertThat(get(appliedDiffMap, key), equalTo(get(afterMap, key))); + } + for (Integer key : keysToAdd) { + assertThat(get(appliedDiffMap, key), equalTo(get(afterMap, key))); + } + } + } + + abstract class JdkMapDriver extends MapDriver, V> { + + @Override + protected Map createMap(Map values) { + return values; + } + + @Override + protected V get(Map map, Integer key) { + return map.get(key); + } + + @Override + protected int size(Map map) { + return map.size(); + } + } + + abstract class ImmutableOpenMapDriver extends MapDriver, V> { + + @Override + protected ImmutableOpenMap createMap(Map values) { + return ImmutableOpenMap.builder().putAll(values).build(); + } + + @Override + protected V get(ImmutableOpenMap map, Integer key) { + return map.get(key); + } + + @Override + protected int size(ImmutableOpenMap map) { + return map.size(); + } + } + + + abstract class ImmutableOpenIntMapDriver extends MapDriver, V> { + + @Override + protected ImmutableOpenIntMap createMap(Map values) { + return ImmutableOpenIntMap.builder().putAll(values).build(); + } + + @Override + protected V get(ImmutableOpenIntMap map, Integer key) { + return map.get(key); + } + + @Override + protected int size(ImmutableOpenIntMap map) { + return map.size(); + } + } + + private static DiffableUtils.DiffableValueSerializer diffableValueSerializer() { + return new DiffableUtils.DiffableValueSerializer() { + @Override + public TestDiffable read(StreamInput in, K key) throws IOException { return new TestDiffable(in.readString()); } @Override - public Diff readDiffFrom(StreamInput in, String key) throws IOException { + public Diff readDiff(StreamInput in, K key) throws IOException { return AbstractDiffable.readDiffFrom(new StreamableReader() { @Override public TestDiffable readFrom(StreamInput in) throws IOException { @@ -91,13 +406,23 @@ public class DiffableTests extends ESTestCase { } }, in); } - }).apply(before); - assertThat(serialized.size(), equalTo(3)); - assertThat(serialized.get("foo").value(), equalTo("1")); - assertThat(serialized.get("baz").value(), equalTo("4")); - assertThat(serialized.get("new").value(), equalTo("5")); - + }; } + + private static DiffableUtils.NonDiffableValueSerializer nonDiffableValueSerializer() { + return new DiffableUtils.NonDiffableValueSerializer() { + @Override + public void write(String value, StreamOutput out) throws IOException { + out.writeString(value); + } + + @Override + public String read(StreamInput in, K key) throws IOException { + return in.readString(); + } + }; + } + public static class TestDiffable extends AbstractDiffable { public static final TestDiffable PROTO = new TestDiffable(""); @@ -121,6 +446,22 @@ public class DiffableTests extends ESTestCase { public void writeTo(StreamOutput out) throws IOException { out.writeString(value); } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + + TestDiffable that = (TestDiffable) o; + + return !(value != null ? !value.equals(that.value) : that.value != null); + + } + + @Override + public int hashCode() { + return value != null ? value.hashCode() : 0; + } } } diff --git a/core/src/test/java/org/elasticsearch/common/cli/TerminalTests.java b/core/src/test/java/org/elasticsearch/common/cli/TerminalTests.java index f49e1f3a16e..dcbbc1ed337 100644 --- a/core/src/test/java/org/elasticsearch/common/cli/TerminalTests.java +++ b/core/src/test/java/org/elasticsearch/common/cli/TerminalTests.java @@ -19,6 +19,9 @@ package org.elasticsearch.common.cli; +import java.nio.file.NoSuchFileException; +import java.util.List; + import static org.hamcrest.Matchers.hasItem; import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.Matchers.is; @@ -44,10 +47,28 @@ public class TerminalTests extends CliToolTestCase { assertPrinted(terminal, Terminal.Verbosity.VERBOSE, "text"); } + public void testError() throws Exception { + try { + // actually throw so we have a stacktrace + throw new NoSuchFileException("/path/to/some/file"); + } catch (NoSuchFileException e) { + CaptureOutputTerminal terminal = new CaptureOutputTerminal(Terminal.Verbosity.NORMAL); + terminal.printError(e); + List output = terminal.getTerminalOutput(); + assertFalse(output.isEmpty()); + assertTrue(output.get(0), output.get(0).contains("NoSuchFileException")); // exception class + assertTrue(output.get(0), output.get(0).contains("/path/to/some/file")); // message + assertEquals(1, output.size()); + + // TODO: we should test stack trace is printed in debug mode...except debug is a sysprop instead of + // a command line param...maybe it should be VERBOSE instead of a separate debug prop? + } + } + private void assertPrinted(CaptureOutputTerminal logTerminal, Terminal.Verbosity verbosity, String text) { logTerminal.print(verbosity, text); assertThat(logTerminal.getTerminalOutput(), hasSize(1)); - assertThat(logTerminal.getTerminalOutput(), hasItem(is("text"))); + assertThat(logTerminal.getTerminalOutput(), hasItem(text)); logTerminal.terminalOutput.clear(); } diff --git a/core/src/test/java/org/elasticsearch/common/compress/lzf/CompressedStreamOutput.java b/core/src/test/java/org/elasticsearch/common/compress/lzf/CompressedStreamOutput.java deleted file mode 100644 index 3cf0bcd5cfd..00000000000 --- a/core/src/test/java/org/elasticsearch/common/compress/lzf/CompressedStreamOutput.java +++ /dev/null @@ -1,143 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.common.compress.lzf; - -import org.elasticsearch.Version; -import org.elasticsearch.common.io.stream.StreamOutput; - -import java.io.IOException; - -/** - */ -public abstract class CompressedStreamOutput extends StreamOutput { - - private final StreamOutput out; - - protected byte[] uncompressed; - protected int uncompressedLength; - private int position = 0; - - private boolean closed; - - public CompressedStreamOutput(StreamOutput out) throws IOException { - this.out = out; - super.setVersion(out.getVersion()); - writeHeader(out); - } - - @Override - public StreamOutput setVersion(Version version) { - out.setVersion(version); - return super.setVersion(version); - } - - @Override - public void write(int b) throws IOException { - if (position >= uncompressedLength) { - flushBuffer(); - } - uncompressed[position++] = (byte) b; - } - - @Override - public void writeByte(byte b) throws IOException { - if (position >= uncompressedLength) { - flushBuffer(); - } - uncompressed[position++] = b; - } - - @Override - public void writeBytes(byte[] input, int offset, int length) throws IOException { - // ES, check if length is 0, and don't write in this case - if (length == 0) { - return; - } - final int BUFFER_LEN = uncompressedLength; - - // simple case first: buffering only (for trivially short writes) - int free = BUFFER_LEN - position; - if (free >= length) { - System.arraycopy(input, offset, uncompressed, position, length); - position += length; - return; - } - // fill partial input as much as possible and flush - if (position > 0) { - System.arraycopy(input, offset, uncompressed, position, free); - position += free; - flushBuffer(); - offset += free; - length -= free; - } - - // then write intermediate full block, if any, without copying: - while (length >= BUFFER_LEN) { - compress(input, offset, BUFFER_LEN, out); - offset += BUFFER_LEN; - length -= BUFFER_LEN; - } - - // and finally, copy leftovers in input, if any - if (length > 0) { - System.arraycopy(input, offset, uncompressed, 0, length); - } - position = length; - } - - @Override - public void flush() throws IOException { - flushBuffer(); - out.flush(); - } - - @Override - public void close() throws IOException { - if (!closed) { - flushBuffer(); - closed = true; - doClose(); - out.close(); - } - } - - protected abstract void doClose() throws IOException; - - @Override - public void reset() throws IOException { - position = 0; - out.reset(); - } - - private void flushBuffer() throws IOException { - if (position > 0) { - compress(uncompressed, 0, position, out); - position = 0; - } - } - - protected abstract void writeHeader(StreamOutput out) throws IOException; - - /** - * Compresses the data into the output - */ - protected abstract void compress(byte[] data, int offset, int len, StreamOutput out) throws IOException; - -} diff --git a/core/src/test/java/org/elasticsearch/common/compress/lzf/CorruptedCompressorTests.java b/core/src/test/java/org/elasticsearch/common/compress/lzf/CorruptedCompressorTests.java deleted file mode 100644 index a18a9e3fb65..00000000000 --- a/core/src/test/java/org/elasticsearch/common/compress/lzf/CorruptedCompressorTests.java +++ /dev/null @@ -1,70 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.common.compress.lzf; - -import com.ning.compress.lzf.ChunkDecoder; -import com.ning.compress.lzf.ChunkEncoder; -import com.ning.compress.lzf.LZFChunk; -import com.ning.compress.lzf.util.ChunkDecoderFactory; -import com.ning.compress.lzf.util.ChunkEncoderFactory; -import org.elasticsearch.test.ESTestCase; - -import java.io.ByteArrayInputStream; -import java.io.ByteArrayOutputStream; -import java.io.IOException; -import java.io.InputStream; -import java.util.Arrays; - -/** - * Test an extremely rare corruption produced by the pure java impl of ChunkEncoder. - */ -public class CorruptedCompressorTests extends ESTestCase { - - public void testCorruption() throws IOException { - // this test generates a hash collision: [0,1,153,64] hashes the same as [1,153,64,64] - // and then leverages the bug s/inPos/0/ to corrupt the array - // the first array is used to insert a reference from this hash to offset 6 - // and then the hash table is reused and still thinks that there is such a hash at position 6 - // and at position 7, it finds a sequence with the same hash - // so it inserts a buggy reference - byte[] b1 = new byte[] {0,1,2,3,4,(byte)153,64,64,64,9,9,9,9,9,9,9,9,9,9}; - byte[] b2 = new byte[] {1,(byte)153,0,0,0,0,(byte)153,64,64,64,0,0,0,0,0,0,0,0,0,0,0,0,0,0}; - ChunkEncoder encoder = ChunkEncoderFactory.safeInstance(); - ChunkDecoder decoder = ChunkDecoderFactory.safeInstance(); - check(encoder, decoder, b1, 0, b1.length); - final int off = 6; - check(encoder, decoder, b2, off, b2.length - off); - } - - private void check(ChunkEncoder encoder, ChunkDecoder decoder, byte[] bytes, int offset, int length) throws IOException { - ByteArrayOutputStream outputStream = new ByteArrayOutputStream(); - byte[] expected = new byte[length]; - byte[] buffer = new byte[LZFChunk.MAX_CHUNK_LEN]; - byte[] output = new byte[length]; - System.arraycopy(bytes, offset, expected, 0, length); - encoder.encodeAndWriteChunk(bytes, offset, length, outputStream); - System.out.println(Arrays.toString(Arrays.copyOf(outputStream.toByteArray(), 20))); - InputStream inputStream = new ByteArrayInputStream(outputStream.toByteArray()); - assertEquals(decoder.decodeChunk(inputStream, buffer, output), length); - - System.out.println(Arrays.toString(Arrays.copyOf(output, 20))); - assertArrayEquals(expected, output); - } -} diff --git a/core/src/test/java/org/elasticsearch/common/compress/lzf/LZFCompressedStreamOutput.java b/core/src/test/java/org/elasticsearch/common/compress/lzf/LZFCompressedStreamOutput.java deleted file mode 100644 index 3aa2a5de806..00000000000 --- a/core/src/test/java/org/elasticsearch/common/compress/lzf/LZFCompressedStreamOutput.java +++ /dev/null @@ -1,63 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.common.compress.lzf; - -import com.ning.compress.BufferRecycler; -import com.ning.compress.lzf.ChunkEncoder; -import com.ning.compress.lzf.LZFChunk; -import com.ning.compress.lzf.util.ChunkEncoderFactory; - -import org.elasticsearch.common.io.stream.StreamOutput; - -import java.io.IOException; - -public class LZFCompressedStreamOutput extends CompressedStreamOutput { - - private final BufferRecycler recycler; - private final ChunkEncoder encoder; - - public LZFCompressedStreamOutput(StreamOutput out) throws IOException { - super(out); - this.recycler = BufferRecycler.instance(); - this.uncompressed = this.recycler.allocOutputBuffer(LZFChunk.MAX_CHUNK_LEN); - this.uncompressedLength = LZFChunk.MAX_CHUNK_LEN; - this.encoder = ChunkEncoderFactory.safeInstance(recycler); - } - - @Override - public void writeHeader(StreamOutput out) throws IOException { - // nothing to do here, each chunk has a header of its own - } - - @Override - protected void compress(byte[] data, int offset, int len, StreamOutput out) throws IOException { - encoder.encodeAndWriteChunk(data, offset, len, out); - } - - @Override - protected void doClose() throws IOException { - byte[] buf = uncompressed; - if (buf != null) { - uncompressed = null; - recycler.releaseOutputBuffer(buf); - } - encoder.close(); - } -} diff --git a/core/src/test/java/org/elasticsearch/common/geo/builders/AbstractShapeBuilderTestCase.java b/core/src/test/java/org/elasticsearch/common/geo/builders/AbstractShapeBuilderTestCase.java new file mode 100644 index 00000000000..f15a731e86e --- /dev/null +++ b/core/src/test/java/org/elasticsearch/common/geo/builders/AbstractShapeBuilderTestCase.java @@ -0,0 +1,143 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.common.geo.builders; + +import org.elasticsearch.common.io.stream.BytesStreamOutput; +import org.elasticsearch.common.io.stream.NamedWriteableAwareStreamInput; +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.xcontent.*; +import org.elasticsearch.test.ESTestCase; +import org.junit.AfterClass; +import org.junit.BeforeClass; + +import java.io.IOException; + +import static org.hamcrest.Matchers.*; + +public abstract class AbstractShapeBuilderTestCase extends ESTestCase { + + private static final int NUMBER_OF_TESTBUILDERS = 20; + private static NamedWriteableRegistry namedWriteableRegistry; + + /** + * setup for the whole base test class + */ + @BeforeClass + public static void init() { + if (namedWriteableRegistry == null) { + namedWriteableRegistry = new NamedWriteableRegistry(); + namedWriteableRegistry.registerPrototype(ShapeBuilder.class, PointBuilder.PROTOTYPE); + namedWriteableRegistry.registerPrototype(ShapeBuilder.class, CircleBuilder.PROTOTYPE); + namedWriteableRegistry.registerPrototype(ShapeBuilder.class, EnvelopeBuilder.PROTOTYPE); + } + } + + @AfterClass + public static void afterClass() throws Exception { + namedWriteableRegistry = null; + } + + /** + * create random shape that is put under test + */ + protected abstract SB createTestShapeBuilder(); + + /** + * mutate the given shape so the returned shape is different + */ + protected abstract SB mutate(SB original) throws IOException; + + /** + * Test that creates new shape from a random test shape and checks both for equality + */ + public void testFromXContent() throws IOException { + for (int runs = 0; runs < NUMBER_OF_TESTBUILDERS; runs++) { + SB testShape = createTestShapeBuilder(); + XContentBuilder contentBuilder = XContentFactory.contentBuilder(randomFrom(XContentType.values())); + if (randomBoolean()) { + contentBuilder.prettyPrint(); + } + XContentBuilder builder = testShape.toXContent(contentBuilder, ToXContent.EMPTY_PARAMS); + XContentParser shapeParser = XContentHelper.createParser(builder.bytes()); + XContentHelper.createParser(builder.bytes()); + shapeParser.nextToken(); + ShapeBuilder parsedShape = ShapeBuilder.parse(shapeParser); + assertNotSame(testShape, parsedShape); + assertEquals(testShape, parsedShape); + assertEquals(testShape.hashCode(), parsedShape.hashCode()); + } + } + + /** + * Test serialization and deserialization of the test shape. + */ + public void testSerialization() throws IOException { + for (int runs = 0; runs < NUMBER_OF_TESTBUILDERS; runs++) { + SB testShape = createTestShapeBuilder(); + SB deserializedShape = copyShape(testShape); + assertEquals(deserializedShape, testShape); + assertEquals(deserializedShape.hashCode(), testShape.hashCode()); + assertNotSame(deserializedShape, testShape); + } + } + + /** + * Test equality and hashCode properties + */ + public void testEqualsAndHashcode() throws IOException { + for (int runs = 0; runs < NUMBER_OF_TESTBUILDERS; runs++) { + SB firstShape = createTestShapeBuilder(); + assertFalse("shape is equal to null", firstShape.equals(null)); + assertFalse("shape is equal to incompatible type", firstShape.equals("")); + assertTrue("shape is not equal to self", firstShape.equals(firstShape)); + assertThat("same shape's hashcode returns different values if called multiple times", firstShape.hashCode(), + equalTo(firstShape.hashCode())); + assertThat("different shapes should not be equal", mutate(firstShape), not(equalTo(firstShape))); + + SB secondShape = copyShape(firstShape); + assertTrue("shape is not equal to self", secondShape.equals(secondShape)); + assertTrue("shape is not equal to its copy", firstShape.equals(secondShape)); + assertTrue("equals is not symmetric", secondShape.equals(firstShape)); + assertThat("shape copy's hashcode is different from original hashcode", secondShape.hashCode(), equalTo(firstShape.hashCode())); + + SB thirdShape = copyShape(secondShape); + assertTrue("shape is not equal to self", thirdShape.equals(thirdShape)); + assertTrue("shape is not equal to its copy", secondShape.equals(thirdShape)); + assertThat("shape copy's hashcode is different from original hashcode", secondShape.hashCode(), equalTo(thirdShape.hashCode())); + assertTrue("equals is not transitive", firstShape.equals(thirdShape)); + assertThat("shape copy's hashcode is different from original hashcode", firstShape.hashCode(), equalTo(thirdShape.hashCode())); + assertTrue("equals is not symmetric", thirdShape.equals(secondShape)); + assertTrue("equals is not symmetric", thirdShape.equals(firstShape)); + } + } + + protected SB copyShape(SB original) throws IOException { + try (BytesStreamOutput output = new BytesStreamOutput()) { + original.writeTo(output); + try (StreamInput in = new NamedWriteableAwareStreamInput(StreamInput.wrap(output.bytes()), namedWriteableRegistry)) { + ShapeBuilder prototype = (ShapeBuilder) namedWriteableRegistry.getPrototype(ShapeBuilder.class, original.getWriteableName()); + @SuppressWarnings("unchecked") + SB copy = (SB) prototype.readFrom(in); + return copy; + } + } + } +} diff --git a/core/src/test/java/org/elasticsearch/common/geo/builders/CirlceBuilderTests.java b/core/src/test/java/org/elasticsearch/common/geo/builders/CirlceBuilderTests.java new file mode 100644 index 00000000000..6b102b87b2c --- /dev/null +++ b/core/src/test/java/org/elasticsearch/common/geo/builders/CirlceBuilderTests.java @@ -0,0 +1,58 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.common.geo.builders; + +import com.vividsolutions.jts.geom.Coordinate; + +import org.elasticsearch.common.unit.DistanceUnit; + +import java.io.IOException; + +public class CirlceBuilderTests extends AbstractShapeBuilderTestCase { + + @Override + protected CircleBuilder createTestShapeBuilder() { + double centerX = randomDoubleBetween(-180, 180, false); + double centerY = randomDoubleBetween(-90, 90, false); + return new CircleBuilder() + .center(new Coordinate(centerX, centerY)) + .radius(randomDoubleBetween(0.1, 10.0, false), randomFrom(DistanceUnit.values())); + } + + @Override + protected CircleBuilder mutate(CircleBuilder original) throws IOException { + CircleBuilder mutation = copyShape(original); + double radius = original.radius(); + DistanceUnit unit = original.unit(); + + if (randomBoolean()) { + mutation.center(new Coordinate(original.center().x/2, original.center().y/2)); + } else if (randomBoolean()) { + radius = radius/2; + } else { + DistanceUnit newRandom = unit; + while (newRandom == unit) { + newRandom = randomFrom(DistanceUnit.values()); + }; + unit = newRandom; + } + return mutation.radius(radius, unit); + } +} diff --git a/core/src/test/java/org/elasticsearch/common/geo/builders/EnvelopeBuilderTests.java b/core/src/test/java/org/elasticsearch/common/geo/builders/EnvelopeBuilderTests.java new file mode 100644 index 00000000000..e6f3db2f8af --- /dev/null +++ b/core/src/test/java/org/elasticsearch/common/geo/builders/EnvelopeBuilderTests.java @@ -0,0 +1,66 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.common.geo.builders; + +import com.spatial4j.core.shape.Rectangle; +import com.vividsolutions.jts.geom.Coordinate; + +import org.elasticsearch.common.geo.builders.ShapeBuilder.Orientation; +import org.elasticsearch.test.geo.RandomShapeGenerator; + +import java.io.IOException; + +public class EnvelopeBuilderTests extends AbstractShapeBuilderTestCase { + + @Override + protected EnvelopeBuilder createTestShapeBuilder() { + EnvelopeBuilder envelope = new EnvelopeBuilder(randomFrom(Orientation.values())); + Rectangle box = RandomShapeGenerator.xRandomRectangle(getRandom(), RandomShapeGenerator.xRandomPoint(getRandom())); + envelope.topLeft(box.getMinX(), box.getMaxY()) + .bottomRight(box.getMaxX(), box.getMinY()); + return envelope; + } + + @Override + protected EnvelopeBuilder mutate(EnvelopeBuilder original) throws IOException { + EnvelopeBuilder mutation = copyShape(original); + if (randomBoolean()) { + // toggle orientation + mutation.orientation = (original.orientation == Orientation.LEFT ? Orientation.RIGHT : Orientation.LEFT); + } else { + // move one corner to the middle of original + switch (randomIntBetween(0, 3)) { + case 0: + mutation.topLeft(new Coordinate(randomDoubleBetween(-180.0, original.bottomRight.x, true), original.topLeft.y)); + break; + case 1: + mutation.topLeft(new Coordinate(original.topLeft.x, randomDoubleBetween(original.bottomRight.y, 90.0, true))); + break; + case 2: + mutation.bottomRight(new Coordinate(randomDoubleBetween(original.topLeft.x, 180.0, true), original.bottomRight.y)); + break; + case 3: + mutation.bottomRight(new Coordinate(original.bottomRight.x, randomDoubleBetween(-90.0, original.topLeft.y, true))); + break; + } + } + return mutation; + } +} diff --git a/core/src/main/java/org/elasticsearch/index/fieldvisitor/AllFieldsVisitor.java b/core/src/test/java/org/elasticsearch/common/geo/builders/PointBuilderTests.java similarity index 56% rename from core/src/main/java/org/elasticsearch/index/fieldvisitor/AllFieldsVisitor.java rename to core/src/test/java/org/elasticsearch/common/geo/builders/PointBuilderTests.java index beb7de2c756..1e94a1bab3a 100644 --- a/core/src/main/java/org/elasticsearch/index/fieldvisitor/AllFieldsVisitor.java +++ b/core/src/test/java/org/elasticsearch/common/geo/builders/PointBuilderTests.java @@ -16,22 +16,23 @@ * specific language governing permissions and limitations * under the License. */ -package org.elasticsearch.index.fieldvisitor; -import org.apache.lucene.index.FieldInfo; +package org.elasticsearch.common.geo.builders; -import java.io.IOException; +import com.vividsolutions.jts.geom.Coordinate; -/** - */ -public class AllFieldsVisitor extends FieldsVisitor { +import org.elasticsearch.test.geo.RandomShapeGenerator; +import org.elasticsearch.test.geo.RandomShapeGenerator.ShapeType; - public AllFieldsVisitor() { - super(true); +public class PointBuilderTests extends AbstractShapeBuilderTestCase { + + @Override + protected PointBuilder createTestShapeBuilder() { + return (PointBuilder) RandomShapeGenerator.createShape(getRandom(), ShapeType.POINT); } @Override - public Status needsField(FieldInfo fieldInfo) throws IOException { - return Status.YES; + protected PointBuilder mutate(PointBuilder original) { + return new PointBuilder().coordinate(new Coordinate(original.longitude()/2, original.latitude()/2)); } } diff --git a/core/src/test/java/org/elasticsearch/common/unit/DistanceUnitTests.java b/core/src/test/java/org/elasticsearch/common/unit/DistanceUnitTests.java index 1010d2a5e8c..25c3a136271 100644 --- a/core/src/test/java/org/elasticsearch/common/unit/DistanceUnitTests.java +++ b/core/src/test/java/org/elasticsearch/common/unit/DistanceUnitTests.java @@ -57,4 +57,20 @@ public class DistanceUnitTests extends ESTestCase { assertThat("Value can be parsed from '" + testValue + unit.toString() + "'", DistanceUnit.Distance.parseDistance(unit.toString(testValue)).value, equalTo(testValue)); } } + + /** + * This test ensures that we are aware of accidental reordering in the distance unit ordinals, + * since equality in e.g. CircleShapeBuilder, hashCode and serialization rely on them + */ + public void testDistanceUnitNames() { + assertEquals(0, DistanceUnit.INCH.ordinal()); + assertEquals(1, DistanceUnit.YARD.ordinal()); + assertEquals(2, DistanceUnit.FEET.ordinal()); + assertEquals(3, DistanceUnit.KILOMETERS.ordinal()); + assertEquals(4, DistanceUnit.NAUTICALMILES.ordinal()); + assertEquals(5, DistanceUnit.MILLIMETERS.ordinal()); + assertEquals(6, DistanceUnit.CENTIMETERS.ordinal()); + assertEquals(7, DistanceUnit.MILES.ordinal()); + assertEquals(8, DistanceUnit.METERS.ordinal()); + } } diff --git a/core/src/test/java/org/elasticsearch/discovery/DiscoveryWithServiceDisruptionsIT.java b/core/src/test/java/org/elasticsearch/discovery/DiscoveryWithServiceDisruptionsIT.java index f9dbda217d0..b14792a2c33 100644 --- a/core/src/test/java/org/elasticsearch/discovery/DiscoveryWithServiceDisruptionsIT.java +++ b/core/src/test/java/org/elasticsearch/discovery/DiscoveryWithServiceDisruptionsIT.java @@ -25,11 +25,7 @@ import org.elasticsearch.action.get.GetResponse; import org.elasticsearch.action.index.IndexRequestBuilder; import org.elasticsearch.action.index.IndexResponse; import org.elasticsearch.client.Client; -import org.elasticsearch.cluster.ClusterChangedEvent; -import org.elasticsearch.cluster.ClusterService; -import org.elasticsearch.cluster.ClusterState; -import org.elasticsearch.cluster.ClusterStateListener; -import org.elasticsearch.cluster.ClusterStateUpdateTask; +import org.elasticsearch.cluster.*; import org.elasticsearch.cluster.block.ClusterBlock; import org.elasticsearch.cluster.block.ClusterBlockLevel; import org.elasticsearch.cluster.metadata.IndexMetaData; @@ -59,16 +55,7 @@ import org.elasticsearch.test.ESIntegTestCase.ClusterScope; import org.elasticsearch.test.ESIntegTestCase.Scope; import org.elasticsearch.test.InternalTestCluster; import org.elasticsearch.test.discovery.ClusterDiscoveryConfiguration; -import org.elasticsearch.test.disruption.BlockClusterStateProcessing; -import org.elasticsearch.test.disruption.IntermittentLongGCDisruption; -import org.elasticsearch.test.disruption.LongGCDisruption; -import org.elasticsearch.test.disruption.NetworkDelaysPartition; -import org.elasticsearch.test.disruption.NetworkDisconnectPartition; -import org.elasticsearch.test.disruption.NetworkPartition; -import org.elasticsearch.test.disruption.NetworkUnresponsivePartition; -import org.elasticsearch.test.disruption.ServiceDisruptionScheme; -import org.elasticsearch.test.disruption.SingleNodeDisruption; -import org.elasticsearch.test.disruption.SlowClusterStateProcessing; +import org.elasticsearch.test.disruption.*; import org.elasticsearch.test.junit.annotations.TestLogging; import org.elasticsearch.test.transport.MockTransportService; import org.elasticsearch.transport.TransportException; @@ -78,31 +65,15 @@ import org.elasticsearch.transport.TransportService; import org.junit.Before; import java.io.IOException; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Collection; -import java.util.Collections; -import java.util.HashMap; -import java.util.HashSet; -import java.util.List; -import java.util.Map; -import java.util.Objects; -import java.util.Set; -import java.util.concurrent.ConcurrentHashMap; -import java.util.concurrent.CountDownLatch; -import java.util.concurrent.ExecutionException; -import java.util.concurrent.Semaphore; -import java.util.concurrent.TimeUnit; +import java.util.*; +import java.util.concurrent.*; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicReference; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; -import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.Matchers.is; -import static org.hamcrest.Matchers.not; -import static org.hamcrest.Matchers.nullValue; +import static org.hamcrest.Matchers.*; @ClusterScope(scope = Scope.TEST, numDataNodes = 0, transportClientRatio = 0) @ESIntegTestCase.SuppressLocalMode @@ -650,7 +621,7 @@ public class DiscoveryWithServiceDisruptionsIT extends ESIntegTestCase { // but will be queued and once the old master node un-freezes it gets executed. // The old master node will send this update + the cluster state where he is flagged as master to the other // nodes that follow the new master. These nodes should ignore this update. - internalCluster().getInstance(ClusterService.class, oldMasterNode).submitStateUpdateTask("sneaky-update", Priority.IMMEDIATE, new ClusterStateUpdateTask() { + internalCluster().getInstance(ClusterService.class, oldMasterNode).submitStateUpdateTask("sneaky-update", new ClusterStateUpdateTask(Priority.IMMEDIATE) { @Override public ClusterState execute(ClusterState currentState) throws Exception { return ClusterState.builder(currentState).build(); diff --git a/core/src/test/java/org/elasticsearch/discovery/ZenFaultDetectionTests.java b/core/src/test/java/org/elasticsearch/discovery/ZenFaultDetectionTests.java index d9a75b297aa..e7a10b0f62b 100644 --- a/core/src/test/java/org/elasticsearch/discovery/ZenFaultDetectionTests.java +++ b/core/src/test/java/org/elasticsearch/discovery/ZenFaultDetectionTests.java @@ -193,7 +193,7 @@ public class ZenFaultDetectionTests extends ESTestCase { masterFD.addListener(new MasterFaultDetection.Listener() { @Override - public void onMasterFailure(DiscoveryNode masterNode, String reason) { + public void onMasterFailure(DiscoveryNode masterNode, Throwable cause, String reason) { failureNode[0] = masterNode; failureReason[0] = reason; notified.countDown(); @@ -211,4 +211,4 @@ public class ZenFaultDetectionTests extends ESTestCase { assertThat(failureReason[0], matcher); } -} \ No newline at end of file +} diff --git a/core/src/test/java/org/elasticsearch/http/netty/NettyHttpServerPipeliningTests.java b/core/src/test/java/org/elasticsearch/http/netty/NettyHttpServerPipeliningTests.java index b675d29c9da..95cb5b46b5f 100644 --- a/core/src/test/java/org/elasticsearch/http/netty/NettyHttpServerPipeliningTests.java +++ b/core/src/test/java/org/elasticsearch/http/netty/NettyHttpServerPipeliningTests.java @@ -91,7 +91,10 @@ public class NettyHttpServerPipeliningTests extends ESTestCase { } public void testThatHttpPipeliningWorksWhenEnabled() throws Exception { - Settings settings = settingsBuilder().put("http.pipelining", true).build(); + Settings settings = settingsBuilder() + .put("http.pipelining", true) + .put("http.port", "0") + .build(); httpServerTransport = new CustomNettyHttpServerTransport(settings); httpServerTransport.start(); InetSocketTransportAddress transportAddress = (InetSocketTransportAddress) randomFrom(httpServerTransport.boundAddress().boundAddresses()); @@ -105,7 +108,10 @@ public class NettyHttpServerPipeliningTests extends ESTestCase { } public void testThatHttpPipeliningCanBeDisabled() throws Exception { - Settings settings = settingsBuilder().put("http.pipelining", false).build(); + Settings settings = settingsBuilder() + .put("http.pipelining", false) + .put("http.port", "0") + .build(); httpServerTransport = new CustomNettyHttpServerTransport(settings); httpServerTransport.start(); InetSocketTransportAddress transportAddress = (InetSocketTransportAddress) randomFrom(httpServerTransport.boundAddress().boundAddresses()); diff --git a/core/src/test/java/org/elasticsearch/http/netty/pipelining/HttpPipeliningHandlerTests.java b/core/src/test/java/org/elasticsearch/http/netty/pipelining/HttpPipeliningHandlerTests.java index 166d394a9cb..28cdd241e15 100644 --- a/core/src/test/java/org/elasticsearch/http/netty/pipelining/HttpPipeliningHandlerTests.java +++ b/core/src/test/java/org/elasticsearch/http/netty/pipelining/HttpPipeliningHandlerTests.java @@ -76,8 +76,6 @@ public class HttpPipeliningHandlerTests extends ESTestCase { private static final long RESPONSE_TIMEOUT = 10000L; private static final long CONNECTION_TIMEOUT = 10000L; private static final String CONTENT_TYPE_TEXT = "text/plain; charset=UTF-8"; - // TODO make me random - private static final InetSocketAddress HOST_ADDR = new InetSocketAddress(InetAddress.getLoopbackAddress(), 9080); private static final String PATH1 = "/1"; private static final String PATH2 = "/2"; private static final String SOME_RESPONSE_TEXT = "some response for "; @@ -90,6 +88,8 @@ public class HttpPipeliningHandlerTests extends ESTestCase { private HashedWheelTimer timer; + private InetSocketAddress boundAddress; + @Before public void startBootstraps() { clientBootstrap = new ClientBootstrap(new NioClientSocketChannelFactory()); @@ -118,7 +118,8 @@ public class HttpPipeliningHandlerTests extends ESTestCase { } }); - serverBootstrap.bind(HOST_ADDR); + Channel channel = serverBootstrap.bind(new InetSocketAddress(InetAddress.getLoopbackAddress(), 0)); + boundAddress = (InetSocketAddress) channel.getLocalAddress(); timer = new HashedWheelTimer(); } @@ -137,7 +138,7 @@ public class HttpPipeliningHandlerTests extends ESTestCase { responsesIn = new CountDownLatch(1); responses.clear(); - final ChannelFuture connectionFuture = clientBootstrap.connect(HOST_ADDR); + final ChannelFuture connectionFuture = clientBootstrap.connect(boundAddress); assertTrue(connectionFuture.await(CONNECTION_TIMEOUT)); final Channel clientChannel = connectionFuture.getChannel(); @@ -145,11 +146,11 @@ public class HttpPipeliningHandlerTests extends ESTestCase { // NetworkAddress.formatAddress makes a proper HOST header. final HttpRequest request1 = new DefaultHttpRequest( HTTP_1_1, HttpMethod.GET, PATH1); - request1.headers().add(HOST, NetworkAddress.formatAddress(HOST_ADDR)); + request1.headers().add(HOST, NetworkAddress.formatAddress(boundAddress)); final HttpRequest request2 = new DefaultHttpRequest( HTTP_1_1, HttpMethod.GET, PATH2); - request2.headers().add(HOST, NetworkAddress.formatAddress(HOST_ADDR)); + request2.headers().add(HOST, NetworkAddress.formatAddress(boundAddress)); clientChannel.write(request1); clientChannel.write(request2); diff --git a/core/src/test/java/org/elasticsearch/index/IndexModuleTests.java b/core/src/test/java/org/elasticsearch/index/IndexModuleTests.java index 30cb7cc8f99..2b76d03952d 100644 --- a/core/src/test/java/org/elasticsearch/index/IndexModuleTests.java +++ b/core/src/test/java/org/elasticsearch/index/IndexModuleTests.java @@ -104,8 +104,8 @@ public class IndexModuleTests extends ESTestCase { Set scriptEngines = new HashSet<>(); scriptEngines.add(new MustacheScriptEngineService(settings)); scriptEngines.addAll(Arrays.asList(scriptEngineServices)); - ScriptService scriptService = new ScriptService(settings, environment, scriptEngines, new ResourceWatcherService(settings, threadPool), new ScriptContextRegistry(Collections.EMPTY_LIST)); - IndicesQueriesRegistry indicesQueriesRegistry = new IndicesQueriesRegistry(settings, Collections.EMPTY_SET, new NamedWriteableRegistry()); + ScriptService scriptService = new ScriptService(settings, environment, scriptEngines, new ResourceWatcherService(settings, threadPool), new ScriptContextRegistry(Collections.emptyList())); + IndicesQueriesRegistry indicesQueriesRegistry = new IndicesQueriesRegistry(settings, Collections.emptySet(), new NamedWriteableRegistry()); return new NodeServicesProvider(threadPool, indicesQueryCache, null, warmer, bigArrays, client, scriptService, indicesQueriesRegistry, indicesFieldDataCache, circuitBreakerService); } @@ -251,7 +251,7 @@ public class IndexModuleTests extends ESTestCase { assertEquals("Unknown Similarity type [test_similarity] for [my_similarity]", ex.getMessage()); } } - + public void testSetupWithoutType() throws IOException { Settings indexSettings = Settings.settingsBuilder() .put("index.similarity.my_similarity.foo", "bar") diff --git a/core/src/test/java/org/elasticsearch/index/IndexSettingsTests.java b/core/src/test/java/org/elasticsearch/index/IndexSettingsTests.java index 9a1dc3587a9..3f97fe402fa 100644 --- a/core/src/test/java/org/elasticsearch/index/IndexSettingsTests.java +++ b/core/src/test/java/org/elasticsearch/index/IndexSettingsTests.java @@ -95,7 +95,7 @@ public class IndexSettingsTests extends ESTestCase { public void testSettingsConsistency() { Version version = VersionUtils.getPreviousVersion(); IndexMetaData metaData = newIndexMeta("index", Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build()); - IndexSettings settings = new IndexSettings(metaData, Settings.EMPTY, Collections.EMPTY_LIST); + IndexSettings settings = new IndexSettings(metaData, Settings.EMPTY, Collections.emptyList()); assertEquals(version, settings.getIndexVersionCreated()); assertEquals("_na_", settings.getUUID()); try { @@ -106,7 +106,7 @@ public class IndexSettingsTests extends ESTestCase { } metaData = newIndexMeta("index", Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT).put(IndexMetaData.SETTING_INDEX_UUID, "0xdeadbeef").build()); - settings = new IndexSettings(metaData, Settings.EMPTY, Collections.EMPTY_LIST); + settings = new IndexSettings(metaData, Settings.EMPTY, Collections.emptyList()); try { settings.updateIndexMetaData(newIndexMeta("index", Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT).put("index.test.setting.int", 42).build())); fail("uuid missing/change"); diff --git a/core/src/test/java/org/elasticsearch/index/analysis/AnalysisModuleTests.java b/core/src/test/java/org/elasticsearch/index/analysis/AnalysisModuleTests.java index ef13a891a29..7cd16e350a4 100644 --- a/core/src/test/java/org/elasticsearch/index/analysis/AnalysisModuleTests.java +++ b/core/src/test/java/org/elasticsearch/index/analysis/AnalysisModuleTests.java @@ -72,7 +72,7 @@ public class AnalysisModuleTests extends ModuleTestCase { public AnalysisRegistry getNewRegistry(Settings settings) { return new AnalysisRegistry(null, new Environment(settings), - Collections.EMPTY_MAP, Collections.singletonMap("myfilter", MyFilterTokenFilterFactory::new), Collections.EMPTY_MAP, Collections.EMPTY_MAP); + Collections.emptyMap(), Collections.singletonMap("myfilter", MyFilterTokenFilterFactory::new), Collections.emptyMap(), Collections.emptyMap()); } private Settings loadFromClasspath(String path) { diff --git a/core/src/test/java/org/elasticsearch/index/analysis/AnalysisTestsHelper.java b/core/src/test/java/org/elasticsearch/index/analysis/AnalysisTestsHelper.java index 7d4164939b0..1404716b0c8 100644 --- a/core/src/test/java/org/elasticsearch/index/analysis/AnalysisTestsHelper.java +++ b/core/src/test/java/org/elasticsearch/index/analysis/AnalysisTestsHelper.java @@ -51,6 +51,6 @@ public class AnalysisTestsHelper { } IndexSettings idxSettings = IndexSettingsModule.newIndexSettings(index, settings); Environment environment = new Environment(settings); - return new AnalysisRegistry(new HunspellService(settings, environment, Collections.EMPTY_MAP), environment).build(idxSettings); + return new AnalysisRegistry(new HunspellService(settings, environment, Collections.emptyMap()), environment).build(idxSettings); } } diff --git a/core/src/test/java/org/elasticsearch/index/analysis/CompoundAnalysisTests.java b/core/src/test/java/org/elasticsearch/index/analysis/CompoundAnalysisTests.java index 0eb916826f4..e685c21422b 100644 --- a/core/src/test/java/org/elasticsearch/index/analysis/CompoundAnalysisTests.java +++ b/core/src/test/java/org/elasticsearch/index/analysis/CompoundAnalysisTests.java @@ -52,7 +52,7 @@ public class CompoundAnalysisTests extends ESTestCase { Settings settings = getJsonSettings(); IndexSettings idxSettings = IndexSettingsModule.newIndexSettings(index, settings); AnalysisService analysisService = new AnalysisRegistry(null, new Environment(settings), - Collections.EMPTY_MAP,Collections.singletonMap("myfilter", MyFilterTokenFilterFactory::new),Collections.EMPTY_MAP,Collections.EMPTY_MAP).build(idxSettings); + Collections.emptyMap(),Collections.singletonMap("myfilter", MyFilterTokenFilterFactory::new),Collections.emptyMap(),Collections.emptyMap()).build(idxSettings); TokenFilterFactory filterFactory = analysisService.tokenFilter("dict_dec"); MatcherAssert.assertThat(filterFactory, instanceOf(DictionaryCompoundWordTokenFilterFactory.class)); @@ -71,7 +71,7 @@ public class CompoundAnalysisTests extends ESTestCase { Index index = new Index("test"); IndexSettings idxSettings = IndexSettingsModule.newIndexSettings(index, settings); AnalysisService analysisService = new AnalysisRegistry(null, new Environment(settings), - Collections.EMPTY_MAP, Collections.singletonMap("myfilter", MyFilterTokenFilterFactory::new),Collections.EMPTY_MAP,Collections.EMPTY_MAP).build(idxSettings); + Collections.emptyMap(), Collections.singletonMap("myfilter", MyFilterTokenFilterFactory::new),Collections.emptyMap(),Collections.emptyMap()).build(idxSettings); Analyzer analyzer = analysisService.analyzer(analyzerName).analyzer(); diff --git a/core/src/test/java/org/elasticsearch/index/codec/CodecTests.java b/core/src/test/java/org/elasticsearch/index/codec/CodecTests.java index e8c351c0b37..eae80418b0d 100644 --- a/core/src/test/java/org/elasticsearch/index/codec/CodecTests.java +++ b/core/src/test/java/org/elasticsearch/index/codec/CodecTests.java @@ -108,7 +108,7 @@ public class CodecTests extends ESTestCase { .put("path.home", createTempDir()) .build(); IndexSettings settings = IndexSettingsModule.newIndexSettings(new Index("_na"), nodeSettings); - SimilarityService similarityService = new SimilarityService(settings, Collections.EMPTY_MAP); + SimilarityService similarityService = new SimilarityService(settings, Collections.emptyMap()); AnalysisService analysisService = new AnalysisRegistry(null, new Environment(nodeSettings)).build(settings); MapperRegistry mapperRegistry = new MapperRegistry(Collections.emptyMap(), Collections.emptyMap()); MapperService service = new MapperService(settings, analysisService, similarityService, mapperRegistry); diff --git a/core/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java b/core/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java index 00265ea5279..f38092d131e 100644 --- a/core/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java +++ b/core/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java @@ -1610,7 +1610,7 @@ public class InternalEngineTests extends ESTestCase { // now it should be OK. IndexSettings indexSettings = new IndexSettings(defaultSettings.getIndexMetaData(), Settings.builder().put(defaultSettings.getSettings()).put(EngineConfig.INDEX_FORCE_NEW_TRANSLOG, true).build(), - Collections.EMPTY_LIST); + Collections.emptyList()); engine = createEngine(indexSettings, store, primaryTranslogDir, new MergeSchedulerConfig(indexSettings), newMergePolicy()); } @@ -1896,8 +1896,8 @@ public class InternalEngineTests extends ESTestCase { RootObjectMapper.Builder rootBuilder = new RootObjectMapper.Builder("test"); Index index = new Index(indexName); IndexSettings indexSettings = IndexSettingsModule.newIndexSettings(index, settings); - AnalysisService analysisService = new AnalysisService(indexSettings, Collections.EMPTY_MAP, Collections.EMPTY_MAP, Collections.EMPTY_MAP, Collections.EMPTY_MAP); - SimilarityService similarityService = new SimilarityService(indexSettings, Collections.EMPTY_MAP); + AnalysisService analysisService = new AnalysisService(indexSettings, Collections.emptyMap(), Collections.emptyMap(), Collections.emptyMap(), Collections.emptyMap()); + SimilarityService similarityService = new SimilarityService(indexSettings, Collections.emptyMap()); MapperRegistry mapperRegistry = new IndicesModule().getMapperRegistry(); MapperService mapperService = new MapperService(indexSettings, analysisService, similarityService, mapperRegistry); DocumentMapper.Builder b = new DocumentMapper.Builder(settings, rootBuilder, mapperService); diff --git a/core/src/test/java/org/elasticsearch/index/mapper/FieldTypeLookupTests.java b/core/src/test/java/org/elasticsearch/index/mapper/FieldTypeLookupTests.java index 6ab4ca38d40..5a31618f14e 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/FieldTypeLookupTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/FieldTypeLookupTests.java @@ -27,6 +27,7 @@ import org.elasticsearch.test.ESTestCase; import java.io.IOException; import java.util.Arrays; import java.util.Collection; +import java.util.Collections; import java.util.Iterator; import java.util.List; @@ -36,6 +37,8 @@ public class FieldTypeLookupTests extends ESTestCase { FieldTypeLookup lookup = new FieldTypeLookup(); assertNull(lookup.get("foo")); assertNull(lookup.getByIndexName("foo")); + assertEquals(Collections.emptySet(), lookup.getTypes("foo")); + assertEquals(Collections.emptySet(), lookup.getTypesByIndexName("foo")); Collection names = lookup.simpleMatchToFullName("foo"); assertNotNull(names); assertTrue(names.isEmpty()); @@ -47,10 +50,20 @@ public class FieldTypeLookupTests extends ESTestCase { assertFalse(itr.hasNext()); } + public void testDefaultMapping() { + FieldTypeLookup lookup = new FieldTypeLookup(); + try { + lookup.copyAndAddAll(MapperService.DEFAULT_MAPPING, Collections.emptyList()); + fail(); + } catch (IllegalArgumentException expected) { + assertEquals("Default mappings should not be added to the lookup", expected.getMessage()); + } + } + public void testAddNewField() { FieldTypeLookup lookup = new FieldTypeLookup(); FakeFieldMapper f = new FakeFieldMapper("foo", "bar"); - FieldTypeLookup lookup2 = lookup.copyAndAddAll(newList(f)); + FieldTypeLookup lookup2 = lookup.copyAndAddAll("type", newList(f)); assertNull(lookup.get("foo")); assertNull(lookup.get("bar")); assertNull(lookup.getByIndexName("foo")); @@ -59,6 +72,14 @@ public class FieldTypeLookupTests extends ESTestCase { assertNull(lookup.get("bar")); assertEquals(f.fieldType(), lookup2.getByIndexName("bar")); assertNull(lookup.getByIndexName("foo")); + assertEquals(Collections.emptySet(), lookup.getTypes("foo")); + assertEquals(Collections.emptySet(), lookup.getTypesByIndexName("foo")); + assertEquals(Collections.emptySet(), lookup.getTypes("bar")); + assertEquals(Collections.emptySet(), lookup.getTypesByIndexName("bar")); + assertEquals(Collections.singleton("type"), lookup2.getTypes("foo")); + assertEquals(Collections.emptySet(), lookup2.getTypesByIndexName("foo")); + assertEquals(Collections.emptySet(), lookup2.getTypes("bar")); + assertEquals(Collections.singleton("type"), lookup2.getTypesByIndexName("bar")); assertEquals(1, size(lookup2.iterator())); } @@ -67,8 +88,8 @@ public class FieldTypeLookupTests extends ESTestCase { MappedFieldType originalFieldType = f.fieldType(); FakeFieldMapper f2 = new FakeFieldMapper("foo", "foo"); FieldTypeLookup lookup = new FieldTypeLookup(); - lookup = lookup.copyAndAddAll(newList(f)); - FieldTypeLookup lookup2 = lookup.copyAndAddAll(newList(f2)); + lookup = lookup.copyAndAddAll("type1", newList(f)); + FieldTypeLookup lookup2 = lookup.copyAndAddAll("type2", newList(f2)); assertNotSame(originalFieldType, f.fieldType()); assertSame(f.fieldType(), f2.fieldType()); @@ -82,8 +103,8 @@ public class FieldTypeLookupTests extends ESTestCase { FakeFieldMapper f2 = new FakeFieldMapper("bar", "foo"); MappedFieldType originalFieldType = f.fieldType(); FieldTypeLookup lookup = new FieldTypeLookup(); - lookup = lookup.copyAndAddAll(newList(f)); - FieldTypeLookup lookup2 = lookup.copyAndAddAll(newList(f2)); + lookup = lookup.copyAndAddAll("type1", newList(f)); + FieldTypeLookup lookup2 = lookup.copyAndAddAll("type2", newList(f2)); assertNotSame(originalFieldType, f.fieldType()); assertSame(f.fieldType(), f2.fieldType()); @@ -98,8 +119,8 @@ public class FieldTypeLookupTests extends ESTestCase { FakeFieldMapper f2 = new FakeFieldMapper("foo", "bar"); MappedFieldType originalFieldType = f.fieldType(); FieldTypeLookup lookup = new FieldTypeLookup(); - lookup = lookup.copyAndAddAll(newList(f)); - FieldTypeLookup lookup2 = lookup.copyAndAddAll(newList(f2)); + lookup = lookup.copyAndAddAll("type1", newList(f)); + FieldTypeLookup lookup2 = lookup.copyAndAddAll("type2", newList(f2)); assertNotSame(originalFieldType, f.fieldType()); assertSame(f.fieldType(), f2.fieldType()); @@ -113,18 +134,18 @@ public class FieldTypeLookupTests extends ESTestCase { FakeFieldMapper f = new FakeFieldMapper("foo", "foo"); FakeFieldMapper f2 = new FakeFieldMapper("bar", "bar"); FieldTypeLookup lookup = new FieldTypeLookup(); - lookup = lookup.copyAndAddAll(newList(f, f2)); + lookup = lookup.copyAndAddAll("type1", newList(f, f2)); try { FakeFieldMapper f3 = new FakeFieldMapper("foo", "bar"); - lookup.copyAndAddAll(newList(f3)); + lookup.copyAndAddAll("type2", newList(f3)); } catch (IllegalStateException e) { assertTrue(e.getMessage().contains("insane mappings")); } try { FakeFieldMapper f3 = new FakeFieldMapper("bar", "foo"); - lookup.copyAndAddAll(newList(f3)); + lookup.copyAndAddAll("type2", newList(f3)); } catch (IllegalStateException e) { assertTrue(e.getMessage().contains("insane mappings")); } @@ -133,25 +154,25 @@ public class FieldTypeLookupTests extends ESTestCase { public void testCheckCompatibilityNewField() { FakeFieldMapper f1 = new FakeFieldMapper("foo", "bar"); FieldTypeLookup lookup = new FieldTypeLookup(); - lookup.checkCompatibility(newList(f1), false); + lookup.checkCompatibility("type", newList(f1), false); } public void testCheckCompatibilityMismatchedTypes() { FieldMapper f1 = new FakeFieldMapper("foo", "bar"); FieldTypeLookup lookup = new FieldTypeLookup(); - lookup = lookup.copyAndAddAll(newList(f1)); + lookup = lookup.copyAndAddAll("type", newList(f1)); MappedFieldType ft2 = FakeFieldMapper.makeOtherFieldType("foo", "foo"); FieldMapper f2 = new FakeFieldMapper("foo", ft2); try { - lookup.checkCompatibility(newList(f2), false); + lookup.checkCompatibility("type2", newList(f2), false); fail("expected type mismatch"); } catch (IllegalArgumentException e) { assertTrue(e.getMessage().contains("cannot be changed from type [faketype] to [otherfaketype]")); } // fails even if updateAllTypes == true try { - lookup.checkCompatibility(newList(f2), true); + lookup.checkCompatibility("type2", newList(f2), true); fail("expected type mismatch"); } catch (IllegalArgumentException e) { assertTrue(e.getMessage().contains("cannot be changed from type [faketype] to [otherfaketype]")); @@ -161,31 +182,33 @@ public class FieldTypeLookupTests extends ESTestCase { public void testCheckCompatibilityConflict() { FieldMapper f1 = new FakeFieldMapper("foo", "bar"); FieldTypeLookup lookup = new FieldTypeLookup(); - lookup = lookup.copyAndAddAll(newList(f1)); + lookup = lookup.copyAndAddAll("type", newList(f1)); MappedFieldType ft2 = FakeFieldMapper.makeFieldType("foo", "bar"); ft2.setBoost(2.0f); FieldMapper f2 = new FakeFieldMapper("foo", ft2); try { - lookup.checkCompatibility(newList(f2), false); + // different type + lookup.checkCompatibility("type2", newList(f2), false); fail("expected conflict"); } catch (IllegalArgumentException e) { assertTrue(e.getMessage().contains("to update [boost] across all types")); } - lookup.checkCompatibility(newList(f2), true); // boost is updateable, so ok if forcing + lookup.checkCompatibility("type", newList(f2), false); // boost is updateable, so ok since we are implicitly updating all types + lookup.checkCompatibility("type2", newList(f2), true); // boost is updateable, so ok if forcing // now with a non changeable setting MappedFieldType ft3 = FakeFieldMapper.makeFieldType("foo", "bar"); ft3.setStored(true); FieldMapper f3 = new FakeFieldMapper("foo", ft3); try { - lookup.checkCompatibility(newList(f3), false); + lookup.checkCompatibility("type2", newList(f3), false); fail("expected conflict"); } catch (IllegalArgumentException e) { assertTrue(e.getMessage().contains("has different [store] values")); } // even with updateAllTypes == true, incompatible try { - lookup.checkCompatibility(newList(f3), true); + lookup.checkCompatibility("type2", newList(f3), true); fail("expected conflict"); } catch (IllegalArgumentException e) { assertTrue(e.getMessage().contains("has different [store] values")); @@ -196,7 +219,7 @@ public class FieldTypeLookupTests extends ESTestCase { FakeFieldMapper f1 = new FakeFieldMapper("foo", "baz"); FakeFieldMapper f2 = new FakeFieldMapper("bar", "boo"); FieldTypeLookup lookup = new FieldTypeLookup(); - lookup = lookup.copyAndAddAll(newList(f1, f2)); + lookup = lookup.copyAndAddAll("type", newList(f1, f2)); Collection names = lookup.simpleMatchToIndexNames("b*"); assertTrue(names.contains("baz")); assertTrue(names.contains("boo")); @@ -206,7 +229,7 @@ public class FieldTypeLookupTests extends ESTestCase { FakeFieldMapper f1 = new FakeFieldMapper("foo", "baz"); FakeFieldMapper f2 = new FakeFieldMapper("bar", "boo"); FieldTypeLookup lookup = new FieldTypeLookup(); - lookup = lookup.copyAndAddAll(newList(f1, f2)); + lookup = lookup.copyAndAddAll("type", newList(f1, f2)); Collection names = lookup.simpleMatchToFullName("b*"); assertTrue(names.contains("foo")); assertTrue(names.contains("bar")); @@ -215,7 +238,7 @@ public class FieldTypeLookupTests extends ESTestCase { public void testIteratorImmutable() { FakeFieldMapper f1 = new FakeFieldMapper("foo", "bar"); FieldTypeLookup lookup = new FieldTypeLookup(); - lookup = lookup.copyAndAddAll(newList(f1)); + lookup = lookup.copyAndAddAll("type", newList(f1)); try { Iterator itr = lookup.iterator(); diff --git a/core/src/test/java/org/elasticsearch/index/mapper/MapperServiceTests.java b/core/src/test/java/org/elasticsearch/index/mapper/MapperServiceTests.java index b37392821a5..2b200524b8e 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/MapperServiceTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/MapperServiceTests.java @@ -21,6 +21,8 @@ package org.elasticsearch.index.mapper; import org.elasticsearch.Version; import org.elasticsearch.action.admin.indices.create.CreateIndexResponse; +import org.elasticsearch.common.compress.CompressedXContent; +import org.elasticsearch.index.IndexService; import org.elasticsearch.test.ESSingleNodeTestCase; import org.junit.Rule; import org.junit.rules.ExpectedException; @@ -31,6 +33,11 @@ import static org.elasticsearch.test.VersionUtils.randomVersionBetween; import static org.hamcrest.CoreMatchers.containsString; import static org.hamcrest.Matchers.hasToString; +import java.util.Arrays; +import java.util.Collections; +import java.util.HashSet; +import java.util.concurrent.ExecutionException; + public class MapperServiceTests extends ESSingleNodeTestCase { @Rule public ExpectedException expectedException = ExpectedException.none(); @@ -82,4 +89,56 @@ public class MapperServiceTests extends ESSingleNodeTestCase { .execute() .actionGet(); } + + public void testTypes() throws Exception { + IndexService indexService1 = createIndex("index1"); + MapperService mapperService = indexService1.mapperService(); + assertEquals(Collections.emptySet(), mapperService.types()); + + mapperService.merge("type1", new CompressedXContent("{\"type1\":{}}"), true, false); + assertNull(mapperService.documentMapper(MapperService.DEFAULT_MAPPING)); + assertEquals(Collections.singleton("type1"), mapperService.types()); + + mapperService.merge(MapperService.DEFAULT_MAPPING, new CompressedXContent("{\"_default_\":{}}"), true, false); + assertNotNull(mapperService.documentMapper(MapperService.DEFAULT_MAPPING)); + assertEquals(Collections.singleton("type1"), mapperService.types()); + + mapperService.merge("type2", new CompressedXContent("{\"type2\":{}}"), true, false); + assertNotNull(mapperService.documentMapper(MapperService.DEFAULT_MAPPING)); + assertEquals(new HashSet<>(Arrays.asList("type1", "type2")), mapperService.types()); + } + + public void testIndexIntoDefaultMapping() throws Throwable { + // 1. test implicit index creation + try { + client().prepareIndex("index1", MapperService.DEFAULT_MAPPING, "1").setSource("{").execute().get(); + fail(); + } catch (Throwable t) { + if (t instanceof ExecutionException) { + t = ((ExecutionException) t).getCause(); + } + if (t instanceof IllegalArgumentException) { + assertEquals("It is forbidden to index into the default mapping [_default_]", t.getMessage()); + } else { + throw t; + } + } + + // 2. already existing index + IndexService indexService = createIndex("index2"); + try { + client().prepareIndex("index2", MapperService.DEFAULT_MAPPING, "2").setSource().execute().get(); + fail(); + } catch (Throwable t) { + if (t instanceof ExecutionException) { + t = ((ExecutionException) t).getCause(); + } + if (t instanceof IllegalArgumentException) { + assertEquals("It is forbidden to index into the default mapping [_default_]", t.getMessage()); + } else { + throw t; + } + } + assertFalse(indexService.mapperService().hasMapping(MapperService.DEFAULT_MAPPING)); + } } diff --git a/core/src/test/java/org/elasticsearch/index/mapper/externalvalues/ExternalMapper.java b/core/src/test/java/org/elasticsearch/index/mapper/externalvalues/ExternalMapper.java index 61f058dab64..e5d08db8d9f 100755 --- a/core/src/test/java/org/elasticsearch/index/mapper/externalvalues/ExternalMapper.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/externalvalues/ExternalMapper.java @@ -34,7 +34,6 @@ import org.elasticsearch.index.mapper.FieldMapper; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.Mapper; import org.elasticsearch.index.mapper.MapperParsingException; -import org.elasticsearch.index.mapper.MergeMappingException; import org.elasticsearch.index.mapper.MergeResult; import org.elasticsearch.index.mapper.ParseContext; import org.elasticsearch.index.mapper.core.BinaryFieldMapper; @@ -220,7 +219,7 @@ public class ExternalMapper extends FieldMapper { } @Override - public void merge(Mapper mergeWith, MergeResult mergeResult) throws MergeMappingException { + public void merge(Mapper mergeWith, MergeResult mergeResult) { // ignore this for now } diff --git a/core/src/test/java/org/elasticsearch/index/mapper/externalvalues/ExternalMetadataMapper.java b/core/src/test/java/org/elasticsearch/index/mapper/externalvalues/ExternalMetadataMapper.java index 9ca07a9c4b6..dae8bc67fda 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/externalvalues/ExternalMetadataMapper.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/externalvalues/ExternalMetadataMapper.java @@ -28,7 +28,6 @@ import org.elasticsearch.index.fielddata.FieldDataType; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.Mapper; import org.elasticsearch.index.mapper.MapperParsingException; -import org.elasticsearch.index.mapper.MergeMappingException; import org.elasticsearch.index.mapper.MergeResult; import org.elasticsearch.index.mapper.MetadataFieldMapper; import org.elasticsearch.index.mapper.ParseContext; @@ -67,7 +66,7 @@ public class ExternalMetadataMapper extends MetadataFieldMapper { } @Override - public void merge(Mapper mergeWith, MergeResult mergeResult) throws MergeMappingException { + public void merge(Mapper mergeWith, MergeResult mergeResult) { if (!(mergeWith instanceof ExternalMetadataMapper)) { mergeResult.addConflict("Trying to merge " + mergeWith + " with " + this); } diff --git a/core/src/test/java/org/elasticsearch/index/mapper/geo/GeoPointFieldMapperTests.java b/core/src/test/java/org/elasticsearch/index/mapper/geo/GeoPointFieldMapperTests.java index 9741b82aad6..93fd71599c4 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/geo/GeoPointFieldMapperTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/geo/GeoPointFieldMapperTests.java @@ -25,12 +25,14 @@ import org.elasticsearch.action.admin.indices.create.CreateIndexRequestBuilder; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.common.Priority; +import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.index.mapper.DocumentMapper; import org.elasticsearch.index.mapper.DocumentMapperParser; import org.elasticsearch.index.mapper.MapperParsingException; +import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.MergeResult; import org.elasticsearch.index.mapper.ParsedDocument; import org.elasticsearch.search.SearchHitField; @@ -715,28 +717,25 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase { String stage1Mapping = XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true) .field("geohash", true).endObject().endObject().endObject().endObject().string(); - DocumentMapperParser parser = createIndex("test", settings).mapperService().documentMapperParser(); - DocumentMapper stage1 = parser.parse(stage1Mapping); + MapperService mapperService = createIndex("test", settings).mapperService(); + DocumentMapper stage1 = mapperService.merge("type", new CompressedXContent(stage1Mapping), true, false); String stage2Mapping = XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", false) .field("geohash", false).endObject().endObject().endObject().endObject().string(); - DocumentMapper stage2 = parser.parse(stage2Mapping); - - MergeResult mergeResult = stage1.merge(stage2.mapping(), false, false); - assertThat(mergeResult.hasConflicts(), equalTo(true)); - assertThat(mergeResult.buildConflicts().length, equalTo(3)); - // todo better way of checking conflict? - assertThat("mapper [point] has different [lat_lon]", isIn(new ArrayList<>(Arrays.asList(mergeResult.buildConflicts())))); - assertThat("mapper [point] has different [geohash]", isIn(new ArrayList<>(Arrays.asList(mergeResult.buildConflicts())))); - assertThat("mapper [point] has different [geohash_precision]", isIn(new ArrayList<>(Arrays.asList(mergeResult.buildConflicts())))); + try { + mapperService.merge("type", new CompressedXContent(stage2Mapping), false, false); + fail(); + } catch (IllegalArgumentException e) { + assertThat(e.getMessage(), containsString("mapper [point] has different [lat_lon]")); + assertThat(e.getMessage(), containsString("mapper [point] has different [geohash]")); + assertThat(e.getMessage(), containsString("mapper [point] has different [geohash_precision]")); + } // correct mapping and ensure no failures stage2Mapping = XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true) .field("geohash", true).endObject().endObject().endObject().endObject().string(); - stage2 = parser.parse(stage2Mapping); - mergeResult = stage1.merge(stage2.mapping(), false, false); - assertThat(Arrays.toString(mergeResult.buildConflicts()), mergeResult.hasConflicts(), equalTo(false)); + mapperService.merge("type", new CompressedXContent(stage2Mapping), false, false); } public void testGeoHashSearch() throws Exception { diff --git a/core/src/test/java/org/elasticsearch/index/mapper/geo/GeoShapeFieldMapperTests.java b/core/src/test/java/org/elasticsearch/index/mapper/geo/GeoShapeFieldMapperTests.java index c00bd3101ae..54e9e96f8ad 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/geo/GeoShapeFieldMapperTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/geo/GeoShapeFieldMapperTests.java @@ -22,12 +22,14 @@ import org.apache.lucene.spatial.prefix.PrefixTreeStrategy; import org.apache.lucene.spatial.prefix.RecursivePrefixTreeStrategy; import org.apache.lucene.spatial.prefix.tree.GeohashPrefixTree; import org.apache.lucene.spatial.prefix.tree.QuadPrefixTree; +import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.geo.GeoUtils; import org.elasticsearch.common.geo.builders.ShapeBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.index.mapper.DocumentMapper; import org.elasticsearch.index.mapper.DocumentMapperParser; import org.elasticsearch.index.mapper.FieldMapper; +import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.MergeResult; import org.elasticsearch.test.ESSingleNodeTestCase; @@ -35,6 +37,7 @@ import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; +import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.isIn; @@ -376,23 +379,21 @@ public class GeoShapeFieldMapperTests extends ESSingleNodeTestCase { .startObject("shape").field("type", "geo_shape").field("tree", "geohash").field("strategy", "recursive") .field("precision", "1m").field("tree_levels", 8).field("distance_error_pct", 0.01).field("orientation", "ccw") .endObject().endObject().endObject().endObject().string(); - DocumentMapperParser parser = createIndex("test").mapperService().documentMapperParser(); - DocumentMapper stage1 = parser.parse(stage1Mapping); + MapperService mapperService = createIndex("test").mapperService(); + DocumentMapper stage1 = mapperService.merge("type", new CompressedXContent(stage1Mapping), true, false); String stage2Mapping = XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("shape").field("type", "geo_shape").field("tree", "quadtree") .field("strategy", "term").field("precision", "1km").field("tree_levels", 26).field("distance_error_pct", 26) .field("orientation", "cw").endObject().endObject().endObject().endObject().string(); - DocumentMapper stage2 = parser.parse(stage2Mapping); - - MergeResult mergeResult = stage1.merge(stage2.mapping(), false, false); - // check correct conflicts - assertThat(mergeResult.hasConflicts(), equalTo(true)); - assertThat(mergeResult.buildConflicts().length, equalTo(4)); - ArrayList conflicts = new ArrayList<>(Arrays.asList(mergeResult.buildConflicts())); - assertThat("mapper [shape] has different [strategy]", isIn(conflicts)); - assertThat("mapper [shape] has different [tree]", isIn(conflicts)); - assertThat("mapper [shape] has different [tree_levels]", isIn(conflicts)); - assertThat("mapper [shape] has different [precision]", isIn(conflicts)); + try { + mapperService.merge("type", new CompressedXContent(stage2Mapping), false, false); + fail(); + } catch (IllegalArgumentException e) { + assertThat(e.getMessage(), containsString("mapper [shape] has different [strategy]")); + assertThat(e.getMessage(), containsString("mapper [shape] has different [tree]")); + assertThat(e.getMessage(), containsString("mapper [shape] has different [tree_levels]")); + assertThat(e.getMessage(), containsString("mapper [shape] has different [precision]")); + } // verify nothing changed FieldMapper fieldMapper = stage1.mappers().getMapper("shape"); @@ -411,11 +412,7 @@ public class GeoShapeFieldMapperTests extends ESSingleNodeTestCase { stage2Mapping = XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("shape").field("type", "geo_shape").field("precision", "1m") .field("tree_levels", 8).field("distance_error_pct", 0.001).field("orientation", "cw").endObject().endObject().endObject().endObject().string(); - stage2 = parser.parse(stage2Mapping); - mergeResult = stage1.merge(stage2.mapping(), false, false); - - // verify mapping changes, and ensure no failures - assertThat(Arrays.toString(mergeResult.buildConflicts()), mergeResult.hasConflicts(), equalTo(false)); + mapperService.merge("type", new CompressedXContent(stage2Mapping), false, false); fieldMapper = stage1.mappers().getMapper("shape"); assertThat(fieldMapper, instanceOf(GeoShapeFieldMapper.class)); diff --git a/core/src/test/java/org/elasticsearch/index/mapper/multifield/MultiFieldTests.java b/core/src/test/java/org/elasticsearch/index/mapper/multifield/MultiFieldTests.java index 8a6b183b11c..a5a073d147f 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/multifield/MultiFieldTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/multifield/MultiFieldTests.java @@ -31,34 +31,24 @@ import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.common.xcontent.support.XContentMapValues; import org.elasticsearch.index.IndexService; -import org.elasticsearch.index.mapper.DocumentMapper; -import org.elasticsearch.index.mapper.DocumentMapperParser; -import org.elasticsearch.index.mapper.MappedFieldType; -import org.elasticsearch.index.mapper.MapperParsingException; +import org.elasticsearch.index.mapper.*; import org.elasticsearch.index.mapper.ParseContext.Document; -import org.elasticsearch.index.mapper.core.CompletionFieldMapper; -import org.elasticsearch.index.mapper.core.DateFieldMapper; -import org.elasticsearch.index.mapper.core.LongFieldMapper; -import org.elasticsearch.index.mapper.core.StringFieldMapper; -import org.elasticsearch.index.mapper.core.TokenCountFieldMapper; +import org.elasticsearch.index.mapper.core.*; import org.elasticsearch.index.mapper.geo.BaseGeoPointFieldMapper; import org.elasticsearch.test.ESSingleNodeTestCase; import org.elasticsearch.test.VersionUtils; +import java.io.IOException; import java.util.Arrays; import java.util.Collections; import java.util.Map; import java.util.TreeMap; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; -import static org.elasticsearch.index.mapper.MapperBuilders.doc; -import static org.elasticsearch.index.mapper.MapperBuilders.rootObject; -import static org.elasticsearch.index.mapper.MapperBuilders.stringField; +import static org.elasticsearch.index.mapper.MapperBuilders.*; import static org.elasticsearch.test.StreamsUtils.copyToBytesFromClasspath; import static org.elasticsearch.test.StreamsUtils.copyToStringFromClasspath; -import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.Matchers.instanceOf; -import static org.hamcrest.Matchers.notNullValue; +import static org.hamcrest.Matchers.*; /** * @@ -526,4 +516,30 @@ public class MultiFieldTests extends ESSingleNodeTestCase { assertTrue(e.getMessage().contains("cannot be used in multi field")); } } + + public void testMultiFieldWithDot() throws IOException { + XContentBuilder mapping = jsonBuilder(); + mapping.startObject() + .startObject("my_type") + .startObject("properties") + .startObject("city") + .field("type", "string") + .startObject("fields") + .startObject("raw.foo") + .field("type", "string") + .field("index", "not_analyzed") + .endObject() + .endObject() + .endObject() + .endObject() + .endObject(); + + MapperService mapperService = createIndex("test").mapperService(); + try { + mapperService.documentMapperParser().parse(mapping.string()); + fail("this should throw an exception because one field contains a dot"); + } catch (MapperParsingException e) { + assertThat(e.getMessage(), equalTo("Field name [raw.foo] which is a multi field of [city] cannot contain '.'")); + } + } } diff --git a/core/src/test/java/org/elasticsearch/index/mapper/multifield/merge/JavaMultiFieldMergeTests.java b/core/src/test/java/org/elasticsearch/index/mapper/multifield/merge/JavaMultiFieldMergeTests.java index 07671a2d4b0..30890dcd22a 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/multifield/merge/JavaMultiFieldMergeTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/multifield/merge/JavaMultiFieldMergeTests.java @@ -22,9 +22,11 @@ package org.elasticsearch.index.mapper.multifield.merge; import org.apache.lucene.index.IndexOptions; import org.apache.lucene.index.IndexableField; import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.index.mapper.DocumentMapper; import org.elasticsearch.index.mapper.DocumentMapperParser; +import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.MergeResult; import org.elasticsearch.index.mapper.ParseContext.Document; import org.elasticsearch.test.ESSingleNodeTestCase; @@ -32,6 +34,7 @@ import org.elasticsearch.test.ESSingleNodeTestCase; import java.util.Arrays; import static org.elasticsearch.test.StreamsUtils.copyToStringFromClasspath; +import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.notNullValue; import static org.hamcrest.Matchers.nullValue; @@ -113,9 +116,9 @@ public class JavaMultiFieldMergeTests extends ESSingleNodeTestCase { public void testUpgradeFromMultiFieldTypeToMultiFields() throws Exception { String mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/multifield/merge/test-mapping1.json"); - DocumentMapperParser parser = createIndex("test").mapperService().documentMapperParser(); + MapperService mapperService = createIndex("test").mapperService(); - DocumentMapper docMapper = parser.parse(mapping); + DocumentMapper docMapper = mapperService.merge("person", new CompressedXContent(mapping), true, false); assertNotSame(IndexOptions.NONE, docMapper.mappers().getMapper("name").fieldType().indexOptions()); assertThat(docMapper.mappers().getMapper("name.indexed"), nullValue()); @@ -129,12 +132,7 @@ public class JavaMultiFieldMergeTests extends ESSingleNodeTestCase { mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/multifield/merge/upgrade1.json"); - DocumentMapper docMapper2 = parser.parse(mapping); - - MergeResult mergeResult = docMapper.merge(docMapper2.mapping(), true, false); - assertThat(Arrays.toString(mergeResult.buildConflicts()), mergeResult.hasConflicts(), equalTo(false)); - - docMapper.merge(docMapper2.mapping(), false, false); + mapperService.merge("person", new CompressedXContent(mapping), false, false); assertNotSame(IndexOptions.NONE, docMapper.mappers().getMapper("name").fieldType().indexOptions()); @@ -151,12 +149,7 @@ public class JavaMultiFieldMergeTests extends ESSingleNodeTestCase { assertThat(f, notNullValue()); mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/multifield/merge/upgrade2.json"); - DocumentMapper docMapper3 = parser.parse(mapping); - - mergeResult = docMapper.merge(docMapper3.mapping(), true, false); - assertThat(Arrays.toString(mergeResult.buildConflicts()), mergeResult.hasConflicts(), equalTo(false)); - - docMapper.merge(docMapper3.mapping(), false, false); + mapperService.merge("person", new CompressedXContent(mapping), false, false); assertNotSame(IndexOptions.NONE, docMapper.mappers().getMapper("name").fieldType().indexOptions()); @@ -168,24 +161,19 @@ public class JavaMultiFieldMergeTests extends ESSingleNodeTestCase { mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/multifield/merge/upgrade3.json"); - DocumentMapper docMapper4 = parser.parse(mapping); - mergeResult = docMapper.merge(docMapper4.mapping(), true, false); - assertThat(Arrays.toString(mergeResult.buildConflicts()), mergeResult.hasConflicts(), equalTo(true)); - assertThat(mergeResult.buildConflicts()[0], equalTo("mapper [name] has different [index] values")); - assertThat(mergeResult.buildConflicts()[1], equalTo("mapper [name] has different [store] values")); + try { + mapperService.merge("person", new CompressedXContent(mapping), false, false); + fail(); + } catch (IllegalArgumentException e) { + assertThat(e.getMessage(), containsString("mapper [name] has different [index] values")); + assertThat(e.getMessage(), containsString("mapper [name] has different [store] values")); + } - mergeResult = docMapper.merge(docMapper4.mapping(), false, false); - assertThat(Arrays.toString(mergeResult.buildConflicts()), mergeResult.hasConflicts(), equalTo(true)); - - assertNotSame(IndexOptions.NONE, docMapper.mappers().getMapper("name").fieldType().indexOptions()); - assertThat(mergeResult.buildConflicts()[0], equalTo("mapper [name] has different [index] values")); - assertThat(mergeResult.buildConflicts()[1], equalTo("mapper [name] has different [store] values")); - - // There are conflicts, but the `name.not_indexed3` has been added, b/c that field has no conflicts + // There are conflicts, so the `name.not_indexed3` has not been added assertNotSame(IndexOptions.NONE, docMapper.mappers().getMapper("name").fieldType().indexOptions()); assertThat(docMapper.mappers().getMapper("name.indexed"), notNullValue()); assertThat(docMapper.mappers().getMapper("name.not_indexed"), notNullValue()); assertThat(docMapper.mappers().getMapper("name.not_indexed2"), notNullValue()); - assertThat(docMapper.mappers().getMapper("name.not_indexed3"), notNullValue()); + assertThat(docMapper.mappers().getMapper("name.not_indexed3"), nullValue()); } } diff --git a/core/src/test/java/org/elasticsearch/index/mapper/string/SimpleStringMappingTests.java b/core/src/test/java/org/elasticsearch/index/mapper/string/SimpleStringMappingTests.java index 121f100ffa6..9ac039a49fb 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/string/SimpleStringMappingTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/string/SimpleStringMappingTests.java @@ -25,6 +25,7 @@ import org.apache.lucene.index.IndexableField; import org.apache.lucene.index.IndexableFieldType; import org.elasticsearch.Version; import org.elasticsearch.cluster.metadata.IndexMetaData; +import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; @@ -478,7 +479,7 @@ public class SimpleStringMappingTests extends ESSingleNodeTestCase { .startObject("properties").startObject("field").field("type", "string").endObject().endObject() .endObject().endObject().string(); - DocumentMapper defaultMapper = parser.parse(mapping); + DocumentMapper defaultMapper = indexService.mapperService().merge("type", new CompressedXContent(mapping), true, false); ParsedDocument doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() .startObject() @@ -507,10 +508,12 @@ public class SimpleStringMappingTests extends ESSingleNodeTestCase { updatedMapping = XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("field").field("type", "string").startObject("norms").field("enabled", true).endObject() .endObject().endObject().endObject().endObject().string(); - mergeResult = defaultMapper.merge(parser.parse(updatedMapping).mapping(), true, false); - assertTrue(mergeResult.hasConflicts()); - assertEquals(1, mergeResult.buildConflicts().length); - assertTrue(mergeResult.buildConflicts()[0].contains("different [omit_norms]")); + try { + defaultMapper.merge(parser.parse(updatedMapping).mapping(), true, false); + fail(); + } catch (IllegalArgumentException e) { + assertThat(e.getMessage(), containsString("different [omit_norms]")); + } } /** diff --git a/core/src/test/java/org/elasticsearch/index/mapper/timestamp/TimestampMappingTests.java b/core/src/test/java/org/elasticsearch/index/mapper/timestamp/TimestampMappingTests.java index df9cc10d8c2..53a3bf7bb6e 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/timestamp/TimestampMappingTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/timestamp/TimestampMappingTests.java @@ -41,6 +41,7 @@ import org.elasticsearch.index.mapper.DocumentMapper; import org.elasticsearch.index.mapper.DocumentMapperParser; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.MapperParsingException; +import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.MergeResult; import org.elasticsearch.index.mapper.ParsedDocument; import org.elasticsearch.index.mapper.SourceToParse; @@ -557,7 +558,6 @@ public class TimestampMappingTests extends ESSingleNodeTestCase { public void testMergingConflicts() throws Exception { String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("_timestamp").field("enabled", true) - .startObject("fielddata").field("format", "doc_values").endObject() .field("store", "yes") .field("index", "analyzed") .field("path", "foo") @@ -565,9 +565,9 @@ public class TimestampMappingTests extends ESSingleNodeTestCase { .endObject() .endObject().endObject().string(); Settings indexSettings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_1_4_2.id).build(); - DocumentMapperParser parser = createIndex("test", indexSettings).mapperService().documentMapperParser(); + MapperService mapperService = createIndex("test", indexSettings).mapperService(); - DocumentMapper docMapper = parser.parse(mapping); + DocumentMapper docMapper = mapperService.merge("type", new CompressedXContent(mapping), true, false); assertThat(docMapper.timestampFieldMapper().fieldType().fieldDataType().getLoading(), equalTo(MappedFieldType.Loading.LAZY)); mapping = XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("_timestamp").field("enabled", false) @@ -579,20 +579,32 @@ public class TimestampMappingTests extends ESSingleNodeTestCase { .endObject() .endObject().endObject().string(); - MergeResult mergeResult = docMapper.merge(parser.parse(mapping).mapping(), true, false); - List expectedConflicts = new ArrayList<>(Arrays.asList( - "mapper [_timestamp] has different [index] values", - "mapper [_timestamp] has different [store] values", - "Cannot update default in _timestamp value. Value is 1970-01-01 now encountering 1970-01-02", - "Cannot update path in _timestamp value. Value is foo path in merged mapping is bar")); - - for (String conflict : mergeResult.buildConflicts()) { - assertTrue("found unexpected conflict [" + conflict + "]", expectedConflicts.remove(conflict)); + try { + mapperService.merge("type", new CompressedXContent(mapping), false, false); + fail(); + } catch (IllegalArgumentException e) { + assertThat(e.getMessage(), containsString("mapper [_timestamp] has different [index] values")); + assertThat(e.getMessage(), containsString("mapper [_timestamp] has different [store] values")); } - assertTrue("missing conflicts: " + Arrays.toString(expectedConflicts.toArray()), expectedConflicts.isEmpty()); + assertThat(docMapper.timestampFieldMapper().fieldType().fieldDataType().getLoading(), equalTo(MappedFieldType.Loading.LAZY)); assertTrue(docMapper.timestampFieldMapper().enabled()); - assertThat(docMapper.timestampFieldMapper().fieldType().fieldDataType().getFormat(indexSettings), equalTo("doc_values")); + + mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + .startObject("_timestamp").field("enabled", true) + .field("store", "yes") + .field("index", "analyzed") + .field("path", "bar") + .field("default", "1970-01-02") + .endObject() + .endObject().endObject().string(); + try { + mapperService.merge("type", new CompressedXContent(mapping), false, false); + fail(); + } catch (IllegalArgumentException e) { + assertThat(e.getMessage(), containsString("Cannot update default in _timestamp value. Value is 1970-01-01 now encountering 1970-01-02")); + assertThat(e.getMessage(), containsString("Cannot update path in _timestamp value. Value is foo path in merged mapping is bar")); + } } public void testBackcompatMergingConflictsForIndexValues() throws Exception { diff --git a/core/src/test/java/org/elasticsearch/index/mapper/ttl/TTLMappingTests.java b/core/src/test/java/org/elasticsearch/index/mapper/ttl/TTLMappingTests.java index 7802e866819..efe07615532 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/ttl/TTLMappingTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/ttl/TTLMappingTests.java @@ -35,7 +35,6 @@ import org.elasticsearch.index.IndexService; import org.elasticsearch.index.mapper.DocumentMapper; import org.elasticsearch.index.mapper.DocumentMapperParser; import org.elasticsearch.index.mapper.MapperParsingException; -import org.elasticsearch.index.mapper.MergeMappingException; import org.elasticsearch.index.mapper.MergeResult; import org.elasticsearch.index.mapper.ParsedDocument; import org.elasticsearch.index.mapper.SourceToParse; @@ -169,8 +168,8 @@ public class TTLMappingTests extends ESSingleNodeTestCase { try { client().admin().indices().preparePutMapping("testindex").setSource(mappingWithTtlDisabled).setType("type").get(); fail(); - } catch (MergeMappingException mme) { - assertThat(mme.getDetailedMessage(), containsString("_ttl cannot be disabled once it was enabled.")); + } catch (IllegalArgumentException e) { + assertThat(e.getMessage(), containsString("_ttl cannot be disabled once it was enabled.")); } GetMappingsResponse mappingsAfterUpdateResponse = client().admin().indices().prepareGetMappings("testindex").addTypes("type").get(); assertThat(mappingsBeforeUpdateResponse.getMappings().get("testindex").get("type").source(), equalTo(mappingsAfterUpdateResponse.getMappings().get("testindex").get("type").source())); @@ -295,7 +294,7 @@ public class TTLMappingTests extends ESSingleNodeTestCase { request.process(MetaData.builder().build(), mappingMetaData, true, "test"); // _ttl in a document never worked, so backcompat is ignoring the field - assertEquals(-1, request.ttl()); + assertNull(request.ttl()); assertNull(docMapper.parse("test", "type", "1", doc.bytes()).rootDoc().get("_ttl")); } diff --git a/core/src/test/java/org/elasticsearch/index/mapper/update/UpdateMappingOnClusterIT.java b/core/src/test/java/org/elasticsearch/index/mapper/update/UpdateMappingOnClusterIT.java index bf97cec3c46..35034dfd911 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/update/UpdateMappingOnClusterIT.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/update/UpdateMappingOnClusterIT.java @@ -24,7 +24,6 @@ import org.elasticsearch.action.admin.indices.mapping.put.PutMappingResponse; import org.elasticsearch.client.Client; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.index.mapper.MapperParsingException; -import org.elasticsearch.index.mapper.MergeMappingException; import org.elasticsearch.test.ESIntegTestCase; import java.util.HashMap; @@ -49,7 +48,7 @@ public class UpdateMappingOnClusterIT extends ESIntegTestCase { public void testAllConflicts() throws Exception { String mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/update/all_mapping_create_index.json"); String mappingUpdate = copyToStringFromClasspath("/org/elasticsearch/index/mapper/update/all_mapping_update_with_conflicts.json"); - String[] errorMessage = {"[_all] enabled is true now encountering false", + String[] errorMessage = { "[_all] has different [omit_norms] values", "[_all] has different [store] values", "[_all] has different [store_term_vector] values", @@ -62,6 +61,13 @@ public class UpdateMappingOnClusterIT extends ESIntegTestCase { testConflict(mapping, mappingUpdate, errorMessage); } + public void testAllDisabled() throws Exception { + XContentBuilder mapping = jsonBuilder().startObject().startObject("mappings").startObject(TYPE).startObject("_all").field("enabled", true).endObject().endObject().endObject().endObject(); + XContentBuilder mappingUpdate = jsonBuilder().startObject().startObject("_all").field("enabled", false).endObject().startObject("properties").startObject("text").field("type", "string").endObject().endObject().endObject(); + String errorMessage = "[_all] enabled is true now encountering false"; + testConflict(mapping.string(), mappingUpdate.string(), errorMessage); + } + public void testAllWithDefault() throws Exception { String defaultMapping = jsonBuilder().startObject().startObject("_default_") .startObject("_all") @@ -150,9 +156,9 @@ public class UpdateMappingOnClusterIT extends ESIntegTestCase { try { client().admin().indices().preparePutMapping(INDEX).setType(TYPE).setSource(mappingUpdate).get(); fail(); - } catch (MergeMappingException e) { + } catch (IllegalArgumentException e) { for (String errorMessage : errorMessages) { - assertThat(e.getDetailedMessage(), containsString(errorMessage)); + assertThat(e.getMessage(), containsString(errorMessage)); } } compareMappingOnNodes(mappingsBeforeUpdateResponse); diff --git a/core/src/test/java/org/elasticsearch/index/mapper/update/UpdateMappingTests.java b/core/src/test/java/org/elasticsearch/index/mapper/update/UpdateMappingTests.java index 5149ab10575..f0a8f5d079d 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/update/UpdateMappingTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/update/UpdateMappingTests.java @@ -30,12 +30,14 @@ import org.elasticsearch.index.IndexService; import org.elasticsearch.index.mapper.DocumentMapper; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.MergeResult; +import org.elasticsearch.index.mapper.core.LongFieldMapper; import org.elasticsearch.test.ESSingleNodeTestCase; import java.io.IOException; import java.util.LinkedHashMap; import static org.elasticsearch.test.StreamsUtils.copyToStringFromClasspath; +import static org.hamcrest.CoreMatchers.containsString; import static org.hamcrest.CoreMatchers.equalTo; @@ -107,6 +109,99 @@ public class UpdateMappingTests extends ESSingleNodeTestCase { assertThat(mappingAfterUpdate, equalTo(mappingBeforeUpdate)); } + public void testConflictSameType() throws Exception { + XContentBuilder mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + .startObject("properties").startObject("foo").field("type", "long").endObject() + .endObject().endObject().endObject(); + MapperService mapperService = createIndex("test", Settings.settingsBuilder().build(), "type", mapping).mapperService(); + + XContentBuilder update = XContentFactory.jsonBuilder().startObject().startObject("type") + .startObject("properties").startObject("foo").field("type", "double").endObject() + .endObject().endObject().endObject(); + + try { + mapperService.merge("type", new CompressedXContent(update.string()), false, false); + fail(); + } catch (IllegalArgumentException e) { + assertThat(e.getMessage(), containsString("mapper [foo] cannot be changed from type [long] to [double]")); + } + + try { + mapperService.merge("type", new CompressedXContent(update.string()), false, false); + fail(); + } catch (IllegalArgumentException e) { + assertThat(e.getMessage(), containsString("mapper [foo] cannot be changed from type [long] to [double]")); + } + + assertTrue(mapperService.documentMapper("type").mapping().root().getMapper("foo") instanceof LongFieldMapper); + } + + public void testConflictNewType() throws Exception { + XContentBuilder mapping = XContentFactory.jsonBuilder().startObject().startObject("type1") + .startObject("properties").startObject("foo").field("type", "long").endObject() + .endObject().endObject().endObject(); + MapperService mapperService = createIndex("test", Settings.settingsBuilder().build(), "type1", mapping).mapperService(); + + XContentBuilder update = XContentFactory.jsonBuilder().startObject().startObject("type2") + .startObject("properties").startObject("foo").field("type", "double").endObject() + .endObject().endObject().endObject(); + + try { + mapperService.merge("type2", new CompressedXContent(update.string()), false, false); + fail(); + } catch (IllegalArgumentException e) { + // expected + assertTrue(e.getMessage().contains("conflicts with existing mapping in other types")); + } + + try { + mapperService.merge("type2", new CompressedXContent(update.string()), false, false); + fail(); + } catch (IllegalArgumentException e) { + // expected + assertTrue(e.getMessage().contains("conflicts with existing mapping in other types")); + } + + assertTrue(mapperService.documentMapper("type1").mapping().root().getMapper("foo") instanceof LongFieldMapper); + assertNull(mapperService.documentMapper("type2")); + } + + // same as the testConflictNewType except that the mapping update is on an existing type + public void testConflictNewTypeUpdate() throws Exception { + XContentBuilder mapping1 = XContentFactory.jsonBuilder().startObject().startObject("type1") + .startObject("properties").startObject("foo").field("type", "long").endObject() + .endObject().endObject().endObject(); + XContentBuilder mapping2 = XContentFactory.jsonBuilder().startObject().startObject("type2").endObject().endObject(); + MapperService mapperService = createIndex("test", Settings.settingsBuilder().build()).mapperService(); + + mapperService.merge("type1", new CompressedXContent(mapping1.string()), false, false); + mapperService.merge("type2", new CompressedXContent(mapping2.string()), false, false); + + XContentBuilder update = XContentFactory.jsonBuilder().startObject().startObject("type2") + .startObject("properties").startObject("foo").field("type", "double").endObject() + .endObject().endObject().endObject(); + + try { + mapperService.merge("type2", new CompressedXContent(update.string()), false, false); + fail(); + } catch (IllegalArgumentException e) { + // expected + assertTrue(e.getMessage().contains("conflicts with existing mapping in other types")); + } + + try { + mapperService.merge("type2", new CompressedXContent(update.string()), false, false); + fail(); + } catch (IllegalArgumentException e) { + // expected + assertTrue(e.getMessage().contains("conflicts with existing mapping in other types")); + } + + assertTrue(mapperService.documentMapper("type1").mapping().root().getMapper("foo") instanceof LongFieldMapper); + assertNotNull(mapperService.documentMapper("type2")); + assertNull(mapperService.documentMapper("type2").mapping().root().getMapper("foo")); + } + public void testIndexFieldParsingBackcompat() throws IOException { IndexService indexService = createIndex("test", Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_1_4_2.id).build()); XContentBuilder indexMapping = XContentFactory.jsonBuilder(); diff --git a/core/src/test/java/org/elasticsearch/index/query/AbstractQueryTestCase.java b/core/src/test/java/org/elasticsearch/index/query/AbstractQueryTestCase.java index b5825e24704..aebf00e0728 100644 --- a/core/src/test/java/org/elasticsearch/index/query/AbstractQueryTestCase.java +++ b/core/src/test/java/org/elasticsearch/index/query/AbstractQueryTestCase.java @@ -57,6 +57,7 @@ import org.elasticsearch.common.io.stream.NamedWriteableAwareStreamInput; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.settings.SettingsFilter; import org.elasticsearch.common.settings.SettingsModule; import org.elasticsearch.common.unit.Fuzziness; import org.elasticsearch.common.xcontent.*; @@ -183,7 +184,7 @@ public abstract class AbstractQueryTestCase> clientInvocationHandler); injector = new ModulesBuilder().add( new EnvironmentModule(new Environment(settings)), - new SettingsModule(settings), + new SettingsModule(settings, new SettingsFilter(settings)), new ThreadPoolModule(new ThreadPool(settings)), new IndicesModule() { @Override @@ -240,7 +241,7 @@ public abstract class AbstractQueryTestCase> ).createInjector(); AnalysisService analysisService = new AnalysisRegistry(null, new Environment(settings)).build(idxSettings); ScriptService scriptService = injector.getInstance(ScriptService.class); - SimilarityService similarityService = new SimilarityService(idxSettings, Collections.EMPTY_MAP); + SimilarityService similarityService = new SimilarityService(idxSettings, Collections.emptyMap()); MapperRegistry mapperRegistry = injector.getInstance(MapperRegistry.class); MapperService mapperService = new MapperService(idxSettings, analysisService, similarityService, mapperRegistry); indexFieldDataService = new IndexFieldDataService(idxSettings, injector.getInstance(IndicesFieldDataCache.class), injector.getInstance(CircuitBreakerService.class), mapperService); @@ -640,7 +641,6 @@ public abstract class AbstractQueryTestCase> secondQuery.boost(firstQuery.boost() + 1f + randomFloat()); } assertThat("different queries should not be equal", secondQuery, not(equalTo(firstQuery))); - assertThat("different queries should have different hashcode", secondQuery.hashCode(), not(equalTo(firstQuery.hashCode()))); } } @@ -896,7 +896,7 @@ public abstract class AbstractQueryTestCase> msg(expected, builder.string()), expected.replaceAll("\\s+",""), builder.string().replaceAll("\\s+","")); - } + } private static String msg(String left, String right) { int size = Math.min(left.length(), right.length()); diff --git a/core/src/test/java/org/elasticsearch/index/query/GeoDistanceQueryBuilderTests.java b/core/src/test/java/org/elasticsearch/index/query/GeoDistanceQueryBuilderTests.java index 02c5aaeebec..bfbd57f9fda 100644 --- a/core/src/test/java/org/elasticsearch/index/query/GeoDistanceQueryBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/index/query/GeoDistanceQueryBuilderTests.java @@ -33,9 +33,7 @@ import org.elasticsearch.test.geo.RandomShapeGenerator; import java.io.IOException; -import static org.hamcrest.Matchers.closeTo; -import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.Matchers.instanceOf; +import static org.hamcrest.Matchers.*; public class GeoDistanceQueryBuilderTests extends AbstractQueryTestCase { @@ -86,7 +84,7 @@ public class GeoDistanceQueryBuilderTests extends AbstractQueryTestCase op+1) { + if (rarely() && translogOperations > op + 1) { translog.commit(); } } @@ -918,7 +889,7 @@ public class TranslogTests extends ESTestCase { final TranslogReader reader = randomBoolean() ? writer : translog.openReader(writer.path(), Checkpoint.read(translog.location().resolve(Translog.CHECKPOINT_FILE_NAME))); for (int i = 0; i < numOps; i++) { ByteBuffer buffer = ByteBuffer.allocate(4); - reader.readBytes(buffer, reader.getFirstOperationOffset() + 4*i); + reader.readBytes(buffer, reader.getFirstOperationOffset() + 4 * i); buffer.flip(); final int value = buffer.getInt(); assertEquals(i, value); @@ -957,9 +928,9 @@ public class TranslogTests extends ESTestCase { for (int op = 0; op < translogOperations; op++) { locations.add(translog.add(new Translog.Index("test", "" + op, Integer.toString(op).getBytes(Charset.forName("UTF-8"))))); final boolean commit = commitOften ? frequently() : rarely(); - if (commit && op < translogOperations-1) { + if (commit && op < translogOperations - 1) { translog.commit(); - minUncommittedOp = op+1; + minUncommittedOp = op + 1; translogGeneration = translog.getGeneration(); } } @@ -993,7 +964,7 @@ public class TranslogTests extends ESTestCase { public void testRecoveryUncommitted() throws IOException { List locations = new ArrayList<>(); int translogOperations = randomIntBetween(10, 100); - final int prepareOp = randomIntBetween(0, translogOperations-1); + final int prepareOp = randomIntBetween(0, translogOperations - 1); Translog.TranslogGeneration translogGeneration = null; final boolean sync = randomBoolean(); for (int op = 0; op < translogOperations; op++) { @@ -1046,7 +1017,7 @@ public class TranslogTests extends ESTestCase { public void testRecoveryUncommittedFileExists() throws IOException { List locations = new ArrayList<>(); int translogOperations = randomIntBetween(10, 100); - final int prepareOp = randomIntBetween(0, translogOperations-1); + final int prepareOp = randomIntBetween(0, translogOperations - 1); Translog.TranslogGeneration translogGeneration = null; final boolean sync = randomBoolean(); for (int op = 0; op < translogOperations; op++) { @@ -1122,10 +1093,10 @@ public class TranslogTests extends ESTestCase { config.setTranslogGeneration(translogGeneration); Path ckp = config.getTranslogPath().resolve(Translog.CHECKPOINT_FILE_NAME); Checkpoint read = Checkpoint.read(ckp); - Checkpoint corrupted = new Checkpoint(0,0,0); + Checkpoint corrupted = new Checkpoint(0, 0, 0); Checkpoint.write(config.getTranslogPath().resolve(Translog.getCommitCheckpointFileName(read.generation)), corrupted, StandardOpenOption.WRITE, StandardOpenOption.CREATE_NEW); try (Translog translog = new Translog(config)) { - fail("corrupted"); + fail("corrupted"); } catch (IllegalStateException ex) { assertEquals(ex.getMessage(), "Checkpoint file translog-2.ckp already exists but has corrupted content expected: Checkpoint{offset=2738, numOps=55, translogFileGeneration= 2} but got: Checkpoint{offset=0, numOps=0, translogFileGeneration= 0}"); } @@ -1163,7 +1134,7 @@ public class TranslogTests extends ESTestCase { List locations = new ArrayList<>(); List locations2 = new ArrayList<>(); int translogOperations = randomIntBetween(10, 100); - try(Translog translog2 = create(createTempDir())) { + try (Translog translog2 = create(createTempDir())) { for (int op = 0; op < translogOperations; op++) { locations.add(translog.add(new Translog.Index("test", "" + op, Integer.toString(op).getBytes(Charset.forName("UTF-8"))))); locations2.add(translog2.add(new Translog.Index("test", "" + op, Integer.toString(op).getBytes(Charset.forName("UTF-8"))))); @@ -1202,7 +1173,7 @@ public class TranslogTests extends ESTestCase { Translog.TranslogGeneration translogGeneration = translog.getGeneration(); translog.close(); - config.setTranslogGeneration(new Translog.TranslogGeneration(randomRealisticUnicodeOfCodepointLengthBetween(1, translogGeneration.translogUUID.length()),translogGeneration.translogFileGeneration)); + config.setTranslogGeneration(new Translog.TranslogGeneration(randomRealisticUnicodeOfCodepointLengthBetween(1, translogGeneration.translogUUID.length()), translogGeneration.translogFileGeneration)); try { new Translog(config); fail("translog doesn't belong to this UUID"); @@ -1220,4 +1191,92 @@ public class TranslogTests extends ESTestCase { assertNull(snapshot.next()); } } + + public void testFailOnClosedWrite() throws IOException { + translog.add(new Translog.Index("test", "1", Integer.toString(1).getBytes(Charset.forName("UTF-8")))); + translog.close(); + try { + translog.add(new Translog.Index("test", "1", Integer.toString(1).getBytes(Charset.forName("UTF-8")))); + fail("closed"); + } catch (AlreadyClosedException ex) { + // all is welll + } + } + + public void testCloseConcurrently() throws Throwable { + final int opsPerThread = randomIntBetween(10, 200); + int threadCount = 2 + randomInt(5); + + logger.info("testing with [{}] threads, each doing [{}] ops", threadCount, opsPerThread); + final BlockingQueue writtenOperations = new ArrayBlockingQueue<>(threadCount * opsPerThread); + + Thread[] threads = new Thread[threadCount]; + final Throwable[] threadExceptions = new Throwable[threadCount]; + final CountDownLatch downLatch = new CountDownLatch(1); + for (int i = 0; i < threadCount; i++) { + final int threadId = i; + threads[i] = new TranslogThread(translog, downLatch, opsPerThread, threadId, writtenOperations, threadExceptions); + threads[i].setDaemon(true); + threads[i].start(); + } + + downLatch.countDown(); + translog.close(); + + for (int i = 0; i < threadCount; i++) { + if (threadExceptions[i] != null) { + if ((threadExceptions[i] instanceof AlreadyClosedException) == false) { + throw threadExceptions[i]; + } + } + threads[i].join(60 * 1000); + } + } + + private static class TranslogThread extends Thread { + private final CountDownLatch downLatch; + private final int opsPerThread; + private final int threadId; + private final BlockingQueue writtenOperations; + private final Throwable[] threadExceptions; + private final Translog translog; + + public TranslogThread(Translog translog, CountDownLatch downLatch, int opsPerThread, int threadId, BlockingQueue writtenOperations, Throwable[] threadExceptions) { + this.translog = translog; + this.downLatch = downLatch; + this.opsPerThread = opsPerThread; + this.threadId = threadId; + this.writtenOperations = writtenOperations; + this.threadExceptions = threadExceptions; + } + + @Override + public void run() { + try { + downLatch.await(); + for (int opCount = 0; opCount < opsPerThread; opCount++) { + Translog.Operation op; + switch (randomFrom(Translog.Operation.Type.values())) { + case CREATE: + case INDEX: + op = new Translog.Index("test", threadId + "_" + opCount, + randomUnicodeOfLengthBetween(1, 20 * 1024).getBytes("UTF-8")); + break; + case DELETE: + op = new Translog.Delete(new Term("_uid", threadId + "_" + opCount), + opCount, 1 + randomInt(100000), + randomFrom(VersionType.values())); + break; + default: + throw new ElasticsearchException("not supported op type"); + } + + Translog.Location loc = translog.add(op); + writtenOperations.add(new LocationOperation(op, loc)); + } + } catch (Throwable t) { + threadExceptions[threadId] = t; + } + } + } } diff --git a/core/src/test/java/org/elasticsearch/indices/IndicesServiceTests.java b/core/src/test/java/org/elasticsearch/indices/IndicesServiceTests.java index d4051148029..522ebfb0f3b 100644 --- a/core/src/test/java/org/elasticsearch/indices/IndicesServiceTests.java +++ b/core/src/test/java/org/elasticsearch/indices/IndicesServiceTests.java @@ -72,12 +72,13 @@ public class IndicesServiceTests extends ESSingleNodeTestCase { IndicesService indicesService = getIndicesService(); IndexMetaData meta = IndexMetaData.builder("test").settings(settings(Version.CURRENT)).numberOfShards(1).numberOfReplicas( 1).build(); - assertFalse("no shard location", indicesService.canDeleteShardContent(new ShardId("test", 0), meta)); + IndexSettings indexSettings = IndexSettingsModule.newIndexSettings("test", meta.getSettings()); + assertFalse("no shard location", indicesService.canDeleteShardContent(new ShardId("test", 0), indexSettings)); IndexService test = createIndex("test"); assertTrue(test.hasShard(0)); - assertFalse("shard is allocated", indicesService.canDeleteShardContent(new ShardId("test", 0), meta)); + assertFalse("shard is allocated", indicesService.canDeleteShardContent(new ShardId("test", 0), indexSettings)); test.removeShard(0, "boom"); - assertTrue("shard is removed", indicesService.canDeleteShardContent(new ShardId("test", 0), meta)); + assertTrue("shard is removed", indicesService.canDeleteShardContent(new ShardId("test", 0), indexSettings)); } public void testDeleteIndexStore() throws Exception { diff --git a/core/src/test/java/org/elasticsearch/indices/cache/query/IndicesQueryCacheTests.java b/core/src/test/java/org/elasticsearch/indices/cache/query/IndicesQueryCacheTests.java new file mode 100644 index 00000000000..c25b20699aa --- /dev/null +++ b/core/src/test/java/org/elasticsearch/indices/cache/query/IndicesQueryCacheTests.java @@ -0,0 +1,327 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.indices.cache.query; + +import java.io.IOException; + +import org.apache.lucene.document.Document; +import org.apache.lucene.index.DirectoryReader; +import org.apache.lucene.index.IndexWriter; +import org.apache.lucene.index.LeafReaderContext; +import org.apache.lucene.search.ConstantScoreScorer; +import org.apache.lucene.search.ConstantScoreWeight; +import org.apache.lucene.search.DocIdSetIterator; +import org.apache.lucene.search.IndexSearcher; +import org.apache.lucene.search.Query; +import org.apache.lucene.search.QueryCachingPolicy; +import org.apache.lucene.search.Scorer; +import org.apache.lucene.search.Weight; +import org.apache.lucene.store.Directory; +import org.apache.lucene.util.IOUtils; +import org.elasticsearch.common.lucene.index.ElasticsearchDirectoryReader; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.index.Index; +import org.elasticsearch.index.cache.query.QueryCacheStats; +import org.elasticsearch.index.shard.ShardId; +import org.elasticsearch.test.ESTestCase; + +public class IndicesQueryCacheTests extends ESTestCase { + + private static class DummyQuery extends Query { + + private final int id; + + DummyQuery(int id) { + this.id = id; + } + + @Override + public boolean equals(Object obj) { + return super.equals(obj) && id == ((DummyQuery) obj).id; + } + + @Override + public int hashCode() { + return 31 * super.hashCode() + id; + } + + @Override + public String toString(String field) { + return "dummy"; + } + + @Override + public Weight createWeight(IndexSearcher searcher, boolean needsScores) + throws IOException { + return new ConstantScoreWeight(this) { + @Override + public Scorer scorer(LeafReaderContext context) throws IOException { + return new ConstantScoreScorer(this, score(), DocIdSetIterator.all(context.reader().maxDoc())); + } + }; + } + + } + + public void testBasics() throws IOException { + Directory dir = newDirectory(); + IndexWriter w = new IndexWriter(dir, newIndexWriterConfig()); + w.addDocument(new Document()); + DirectoryReader r = DirectoryReader.open(w, false); + w.close(); + ShardId shard = new ShardId(new Index("index"), 0); + r = ElasticsearchDirectoryReader.wrap(r, shard); + IndexSearcher s = new IndexSearcher(r); + s.setQueryCachingPolicy(QueryCachingPolicy.ALWAYS_CACHE); + + Settings settings = Settings.builder() + .put(IndicesQueryCache.INDICES_CACHE_QUERY_COUNT, 10) + .build(); + IndicesQueryCache cache = new IndicesQueryCache(settings); + s.setQueryCache(cache); + + QueryCacheStats stats = cache.getStats(shard); + assertEquals(0L, stats.getCacheSize()); + assertEquals(0L, stats.getCacheCount()); + assertEquals(0L, stats.getHitCount()); + assertEquals(0L, stats.getMissCount()); + + assertEquals(1, s.count(new DummyQuery(0))); + + stats = cache.getStats(shard); + assertEquals(1L, stats.getCacheSize()); + assertEquals(1L, stats.getCacheCount()); + assertEquals(0L, stats.getHitCount()); + assertEquals(1L, stats.getMissCount()); + + for (int i = 1; i < 20; ++i) { + assertEquals(1, s.count(new DummyQuery(i))); + } + + stats = cache.getStats(shard); + assertEquals(10L, stats.getCacheSize()); + assertEquals(20L, stats.getCacheCount()); + assertEquals(0L, stats.getHitCount()); + assertEquals(20L, stats.getMissCount()); + + s.count(new DummyQuery(10)); + + stats = cache.getStats(shard); + assertEquals(10L, stats.getCacheSize()); + assertEquals(20L, stats.getCacheCount()); + assertEquals(1L, stats.getHitCount()); + assertEquals(20L, stats.getMissCount()); + + IOUtils.close(r, dir); + + // got emptied, but no changes to other metrics + stats = cache.getStats(shard); + assertEquals(0L, stats.getCacheSize()); + assertEquals(20L, stats.getCacheCount()); + assertEquals(1L, stats.getHitCount()); + assertEquals(20L, stats.getMissCount()); + + cache.onClose(shard); + + // forgot everything + stats = cache.getStats(shard); + assertEquals(0L, stats.getCacheSize()); + assertEquals(0L, stats.getCacheCount()); + assertEquals(0L, stats.getHitCount()); + assertEquals(0L, stats.getMissCount()); + + cache.close(); // this triggers some assertions + } + + public void testTwoShards() throws IOException { + Directory dir1 = newDirectory(); + IndexWriter w1 = new IndexWriter(dir1, newIndexWriterConfig()); + w1.addDocument(new Document()); + DirectoryReader r1 = DirectoryReader.open(w1, false); + w1.close(); + ShardId shard1 = new ShardId(new Index("index"), 0); + r1 = ElasticsearchDirectoryReader.wrap(r1, shard1); + IndexSearcher s1 = new IndexSearcher(r1); + s1.setQueryCachingPolicy(QueryCachingPolicy.ALWAYS_CACHE); + + Directory dir2 = newDirectory(); + IndexWriter w2 = new IndexWriter(dir2, newIndexWriterConfig()); + w2.addDocument(new Document()); + DirectoryReader r2 = DirectoryReader.open(w2, false); + w2.close(); + ShardId shard2 = new ShardId(new Index("index"), 1); + r2 = ElasticsearchDirectoryReader.wrap(r2, shard2); + IndexSearcher s2 = new IndexSearcher(r2); + s2.setQueryCachingPolicy(QueryCachingPolicy.ALWAYS_CACHE); + + Settings settings = Settings.builder() + .put(IndicesQueryCache.INDICES_CACHE_QUERY_COUNT, 10) + .build(); + IndicesQueryCache cache = new IndicesQueryCache(settings); + s1.setQueryCache(cache); + s2.setQueryCache(cache); + + assertEquals(1, s1.count(new DummyQuery(0))); + + QueryCacheStats stats1 = cache.getStats(shard1); + assertEquals(1L, stats1.getCacheSize()); + assertEquals(1L, stats1.getCacheCount()); + assertEquals(0L, stats1.getHitCount()); + assertEquals(1L, stats1.getMissCount()); + + QueryCacheStats stats2 = cache.getStats(shard2); + assertEquals(0L, stats2.getCacheSize()); + assertEquals(0L, stats2.getCacheCount()); + assertEquals(0L, stats2.getHitCount()); + assertEquals(0L, stats2.getMissCount()); + + assertEquals(1, s2.count(new DummyQuery(0))); + + stats1 = cache.getStats(shard1); + assertEquals(1L, stats1.getCacheSize()); + assertEquals(1L, stats1.getCacheCount()); + assertEquals(0L, stats1.getHitCount()); + assertEquals(1L, stats1.getMissCount()); + + stats2 = cache.getStats(shard2); + assertEquals(1L, stats2.getCacheSize()); + assertEquals(1L, stats2.getCacheCount()); + assertEquals(0L, stats2.getHitCount()); + assertEquals(1L, stats2.getMissCount()); + + for (int i = 0; i < 20; ++i) { + assertEquals(1, s2.count(new DummyQuery(i))); + } + + stats1 = cache.getStats(shard1); + assertEquals(0L, stats1.getCacheSize()); // evicted + assertEquals(1L, stats1.getCacheCount()); + assertEquals(0L, stats1.getHitCount()); + assertEquals(1L, stats1.getMissCount()); + + stats2 = cache.getStats(shard2); + assertEquals(10L, stats2.getCacheSize()); + assertEquals(20L, stats2.getCacheCount()); + assertEquals(1L, stats2.getHitCount()); + assertEquals(20L, stats2.getMissCount()); + + IOUtils.close(r1, dir1); + + // no changes + stats1 = cache.getStats(shard1); + assertEquals(0L, stats1.getCacheSize()); + assertEquals(1L, stats1.getCacheCount()); + assertEquals(0L, stats1.getHitCount()); + assertEquals(1L, stats1.getMissCount()); + + stats2 = cache.getStats(shard2); + assertEquals(10L, stats2.getCacheSize()); + assertEquals(20L, stats2.getCacheCount()); + assertEquals(1L, stats2.getHitCount()); + assertEquals(20L, stats2.getMissCount()); + + cache.onClose(shard1); + + // forgot everything about shard1 + stats1 = cache.getStats(shard1); + assertEquals(0L, stats1.getCacheSize()); + assertEquals(0L, stats1.getCacheCount()); + assertEquals(0L, stats1.getHitCount()); + assertEquals(0L, stats1.getMissCount()); + + stats2 = cache.getStats(shard2); + assertEquals(10L, stats2.getCacheSize()); + assertEquals(20L, stats2.getCacheCount()); + assertEquals(1L, stats2.getHitCount()); + assertEquals(20L, stats2.getMissCount()); + + IOUtils.close(r2, dir2); + cache.onClose(shard2); + + // forgot everything about shard2 + stats1 = cache.getStats(shard1); + assertEquals(0L, stats1.getCacheSize()); + assertEquals(0L, stats1.getCacheCount()); + assertEquals(0L, stats1.getHitCount()); + assertEquals(0L, stats1.getMissCount()); + + stats2 = cache.getStats(shard2); + assertEquals(0L, stats2.getCacheSize()); + assertEquals(0L, stats2.getCacheCount()); + assertEquals(0L, stats2.getHitCount()); + assertEquals(0L, stats2.getMissCount()); + + cache.close(); // this triggers some assertions + } + + // Make sure the cache behaves correctly when a segment that is associated + // with an empty cache gets closed. In that particular case, the eviction + // callback is called with a number of evicted entries equal to 0 + // see https://github.com/elastic/elasticsearch/issues/15043 + public void testStatsOnEviction() throws IOException { + Directory dir1 = newDirectory(); + IndexWriter w1 = new IndexWriter(dir1, newIndexWriterConfig()); + w1.addDocument(new Document()); + DirectoryReader r1 = DirectoryReader.open(w1, false); + w1.close(); + ShardId shard1 = new ShardId(new Index("index"), 0); + r1 = ElasticsearchDirectoryReader.wrap(r1, shard1); + IndexSearcher s1 = new IndexSearcher(r1); + s1.setQueryCachingPolicy(QueryCachingPolicy.ALWAYS_CACHE); + + Directory dir2 = newDirectory(); + IndexWriter w2 = new IndexWriter(dir2, newIndexWriterConfig()); + w2.addDocument(new Document()); + DirectoryReader r2 = DirectoryReader.open(w2, false); + w2.close(); + ShardId shard2 = new ShardId(new Index("index"), 1); + r2 = ElasticsearchDirectoryReader.wrap(r2, shard2); + IndexSearcher s2 = new IndexSearcher(r2); + s2.setQueryCachingPolicy(QueryCachingPolicy.ALWAYS_CACHE); + + Settings settings = Settings.builder() + .put(IndicesQueryCache.INDICES_CACHE_QUERY_COUNT, 10) + .build(); + IndicesQueryCache cache = new IndicesQueryCache(settings); + s1.setQueryCache(cache); + s2.setQueryCache(cache); + + assertEquals(1, s1.count(new DummyQuery(0))); + + for (int i = 1; i <= 20; ++i) { + assertEquals(1, s2.count(new DummyQuery(i))); + } + + QueryCacheStats stats1 = cache.getStats(shard1); + assertEquals(0L, stats1.getCacheSize()); + assertEquals(1L, stats1.getCacheCount()); + + // this used to fail because we were evicting an empty cache on + // the segment from r1 + IOUtils.close(r1, dir1); + cache.onClose(shard1); + + IOUtils.close(r2, dir2); + cache.onClose(shard2); + + cache.close(); // this triggers some assertions + } + +} diff --git a/core/src/test/java/org/elasticsearch/indices/mapping/UpdateMappingIntegrationIT.java b/core/src/test/java/org/elasticsearch/indices/mapping/UpdateMappingIntegrationIT.java index 305688b3a6e..ed4b95c03d8 100644 --- a/core/src/test/java/org/elasticsearch/indices/mapping/UpdateMappingIntegrationIT.java +++ b/core/src/test/java/org/elasticsearch/indices/mapping/UpdateMappingIntegrationIT.java @@ -22,8 +22,8 @@ package org.elasticsearch.indices.mapping; import org.elasticsearch.action.admin.indices.mapping.get.GetMappingsResponse; import org.elasticsearch.action.admin.indices.mapping.put.PutMappingResponse; import org.elasticsearch.action.admin.indices.refresh.RefreshResponse; -import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.index.IndexRequestBuilder; +import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.client.Client; import org.elasticsearch.cluster.metadata.MappingMetaData; import org.elasticsearch.common.Priority; @@ -31,7 +31,6 @@ import org.elasticsearch.common.collect.ImmutableOpenMap; import org.elasticsearch.common.xcontent.json.JsonXContent; import org.elasticsearch.index.mapper.MapperParsingException; import org.elasticsearch.index.mapper.MapperService; -import org.elasticsearch.index.mapper.MergeMappingException; import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.ESIntegTestCase.ClusterScope; import org.hamcrest.Matchers; @@ -43,22 +42,14 @@ import java.util.Map; import java.util.concurrent.CyclicBarrier; import java.util.concurrent.atomic.AtomicBoolean; -import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_BLOCKS_METADATA; -import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_BLOCKS_READ; -import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_BLOCKS_WRITE; -import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_READ_ONLY; +import static org.elasticsearch.cluster.metadata.IndexMetaData.*; import static org.elasticsearch.common.settings.Settings.settingsBuilder; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertBlocked; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertThrows; -import static org.hamcrest.Matchers.containsString; -import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.Matchers.hasEntry; -import static org.hamcrest.Matchers.hasKey; -import static org.hamcrest.Matchers.not; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.*; +import static org.hamcrest.Matchers.*; @ClusterScope(randomDynamicTemplates = false) public class UpdateMappingIntegrationIT extends ESIntegTestCase { + public void testDynamicUpdates() throws Exception { client().admin().indices().prepareCreate("test") .setSettings( @@ -148,8 +139,8 @@ public class UpdateMappingIntegrationIT extends ESIntegTestCase { client().admin().indices().preparePutMapping("test").setType("type") .setSource("{\"type\":{\"properties\":{\"body\":{\"type\":\"integer\"}}}}").execute().actionGet(); fail("Expected MergeMappingException"); - } catch (MergeMappingException e) { - assertThat(e.getMessage(), containsString("mapper [body] of different type")); + } catch (IllegalArgumentException e) { + assertThat(e.getMessage(), containsString("mapper [body] cannot be changed from type [string] to [int]")); } } @@ -162,7 +153,7 @@ public class UpdateMappingIntegrationIT extends ESIntegTestCase { .setSource("{\"type\":{\"properties\":{\"body\":{\"type\":\"string\", \"norms\": { \"enabled\": true }}}}}").execute() .actionGet(); fail("Expected MergeMappingException"); - } catch (MergeMappingException e) { + } catch (IllegalArgumentException e) { assertThat(e.getMessage(), containsString("mapper [body] has different [omit_norms]")); } } diff --git a/core/src/test/java/org/elasticsearch/indices/memory/IndexingMemoryControllerTests.java b/core/src/test/java/org/elasticsearch/indices/memory/IndexingMemoryControllerTests.java index 4dddc4b788f..6d8dda3afb9 100644 --- a/core/src/test/java/org/elasticsearch/indices/memory/IndexingMemoryControllerTests.java +++ b/core/src/test/java/org/elasticsearch/indices/memory/IndexingMemoryControllerTests.java @@ -22,54 +22,51 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.ByteSizeUnit; import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.index.IndexService; import org.elasticsearch.index.shard.IndexShard; -import org.elasticsearch.index.shard.ShardId; -import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.indices.IndicesService; +import org.elasticsearch.test.ESSingleNodeTestCase; -import java.util.ArrayList; -import java.util.HashMap; -import java.util.HashSet; -import java.util.List; -import java.util.Map; -import java.util.Set; +import java.util.*; +import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF_REPLICAS; +import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF_SHARDS; import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.Matchers.not; -public class IndexingMemoryControllerTests extends ESTestCase { +public class IndexingMemoryControllerTests extends ESSingleNodeTestCase { static class MockController extends IndexingMemoryController { final static ByteSizeValue INACTIVE = new ByteSizeValue(-1); - final Map indexingBuffers = new HashMap<>(); - final Map translogBuffers = new HashMap<>(); + final Map indexingBuffers = new HashMap<>(); + final Map translogBuffers = new HashMap<>(); - final Map lastIndexTimeNanos = new HashMap<>(); - final Set activeShards = new HashSet<>(); + final Map lastIndexTimeNanos = new HashMap<>(); + final Set activeShards = new HashSet<>(); long currentTimeSec = TimeValue.timeValueNanos(System.nanoTime()).seconds(); public MockController(Settings settings) { super(Settings.builder() - .put(SHARD_INACTIVE_INTERVAL_TIME_SETTING, "200h") // disable it - .put(IndexShard.INDEX_SHARD_INACTIVE_TIME_SETTING, "1ms") // nearly immediate - .put(settings) - .build(), - null, null, 100 * 1024 * 1024); // fix jvm mem size to 100mb + .put(SHARD_INACTIVE_INTERVAL_TIME_SETTING, "200h") // disable it + .put(IndexShard.INDEX_SHARD_INACTIVE_TIME_SETTING, "1ms") // nearly immediate + .put(settings) + .build(), + null, null, 100 * 1024 * 1024); // fix jvm mem size to 100mb } - public void deleteShard(ShardId id) { + public void deleteShard(IndexShard id) { indexingBuffers.remove(id); translogBuffers.remove(id); } - public void assertBuffers(ShardId id, ByteSizeValue indexing, ByteSizeValue translog) { + public void assertBuffers(IndexShard id, ByteSizeValue indexing, ByteSizeValue translog) { assertThat(indexingBuffers.get(id), equalTo(indexing)); assertThat(translogBuffers.get(id), equalTo(translog)); } - public void assertInActive(ShardId id) { + public void assertInactive(IndexShard id) { assertThat(indexingBuffers.get(id), equalTo(INACTIVE)); assertThat(translogBuffers.get(id), equalTo(INACTIVE)); } @@ -80,36 +77,31 @@ public class IndexingMemoryControllerTests extends ESTestCase { } @Override - protected List availableShards() { + protected List availableShards() { return new ArrayList<>(indexingBuffers.keySet()); } @Override - protected boolean shardAvailable(ShardId shardId) { - return indexingBuffers.containsKey(shardId); + protected boolean shardAvailable(IndexShard shard) { + return indexingBuffers.containsKey(shard); } @Override - protected Boolean getShardActive(ShardId shardId) { - return activeShards.contains(shardId); + protected void updateShardBuffers(IndexShard shard, ByteSizeValue shardIndexingBufferSize, ByteSizeValue shardTranslogBufferSize) { + indexingBuffers.put(shard, shardIndexingBufferSize); + translogBuffers.put(shard, shardTranslogBufferSize); } @Override - protected void updateShardBuffers(ShardId shardId, ByteSizeValue shardIndexingBufferSize, ByteSizeValue shardTranslogBufferSize) { - indexingBuffers.put(shardId, shardIndexingBufferSize); - translogBuffers.put(shardId, shardTranslogBufferSize); - } - - @Override - protected Boolean checkIdle(ShardId shardId) { + protected boolean checkIdle(IndexShard shard) { final TimeValue inactiveTime = settings.getAsTime(IndexShard.INDEX_SHARD_INACTIVE_TIME_SETTING, TimeValue.timeValueMinutes(5)); - Long ns = lastIndexTimeNanos.get(shardId); + Long ns = lastIndexTimeNanos.get(shard); if (ns == null) { - return null; + return true; } else if (currentTimeInNanos() - ns >= inactiveTime.nanos()) { - indexingBuffers.put(shardId, INACTIVE); - translogBuffers.put(shardId, INACTIVE); - activeShards.remove(shardId); + indexingBuffers.put(shard, INACTIVE); + translogBuffers.put(shard, INACTIVE); + activeShards.remove(shard); return true; } else { return false; @@ -120,118 +112,126 @@ public class IndexingMemoryControllerTests extends ESTestCase { currentTimeSec += sec; } - public void simulateIndexing(ShardId shardId) { - lastIndexTimeNanos.put(shardId, currentTimeInNanos()); - if (indexingBuffers.containsKey(shardId) == false) { + public void simulateIndexing(IndexShard shard) { + lastIndexTimeNanos.put(shard, currentTimeInNanos()); + if (indexingBuffers.containsKey(shard) == false) { // First time we are seeing this shard; start it off with inactive buffers as IndexShard does: - indexingBuffers.put(shardId, IndexingMemoryController.INACTIVE_SHARD_INDEXING_BUFFER); - translogBuffers.put(shardId, IndexingMemoryController.INACTIVE_SHARD_TRANSLOG_BUFFER); + indexingBuffers.put(shard, IndexingMemoryController.INACTIVE_SHARD_INDEXING_BUFFER); + translogBuffers.put(shard, IndexingMemoryController.INACTIVE_SHARD_TRANSLOG_BUFFER); } - activeShards.add(shardId); + activeShards.add(shard); forceCheck(); } } public void testShardAdditionAndRemoval() { + createIndex("test", Settings.builder().put(SETTING_NUMBER_OF_SHARDS, 3).put(SETTING_NUMBER_OF_REPLICAS, 0).build()); + IndicesService indicesService = getInstanceFromNode(IndicesService.class); + IndexService test = indicesService.indexService("test"); + MockController controller = new MockController(Settings.builder() - .put(IndexingMemoryController.INDEX_BUFFER_SIZE_SETTING, "10mb") - .put(IndexingMemoryController.TRANSLOG_BUFFER_SIZE_SETTING, "100kb").build()); - final ShardId shard1 = new ShardId("test", 1); - controller.simulateIndexing(shard1); - controller.assertBuffers(shard1, new ByteSizeValue(10, ByteSizeUnit.MB), new ByteSizeValue(64, ByteSizeUnit.KB)); // translog is maxed at 64K + .put(IndexingMemoryController.INDEX_BUFFER_SIZE_SETTING, "10mb") + .put(IndexingMemoryController.TRANSLOG_BUFFER_SIZE_SETTING, "100kb").build()); + IndexShard shard0 = test.getShard(0); + controller.simulateIndexing(shard0); + controller.assertBuffers(shard0, new ByteSizeValue(10, ByteSizeUnit.MB), new ByteSizeValue(64, ByteSizeUnit.KB)); // translog is maxed at 64K // add another shard - final ShardId shard2 = new ShardId("test", 2); - controller.simulateIndexing(shard2); + IndexShard shard1 = test.getShard(1); + controller.simulateIndexing(shard1); + controller.assertBuffers(shard0, new ByteSizeValue(5, ByteSizeUnit.MB), new ByteSizeValue(50, ByteSizeUnit.KB)); controller.assertBuffers(shard1, new ByteSizeValue(5, ByteSizeUnit.MB), new ByteSizeValue(50, ByteSizeUnit.KB)); - controller.assertBuffers(shard2, new ByteSizeValue(5, ByteSizeUnit.MB), new ByteSizeValue(50, ByteSizeUnit.KB)); // remove first shard - controller.deleteShard(shard1); + controller.deleteShard(shard0); controller.forceCheck(); - controller.assertBuffers(shard2, new ByteSizeValue(10, ByteSizeUnit.MB), new ByteSizeValue(64, ByteSizeUnit.KB)); // translog is maxed at 64K + controller.assertBuffers(shard1, new ByteSizeValue(10, ByteSizeUnit.MB), new ByteSizeValue(64, ByteSizeUnit.KB)); // translog is maxed at 64K // remove second shard - controller.deleteShard(shard2); + controller.deleteShard(shard1); controller.forceCheck(); // add a new one - final ShardId shard3 = new ShardId("test", 3); - controller.simulateIndexing(shard3); - controller.assertBuffers(shard3, new ByteSizeValue(10, ByteSizeUnit.MB), new ByteSizeValue(64, ByteSizeUnit.KB)); // translog is maxed at 64K + IndexShard shard2 = test.getShard(2); + controller.simulateIndexing(shard2); + controller.assertBuffers(shard2, new ByteSizeValue(10, ByteSizeUnit.MB), new ByteSizeValue(64, ByteSizeUnit.KB)); // translog is maxed at 64K } public void testActiveInactive() { - MockController controller = new MockController(Settings.builder() - .put(IndexingMemoryController.INDEX_BUFFER_SIZE_SETTING, "10mb") - .put(IndexingMemoryController.TRANSLOG_BUFFER_SIZE_SETTING, "100kb") - .put(IndexShard.INDEX_SHARD_INACTIVE_TIME_SETTING, "5s") - .build()); + createIndex("test", Settings.builder().put(SETTING_NUMBER_OF_SHARDS, 2).put(SETTING_NUMBER_OF_REPLICAS, 0).build()); + IndicesService indicesService = getInstanceFromNode(IndicesService.class); + IndexService test = indicesService.indexService("test"); - final ShardId shard1 = new ShardId("test", 1); + MockController controller = new MockController(Settings.builder() + .put(IndexingMemoryController.INDEX_BUFFER_SIZE_SETTING, "10mb") + .put(IndexingMemoryController.TRANSLOG_BUFFER_SIZE_SETTING, "100kb") + .put(IndexShard.INDEX_SHARD_INACTIVE_TIME_SETTING, "5s") + .build()); + + IndexShard shard0 = test.getShard(0); + controller.simulateIndexing(shard0); + IndexShard shard1 = test.getShard(1); controller.simulateIndexing(shard1); - final ShardId shard2 = new ShardId("test", 2); - controller.simulateIndexing(shard2); + controller.assertBuffers(shard0, new ByteSizeValue(5, ByteSizeUnit.MB), new ByteSizeValue(50, ByteSizeUnit.KB)); controller.assertBuffers(shard1, new ByteSizeValue(5, ByteSizeUnit.MB), new ByteSizeValue(50, ByteSizeUnit.KB)); - controller.assertBuffers(shard2, new ByteSizeValue(5, ByteSizeUnit.MB), new ByteSizeValue(50, ByteSizeUnit.KB)); // index into both shards, move the clock and see that they are still active + controller.simulateIndexing(shard0); controller.simulateIndexing(shard1); - controller.simulateIndexing(shard2); controller.incrementTimeSec(10); controller.forceCheck(); // both shards now inactive - controller.assertInActive(shard1); - controller.assertInActive(shard2); + controller.assertInactive(shard0); + controller.assertInactive(shard1); // index into one shard only, see it becomes active - controller.simulateIndexing(shard1); - controller.assertBuffers(shard1, new ByteSizeValue(10, ByteSizeUnit.MB), new ByteSizeValue(64, ByteSizeUnit.KB)); - controller.assertInActive(shard2); + controller.simulateIndexing(shard0); + controller.assertBuffers(shard0, new ByteSizeValue(10, ByteSizeUnit.MB), new ByteSizeValue(64, ByteSizeUnit.KB)); + controller.assertInactive(shard1); controller.incrementTimeSec(3); // increment but not enough to become inactive controller.forceCheck(); - controller.assertBuffers(shard1, new ByteSizeValue(10, ByteSizeUnit.MB), new ByteSizeValue(64, ByteSizeUnit.KB)); - controller.assertInActive(shard2); + controller.assertBuffers(shard0, new ByteSizeValue(10, ByteSizeUnit.MB), new ByteSizeValue(64, ByteSizeUnit.KB)); + controller.assertInactive(shard1); controller.incrementTimeSec(3); // increment some more controller.forceCheck(); - controller.assertInActive(shard1); - controller.assertInActive(shard2); + controller.assertInactive(shard0); + controller.assertInactive(shard1); // index some and shard becomes immediately active - controller.simulateIndexing(shard2); - controller.assertInActive(shard1); - controller.assertBuffers(shard2, new ByteSizeValue(10, ByteSizeUnit.MB), new ByteSizeValue(64, ByteSizeUnit.KB)); + controller.simulateIndexing(shard1); + controller.assertInactive(shard0); + controller.assertBuffers(shard1, new ByteSizeValue(10, ByteSizeUnit.MB), new ByteSizeValue(64, ByteSizeUnit.KB)); } public void testMinShardBufferSizes() { MockController controller = new MockController(Settings.builder() - .put(IndexingMemoryController.INDEX_BUFFER_SIZE_SETTING, "10mb") - .put(IndexingMemoryController.TRANSLOG_BUFFER_SIZE_SETTING, "50kb") - .put(IndexingMemoryController.MIN_SHARD_INDEX_BUFFER_SIZE_SETTING, "6mb") - .put(IndexingMemoryController.MIN_SHARD_TRANSLOG_BUFFER_SIZE_SETTING, "40kb").build()); + .put(IndexingMemoryController.INDEX_BUFFER_SIZE_SETTING, "10mb") + .put(IndexingMemoryController.TRANSLOG_BUFFER_SIZE_SETTING, "50kb") + .put(IndexingMemoryController.MIN_SHARD_INDEX_BUFFER_SIZE_SETTING, "6mb") + .put(IndexingMemoryController.MIN_SHARD_TRANSLOG_BUFFER_SIZE_SETTING, "40kb").build()); assertTwoActiveShards(controller, new ByteSizeValue(6, ByteSizeUnit.MB), new ByteSizeValue(40, ByteSizeUnit.KB)); } public void testMaxShardBufferSizes() { MockController controller = new MockController(Settings.builder() - .put(IndexingMemoryController.INDEX_BUFFER_SIZE_SETTING, "10mb") - .put(IndexingMemoryController.TRANSLOG_BUFFER_SIZE_SETTING, "50kb") - .put(IndexingMemoryController.MAX_SHARD_INDEX_BUFFER_SIZE_SETTING, "3mb") - .put(IndexingMemoryController.MAX_SHARD_TRANSLOG_BUFFER_SIZE_SETTING, "10kb").build()); + .put(IndexingMemoryController.INDEX_BUFFER_SIZE_SETTING, "10mb") + .put(IndexingMemoryController.TRANSLOG_BUFFER_SIZE_SETTING, "50kb") + .put(IndexingMemoryController.MAX_SHARD_INDEX_BUFFER_SIZE_SETTING, "3mb") + .put(IndexingMemoryController.MAX_SHARD_TRANSLOG_BUFFER_SIZE_SETTING, "10kb").build()); assertTwoActiveShards(controller, new ByteSizeValue(3, ByteSizeUnit.MB), new ByteSizeValue(10, ByteSizeUnit.KB)); } public void testRelativeBufferSizes() { MockController controller = new MockController(Settings.builder() - .put(IndexingMemoryController.INDEX_BUFFER_SIZE_SETTING, "50%") - .put(IndexingMemoryController.TRANSLOG_BUFFER_SIZE_SETTING, "0.5%") - .build()); + .put(IndexingMemoryController.INDEX_BUFFER_SIZE_SETTING, "50%") + .put(IndexingMemoryController.TRANSLOG_BUFFER_SIZE_SETTING, "0.5%") + .build()); assertThat(controller.indexingBufferSize(), equalTo(new ByteSizeValue(50, ByteSizeUnit.MB))); assertThat(controller.translogBufferSize(), equalTo(new ByteSizeValue(512, ByteSizeUnit.KB))); @@ -240,10 +240,10 @@ public class IndexingMemoryControllerTests extends ESTestCase { public void testMinBufferSizes() { MockController controller = new MockController(Settings.builder() - .put(IndexingMemoryController.INDEX_BUFFER_SIZE_SETTING, "0.001%") - .put(IndexingMemoryController.TRANSLOG_BUFFER_SIZE_SETTING, "0.001%") - .put(IndexingMemoryController.MIN_INDEX_BUFFER_SIZE_SETTING, "6mb") - .put(IndexingMemoryController.MIN_TRANSLOG_BUFFER_SIZE_SETTING, "512kb").build()); + .put(IndexingMemoryController.INDEX_BUFFER_SIZE_SETTING, "0.001%") + .put(IndexingMemoryController.TRANSLOG_BUFFER_SIZE_SETTING, "0.001%") + .put(IndexingMemoryController.MIN_INDEX_BUFFER_SIZE_SETTING, "6mb") + .put(IndexingMemoryController.MIN_TRANSLOG_BUFFER_SIZE_SETTING, "512kb").build()); assertThat(controller.indexingBufferSize(), equalTo(new ByteSizeValue(6, ByteSizeUnit.MB))); assertThat(controller.translogBufferSize(), equalTo(new ByteSizeValue(512, ByteSizeUnit.KB))); @@ -251,23 +251,24 @@ public class IndexingMemoryControllerTests extends ESTestCase { public void testMaxBufferSizes() { MockController controller = new MockController(Settings.builder() - .put(IndexingMemoryController.INDEX_BUFFER_SIZE_SETTING, "90%") - .put(IndexingMemoryController.TRANSLOG_BUFFER_SIZE_SETTING, "90%") - .put(IndexingMemoryController.MAX_INDEX_BUFFER_SIZE_SETTING, "6mb") - .put(IndexingMemoryController.MAX_TRANSLOG_BUFFER_SIZE_SETTING, "512kb").build()); + .put(IndexingMemoryController.INDEX_BUFFER_SIZE_SETTING, "90%") + .put(IndexingMemoryController.TRANSLOG_BUFFER_SIZE_SETTING, "90%") + .put(IndexingMemoryController.MAX_INDEX_BUFFER_SIZE_SETTING, "6mb") + .put(IndexingMemoryController.MAX_TRANSLOG_BUFFER_SIZE_SETTING, "512kb").build()); assertThat(controller.indexingBufferSize(), equalTo(new ByteSizeValue(6, ByteSizeUnit.MB))); assertThat(controller.translogBufferSize(), equalTo(new ByteSizeValue(512, ByteSizeUnit.KB))); } protected void assertTwoActiveShards(MockController controller, ByteSizeValue indexBufferSize, ByteSizeValue translogBufferSize) { - final ShardId shard1 = new ShardId("test", 1); + createIndex("test", Settings.builder().put(SETTING_NUMBER_OF_SHARDS, 2).put(SETTING_NUMBER_OF_REPLICAS, 0).build()); + IndicesService indicesService = getInstanceFromNode(IndicesService.class); + IndexService test = indicesService.indexService("test"); + IndexShard shard0 = test.getShard(0); + controller.simulateIndexing(shard0); + IndexShard shard1 = test.getShard(1); controller.simulateIndexing(shard1); - final ShardId shard2 = new ShardId("test", 2); - controller.simulateIndexing(shard2); + controller.assertBuffers(shard0, indexBufferSize, translogBufferSize); controller.assertBuffers(shard1, indexBufferSize, translogBufferSize); - controller.assertBuffers(shard2, indexBufferSize, translogBufferSize); - } - } diff --git a/core/src/test/java/org/elasticsearch/indices/state/RareClusterStateIT.java b/core/src/test/java/org/elasticsearch/indices/state/RareClusterStateIT.java index ab3f825cecc..96611aeca8a 100644 --- a/core/src/test/java/org/elasticsearch/indices/state/RareClusterStateIT.java +++ b/core/src/test/java/org/elasticsearch/indices/state/RareClusterStateIT.java @@ -161,6 +161,7 @@ public class RareClusterStateIT extends ESIntegTestCase { } @TestLogging("cluster.service:TRACE") + @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/14932") public void testDeleteCreateInOneBulk() throws Exception { internalCluster().startNodesAsync(2, Settings.builder() .put(DiscoveryModule.DISCOVERY_TYPE_KEY, "zen") diff --git a/core/src/test/java/org/elasticsearch/indices/store/IndicesStoreIntegrationIT.java b/core/src/test/java/org/elasticsearch/indices/store/IndicesStoreIntegrationIT.java index 12417d4c18f..cd3977e5305 100644 --- a/core/src/test/java/org/elasticsearch/indices/store/IndicesStoreIntegrationIT.java +++ b/core/src/test/java/org/elasticsearch/indices/store/IndicesStoreIntegrationIT.java @@ -27,13 +27,7 @@ import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.ClusterStateUpdateTask; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.node.DiscoveryNode; -import org.elasticsearch.cluster.routing.IndexRoutingTable; -import org.elasticsearch.cluster.routing.IndexShardRoutingTable; -import org.elasticsearch.cluster.routing.RoutingNode; -import org.elasticsearch.cluster.routing.RoutingTable; -import org.elasticsearch.cluster.routing.ShardRouting; -import org.elasticsearch.cluster.routing.ShardRoutingState; -import org.elasticsearch.cluster.routing.TestShardRouting; +import org.elasticsearch.cluster.routing.*; import org.elasticsearch.cluster.routing.allocation.command.MoveAllocationCommand; import org.elasticsearch.cluster.routing.allocation.decider.EnableAllocationDecider; import org.elasticsearch.cluster.routing.allocation.decider.FilterAllocationDecider; @@ -56,11 +50,7 @@ import org.elasticsearch.test.disruption.BlockClusterStateProcessing; import org.elasticsearch.test.disruption.SingleNodeDisruption; import org.elasticsearch.test.junit.annotations.TestLogging; import org.elasticsearch.test.transport.MockTransportService; -import org.elasticsearch.transport.ConnectTransportException; -import org.elasticsearch.transport.TransportException; -import org.elasticsearch.transport.TransportRequest; -import org.elasticsearch.transport.TransportRequestOptions; -import org.elasticsearch.transport.TransportService; +import org.elasticsearch.transport.*; import java.io.IOException; import java.nio.file.Files; @@ -407,7 +397,7 @@ public class IndicesStoreIntegrationIT extends ESIntegTestCase { // disable relocations when we do this, to make sure the shards are not relocated from node2 // due to rebalancing, and delete its content client().admin().cluster().prepareUpdateSettings().setTransientSettings(settingsBuilder().put(EnableAllocationDecider.CLUSTER_ROUTING_REBALANCE_ENABLE, EnableAllocationDecider.Rebalance.NONE)).get(); - internalCluster().getInstance(ClusterService.class, nonMasterNode).submitStateUpdateTask("test", Priority.IMMEDIATE, new ClusterStateUpdateTask() { + internalCluster().getInstance(ClusterService.class, nonMasterNode).submitStateUpdateTask("test", new ClusterStateUpdateTask(Priority.IMMEDIATE) { @Override public ClusterState execute(ClusterState currentState) throws Exception { IndexRoutingTable.Builder indexRoutingTableBuilder = IndexRoutingTable.builder("test"); diff --git a/core/src/test/java/org/elasticsearch/indices/template/SimpleIndexTemplateIT.java b/core/src/test/java/org/elasticsearch/indices/template/SimpleIndexTemplateIT.java index b9da71d75aa..ca2025ced1b 100644 --- a/core/src/test/java/org/elasticsearch/indices/template/SimpleIndexTemplateIT.java +++ b/core/src/test/java/org/elasticsearch/indices/template/SimpleIndexTemplateIT.java @@ -118,7 +118,8 @@ public class SimpleIndexTemplateIT extends ESIntegTestCase { assertHitCount(searchResponse, 1); assertThat(searchResponse.getHits().getAt(0).field("field1").value().toString(), equalTo("value1")); - assertThat(searchResponse.getHits().getAt(0).field("field2").value().toString(), equalTo("value 2")); // this will still be loaded because of the source feature + // field2 is not stored. + assertThat(searchResponse.getHits().getAt(0).field("field2"), nullValue()); client().prepareIndex("text_index", "type1", "1").setSource("field1", "value1", "field2", "value 2").setRefresh(true).execute().actionGet(); diff --git a/core/src/test/java/org/elasticsearch/plugins/PluginInfoTests.java b/core/src/test/java/org/elasticsearch/plugins/PluginInfoTests.java index b236f9e2795..000365f6a20 100644 --- a/core/src/test/java/org/elasticsearch/plugins/PluginInfoTests.java +++ b/core/src/test/java/org/elasticsearch/plugins/PluginInfoTests.java @@ -20,7 +20,7 @@ package org.elasticsearch.plugins; import org.elasticsearch.Version; -import org.elasticsearch.action.admin.cluster.node.info.PluginsInfo; +import org.elasticsearch.action.admin.cluster.node.info.PluginsAndModules; import org.elasticsearch.test.ESTestCase; import java.io.IOException; @@ -259,14 +259,14 @@ public class PluginInfoTests extends ESTestCase { } public void testPluginListSorted() { - PluginsInfo pluginsInfo = new PluginsInfo(5); - pluginsInfo.add(new PluginInfo("c", "foo", true, "dummy", true, "dummyclass", true)); - pluginsInfo.add(new PluginInfo("b", "foo", true, "dummy", true, "dummyclass", true)); - pluginsInfo.add(new PluginInfo("e", "foo", true, "dummy", true, "dummyclass", true)); - pluginsInfo.add(new PluginInfo("a", "foo", true, "dummy", true, "dummyclass", true)); - pluginsInfo.add(new PluginInfo("d", "foo", true, "dummy", true, "dummyclass", true)); + PluginsAndModules pluginsInfo = new PluginsAndModules(); + pluginsInfo.addPlugin(new PluginInfo("c", "foo", true, "dummy", true, "dummyclass", true)); + pluginsInfo.addPlugin(new PluginInfo("b", "foo", true, "dummy", true, "dummyclass", true)); + pluginsInfo.addPlugin(new PluginInfo("e", "foo", true, "dummy", true, "dummyclass", true)); + pluginsInfo.addPlugin(new PluginInfo("a", "foo", true, "dummy", true, "dummyclass", true)); + pluginsInfo.addPlugin(new PluginInfo("d", "foo", true, "dummy", true, "dummyclass", true)); - final List infos = pluginsInfo.getInfos(); + final List infos = pluginsInfo.getPluginInfos(); List names = infos.stream().map((input) -> input.getName()).collect(Collectors.toList()); assertThat(names, contains("a", "b", "c", "d", "e")); } diff --git a/core/src/test/java/org/elasticsearch/plugins/PluginsServiceTests.java b/core/src/test/java/org/elasticsearch/plugins/PluginsServiceTests.java index fd528f48f33..660f1015c3d 100644 --- a/core/src/test/java/org/elasticsearch/plugins/PluginsServiceTests.java +++ b/core/src/test/java/org/elasticsearch/plugins/PluginsServiceTests.java @@ -26,6 +26,8 @@ import org.elasticsearch.env.Environment; import org.elasticsearch.index.IndexModule; import org.elasticsearch.test.ESTestCase; +import java.nio.file.Files; +import java.nio.file.Path; import java.util.Arrays; public class PluginsServiceTests extends ESTestCase { @@ -81,7 +83,7 @@ public class PluginsServiceTests extends ESTestCase { } static PluginsService newPluginsService(Settings settings, Class... classpathPlugins) { - return new PluginsService(settings, new Environment(settings).pluginsFile(), Arrays.asList(classpathPlugins)); + return new PluginsService(settings, null, new Environment(settings).pluginsFile(), Arrays.asList(classpathPlugins)); } public void testAdditionalSettings() { @@ -123,4 +125,15 @@ public class PluginsServiceTests extends ESTestCase { assertEquals("boom", ex.getCause().getCause().getMessage()); } } + + public void testExistingPluginMissingDescriptor() throws Exception { + Path pluginsDir = createTempDir(); + Files.createDirectory(pluginsDir.resolve("plugin-missing-descriptor")); + try { + PluginsService.getPluginBundles(pluginsDir); + fail(); + } catch (IllegalStateException e) { + assertTrue(e.getMessage(), e.getMessage().contains("Could not load plugin descriptor for existing plugin")); + } + } } diff --git a/core/src/test/java/org/elasticsearch/recovery/RecoveryWhileUnderLoadIT.java b/core/src/test/java/org/elasticsearch/recovery/RecoveryWhileUnderLoadIT.java index 5468e095de4..b8bac5090c5 100644 --- a/core/src/test/java/org/elasticsearch/recovery/RecoveryWhileUnderLoadIT.java +++ b/core/src/test/java/org/elasticsearch/recovery/RecoveryWhileUnderLoadIT.java @@ -225,7 +225,7 @@ public class RecoveryWhileUnderLoadIT extends ESIntegTestCase { } public void testRecoverWhileRelocating() throws Exception { - final int numShards = between(2, 10); + final int numShards = between(2, 5); final int numReplicas = 0; logger.info("--> creating test index ..."); int allowNodes = 2; diff --git a/core/src/test/java/org/elasticsearch/script/ClassPermissionTests.java b/core/src/test/java/org/elasticsearch/script/ClassPermissionTests.java new file mode 100644 index 00000000000..05a65363ff5 --- /dev/null +++ b/core/src/test/java/org/elasticsearch/script/ClassPermissionTests.java @@ -0,0 +1,79 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.script; + +import org.elasticsearch.test.ESTestCase; + +import java.security.AllPermission; +import java.security.PermissionCollection; + +/** Very simple sanity checks for {@link ClassPermission} */ +public class ClassPermissionTests extends ESTestCase { + + public void testEquals() { + assertEquals(new ClassPermission("pkg.MyClass"), new ClassPermission("pkg.MyClass")); + assertFalse(new ClassPermission("pkg.MyClass").equals(new AllPermission())); + } + + public void testImplies() { + assertTrue(new ClassPermission("pkg.MyClass").implies(new ClassPermission("pkg.MyClass"))); + assertFalse(new ClassPermission("pkg.MyClass").implies(new ClassPermission("pkg.MyOtherClass"))); + assertFalse(new ClassPermission("pkg.MyClass").implies(null)); + assertFalse(new ClassPermission("pkg.MyClass").implies(new AllPermission())); + } + + public void testStandard() { + assertTrue(new ClassPermission("<>").implies(new ClassPermission("java.lang.Math"))); + assertFalse(new ClassPermission("<>").implies(new ClassPermission("pkg.MyClass"))); + } + + public void testPermissionCollection() { + ClassPermission math = new ClassPermission("java.lang.Math"); + PermissionCollection collection = math.newPermissionCollection(); + collection.add(math); + assertTrue(collection.implies(new ClassPermission("java.lang.Math"))); + assertFalse(collection.implies(new ClassPermission("pkg.MyClass"))); + } + + public void testPermissionCollectionStandard() { + ClassPermission standard = new ClassPermission("<>"); + PermissionCollection collection = standard.newPermissionCollection(); + collection.add(standard); + assertTrue(collection.implies(new ClassPermission("java.lang.Math"))); + assertFalse(collection.implies(new ClassPermission("pkg.MyClass"))); + } + + /** not recommended but we test anyway */ + public void testWildcards() { + assertTrue(new ClassPermission("*").implies(new ClassPermission("pkg.MyClass"))); + assertTrue(new ClassPermission("pkg.*").implies(new ClassPermission("pkg.MyClass"))); + assertTrue(new ClassPermission("pkg.*").implies(new ClassPermission("pkg.sub.MyClass"))); + assertFalse(new ClassPermission("pkg.My*").implies(new ClassPermission("pkg.MyClass"))); + assertFalse(new ClassPermission("pkg*").implies(new ClassPermission("pkg.MyClass"))); + } + + public void testPermissionCollectionWildcards() { + ClassPermission lang = new ClassPermission("java.lang.*"); + PermissionCollection collection = lang.newPermissionCollection(); + collection.add(lang); + assertTrue(collection.implies(new ClassPermission("java.lang.Math"))); + assertFalse(collection.implies(new ClassPermission("pkg.MyClass"))); + } +} diff --git a/core/src/test/java/org/elasticsearch/script/NativeScriptTests.java b/core/src/test/java/org/elasticsearch/script/NativeScriptTests.java index f386e829c64..02fad319846 100644 --- a/core/src/test/java/org/elasticsearch/script/NativeScriptTests.java +++ b/core/src/test/java/org/elasticsearch/script/NativeScriptTests.java @@ -24,6 +24,7 @@ import org.elasticsearch.common.Nullable; import org.elasticsearch.common.inject.Injector; import org.elasticsearch.common.inject.ModulesBuilder; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.settings.SettingsFilter; import org.elasticsearch.common.settings.SettingsModule; import org.elasticsearch.env.Environment; import org.elasticsearch.env.EnvironmentModule; @@ -55,7 +56,7 @@ public class NativeScriptTests extends ESTestCase { Injector injector = new ModulesBuilder().add( new EnvironmentModule(new Environment(settings)), new ThreadPoolModule(new ThreadPool(settings)), - new SettingsModule(settings), + new SettingsModule(settings, new SettingsFilter(settings)), scriptModule).createInjector(); ScriptService scriptService = injector.getInstance(ScriptService.class); diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/SignificantTermsBackwardCompatibilityIT.java b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/SignificantTermsBackwardCompatibilityIT.java index d6afb35547f..9273621dee5 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/SignificantTermsBackwardCompatibilityIT.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/SignificantTermsBackwardCompatibilityIT.java @@ -18,91 +18,18 @@ */ package org.elasticsearch.search.aggregations.bucket; -import org.elasticsearch.action.index.IndexRequestBuilder; -import org.elasticsearch.action.search.SearchResponse; -import org.elasticsearch.search.aggregations.Aggregation; -import org.elasticsearch.search.aggregations.bucket.significant.SignificantTerms; -import org.elasticsearch.search.aggregations.bucket.significant.SignificantTermsBuilder; -import org.elasticsearch.search.aggregations.bucket.terms.StringTerms; -import org.elasticsearch.search.aggregations.bucket.terms.Terms; -import org.elasticsearch.search.aggregations.bucket.terms.TermsBuilder; import org.elasticsearch.test.ESBackcompatTestCase; +import org.elasticsearch.test.search.aggregations.bucket.SharedSignificantTermsTestMethods; import java.io.IOException; -import java.util.ArrayList; -import java.util.List; -import java.util.Map; import java.util.concurrent.ExecutionException; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchResponse; -import static org.hamcrest.Matchers.equalTo; - -/** - */ public class SignificantTermsBackwardCompatibilityIT extends ESBackcompatTestCase { - static final String INDEX_NAME = "testidx"; - static final String DOC_TYPE = "doc"; - static final String TEXT_FIELD = "text"; - static final String CLASS_FIELD = "class"; - /** * Test for streaming significant terms buckets to old es versions. */ - public void testBucketStreaming() throws IOException, ExecutionException, InterruptedException { - logger.debug("testBucketStreaming: indexing documents"); - String type = randomBoolean() ? "string" : "long"; - String settings = "{\"index.number_of_shards\": 5, \"index.number_of_replicas\": 0}"; - index01Docs(type, settings); - ensureGreen(); - logClusterState(); - checkSignificantTermsAggregationCorrect(); - logger.debug("testBucketStreaming: done testing significant terms while upgrading"); - } - - private void index01Docs(String type, String settings) throws ExecutionException, InterruptedException { - String mappings = "{\"doc\": {\"properties\":{\"" + TEXT_FIELD + "\": {\"type\":\"" + type + "\"},\"" + CLASS_FIELD - + "\": {\"type\":\"string\"}}}}"; - assertAcked(prepareCreate(INDEX_NAME).setSettings(settings).addMapping("doc", mappings)); - String[] gb = {"0", "1"}; - List indexRequestBuilderList = new ArrayList<>(); - indexRequestBuilderList.add(client().prepareIndex(INDEX_NAME, DOC_TYPE, "1") - .setSource(TEXT_FIELD, "1", CLASS_FIELD, "1")); - indexRequestBuilderList.add(client().prepareIndex(INDEX_NAME, DOC_TYPE, "2") - .setSource(TEXT_FIELD, "1", CLASS_FIELD, "1")); - indexRequestBuilderList.add(client().prepareIndex(INDEX_NAME, DOC_TYPE, "3") - .setSource(TEXT_FIELD, "0", CLASS_FIELD, "0")); - indexRequestBuilderList.add(client().prepareIndex(INDEX_NAME, DOC_TYPE, "4") - .setSource(TEXT_FIELD, "0", CLASS_FIELD, "0")); - indexRequestBuilderList.add(client().prepareIndex(INDEX_NAME, DOC_TYPE, "5") - .setSource(TEXT_FIELD, gb, CLASS_FIELD, "1")); - indexRequestBuilderList.add(client().prepareIndex(INDEX_NAME, DOC_TYPE, "6") - .setSource(TEXT_FIELD, gb, CLASS_FIELD, "0")); - indexRequestBuilderList.add(client().prepareIndex(INDEX_NAME, DOC_TYPE, "7") - .setSource(TEXT_FIELD, "0", CLASS_FIELD, "0")); - indexRandom(true, indexRequestBuilderList); - } - - private void checkSignificantTermsAggregationCorrect() { - - SearchResponse response = client().prepareSearch(INDEX_NAME).setTypes(DOC_TYPE) - .addAggregation(new TermsBuilder("class").field(CLASS_FIELD).subAggregation( - new SignificantTermsBuilder("sig_terms") - .field(TEXT_FIELD))) - .execute() - .actionGet(); - assertSearchResponse(response); - StringTerms classes = response.getAggregations().get("class"); - assertThat(classes.getBuckets().size(), equalTo(2)); - for (Terms.Bucket classBucket : classes.getBuckets()) { - Map aggs = classBucket.getAggregations().asMap(); - assertTrue(aggs.containsKey("sig_terms")); - SignificantTerms agg = (SignificantTerms) aggs.get("sig_terms"); - assertThat(agg.getBuckets().size(), equalTo(1)); - String term = agg.iterator().next().getKeyAsString(); - String classTerm = classBucket.getKeyAsString(); - assertTrue(term.equals(classTerm)); - } + public void testAggregateAndCheckFromSeveralShards() throws IOException, ExecutionException, InterruptedException { + SharedSignificantTermsTestMethods.aggregateAndCheckFromSeveralShards(this); } } diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/SignificantTermsSignificanceScoreIT.java b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/SignificantTermsSignificanceScoreIT.java index dc54aa2d9f6..1f77ca5bb64 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/SignificantTermsSignificanceScoreIT.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/SignificantTermsSignificanceScoreIT.java @@ -23,7 +23,6 @@ import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.common.ParseFieldMatcher; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentParser; @@ -39,54 +38,39 @@ import org.elasticsearch.search.aggregations.bucket.filter.FilterAggregationBuil import org.elasticsearch.search.aggregations.bucket.filter.InternalFilter; import org.elasticsearch.search.aggregations.bucket.script.NativeSignificanceScoreScriptNoParams; import org.elasticsearch.search.aggregations.bucket.script.NativeSignificanceScoreScriptWithParams; -import org.elasticsearch.search.aggregations.bucket.significant.SignificantStringTerms; import org.elasticsearch.search.aggregations.bucket.significant.SignificantTerms; import org.elasticsearch.search.aggregations.bucket.significant.SignificantTermsAggregatorFactory; import org.elasticsearch.search.aggregations.bucket.significant.SignificantTermsBuilder; -import org.elasticsearch.search.aggregations.bucket.significant.heuristics.ChiSquare; -import org.elasticsearch.search.aggregations.bucket.significant.heuristics.GND; -import org.elasticsearch.search.aggregations.bucket.significant.heuristics.MutualInformation; -import org.elasticsearch.search.aggregations.bucket.significant.heuristics.ScriptHeuristic; -import org.elasticsearch.search.aggregations.bucket.significant.heuristics.SignificanceHeuristic; -import org.elasticsearch.search.aggregations.bucket.significant.heuristics.SignificanceHeuristicBuilder; -import org.elasticsearch.search.aggregations.bucket.significant.heuristics.SignificanceHeuristicParser; -import org.elasticsearch.search.aggregations.bucket.significant.heuristics.SignificanceHeuristicStreams; +import org.elasticsearch.search.aggregations.bucket.significant.heuristics.*; import org.elasticsearch.search.aggregations.bucket.terms.StringTerms; import org.elasticsearch.search.aggregations.bucket.terms.Terms; import org.elasticsearch.search.aggregations.bucket.terms.TermsBuilder; import org.elasticsearch.search.internal.SearchContext; import org.elasticsearch.test.ESIntegTestCase; +import org.elasticsearch.test.search.aggregations.bucket.SharedSignificantTermsTestMethods; +import org.junit.Test; import java.io.IOException; -import java.util.ArrayList; -import java.util.Collection; -import java.util.HashMap; -import java.util.Iterator; -import java.util.List; -import java.util.Map; +import java.util.*; import java.util.concurrent.ExecutionException; import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF_REPLICAS; import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF_SHARDS; -import static org.elasticsearch.common.settings.Settings.settingsBuilder; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchResponse; -import static org.hamcrest.Matchers.closeTo; -import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.Matchers.greaterThan; -import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.*; /** * */ @ESIntegTestCase.ClusterScope(scope = ESIntegTestCase.Scope.SUITE) public class SignificantTermsSignificanceScoreIT extends ESIntegTestCase { + static final String INDEX_NAME = "testidx"; static final String DOC_TYPE = "doc"; static final String TEXT_FIELD = "text"; static final String CLASS_FIELD = "class"; - @Override protected Collection> nodePlugins() { return pluginList(CustomSignificanceHeuristicPlugin.class); @@ -99,7 +83,7 @@ public class SignificantTermsSignificanceScoreIT extends ESIntegTestCase { public void testPlugin() throws Exception { String type = randomBoolean() ? "string" : "long"; String settings = "{\"index.number_of_shards\": 1, \"index.number_of_replicas\": 0}"; - index01Docs(type, settings); + SharedSignificantTermsTestMethods.index01Docs(type, settings, this); SearchResponse response = client().prepareSearch(INDEX_NAME).setTypes(DOC_TYPE) .addAggregation(new TermsBuilder("class") .field(CLASS_FIELD) @@ -252,7 +236,7 @@ public class SignificantTermsSignificanceScoreIT extends ESIntegTestCase { public void testXContentResponse() throws Exception { String type = randomBoolean() ? "string" : "long"; String settings = "{\"index.number_of_shards\": 1, \"index.number_of_replicas\": 0}"; - index01Docs(type, settings); + SharedSignificantTermsTestMethods.index01Docs(type, settings, this); SearchResponse response = client().prepareSearch(INDEX_NAME).setTypes(DOC_TYPE) .addAggregation(new TermsBuilder("class").field(CLASS_FIELD).subAggregation(new SignificantTermsBuilder("sig_terms").field(TEXT_FIELD))) .execute() @@ -327,7 +311,7 @@ public class SignificantTermsSignificanceScoreIT extends ESIntegTestCase { public void testBackgroundVsSeparateSet() throws Exception { String type = randomBoolean() ? "string" : "long"; String settings = "{\"index.number_of_shards\": 1, \"index.number_of_replicas\": 0}"; - index01Docs(type, settings); + SharedSignificantTermsTestMethods.index01Docs(type, settings, this); testBackgroundVsSeparateSet(new MutualInformation.MutualInformationBuilder(true, true), new MutualInformation.MutualInformationBuilder(true, false)); testBackgroundVsSeparateSet(new ChiSquare.ChiSquareBuilder(true, true), new ChiSquare.ChiSquareBuilder(true, false)); testBackgroundVsSeparateSet(new GND.GNDBuilder(true), new GND.GNDBuilder(false)); @@ -388,28 +372,6 @@ public class SignificantTermsSignificanceScoreIT extends ESIntegTestCase { assertThat(score11Background, equalTo(score11SeparateSets)); } - private void index01Docs(String type, String settings) throws ExecutionException, InterruptedException { - String mappings = "{\"doc\": {\"properties\":{\"text\": {\"type\":\"" + type + "\"}}}}"; - assertAcked(prepareCreate(INDEX_NAME).setSettings(settings).addMapping("doc", mappings)); - String[] gb = {"0", "1"}; - List indexRequestBuilderList = new ArrayList<>(); - indexRequestBuilderList.add(client().prepareIndex(INDEX_NAME, DOC_TYPE, "1") - .setSource(TEXT_FIELD, "1", CLASS_FIELD, "1")); - indexRequestBuilderList.add(client().prepareIndex(INDEX_NAME, DOC_TYPE, "2") - .setSource(TEXT_FIELD, "1", CLASS_FIELD, "1")); - indexRequestBuilderList.add(client().prepareIndex(INDEX_NAME, DOC_TYPE, "3") - .setSource(TEXT_FIELD, "0", CLASS_FIELD, "0")); - indexRequestBuilderList.add(client().prepareIndex(INDEX_NAME, DOC_TYPE, "4") - .setSource(TEXT_FIELD, "0", CLASS_FIELD, "0")); - indexRequestBuilderList.add(client().prepareIndex(INDEX_NAME, DOC_TYPE, "5") - .setSource(TEXT_FIELD, gb, CLASS_FIELD, "1")); - indexRequestBuilderList.add(client().prepareIndex(INDEX_NAME, DOC_TYPE, "6") - .setSource(TEXT_FIELD, gb, CLASS_FIELD, "0")); - indexRequestBuilderList.add(client().prepareIndex(INDEX_NAME, DOC_TYPE, "7") - .setSource(TEXT_FIELD, "0", CLASS_FIELD, "0")); - indexRandom(true, false, indexRequestBuilderList); - } - public void testScoresEqualForPositiveAndNegative() throws Exception { indexEqualTestData(); testScoresEqualForPositiveAndNegative(new MutualInformation.MutualInformationBuilder(true, true)); @@ -528,4 +490,9 @@ public class SignificantTermsSignificanceScoreIT extends ESIntegTestCase { } indexRandom(true, indexRequestBuilderList); } + + public void testReduceFromSeveralShards() throws IOException, ExecutionException, InterruptedException { + SharedSignificantTermsTestMethods.aggregateAndCheckFromSeveralShards(this); + } + } diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/significant/SignificanceHeuristicTests.java b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/significant/SignificanceHeuristicTests.java index b10dfd31b35..d20dff0ae05 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/significant/SignificanceHeuristicTests.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/significant/SignificanceHeuristicTests.java @@ -103,13 +103,13 @@ public class SignificanceHeuristicTests extends ESTestCase { if (randomBoolean()) { buckets.add(new SignificantLongTerms.Bucket(1, 2, 3, 4, 123, InternalAggregations.EMPTY, null)); sTerms[0] = new SignificantLongTerms(10, 20, "some_name", null, 1, 1, heuristic, buckets, - Collections.EMPTY_LIST, null); + Collections.emptyList(), null); sTerms[1] = new SignificantLongTerms(); } else { BytesRef term = new BytesRef("someterm"); buckets.add(new SignificantStringTerms.Bucket(term, 1, 2, 3, 4, InternalAggregations.EMPTY)); - sTerms[0] = new SignificantStringTerms(10, 20, "some_name", 1, 1, heuristic, buckets, Collections.EMPTY_LIST, + sTerms[0] = new SignificantStringTerms(10, 20, "some_name", 1, 1, heuristic, buckets, Collections.emptyList(), null); sTerms[1] = new SignificantStringTerms(); } diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/metrics/AbstractGeoTestCase.java b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/AbstractGeoTestCase.java index f2acc7c83a8..390e0cf5473 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/metrics/AbstractGeoTestCase.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/AbstractGeoTestCase.java @@ -154,7 +154,7 @@ public abstract class AbstractGeoTestCase extends ESIntegTestCase { .endObject())); } assertAcked(prepareCreate(HIGH_CARD_IDX_NAME).setSettings(Settings.builder().put("number_of_shards", 2)) - .addMapping("type", SINGLE_VALUED_FIELD_NAME, "type=geo_point", MULTI_VALUED_FIELD_NAME, "type=geo_point", NUMBER_FIELD_NAME, "type=long", "tag", "type=string,index=not_analyzed")); + .addMapping("type", SINGLE_VALUED_FIELD_NAME, "type=geo_point", MULTI_VALUED_FIELD_NAME, "type=geo_point", NUMBER_FIELD_NAME, "type=long,store=true", "tag", "type=string,index=not_analyzed")); for (int i = 0; i < 2000; i++) { singleVal = singleValues[i % numUniqueGeoPoints]; @@ -196,8 +196,8 @@ public abstract class AbstractGeoTestCase extends ESIntegTestCase { SearchHitField hitField = searchHit.field(NUMBER_FIELD_NAME); assertThat("Hit " + i + " has wrong number of values", hitField.getValues().size(), equalTo(1)); - Integer value = hitField.getValue(); - assertThat("Hit " + i + " has wrong value", value, equalTo(i)); + Long value = hitField.getValue(); + assertThat("Hit " + i + " has wrong value", value.intValue(), equalTo(i)); } assertThat(totalHits, equalTo(2000l)); } diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/metrics/TopHitsIT.java b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/TopHitsIT.java index f9c8c53bed3..65e71fe9c05 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/metrics/TopHitsIT.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/TopHitsIT.java @@ -532,8 +532,8 @@ public class TopHitsIT extends ESIntegTestCase { topHits("hits").setSize(1) .highlighter(new HighlightBuilder().field("text")) .setExplain(true) - .addFieldDataField("field1") .addField("text") + .addFieldDataField("field1") .addScriptField("script", new Script("5", ScriptService.ScriptType.INLINE, MockScriptEngine.NAME, Collections.emptyMap())) .setFetchSource("text", null) .setVersion(true) @@ -569,8 +569,7 @@ public class TopHitsIT extends ESIntegTestCase { SearchHitField field = hit.field("field1"); assertThat(field.getValue().toString(), equalTo("5")); - field = hit.field("text"); - assertThat(field.getValue().toString(), equalTo("some text to entertain")); + assertThat(hit.getSource().get("text").toString(), equalTo("some text to entertain")); field = hit.field("script"); assertThat(field.getValue().toString(), equalTo("5")); diff --git a/core/src/test/java/org/elasticsearch/search/builder/SearchSourceBuilderTests.java b/core/src/test/java/org/elasticsearch/search/builder/SearchSourceBuilderTests.java index b52eec448c5..891ebf7e2d1 100644 --- a/core/src/test/java/org/elasticsearch/search/builder/SearchSourceBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/search/builder/SearchSourceBuilderTests.java @@ -31,6 +31,7 @@ import org.elasticsearch.common.io.stream.NamedWriteableAwareStreamInput; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.settings.SettingsFilter; import org.elasticsearch.common.settings.SettingsModule; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.xcontent.*; @@ -79,7 +80,7 @@ public class SearchSourceBuilderTests extends ESTestCase { .put("path.home", createTempDir()) .build(); injector = new ModulesBuilder().add( - new SettingsModule(settings), + new SettingsModule(settings, new SettingsFilter(settings)), new ThreadPoolModule(new ThreadPool(settings)), new IndicesModule() { @Override diff --git a/core/src/test/java/org/elasticsearch/search/child/ChildQuerySearchIT.java b/core/src/test/java/org/elasticsearch/search/child/ChildQuerySearchIT.java index e360649e919..4be2b36fbe6 100644 --- a/core/src/test/java/org/elasticsearch/search/child/ChildQuerySearchIT.java +++ b/core/src/test/java/org/elasticsearch/search/child/ChildQuerySearchIT.java @@ -31,7 +31,6 @@ import org.elasticsearch.common.lucene.search.function.FiltersFunctionScoreQuery import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.index.IndexModule; -import org.elasticsearch.index.mapper.MergeMappingException; import org.elasticsearch.index.query.HasChildQueryBuilder; import org.elasticsearch.index.query.IdsQueryBuilder; import org.elasticsearch.index.query.QueryBuilder; @@ -1176,7 +1175,7 @@ public class ChildQuerySearchIT extends ESIntegTestCase { .startObject("_parent").field("type", "parent").endObject() .endObject().endObject()).get(); fail(); - } catch (MergeMappingException e) { + } catch (IllegalArgumentException e) { assertThat(e.toString(), containsString("Merge failed with failures {[The _parent field's type option can't be changed: [null]->[parent]")); } } diff --git a/core/src/test/java/org/elasticsearch/search/compress/SearchSourceCompressTests.java b/core/src/test/java/org/elasticsearch/search/compress/SearchSourceCompressTests.java deleted file mode 100644 index d98574bc379..00000000000 --- a/core/src/test/java/org/elasticsearch/search/compress/SearchSourceCompressTests.java +++ /dev/null @@ -1,99 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.search.compress; - -import org.elasticsearch.Version; -import org.elasticsearch.action.get.GetResponse; -import org.elasticsearch.action.search.SearchResponse; -import org.elasticsearch.cluster.metadata.IndexMetaData; -import org.elasticsearch.common.Priority; -import org.elasticsearch.common.compress.Compressor; -import org.elasticsearch.common.compress.CompressorFactory; -import org.elasticsearch.common.compress.lzf.LZFTestCompressor; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.common.xcontent.XContentFactory; -import org.elasticsearch.index.query.QueryBuilders; -import org.elasticsearch.test.ESSingleNodeTestCase; - -import java.io.IOException; - -import static org.hamcrest.Matchers.equalTo; - -public class SearchSourceCompressTests extends ESSingleNodeTestCase { - public void testSourceCompressionLZF() throws IOException { - final Compressor defaultCompressor = CompressorFactory.defaultCompressor(); - try { - CompressorFactory.setDefaultCompressor(new LZFTestCompressor()); - verifySource(true); - verifySource(false); - verifySource(null); - } finally { - CompressorFactory.setDefaultCompressor(defaultCompressor); - } - } - - private void verifySource(Boolean compress) throws IOException { - try { - client().admin().indices().prepareDelete("test").execute().actionGet(); - } catch (Exception e) { - // ignore - } - Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_1_4_2.id).build(); - createIndex("test", settings); - client().admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID).setWaitForGreenStatus().execute().actionGet(); - - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type1") - .startObject("_source").field("compress", compress).endObject() - .endObject().endObject().string(); - - client().admin().indices().preparePutMapping().setType("type1").setSource(mapping).execute().actionGet(); - - for (int i = 1; i < 100; i++) { - client().prepareIndex("test", "type1", Integer.toString(i)).setSource(buildSource(i)).execute().actionGet(); - } - client().prepareIndex("test", "type1", Integer.toString(10000)).setSource(buildSource(10000)).execute().actionGet(); - - client().admin().indices().prepareRefresh().execute().actionGet(); - - for (int i = 1; i < 100; i++) { - GetResponse getResponse = client().prepareGet("test", "type1", Integer.toString(i)).execute().actionGet(); - assertThat(getResponse.getSourceAsBytes(), equalTo(buildSource(i).bytes().toBytes())); - } - GetResponse getResponse = client().prepareGet("test", "type1", Integer.toString(10000)).execute().actionGet(); - assertThat(getResponse.getSourceAsBytes(), equalTo(buildSource(10000).bytes().toBytes())); - - for (int i = 1; i < 100; i++) { - SearchResponse searchResponse = client().prepareSearch().setQuery(QueryBuilders.idsQuery("type1").addIds(Integer.toString(i))).execute().actionGet(); - assertThat(searchResponse.getHits().getTotalHits(), equalTo(1l)); - assertThat(searchResponse.getHits().getAt(0).source(), equalTo(buildSource(i).bytes().toBytes())); - } - } - - private XContentBuilder buildSource(int count) throws IOException { - XContentBuilder builder = XContentFactory.jsonBuilder().startObject(); - StringBuilder sb = new StringBuilder(); - for (int j = 0; j < count; j++) { - sb.append("value").append(j).append(' '); - } - builder.field("field", sb.toString()); - return builder.endObject(); - } -} diff --git a/core/src/test/java/org/elasticsearch/search/highlight/HighlightBuilderTests.java b/core/src/test/java/org/elasticsearch/search/highlight/HighlightBuilderTests.java new file mode 100644 index 00000000000..cefc232fddb --- /dev/null +++ b/core/src/test/java/org/elasticsearch/search/highlight/HighlightBuilderTests.java @@ -0,0 +1,332 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.search.highlight; + +import org.elasticsearch.common.io.stream.BytesStreamOutput; +import org.elasticsearch.common.io.stream.NamedWriteableAwareStreamInput; +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.index.query.IdsQueryBuilder; +import org.elasticsearch.index.query.MatchAllQueryBuilder; +import org.elasticsearch.index.query.QueryBuilder; +import org.elasticsearch.index.query.TermQueryBuilder; +import org.elasticsearch.search.highlight.HighlightBuilder.Field; +import org.elasticsearch.test.ESTestCase; +import org.junit.AfterClass; +import org.junit.BeforeClass; + +import java.io.IOException; +import java.util.Arrays; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.not; + +public class HighlightBuilderTests extends ESTestCase { + + private static final int NUMBER_OF_TESTBUILDERS = 20; + private static NamedWriteableRegistry namedWriteableRegistry; + + /** + * setup for the whole base test class + */ + @BeforeClass + public static void init() { + if (namedWriteableRegistry == null) { + namedWriteableRegistry = new NamedWriteableRegistry(); + namedWriteableRegistry.registerPrototype(QueryBuilder.class, new MatchAllQueryBuilder()); + namedWriteableRegistry.registerPrototype(QueryBuilder.class, new IdsQueryBuilder()); + namedWriteableRegistry.registerPrototype(QueryBuilder.class, new TermQueryBuilder("field", "value")); + } + } + + @AfterClass + public static void afterClass() throws Exception { + namedWriteableRegistry = null; + } + + /** + * Test serialization and deserialization of the highlighter builder + */ + public void testSerialization() throws IOException { + for (int runs = 0; runs < NUMBER_OF_TESTBUILDERS; runs++) { + HighlightBuilder original = randomHighlighterBuilder(); + HighlightBuilder deserialized = serializedCopy(original); + assertEquals(deserialized, original); + assertEquals(deserialized.hashCode(), original.hashCode()); + assertNotSame(deserialized, original); + } + } + + /** + * Test equality and hashCode properties + */ + public void testEqualsAndHashcode() throws IOException { + for (int runs = 0; runs < NUMBER_OF_TESTBUILDERS; runs++) { + HighlightBuilder firstBuilder = randomHighlighterBuilder(); + assertFalse("highlighter is equal to null", firstBuilder.equals(null)); + assertFalse("highlighter is equal to incompatible type", firstBuilder.equals("")); + assertTrue("highlighter is not equal to self", firstBuilder.equals(firstBuilder)); + assertThat("same highlighter's hashcode returns different values if called multiple times", firstBuilder.hashCode(), + equalTo(firstBuilder.hashCode())); + assertThat("different highlighters should not be equal", mutate(firstBuilder), not(equalTo(firstBuilder))); + + HighlightBuilder secondBuilder = serializedCopy(firstBuilder); + assertTrue("highlighter is not equal to self", secondBuilder.equals(secondBuilder)); + assertTrue("highlighter is not equal to its copy", firstBuilder.equals(secondBuilder)); + assertTrue("equals is not symmetric", secondBuilder.equals(firstBuilder)); + assertThat("highlighter copy's hashcode is different from original hashcode", secondBuilder.hashCode(), equalTo(firstBuilder.hashCode())); + + HighlightBuilder thirdBuilder = serializedCopy(secondBuilder); + assertTrue("highlighter is not equal to self", thirdBuilder.equals(thirdBuilder)); + assertTrue("highlighter is not equal to its copy", secondBuilder.equals(thirdBuilder)); + assertThat("highlighter copy's hashcode is different from original hashcode", secondBuilder.hashCode(), equalTo(thirdBuilder.hashCode())); + assertTrue("equals is not transitive", firstBuilder.equals(thirdBuilder)); + assertThat("highlighter copy's hashcode is different from original hashcode", firstBuilder.hashCode(), equalTo(thirdBuilder.hashCode())); + assertTrue("equals is not symmetric", thirdBuilder.equals(secondBuilder)); + assertTrue("equals is not symmetric", thirdBuilder.equals(firstBuilder)); + } + } + + /** + * create random shape that is put under test + */ + private static HighlightBuilder randomHighlighterBuilder() { + HighlightBuilder testHighlighter = new HighlightBuilder(); + setRandomCommonOptions(testHighlighter); + testHighlighter.useExplicitFieldOrder(randomBoolean()); + if (randomBoolean()) { + testHighlighter.encoder(randomFrom(Arrays.asList(new String[]{"default", "html"}))); + } + int numberOfFields = randomIntBetween(1,5); + for (int i = 0; i < numberOfFields; i++) { + Field field = new Field(randomAsciiOfLengthBetween(1, 10)); + setRandomCommonOptions(field); + if (randomBoolean()) { + field.fragmentOffset(randomIntBetween(1, 100)); + } + if (randomBoolean()) { + field.matchedFields(randomStringArray(0, 4)); + } + testHighlighter.field(field); + } + return testHighlighter; + } + + private static void setRandomCommonOptions(AbstractHighlighterBuilder highlightBuilder) { + if (randomBoolean()) { + highlightBuilder.preTags(randomStringArray(0, 3)); + } + if (randomBoolean()) { + highlightBuilder.postTags(randomStringArray(0, 3)); + } + if (randomBoolean()) { + highlightBuilder.fragmentSize(randomIntBetween(0, 100)); + } + if (randomBoolean()) { + highlightBuilder.numOfFragments(randomIntBetween(0, 10)); + } + if (randomBoolean()) { + highlightBuilder.highlighterType(randomAsciiOfLengthBetween(1, 10)); + } + if (randomBoolean()) { + highlightBuilder.fragmenter(randomAsciiOfLengthBetween(1, 10)); + } + if (randomBoolean()) { + QueryBuilder highlightQuery; + switch (randomInt(2)) { + case 0: + highlightQuery = new MatchAllQueryBuilder(); + break; + case 1: + highlightQuery = new IdsQueryBuilder(); + break; + default: + case 2: + highlightQuery = new TermQueryBuilder(randomAsciiOfLengthBetween(1, 10), randomAsciiOfLengthBetween(1, 10)); + break; + } + highlightQuery.boost((float) randomDoubleBetween(0, 10, false)); + highlightBuilder.highlightQuery(highlightQuery); + } + if (randomBoolean()) { + highlightBuilder.order(randomAsciiOfLengthBetween(1, 10)); + } + if (randomBoolean()) { + highlightBuilder.highlightFilter(randomBoolean()); + } + if (randomBoolean()) { + highlightBuilder.forceSource(randomBoolean()); + } + if (randomBoolean()) { + highlightBuilder.boundaryMaxScan(randomIntBetween(0, 10)); + } + if (randomBoolean()) { + highlightBuilder.boundaryChars(randomAsciiOfLengthBetween(1, 10).toCharArray()); + } + if (randomBoolean()) { + highlightBuilder.noMatchSize(randomIntBetween(0, 10)); + } + if (randomBoolean()) { + highlightBuilder.phraseLimit(randomIntBetween(0, 10)); + } + if (randomBoolean()) { + int items = randomIntBetween(0, 5); + Map options = new HashMap(items); + for (int i = 0; i < items; i++) { + Object value = null; + switch (randomInt(2)) { + case 0: + value = randomAsciiOfLengthBetween(1, 10); + break; + case 1: + value = new Integer(randomInt(1000)); + break; + case 2: + value = new Boolean(randomBoolean()); + break; + } + options.put(randomAsciiOfLengthBetween(1, 10), value); + } + } + if (randomBoolean()) { + highlightBuilder.requireFieldMatch(randomBoolean()); + } + } + + @SuppressWarnings("unchecked") + private static void mutateCommonOptions(AbstractHighlighterBuilder highlightBuilder) { + switch (randomIntBetween(1, 16)) { + case 1: + highlightBuilder.preTags(randomStringArray(4, 6)); + break; + case 2: + highlightBuilder.postTags(randomStringArray(4, 6)); + break; + case 3: + highlightBuilder.fragmentSize(randomIntBetween(101, 200)); + break; + case 4: + highlightBuilder.numOfFragments(randomIntBetween(11, 20)); + break; + case 5: + highlightBuilder.highlighterType(randomAsciiOfLengthBetween(11, 20)); + break; + case 6: + highlightBuilder.fragmenter(randomAsciiOfLengthBetween(11, 20)); + break; + case 7: + highlightBuilder.highlightQuery(new TermQueryBuilder(randomAsciiOfLengthBetween(11, 20), randomAsciiOfLengthBetween(11, 20))); + break; + case 8: + highlightBuilder.order(randomAsciiOfLengthBetween(11, 20)); + break; + case 9: + highlightBuilder.highlightFilter(toggleOrSet(highlightBuilder.highlightFilter())); + case 10: + highlightBuilder.forceSource(toggleOrSet(highlightBuilder.forceSource())); + break; + case 11: + highlightBuilder.boundaryMaxScan(randomIntBetween(11, 20)); + break; + case 12: + highlightBuilder.boundaryChars(randomAsciiOfLengthBetween(11, 20).toCharArray()); + break; + case 13: + highlightBuilder.noMatchSize(randomIntBetween(11, 20)); + break; + case 14: + highlightBuilder.phraseLimit(randomIntBetween(11, 20)); + break; + case 15: + int items = 6; + Map options = new HashMap(items); + for (int i = 0; i < items; i++) { + options.put(randomAsciiOfLengthBetween(1, 10), randomAsciiOfLengthBetween(1, 10)); + } + highlightBuilder.options(options); + break; + case 16: + highlightBuilder.requireFieldMatch(toggleOrSet(highlightBuilder.requireFieldMatch())); + break; + } + } + + private static Boolean toggleOrSet(Boolean flag) { + if (flag == null) { + return randomBoolean(); + } else { + return !flag.booleanValue(); + } + } + + private static String[] randomStringArray(int minSize, int maxSize) { + int size = randomIntBetween(minSize, maxSize); + String[] randomStrings = new String[size]; + for (int f = 0; f < size; f++) { + randomStrings[f] = randomAsciiOfLengthBetween(1, 10); + } + return randomStrings; + } + + /** + * mutate the given highlighter builder so the returned one is different in one aspect + */ + private static HighlightBuilder mutate(HighlightBuilder original) throws IOException { + HighlightBuilder mutation = serializedCopy(original); + if (randomBoolean()) { + mutateCommonOptions(mutation); + } else { + switch (randomIntBetween(0, 2)) { + // change settings that only exists on top level + case 0: + mutation.useExplicitFieldOrder(!original.useExplicitFieldOrder()); break; + case 1: + mutation.encoder(original.encoder() + randomAsciiOfLength(2)); break; + case 2: + if (randomBoolean()) { + // add another field + mutation.field(new Field(randomAsciiOfLength(10))); + } else { + // change existing fields + List originalFields = original.fields(); + Field fieldToChange = originalFields.get(randomInt(originalFields.size() - 1)); + if (randomBoolean()) { + fieldToChange.fragmentOffset(randomIntBetween(101, 200)); + } else { + fieldToChange.matchedFields(randomStringArray(5, 10)); + } + } + } + } + return mutation; + } + + private static HighlightBuilder serializedCopy(HighlightBuilder original) throws IOException { + try (BytesStreamOutput output = new BytesStreamOutput()) { + original.writeTo(output); + try (StreamInput in = new NamedWriteableAwareStreamInput(StreamInput.wrap(output.bytes()), namedWriteableRegistry)) { + return HighlightBuilder.PROTOTYPE.readFrom(in); + } + } + } +} diff --git a/core/src/test/java/org/elasticsearch/search/simple/SimpleSearchIT.java b/core/src/test/java/org/elasticsearch/search/simple/SimpleSearchIT.java index 8a81c49f71c..27cc3d3cfb8 100644 --- a/core/src/test/java/org/elasticsearch/search/simple/SimpleSearchIT.java +++ b/core/src/test/java/org/elasticsearch/search/simple/SimpleSearchIT.java @@ -333,9 +333,9 @@ public class SimpleSearchIT extends ESIntegTestCase { } public void testQueryNumericFieldWithRegex() throws Exception { - createIndex("idx"); - indexRandom(true, client().prepareIndex("idx", "type").setSource("num", 34)); - + assertAcked(prepareCreate("idx").addMapping("type", "num", "type=integer")); + ensureGreen("idx"); + try { client().prepareSearch("idx").setQuery(QueryBuilders.regexpQuery("num", "34")).get(); fail("SearchPhaseExecutionException should have been thrown"); diff --git a/core/src/test/java/org/elasticsearch/search/suggest/ContextCompletionSuggestSearchIT.java b/core/src/test/java/org/elasticsearch/search/suggest/ContextCompletionSuggestSearchIT.java index af336b0cc01..ae6ec51ac36 100644 --- a/core/src/test/java/org/elasticsearch/search/suggest/ContextCompletionSuggestSearchIT.java +++ b/core/src/test/java/org/elasticsearch/search/suggest/ContextCompletionSuggestSearchIT.java @@ -562,6 +562,65 @@ public class ContextCompletionSuggestSearchIT extends ESIntegTestCase { assertSuggestions("foo", geoNeighbourPrefix, "suggestion9", "suggestion8", "suggestion7", "suggestion6", "suggestion5"); } + public void testGeoField() throws Exception { + + XContentBuilder mapping = jsonBuilder(); + mapping.startObject(); + mapping.startObject(TYPE); + mapping.startObject("properties"); + mapping.startObject("pin"); + mapping.field("type", "geo_point"); + mapping.endObject(); + mapping.startObject(FIELD); + mapping.field("type", "completion"); + mapping.field("analyzer", "simple"); + + mapping.startArray("contexts"); + mapping.startObject(); + mapping.field("name", "st"); + mapping.field("type", "geo"); + mapping.field("path", "pin"); + mapping.field("precision", 5); + mapping.endObject(); + mapping.endArray(); + + mapping.endObject(); + mapping.endObject(); + mapping.endObject(); + mapping.endObject(); + + assertAcked(prepareCreate(INDEX).addMapping(TYPE, mapping)); + ensureYellow(); + + XContentBuilder source1 = jsonBuilder() + .startObject() + .latlon("pin", 52.529172, 13.407333) + .startObject(FIELD) + .array("input", "Hotel Amsterdam in Berlin") + .endObject() + .endObject(); + client().prepareIndex(INDEX, TYPE, "1").setSource(source1).execute().actionGet(); + + XContentBuilder source2 = jsonBuilder() + .startObject() + .latlon("pin", 52.363389, 4.888695) + .startObject(FIELD) + .array("input", "Hotel Berlin in Amsterdam") + .endObject() + .endObject(); + client().prepareIndex(INDEX, TYPE, "2").setSource(source2).execute().actionGet(); + + refresh(); + + String suggestionName = randomAsciiOfLength(10); + CompletionSuggestionBuilder context = SuggestBuilders.completionSuggestion(suggestionName).field(FIELD).text("h").size(10) + .geoContexts("st", GeoQueryContext.builder().setGeoPoint(new GeoPoint(52.52, 13.4)).build()); + SuggestResponse suggestResponse = client().prepareSuggest(INDEX).addSuggestion(context).get(); + + assertEquals(suggestResponse.getSuggest().size(), 1); + assertEquals("Hotel Amsterdam in Berlin", suggestResponse.getSuggest().getSuggestion(suggestionName).iterator().next().getOptions().iterator().next().getText().string()); + } + public void assertSuggestions(String suggestionName, SuggestBuilder.SuggestionBuilder suggestBuilder, String... suggestions) { SuggestResponse suggestResponse = client().prepareSuggest(INDEX).addSuggestion(suggestBuilder ).execute().actionGet(); diff --git a/core/src/test/java/org/elasticsearch/snapshots/AbstractSnapshotIntegTestCase.java b/core/src/test/java/org/elasticsearch/snapshots/AbstractSnapshotIntegTestCase.java index ffddcfc1619..51ae038ca0d 100644 --- a/core/src/test/java/org/elasticsearch/snapshots/AbstractSnapshotIntegTestCase.java +++ b/core/src/test/java/org/elasticsearch/snapshots/AbstractSnapshotIntegTestCase.java @@ -20,12 +20,7 @@ package org.elasticsearch.snapshots; import org.elasticsearch.action.admin.cluster.state.ClusterStateResponse; import org.elasticsearch.action.admin.cluster.tasks.PendingClusterTasksResponse; -import org.elasticsearch.cluster.ClusterChangedEvent; -import org.elasticsearch.cluster.ClusterService; -import org.elasticsearch.cluster.ClusterState; -import org.elasticsearch.cluster.ClusterStateListener; -import org.elasticsearch.cluster.ClusterStateUpdateTask; -import org.elasticsearch.cluster.SnapshotsInProgress; +import org.elasticsearch.cluster.*; import org.elasticsearch.cluster.metadata.SnapshotId; import org.elasticsearch.cluster.routing.allocation.decider.EnableAllocationDecider; import org.elasticsearch.cluster.service.PendingClusterTask; @@ -208,7 +203,7 @@ public abstract class AbstractSnapshotIntegTestCase extends ESIntegTestCase { private void addBlock() { // We should block after this task - add blocking cluster state update task - clusterService.submitStateUpdateTask("test_block", passThroughPriority, new ClusterStateUpdateTask() { + clusterService.submitStateUpdateTask("test_block", new ClusterStateUpdateTask(passThroughPriority) { @Override public ClusterState execute(ClusterState currentState) throws Exception { while(System.currentTimeMillis() < stopWaitingAt) { diff --git a/core/src/test/java/org/elasticsearch/test/search/aggregations/bucket/SharedSignificantTermsTestMethods.java b/core/src/test/java/org/elasticsearch/test/search/aggregations/bucket/SharedSignificantTermsTestMethods.java new file mode 100644 index 00000000000..1df965968a2 --- /dev/null +++ b/core/src/test/java/org/elasticsearch/test/search/aggregations/bucket/SharedSignificantTermsTestMethods.java @@ -0,0 +1,103 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.test.search.aggregations.bucket; + +import org.elasticsearch.action.index.IndexRequestBuilder; +import org.elasticsearch.action.search.SearchResponse; +import org.elasticsearch.search.aggregations.Aggregation; +import org.elasticsearch.search.aggregations.bucket.significant.SignificantTerms; +import org.elasticsearch.search.aggregations.bucket.significant.SignificantTermsBuilder; +import org.elasticsearch.search.aggregations.bucket.terms.StringTerms; +import org.elasticsearch.search.aggregations.bucket.terms.Terms; +import org.elasticsearch.search.aggregations.bucket.terms.TermsBuilder; +import org.elasticsearch.test.ESIntegTestCase; +import org.elasticsearch.test.ESTestCase; +import org.junit.Assert; + +import java.util.ArrayList; +import java.util.List; +import java.util.Map; +import java.util.concurrent.ExecutionException; + +import static org.elasticsearch.test.ESIntegTestCase.client; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchResponse; +import static org.hamcrest.Matchers.equalTo; + +public class SharedSignificantTermsTestMethods { + public static final String INDEX_NAME = "testidx"; + public static final String DOC_TYPE = "doc"; + public static final String TEXT_FIELD = "text"; + public static final String CLASS_FIELD = "class"; + + public static void aggregateAndCheckFromSeveralShards(ESIntegTestCase testCase) throws ExecutionException, InterruptedException { + String type = ESTestCase.randomBoolean() ? "string" : "long"; + String settings = "{\"index.number_of_shards\": 5, \"index.number_of_replicas\": 0}"; + index01Docs(type, settings, testCase); + testCase.ensureGreen(); + testCase.logClusterState(); + checkSignificantTermsAggregationCorrect(testCase); + } + + private static void checkSignificantTermsAggregationCorrect(ESIntegTestCase testCase) { + + SearchResponse response = client().prepareSearch(INDEX_NAME).setTypes(DOC_TYPE) + .addAggregation(new TermsBuilder("class").field(CLASS_FIELD).subAggregation( + new SignificantTermsBuilder("sig_terms") + .field(TEXT_FIELD))) + .execute() + .actionGet(); + assertSearchResponse(response); + StringTerms classes = response.getAggregations().get("class"); + Assert.assertThat(classes.getBuckets().size(), equalTo(2)); + for (Terms.Bucket classBucket : classes.getBuckets()) { + Map aggs = classBucket.getAggregations().asMap(); + Assert.assertTrue(aggs.containsKey("sig_terms")); + SignificantTerms agg = (SignificantTerms) aggs.get("sig_terms"); + Assert.assertThat(agg.getBuckets().size(), equalTo(1)); + SignificantTerms.Bucket sigBucket = agg.iterator().next(); + String term = sigBucket.getKeyAsString(); + String classTerm = classBucket.getKeyAsString(); + Assert.assertTrue(term.equals(classTerm)); + } + } + + public static void index01Docs(String type, String settings, ESIntegTestCase testCase) throws ExecutionException, InterruptedException { + String mappings = "{\"doc\": {\"properties\":{\"text\": {\"type\":\"" + type + "\"}}}}"; + assertAcked(testCase.prepareCreate(INDEX_NAME).setSettings(settings).addMapping("doc", mappings)); + String[] gb = {"0", "1"}; + List indexRequestBuilderList = new ArrayList<>(); + indexRequestBuilderList.add(client().prepareIndex(INDEX_NAME, DOC_TYPE, "1") + .setSource(TEXT_FIELD, "1", CLASS_FIELD, "1")); + indexRequestBuilderList.add(client().prepareIndex(INDEX_NAME, DOC_TYPE, "2") + .setSource(TEXT_FIELD, "1", CLASS_FIELD, "1")); + indexRequestBuilderList.add(client().prepareIndex(INDEX_NAME, DOC_TYPE, "3") + .setSource(TEXT_FIELD, "0", CLASS_FIELD, "0")); + indexRequestBuilderList.add(client().prepareIndex(INDEX_NAME, DOC_TYPE, "4") + .setSource(TEXT_FIELD, "0", CLASS_FIELD, "0")); + indexRequestBuilderList.add(client().prepareIndex(INDEX_NAME, DOC_TYPE, "5") + .setSource(TEXT_FIELD, gb, CLASS_FIELD, "1")); + indexRequestBuilderList.add(client().prepareIndex(INDEX_NAME, DOC_TYPE, "6") + .setSource(TEXT_FIELD, gb, CLASS_FIELD, "0")); + indexRequestBuilderList.add(client().prepareIndex(INDEX_NAME, DOC_TYPE, "7") + .setSource(TEXT_FIELD, "0", CLASS_FIELD, "0")); + testCase.indexRandom(true, false, indexRequestBuilderList); + } +} diff --git a/core/src/test/java/org/elasticsearch/transport/NettySizeHeaderFrameDecoderTests.java b/core/src/test/java/org/elasticsearch/transport/NettySizeHeaderFrameDecoderTests.java index c8a566a7763..3f140b388fd 100644 --- a/core/src/test/java/org/elasticsearch/transport/NettySizeHeaderFrameDecoderTests.java +++ b/core/src/test/java/org/elasticsearch/transport/NettySizeHeaderFrameDecoderTests.java @@ -50,7 +50,11 @@ import static org.hamcrest.Matchers.is; */ public class NettySizeHeaderFrameDecoderTests extends ESTestCase { - private final Settings settings = settingsBuilder().put("name", "foo").put("transport.host", "127.0.0.1").build(); + private final Settings settings = settingsBuilder() + .put("name", "foo") + .put("transport.host", "127.0.0.1") + .put("transport.tcp.port", "0") + .build(); private ThreadPool threadPool; private NettyTransport nettyTransport; diff --git a/core/src/test/java/org/elasticsearch/transport/netty/NettyScheduledPingTests.java b/core/src/test/java/org/elasticsearch/transport/netty/NettyScheduledPingTests.java index afb4d1d75fc..7a939a5a1bc 100644 --- a/core/src/test/java/org/elasticsearch/transport/netty/NettyScheduledPingTests.java +++ b/core/src/test/java/org/elasticsearch/transport/netty/NettyScheduledPingTests.java @@ -49,9 +49,7 @@ public class NettyScheduledPingTests extends ESTestCase { public void testScheduledPing() throws Exception { ThreadPool threadPool = new ThreadPool(getClass().getName()); - int startPort = 11000 + randomIntBetween(0, 255); - int endPort = startPort + 10; - Settings settings = Settings.builder().put(NettyTransport.PING_SCHEDULE, "5ms").put("transport.tcp.port", startPort + "-" + endPort).build(); + Settings settings = Settings.builder().put(NettyTransport.PING_SCHEDULE, "5ms").put("transport.tcp.port", 0).build(); final NettyTransport nettyA = new NettyTransport(settings, threadPool, new NetworkService(settings), BigArrays.NON_RECYCLING_INSTANCE, Version.CURRENT, new NamedWriteableRegistry()); MockTransportService serviceA = new MockTransportService(settings, nettyA, threadPool); diff --git a/core/src/test/java/org/elasticsearch/transport/netty/NettyTransportMultiPortTests.java b/core/src/test/java/org/elasticsearch/transport/netty/NettyTransportMultiPortTests.java index 02819525faf..1c8869772e2 100644 --- a/core/src/test/java/org/elasticsearch/transport/netty/NettyTransportMultiPortTests.java +++ b/core/src/test/java/org/elasticsearch/transport/netty/NettyTransportMultiPortTests.java @@ -18,8 +18,6 @@ */ package org.elasticsearch.transport.netty; -import com.carrotsearch.hppc.IntHashSet; - import org.elasticsearch.Version; import org.elasticsearch.cache.recycler.PageCacheRecycler; import org.elasticsearch.common.component.Lifecycle; @@ -27,176 +25,115 @@ import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.network.NetworkService; import org.elasticsearch.common.network.NetworkUtils; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.transport.InetSocketTransportAddress; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.common.util.MockBigArrays; import org.elasticsearch.indices.breaker.NoneCircuitBreakerService; import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.test.junit.rule.RepeatOnExceptionRule; import org.elasticsearch.threadpool.ThreadPool; -import org.elasticsearch.transport.BindTransportException; import org.elasticsearch.transport.TransportService; import org.junit.Before; -import org.junit.Rule; - -import java.io.IOException; -import java.io.OutputStream; -import java.net.InetAddress; -import java.net.InetSocketAddress; -import java.net.ServerSocket; -import java.net.Socket; -import java.nio.charset.StandardCharsets; import static org.elasticsearch.common.settings.Settings.settingsBuilder; import static org.hamcrest.Matchers.is; public class NettyTransportMultiPortTests extends ESTestCase { - private static final int MAX_RETRIES = 10; private String host; - @Rule - public RepeatOnExceptionRule repeatOnBindExceptionRule = new RepeatOnExceptionRule(logger, MAX_RETRIES, BindTransportException.class); - @Before public void setup() { - if (randomBoolean()) { - host = "localhost"; + if (NetworkUtils.SUPPORTS_V6 && randomBoolean()) { + host = "::1"; } else { - if (NetworkUtils.SUPPORTS_V6 && randomBoolean()) { - host = "::1"; - } else { - host = "127.0.0.1"; - } + host = "127.0.0.1"; } } public void testThatNettyCanBindToMultiplePorts() throws Exception { - int[] ports = getRandomPorts(3); - Settings settings = settingsBuilder() .put("network.host", host) - .put("transport.tcp.port", ports[0]) - .put("transport.profiles.default.port", ports[1]) - .put("transport.profiles.client1.port", ports[2]) + .put("transport.tcp.port", 22) // will not actually bind to this + .put("transport.profiles.default.port", 0) + .put("transport.profiles.client1.port", 0) .build(); ThreadPool threadPool = new ThreadPool("tst"); - try (NettyTransport ignored = startNettyTransport(settings, threadPool)) { - assertConnectionRefused(ports[0]); - assertPortIsBound(ports[1]); - assertPortIsBound(ports[2]); + try (NettyTransport transport = startNettyTransport(settings, threadPool)) { + assertEquals(1, transport.profileBoundAddresses().size()); + assertEquals(1, transport.boundAddress().boundAddresses().length); } finally { terminate(threadPool); } } public void testThatDefaultProfileInheritsFromStandardSettings() throws Exception { - int[] ports = getRandomPorts(2); - Settings settings = settingsBuilder() .put("network.host", host) - .put("transport.tcp.port", ports[0]) - .put("transport.profiles.client1.port", ports[1]) + .put("transport.tcp.port", 0) + .put("transport.profiles.client1.port", 0) .build(); ThreadPool threadPool = new ThreadPool("tst"); - try (NettyTransport ignored = startNettyTransport(settings, threadPool)) { - assertPortIsBound(ports[0]); - assertPortIsBound(ports[1]); + try (NettyTransport transport = startNettyTransport(settings, threadPool)) { + assertEquals(1, transport.profileBoundAddresses().size()); + assertEquals(1, transport.boundAddress().boundAddresses().length); } finally { terminate(threadPool); } } public void testThatProfileWithoutPortSettingsFails() throws Exception { - int[] ports = getRandomPorts(1); Settings settings = settingsBuilder() .put("network.host", host) - .put("transport.tcp.port", ports[0]) + .put("transport.tcp.port", 0) .put("transport.profiles.client1.whatever", "foo") .build(); ThreadPool threadPool = new ThreadPool("tst"); - try (NettyTransport ignored = startNettyTransport(settings, threadPool)) { - assertPortIsBound(ports[0]); + try (NettyTransport transport = startNettyTransport(settings, threadPool)) { + assertEquals(0, transport.profileBoundAddresses().size()); + assertEquals(1, transport.boundAddress().boundAddresses().length); } finally { terminate(threadPool); } } public void testThatDefaultProfilePortOverridesGeneralConfiguration() throws Exception { - int[] ports = getRandomPorts(3); - Settings settings = settingsBuilder() .put("network.host", host) - .put("transport.tcp.port", ports[0]) - .put("transport.netty.port", ports[1]) - .put("transport.profiles.default.port", ports[2]) + .put("transport.tcp.port", 22) // will not actually bind to this + .put("transport.netty.port", 23) // will not actually bind to this + .put("transport.profiles.default.port", 0) .build(); ThreadPool threadPool = new ThreadPool("tst"); - try (NettyTransport ignored = startNettyTransport(settings, threadPool)) { - assertConnectionRefused(ports[0]); - assertConnectionRefused(ports[1]); - assertPortIsBound(ports[2]); + try (NettyTransport transport = startNettyTransport(settings, threadPool)) { + assertEquals(0, transport.profileBoundAddresses().size()); + assertEquals(1, transport.boundAddress().boundAddresses().length); } finally { terminate(threadPool); } } public void testThatProfileWithoutValidNameIsIgnored() throws Exception { - int[] ports = getRandomPorts(3); - Settings settings = settingsBuilder() .put("network.host", host) - .put("transport.tcp.port", ports[0]) + .put("transport.tcp.port", 0) // mimics someone trying to define a profile for .local which is the profile for a node request to itself - .put("transport.profiles." + TransportService.DIRECT_RESPONSE_PROFILE + ".port", ports[1]) - .put("transport.profiles..port", ports[2]) + .put("transport.profiles." + TransportService.DIRECT_RESPONSE_PROFILE + ".port", 22) // will not actually bind to this + .put("transport.profiles..port", 23) // will not actually bind to this .build(); ThreadPool threadPool = new ThreadPool("tst"); - try (NettyTransport ignored = startNettyTransport(settings, threadPool)) { - assertPortIsBound(ports[0]); - assertConnectionRefused(ports[1]); - assertConnectionRefused(ports[2]); + try (NettyTransport transport = startNettyTransport(settings, threadPool)) { + assertEquals(0, transport.profileBoundAddresses().size()); + assertEquals(1, transport.boundAddress().boundAddresses().length); } finally { terminate(threadPool); } } - private int[] getRandomPorts(int numberOfPorts) { - IntHashSet ports = new IntHashSet(); - - int nextPort = randomIntBetween(49152, 65535); - for (int i = 0; i < numberOfPorts; i++) { - boolean foundPortInRange = false; - while (!foundPortInRange) { - if (!ports.contains(nextPort)) { - logger.debug("looking to see if port [{}]is available", nextPort); - try (ServerSocket serverSocket = new ServerSocket()) { - // Set SO_REUSEADDR as we may bind here and not be able - // to reuse the address immediately without it. - serverSocket.setReuseAddress(NetworkUtils.defaultReuseAddress()); - serverSocket.bind(new InetSocketAddress(InetAddress.getLoopbackAddress(), nextPort)); - - // bind was a success - logger.debug("port [{}] available.", nextPort); - foundPortInRange = true; - ports.add(nextPort); - } catch (IOException e) { - // Do nothing - logger.debug("port [{}] not available.", e, nextPort); - } - } - nextPort = randomIntBetween(49152, 65535); - } - } - return ports.toArray(); - } - private NettyTransport startNettyTransport(Settings settings, ThreadPool threadPool) { BigArrays bigArrays = new MockBigArrays(new PageCacheRecycler(settings, threadPool), new NoneCircuitBreakerService()); @@ -206,36 +143,4 @@ public class NettyTransportMultiPortTests extends ESTestCase { assertThat(nettyTransport.lifecycleState(), is(Lifecycle.State.STARTED)); return nettyTransport; } - - private void assertConnectionRefused(int port) throws Exception { - try { - trySocketConnection(new InetSocketTransportAddress(InetAddress.getByName(host), port).address()); - fail("Expected to get exception when connecting to port " + port); - } catch (IOException e) { - // expected - logger.info("Got expected connection message {}", e.getMessage()); - } - } - - private void assertPortIsBound(int port) throws Exception { - assertPortIsBound(host, port); - } - - private void assertPortIsBound(String host, int port) throws Exception { - logger.info("Trying to connect to [{}]:[{}]", host, port); - trySocketConnection(new InetSocketTransportAddress(InetAddress.getByName(host), port).address()); - } - - private void trySocketConnection(InetSocketAddress address) throws Exception { - try (Socket socket = new Socket()) { - logger.info("Connecting to {}", address); - socket.connect(address, 500); - - assertThat(socket.isConnected(), is(true)); - try (OutputStream os = socket.getOutputStream()) { - os.write("foo".getBytes(StandardCharsets.UTF_8)); - os.flush(); - } - } - } } diff --git a/core/src/test/java/org/elasticsearch/transport/netty/SimpleNettyTransportTests.java b/core/src/test/java/org/elasticsearch/transport/netty/SimpleNettyTransportTests.java index c18597ff8d4..89702118b49 100644 --- a/core/src/test/java/org/elasticsearch/transport/netty/SimpleNettyTransportTests.java +++ b/core/src/test/java/org/elasticsearch/transport/netty/SimpleNettyTransportTests.java @@ -38,9 +38,7 @@ import static org.hamcrest.Matchers.containsString; public class SimpleNettyTransportTests extends AbstractSimpleTransportTestCase { @Override protected MockTransportService build(Settings settings, Version version, NamedWriteableRegistry namedWriteableRegistry) { - int startPort = 11000 + randomIntBetween(0, 255); - int endPort = startPort + 10; - settings = Settings.builder().put(settings).put("transport.tcp.port", startPort + "-" + endPort).build(); + settings = Settings.builder().put(settings).put("transport.tcp.port", "0").build(); MockTransportService transportService = new MockTransportService(settings, new NettyTransport(settings, threadPool, new NetworkService(settings), BigArrays.NON_RECYCLING_INSTANCE, version, namedWriteableRegistry), threadPool); transportService.start(); return transportService; diff --git a/dev-tools/create_bwc_index.py b/dev-tools/create_bwc_index.py index 5d663ca69f3..83a35941577 100644 --- a/dev-tools/create_bwc_index.py +++ b/dev-tools/create_bwc_index.py @@ -149,6 +149,16 @@ def start_node(version, release_dir, data_dir, repo_dir, tcp_port=DEFAULT_TRANSP cmd.append('-f') # version before 1.0 start in background automatically return subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE) +def install_plugin(version, release_dir, plugin_name): + run_plugin(version, release_dir, 'install', [plugin_name]) + +def remove_plugin(version, release_dir, plugin_name): + run_plugin(version, release_dir, 'remove', [plugin_name]) + +def run_plugin(version, release_dir, plugin_cmd, args): + cmd = [os.path.join(release_dir, 'bin/plugin'), plugin_cmd] + args + subprocess.check_call(cmd) + def create_client(http_port=DEFAULT_HTTP_TCP_PORT, timeout=30): logging.info('Waiting for node to startup') for _ in range(0, timeout): diff --git a/dev-tools/create_bwc_index_with_plugin_mappings.py b/dev-tools/create_bwc_index_with_plugin_mappings.py new file mode 100644 index 00000000000..c30de412d1d --- /dev/null +++ b/dev-tools/create_bwc_index_with_plugin_mappings.py @@ -0,0 +1,124 @@ +import create_bwc_index +import logging +import os +import random +import shutil +import subprocess +import sys +import tempfile + +def fetch_version(version): + logging.info('fetching ES version %s' % version) + if subprocess.call([sys.executable, os.path.join(os.path.split(sys.argv[0])[0], 'get-bwc-version.py'), version]) != 0: + raise RuntimeError('failed to download ES version %s' % version) + +def create_index(plugin, mapping, docs): + ''' + Creates a static back compat index (.zip) with mappings using fields defined in plugins. + ''' + + logging.basicConfig(format='[%(levelname)s] [%(asctime)s] %(message)s', level=logging.INFO, + datefmt='%Y-%m-%d %I:%M:%S %p') + logging.getLogger('elasticsearch').setLevel(logging.ERROR) + logging.getLogger('urllib3').setLevel(logging.WARN) + + tmp_dir = tempfile.mkdtemp() + plugin_installed = False + node = None + try: + data_dir = os.path.join(tmp_dir, 'data') + repo_dir = os.path.join(tmp_dir, 'repo') + logging.info('Temp data dir: %s' % data_dir) + logging.info('Temp repo dir: %s' % repo_dir) + + version = '2.0.0' + classifier = '%s-%s' %(plugin, version) + index_name = 'index-%s' % classifier + + # Download old ES releases if necessary: + release_dir = os.path.join('backwards', 'elasticsearch-%s' % version) + if not os.path.exists(release_dir): + fetch_version(version) + + create_bwc_index.install_plugin(version, release_dir, plugin) + plugin_installed = True + node = create_bwc_index.start_node(version, release_dir, data_dir, repo_dir, cluster_name=index_name) + client = create_bwc_index.create_client() + put_plugin_mappings(client, index_name, mapping, docs) + create_bwc_index.shutdown_node(node) + + print('%s server output:\n%s' % (version, node.stdout.read().decode('utf-8'))) + node = None + create_bwc_index.compress_index(classifier, tmp_dir, 'plugins/%s/src/test/resources/indices/bwc' %plugin) + finally: + if node is not None: + create_bwc_index.shutdown_node(node) + if plugin_installed: + create_bwc_index.remove_plugin(version, release_dir, plugin) + shutil.rmtree(tmp_dir) + +def put_plugin_mappings(client, index_name, mapping, docs): + client.indices.delete(index=index_name, ignore=404) + logging.info('Create single shard test index') + + client.indices.create(index=index_name, body={ + 'settings': { + 'number_of_shards': 1, + 'number_of_replicas': 0 + }, + 'mappings': { + 'type': mapping + } + }) + health = client.cluster.health(wait_for_status='green', wait_for_relocating_shards=0) + assert health['timed_out'] == False, 'cluster health timed out %s' % health + + logging.info('Indexing documents') + for i in range(len(docs)): + client.index(index=index_name, doc_type="type", id=str(i), body=docs[i]) + logging.info('Flushing index') + client.indices.flush(index=index_name) + + logging.info('Running basic checks') + count = client.count(index=index_name)['count'] + assert count == len(docs), "expected %d docs, got %d" %(len(docs), count) + +def main(): + docs = [ + { + "foo": "abc" + }, + { + "foo": "abcdef" + }, + { + "foo": "a" + } + ] + + murmur3_mapping = { + 'properties': { + 'foo': { + 'type': 'string', + 'fields': { + 'hash': { + 'type': 'murmur3' + } + } + } + } + } + + create_index("mapper-murmur3", murmur3_mapping, docs) + + size_mapping = { + '_size': { + 'enabled': True + } + } + + create_index("mapper-size", size_mapping, docs) + +if __name__ == '__main__': + main() + diff --git a/dev-tools/prepare_release_candidate.py b/dev-tools/prepare_release_candidate.py index d3a3e178961..31b07043389 100644 --- a/dev-tools/prepare_release_candidate.py +++ b/dev-tools/prepare_release_candidate.py @@ -356,7 +356,7 @@ if __name__ == "__main__": debs3_list_cmd = 'deb-s3 list -b %s --prefix %s' % (bucket, debs3_prefix) debs3_verify_cmd = 'deb-s3 verify -b %s --prefix %s' % (bucket, debs3_prefix) rpms3_prefix = 'elasticsearch/staging/%s-%s/repos/%s/centos' % (release_version, shortHash, package_repo_version) - rpms3_upload_cmd = 'rpm-s3 -v -b %s -p %s --sign --visibility public-read -k 0 %s' % (bucket, rpms3_prefix, rpm) + rpms3_upload_cmd = 'rpm-s3 -v -b %s -p %s --sign --visibility public-read -k 100 %s' % (bucket, rpms3_prefix, rpm) if deploy_s3: run(s3cmd_sync_to_staging_bucket_cmd) diff --git a/distribution/build.gradle b/distribution/build.gradle index 6ceb940e4a6..56a602c1aa2 100644 --- a/distribution/build.gradle +++ b/distribution/build.gradle @@ -20,6 +20,7 @@ import org.apache.tools.ant.filters.FixCrLfFilter import org.apache.tools.ant.taskdefs.condition.Os import org.elasticsearch.gradle.precommit.DependencyLicensesTask +import org.elasticsearch.gradle.precommit.UpdateShasTask import org.elasticsearch.gradle.test.RunTask import org.elasticsearch.gradle.EmptyDirTask import org.elasticsearch.gradle.MavenFilteringHack @@ -38,20 +39,61 @@ buildscript { } } -allprojects { - project.ext { - // this is common configuration for distributions, but we also add it here for the license check to use - dependencyFiles = project(':core').configurations.runtime.copyRecursive().exclude(module: 'slf4j-api') +// this is common configuration for distributions, but we also add it here for the license check to use +ext.dependencyFiles = project(':core').configurations.runtime.copyRecursive().exclude(module: 'slf4j-api') + + +/***************************************************************************** + * Modules * + *****************************************************************************/ + +task buildModules(type: Copy) { + into 'build/modules' +} + +ext.restTestExpansions = [ + 'expected.modules.count': 0, +] +// we create the buildModules task above so the distribution subprojects can +// depend on it, but we don't actually configure it until projects are evaluated +// so it can depend on the bundling of plugins (ie modules must have been configured) +project.gradle.projectsEvaluated { + project.rootProject.subprojects.findAll { it.path.startsWith(':modules:') }.each { Project module -> + buildModules { + dependsOn module.bundlePlugin + into(module.name) { + from { zipTree(module.bundlePlugin.outputs.files.singleFile) } + } + } + configure(subprojects.findAll { it.name != 'integ-test-zip' }) { Project distribution -> + distribution.integTest.mustRunAfter(module.integTest) + } + restTestExpansions['expected.modules.count'] += 1 } } +// make sure we have a clean task since we aren't a java project, but we have tasks that +// put stuff in the build dir +task clean(type: Delete) { + delete 'build' +} + subprojects { /***************************************************************************** * Rest test config * *****************************************************************************/ apply plugin: 'elasticsearch.rest-test' - integTest { - includePackaged true + project.integTest { + dependsOn(project.assemble) + includePackaged project.name == 'integ-test-zip' + cluster { + distribution = project.name + } + } + + processTestResources { + inputs.properties(project(':distribution').restTestExpansions) + MavenFilteringHack.filter(it, project(':distribution').restTestExpansions) } /***************************************************************************** @@ -80,7 +122,12 @@ subprojects { libFiles = copySpec { into 'lib' from project(':core').jar - from dependencyFiles + from project(':distribution').dependencyFiles + } + + modulesFiles = copySpec { + into 'modules' + from project(':distribution').buildModules } configFiles = copySpec { @@ -102,7 +149,7 @@ subprojects { /***************************************************************************** * Zip and tgz configuration * *****************************************************************************/ -configure(subprojects.findAll { it.name == 'zip' || it.name == 'tar' }) { +configure(subprojects.findAll { ['zip', 'tar', 'integ-test-zip'].contains(it.name) }) { project.ext.archivesFiles = copySpec { into("elasticsearch-${version}") { with libFiles @@ -120,6 +167,9 @@ configure(subprojects.findAll { it.name == 'zip' || it.name == 'tar' }) { from('../src/main/resources') { include 'bin/*.exe' } + if (project.name != 'integ-test-zip') { + with modulesFiles + } } } } @@ -142,7 +192,7 @@ configure(subprojects.findAll { it.name == 'zip' || it.name == 'tar' }) { * directly from the filesystem. It doesn't want to process them through * MavenFilteringHack or any other copy-style action. */ -configure(subprojects.findAll { it.name == 'deb' || it.name == 'rpm' }) { +configure(subprojects.findAll { ['deb', 'rpm'].contains(it.name) }) { integTest.enabled = Os.isFamily(Os.FAMILY_WINDOWS) == false File packagingFiles = new File(buildDir, 'packaging') project.ext.packagingFiles = packagingFiles @@ -232,6 +282,7 @@ configure(subprojects.findAll { it.name == 'deb' || it.name == 'rpm' }) { user 'root' permissionGroup 'root' with libFiles + with modulesFiles with copySpec { with commonFiles if (project.name == 'deb') { @@ -293,15 +344,18 @@ configure(subprojects.findAll { it.name == 'deb' || it.name == 'rpm' }) { // TODO: dependency checks should really be when building the jar itself, which would remove the need // for this hackery and instead we can do this inside the BuildPlugin -task check(group: 'Verification', description: 'Runs all checks.') {} // dummy task! -DependencyLicensesTask.configure(project) { +task dependencyLicenses(type: DependencyLicensesTask) { dependsOn = [dependencyFiles] dependencies = dependencyFiles mapping from: /lucene-.*/, to: 'lucene' mapping from: /jackson-.*/, to: 'jackson' } +task check(group: 'Verification', description: 'Runs all checks.', dependsOn: dependencyLicenses) {} // dummy task! +task updateShas(type: UpdateShasTask) { + parentTask = dependencyLicenses +} -RunTask.configure(project) +task run(type: RunTask) {} /** * Build some variables that are replaced in the packages. This includes both diff --git a/distribution/deb/build.gradle b/distribution/deb/build.gradle index 9689f5997e8..d9bd8447ab9 100644 --- a/distribution/deb/build.gradle +++ b/distribution/deb/build.gradle @@ -18,7 +18,8 @@ */ task buildDeb(type: Deb) { - dependsOn dependencyFiles, preparePackagingFiles + dependsOn preparePackagingFiles + baseName 'elasticsearch' // this is what pom generation uses for artifactId // Follow elasticsearch's deb file naming convention archiveName "${packageName}-${project.version}.deb" packageGroup 'web' @@ -34,7 +35,13 @@ task buildDeb(type: Deb) { } artifacts { + 'default' buildDeb archives buildDeb } -integTest.dependsOn buildDeb +integTest { + /* We use real deb tools to extract the deb file for testing so we have to + skip the test if they aren't around. */ + enabled = new File('/usr/bin/dpkg-deb').exists() || // Standard location + new File('/usr/local/bin/dpkg-deb').exists() // Homebrew location +} diff --git a/distribution/deb/src/test/resources/rest-api-spec/test/smoke_test_plugins/10_modules.yaml b/distribution/deb/src/test/resources/rest-api-spec/test/smoke_test_plugins/10_modules.yaml new file mode 100644 index 00000000000..a7f265dd3ad --- /dev/null +++ b/distribution/deb/src/test/resources/rest-api-spec/test/smoke_test_plugins/10_modules.yaml @@ -0,0 +1,13 @@ +# Integration tests for distributions with modules +# +"Correct Modules Count": + - do: + cluster.state: {} + + # Get master node id + - set: { master_node: master } + + - do: + nodes.info: {} + + - length: { nodes.$master.plugins: ${expected.modules.count} } diff --git a/core/src/test/java/org/elasticsearch/common/compress/lzf/LZFXContentTests.java b/distribution/integ-test-zip/build.gradle similarity index 74% rename from core/src/test/java/org/elasticsearch/common/compress/lzf/LZFXContentTests.java rename to distribution/integ-test-zip/build.gradle index 05135f0ed68..23191ff03a4 100644 --- a/core/src/test/java/org/elasticsearch/common/compress/lzf/LZFXContentTests.java +++ b/distribution/integ-test-zip/build.gradle @@ -17,14 +17,15 @@ * under the License. */ -package org.elasticsearch.common.compress.lzf; - -import org.elasticsearch.common.compress.AbstractCompressedXContentTestCase; - -public class LZFXContentTests extends AbstractCompressedXContentTestCase { - - public LZFXContentTests() { - super(new LZFTestCompressor()); - } - +task buildZip(type: Zip) { + baseName = 'elasticsearch' + with archivesFiles } + +artifacts { + 'default' buildZip + archives buildZip +} + +integTest.dependsOn buildZip + diff --git a/test-framework/src/main/java/org/elasticsearch/test/rest/Rest3IT.java b/distribution/integ-test-zip/src/test/java/org/elasticsearch/test/rest/RestIT.java similarity index 81% rename from test-framework/src/main/java/org/elasticsearch/test/rest/Rest3IT.java rename to distribution/integ-test-zip/src/test/java/org/elasticsearch/test/rest/RestIT.java index 7cbc974de0d..fd12fd2e519 100644 --- a/test-framework/src/main/java/org/elasticsearch/test/rest/Rest3IT.java +++ b/distribution/integ-test-zip/src/test/java/org/elasticsearch/test/rest/RestIT.java @@ -19,20 +19,20 @@ package org.elasticsearch.test.rest; -import com.carrotsearch.randomizedtesting.annotations.Name; import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; import org.elasticsearch.test.rest.parser.RestTestParseException; import java.io.IOException; -/** Rest API tests subset 3 */ -public class Rest3IT extends ESRestTestCase { - public Rest3IT(@Name("yaml") RestTestCandidate testCandidate) { +/** Rest integration test. runs against external cluster in 'mvn verify' */ +public class RestIT extends ESRestTestCase { + public RestIT(RestTestCandidate testCandidate) { super(testCandidate); } + // we run them all sequentially: start simple! @ParametersFactory public static Iterable parameters() throws IOException, RestTestParseException { - return createParameters(3, 8); + return createParameters(0, 1); } } diff --git a/distribution/licenses/compress-lzf-1.0.2.jar.sha1 b/distribution/licenses/compress-lzf-1.0.2.jar.sha1 deleted file mode 100644 index 14e4a7f1476..00000000000 --- a/distribution/licenses/compress-lzf-1.0.2.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -62896e6fca184c79cc01a14d143f3ae2b4f4b4ae diff --git a/distribution/licenses/compress-lzf-LICENSE b/distribution/licenses/compress-lzf-LICENSE deleted file mode 100644 index 8d6813f770f..00000000000 --- a/distribution/licenses/compress-lzf-LICENSE +++ /dev/null @@ -1,8 +0,0 @@ -This copy of Compress-LZF library is licensed under the -Apache (Software) License, version 2.0 ("the License"). -See the License for details about distribution rights, and the -specific rights regarding derivate works. - -You may obtain a copy of the License at: - -http://www.apache.org/licenses/LICENSE-2.0 diff --git a/distribution/licenses/compress-lzf-NOTICE b/distribution/licenses/compress-lzf-NOTICE deleted file mode 100644 index 382a800c80d..00000000000 --- a/distribution/licenses/compress-lzf-NOTICE +++ /dev/null @@ -1,24 +0,0 @@ -# Compress LZF - -This library contains efficient implementation of LZF compression format, -as well as additional helper classes that build on JDK-provided gzip (deflat) -codec. - -## Licensing - -Library is licensed under Apache License 2.0, as per accompanying LICENSE file. - -## Credit - -Library has been written by Tatu Saloranta (tatu.saloranta@iki.fi). -It was started at Ning, inc., as an official Open Source process used by -platform backend, but after initial versions has been developed outside of -Ning by supporting community. - -Other contributors include: - -* Jon Hartlaub (first versions of streaming reader/writer; unit tests) -* Cedrik Lime: parallel LZF implementation - -Various community members have contributed bug reports, and suggested minor -fixes; these can be found from file "VERSION.txt" in SCM. diff --git a/distribution/licenses/snakeyaml-1.15.jar.sha1 b/distribution/licenses/snakeyaml-1.15.jar.sha1 deleted file mode 100644 index 62d6943ca03..00000000000 --- a/distribution/licenses/snakeyaml-1.15.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -3b132bea69e8ee099f416044970997bde80f4ea6 diff --git a/distribution/licenses/snakeyaml-NOTICE.txt b/distribution/licenses/snakeyaml-NOTICE.txt deleted file mode 100644 index 8d1c8b69c3f..00000000000 --- a/distribution/licenses/snakeyaml-NOTICE.txt +++ /dev/null @@ -1 +0,0 @@ - diff --git a/distribution/rpm/build.gradle b/distribution/rpm/build.gradle index ab2e61ad463..2ab78fe7e41 100644 --- a/distribution/rpm/build.gradle +++ b/distribution/rpm/build.gradle @@ -19,8 +19,9 @@ task buildRpm(type: Rpm) { dependsOn dependencyFiles, preparePackagingFiles + baseName 'elasticsearch' // this is what pom generation uses for artifactId // Follow elasticsearch's rpm file naming convention - archiveName = "${packageName}-${project.version}.rpm" + archiveName "${packageName}-${project.version}.rpm" packageGroup 'Application/Internet' prefix '/usr' packager 'Elasticsearch' @@ -31,7 +32,14 @@ task buildRpm(type: Rpm) { } artifacts { + 'default' buildRpm archives buildRpm } -integTest.dependsOn buildRpm +integTest { + /* We use real rpm tools to extract the rpm file for testing so we have to + skip the test if they aren't around. */ + enabled = new File('/bin/rpm').exists() || // Standard location + new File('/usr/bin/rpm').exists() || // Debian location + new File('/usr/local/bin/rpm').exists() // Homebrew location +} diff --git a/distribution/rpm/src/test/resources/rest-api-spec/test/smoke_test_plugins/10_modules.yaml b/distribution/rpm/src/test/resources/rest-api-spec/test/smoke_test_plugins/10_modules.yaml new file mode 100644 index 00000000000..a7f265dd3ad --- /dev/null +++ b/distribution/rpm/src/test/resources/rest-api-spec/test/smoke_test_plugins/10_modules.yaml @@ -0,0 +1,13 @@ +# Integration tests for distributions with modules +# +"Correct Modules Count": + - do: + cluster.state: {} + + # Get master node id + - set: { master_node: master } + + - do: + nodes.info: {} + + - length: { nodes.$master.plugins: ${expected.modules.count} } diff --git a/distribution/tar/build.gradle b/distribution/tar/build.gradle index 5cf24e72f82..7230ab50799 100644 --- a/distribution/tar/build.gradle +++ b/distribution/tar/build.gradle @@ -17,7 +17,7 @@ * under the License. */ -task buildTar(type: Tar, dependsOn: dependencyFiles) { +task buildTar(type: Tar) { baseName = 'elasticsearch' extension = 'tar.gz' with archivesFiles @@ -28,8 +28,3 @@ artifacts { 'default' buildTar archives buildTar } - -integTest { - dependsOn buildTar - clusterConfig.distribution = 'tar' -} diff --git a/distribution/tar/src/test/resources/rest-api-spec/test/smoke_test_plugins/10_modules.yaml b/distribution/tar/src/test/resources/rest-api-spec/test/smoke_test_plugins/10_modules.yaml new file mode 100644 index 00000000000..a7f265dd3ad --- /dev/null +++ b/distribution/tar/src/test/resources/rest-api-spec/test/smoke_test_plugins/10_modules.yaml @@ -0,0 +1,13 @@ +# Integration tests for distributions with modules +# +"Correct Modules Count": + - do: + cluster.state: {} + + # Get master node id + - set: { master_node: master } + + - do: + nodes.info: {} + + - length: { nodes.$master.plugins: ${expected.modules.count} } diff --git a/distribution/zip/build.gradle b/distribution/zip/build.gradle index d636e66f152..23191ff03a4 100644 --- a/distribution/zip/build.gradle +++ b/distribution/zip/build.gradle @@ -17,7 +17,7 @@ * under the License. */ -task buildZip(type: Zip, dependsOn: dependencyFiles) { +task buildZip(type: Zip) { baseName = 'elasticsearch' with archivesFiles } diff --git a/distribution/zip/src/test/resources/rest-api-spec/test/smoke_test_plugins/10_modules.yaml b/distribution/zip/src/test/resources/rest-api-spec/test/smoke_test_plugins/10_modules.yaml new file mode 100644 index 00000000000..a7f265dd3ad --- /dev/null +++ b/distribution/zip/src/test/resources/rest-api-spec/test/smoke_test_plugins/10_modules.yaml @@ -0,0 +1,13 @@ +# Integration tests for distributions with modules +# +"Correct Modules Count": + - do: + cluster.state: {} + + # Get master node id + - set: { master_node: master } + + - do: + nodes.info: {} + + - length: { nodes.$master.plugins: ${expected.modules.count} } diff --git a/docs/community-clients/index.asciidoc b/docs/community-clients/index.asciidoc index 1d06aeddf98..51789221be6 100644 --- a/docs/community-clients/index.asciidoc +++ b/docs/community-clients/index.asciidoc @@ -144,9 +144,6 @@ Also see the {client}/php-api/current/index.html[official Elasticsearch PHP clie Also see the {client}/python-api/current/index.html[official Elasticsearch Python client]. -* http://github.com/elasticsearch/elasticsearch-dsl-py[elasticsearch-dsl-py] - chainable query and filter construction built on top of official client. - * http://github.com/rhec/pyelasticsearch[pyelasticsearch]: Python client. diff --git a/docs/java-api/index.asciidoc b/docs/java-api/index.asciidoc index 37f56df0f9f..16403d5c147 100644 --- a/docs/java-api/index.asciidoc +++ b/docs/java-api/index.asciidoc @@ -34,6 +34,62 @@ For example, you can define the latest version in your `pom.xml` file: -------------------------------------------------- +== Dealing with JAR dependency conflicts + +If you want to use Elasticsearch in your Java application, you may have to deal with version conflicts with third party +dependencies like Guava and Joda. For instance, perhaps Elasticsearch uses Joda 2.8, while your code uses Joda 2.1. + +You have two choices: + +* The simplest solution is to upgrade. Newer module versions are likely to have fixed old bugs. +The further behind you fall, the harder it will be to upgrade later. Of course, it is possible that you are using a +third party dependency that in turn depends on an outdated version of a package, which prevents you from upgrading. + +* The second option is to relocate the troublesome dependencies and to shade them either with your own application +or with Elasticsearch and any plugins needed by the Elasticsearch client. + +The https://www.elastic.co/blog/to-shade-or-not-to-shade["To shade or not to shade" blog post] describes +all the steps for doing so. + +== Embedding jar with dependencies + +If you want to create a single jar containing your application and all dependencies, you should not +use `maven-assembly-plugin` for that because it can not deal with `META-INF/services` structure which is +required by Lucene jars. + +Instead, you can use `maven-shade-plugin` and configure it as follow: + +[source,xml] +-------------------------------------------------- + + org.apache.maven.plugins + maven-shade-plugin + 2.4.1 + + + package + shade + + + + + + + + +-------------------------------------------------- + +Note that if you have a `main` class you want to automatically call when running `java -jar yourjar.jar`, just add +it to the `transformers`: + +[source,xml] +-------------------------------------------------- + + org.elasticsearch.demo.Generate + +-------------------------------------------------- + + == Deploying in JBoss EAP6 module Elasticsearch and Lucene classes need to be in the same JBoss module. diff --git a/docs/plugins/api.asciidoc b/docs/plugins/api.asciidoc index 6b9afe0b243..7ac0cdee834 100644 --- a/docs/plugins/api.asciidoc +++ b/docs/plugins/api.asciidoc @@ -56,5 +56,6 @@ These community plugins appear to have been abandoned: * https://github.com/endgameinc/elasticsearch-term-plugin[Terms Component Plugin] (by Endgame Inc.) +* https://github.com/etsy/es-restlog[REST Request Logging Plugin] (by Etsy/Shikhar Bhushan) include::delete-by-query.asciidoc[] diff --git a/docs/plugins/discovery.asciidoc b/docs/plugins/discovery.asciidoc index 1fab9427d1a..cfc98e45dee 100644 --- a/docs/plugins/discovery.asciidoc +++ b/docs/plugins/discovery.asciidoc @@ -33,6 +33,7 @@ A number of discovery plugins have been contributed by our community: * https://github.com/grantr/elasticsearch-srv-discovery[DNS SRV Discovery Plugin] (by Grant Rodgers) * https://github.com/shikhar/eskka[eskka Discovery Plugin] (by Shikhar Bhushan) * https://github.com/grmblfrz/elasticsearch-zookeeper[ZooKeeper Discovery Plugin] (by Sonian Inc.) +* https://github.com/fabric8io/elasticsearch-cloud-kubernetes[Kubernetes Discovery Plugin] (by Jimmi Dyson, http://fabric8.io[fabric8]) include::discovery-ec2.asciidoc[] diff --git a/docs/plugins/repository-s3.asciidoc b/docs/plugins/repository-s3.asciidoc index 57c1e38c928..16505885446 100644 --- a/docs/plugins/repository-s3.asciidoc +++ b/docs/plugins/repository-s3.asciidoc @@ -198,7 +198,7 @@ The following settings are supported: request. Beyond this threshold, the S3 repository will use the http://docs.aws.amazon.com/AmazonS3/latest/dev/uploadobjusingmpu.html[AWS Multipart Upload API] to split the chunk into several parts, each of `buffer_size` length, and - to upload each part in its own request. Note that positioning a buffer + to upload each part in its own request. Note that setting a buffer size lower than `5mb` is not allowed since it will prevents the use of the Multipart API and may result in upload errors. Defaults to `5mb`. @@ -343,6 +343,14 @@ to your URL provider. Note that this setting will be used for all S3 repositorie Different `endpoint`, `region` and `protocol` settings can be set on a per-repository basis See <> for details. +[[repository-s3-aws-vpc]] +[float] +==== AWS VPC Bandwidth Settings + +AWS instances resolve S3 endpoints to a public IP. If the elasticsearch instances reside in a private subnet in an AWS VPC then all traffic to S3 will go through that VPC's NAT instance. If your VPC's NAT instance is a smaller instance size (e.g. a t1.micro) or is handling a high volume of network traffic your bandwidth to S3 may be limited by that NAT instance's networking bandwidth limitations. + +Instances residing in a public subnet in an AWS VPC will connect to S3 via the VPC's internet gateway and not be bandwidth limited by the VPC's NAT instance. + [[repository-s3-testing]] ==== Testing AWS @@ -387,4 +395,3 @@ To run test: ---- mvn -Dtests.aws=true -Dtests.config=/path/to/config/file/elasticsearch.yml clean test ---- - diff --git a/docs/python/index.asciidoc b/docs/python/index.asciidoc index 66b79d7fa2b..64756adc444 100644 --- a/docs/python/index.asciidoc +++ b/docs/python/index.asciidoc @@ -7,7 +7,29 @@ ground for all Elasticsearch-related code in Python; because of this it tries to be opinion-free and very extendable. The full documentation is available at http://elasticsearch-py.rtfd.org/ -It can be installed with: +.Elasticsearch DSL +************************************************************************************ +For a more high level client library with more limited scope, have a look at +http://elasticsearch-dsl.rtfd.org/[elasticsearch-dsl] - a more pythonic library +sitting on top of `elasticsearch-py`. + +It provides a more convenient and idiomatic way to write and manipulate +http://elasticsearch-dsl.readthedocs.org/en/latest/search_dsl.html[queries]. It +stays close to the Elasticsearch JSON DSL, mirroring its terminology and +structure while exposing the whole range of the DSL from Python either directly +using defined classes or a queryset-like expressions. + +It also provides an optional +http://elasticsearch-dsl.readthedocs.org/en/latest/persistence.html#doctype[persistence +layer] for working with documents as Python objects in an ORM-like fashion: +defining mappings, retrieving and saving documents, wrapping the document data +in user-defined classes. +************************************************************************************ + + +=== Installation + +It can be installed with pip: [source,sh] ------------------------------------ @@ -16,13 +38,24 @@ pip install elasticsearch === Versioning -There are two branches for development - `master` and `0.4`. Master branch is -used to track all the changes for Elasticsearch 1.0 and beyond whereas 0.4 -tracks Elasticsearch 0.90. +There are two branches for development - `master` and `1.x`. Master branch is +used to track all the changes for Elasticsearch 2.0 and beyond whereas 1.x +tracks Elasticsearch 1.*. Releases with major version 1 (1.X.Y) are to be used with Elasticsearch 1.* and later, 0.4 releases are meant to work with Elasticsearch 0.90.*. +The recommended way to set your requirements in your `setup.py` or +`requirements.txt` is: + +------------------------------------ + # Elasticsearch 2.x + elasticsearch>=2.0.0,<3.0.0 + + # Elasticsearch 1.x + elasticsearch>=1.0.0,<2.0.0 +------------------------------------ + === Example use Simple use-case: @@ -71,6 +104,10 @@ The client's features include: * pluggable architecture +The client also contains a convenient set of +http://elasticsearch-py.readthedocs.org/en/master/helpers.html[helpers] for +some of the more engaging tasks like bulk indexing and reindexing. + === License diff --git a/docs/reference/analysis/tokenfilters/cjk-bigram-tokenfilter.asciidoc b/docs/reference/analysis/tokenfilters/cjk-bigram-tokenfilter.asciidoc index 8c6d767b6f9..b5d1b5cde10 100644 --- a/docs/reference/analysis/tokenfilters/cjk-bigram-tokenfilter.asciidoc +++ b/docs/reference/analysis/tokenfilters/cjk-bigram-tokenfilter.asciidoc @@ -12,7 +12,7 @@ combined unigram+bigram approach. Bigrams are generated for characters in `han`, `hiragana`, `katakana` and `hangul`, but bigrams can be disabled for particular scripts with the -`ignore_scripts` parameter. All non-CJK input is passed through unmodified. +`ignored_scripts` parameter. All non-CJK input is passed through unmodified. [source,js] -------------------------------------------------- @@ -28,7 +28,7 @@ Bigrams are generated for characters in `han`, `hiragana`, `katakana` and "filter" : { "han_bigrams_filter" : { "type" : "cjk_bigram", - "ignore_scripts": [ + "ignored_scripts": [ "hiragana", "katakana", "hangul" diff --git a/docs/reference/mapping/fields/index-field.asciidoc b/docs/reference/mapping/fields/index-field.asciidoc index 0a1928f15ff..bc506db1836 100644 --- a/docs/reference/mapping/fields/index-field.asciidoc +++ b/docs/reference/mapping/fields/index-field.asciidoc @@ -1,10 +1,17 @@ [[mapping-index-field]] === `_index` field -When performing queries across multiple indexes, it is sometimes desirable -to add query clauses that are associated with documents of only certain -indexes. The `_index` field allows matching on the index a document was -indexed into. Its value is accessible in queries, aggregations, scripts, and when sorting: +When performing queries across multiple indexes, it is sometimes desirable to +add query clauses that are associated with documents of only certain indexes. +The `_index` field allows matching on the index a document was indexed into. +Its value is accessible in `term`, or `terms` queries, aggregations, +scripts, and when sorting: + +NOTE: The `_index` is exposed as a virtual field -- it is not added to the +Lucene index as a real field. This means that you can use the `_index` field +in a `term` or `terms` query (or any query that is rewritten to a `term` +query, such as the `match`, `query_string` or `simple_query_string` query), +but it does not support `prefix`, `wildcard`, `regexp`, or `fuzzy` queries. [source,js] -------------------------- diff --git a/docs/reference/mapping/fields/ttl-field.asciidoc b/docs/reference/mapping/fields/ttl-field.asciidoc index 07ce8a86b9e..d81582c9078 100644 --- a/docs/reference/mapping/fields/ttl-field.asciidoc +++ b/docs/reference/mapping/fields/ttl-field.asciidoc @@ -62,7 +62,7 @@ PUT my_index "my_type": { "_ttl": { "enabled": true, - "defaut": "5m" + "default": "5m" } } } diff --git a/docs/reference/mapping/params/doc-values.asciidoc b/docs/reference/mapping/params/doc-values.asciidoc index ea8dfd08bb7..a0108b899b9 100644 --- a/docs/reference/mapping/params/doc-values.asciidoc +++ b/docs/reference/mapping/params/doc-values.asciidoc @@ -9,7 +9,7 @@ of documents that contain the term. Sorting, aggregations, and access to field values in scripts requires a different data access pattern. Instead of lookup up the term and finding documents, we need to be able to look up the document and find the terms that -is has in a field. +it has in a field. Doc values are the on-disk data structure, built at document index time, which makes this data access pattern possible. They store the same values as the diff --git a/docs/reference/migration/index.asciidoc b/docs/reference/migration/index.asciidoc index d200cab3f0b..56c000c3d9a 100644 --- a/docs/reference/migration/index.asciidoc +++ b/docs/reference/migration/index.asciidoc @@ -18,10 +18,10 @@ See <> for more info. -- include::migrate_3_0.asciidoc[] -include::migrate_2_1.asciidoc[] - include::migrate_2_2.asciidoc[] +include::migrate_2_1.asciidoc[] + include::migrate_2_0.asciidoc[] include::migrate_1_6.asciidoc[] diff --git a/docs/reference/migration/migrate_3_0.asciidoc b/docs/reference/migration/migrate_3_0.asciidoc index 08bcb380985..e6c3e49b43d 100644 --- a/docs/reference/migration/migrate_3_0.asciidoc +++ b/docs/reference/migration/migrate_3_0.asciidoc @@ -90,6 +90,9 @@ The search exists api has been removed in favour of using the search api with The deprecated `/_optimize` endpoint has been removed. The `/_forcemerge` endpoint should be used in lieu of optimize. +The `GET` HTTP verb for `/_forcemerge` is no longer supported, please use the +`POST` HTTP verb. + ==== Deprecated queries removed The following deprecated queries have been removed: @@ -468,3 +471,7 @@ response is output by default. Finally, the API for org.elasticsearch.monitor.os.OsStats has changed. The `getLoadAverage` method has been removed. The value for this can now be obtained from `OsStats.Cpu#getLoadAverage`. Additionally, the recent CPU usage can be obtained from `OsStats.Cpu#getPercent`. + +=== Fields option +Only stored fields are retrievable with this option. +The fields option won't be able to load non stored fields from _source anymore. diff --git a/docs/reference/modules/scripting.asciidoc b/docs/reference/modules/scripting.asciidoc index 50be5fdce48..aea7846202e 100644 --- a/docs/reference/modules/scripting.asciidoc +++ b/docs/reference/modules/scripting.asciidoc @@ -121,10 +121,12 @@ curl -XPOST localhost:9200/_search -d '{ "functions": [ { "script_score": { - "lang": "groovy", - "file": "calculate-score", - "params": { - "my_modifier": 8 + "script": { + "lang": "groovy", + "file": "calculate-score", + "params": { + "my_modifier": 8 + } } } } @@ -180,10 +182,12 @@ curl -XPOST localhost:9200/_search -d '{ "functions": [ { "script_score": { - "id": "indexedCalculateScore", - "lang" : "groovy", - "params": { - "my_modifier": 8 + "script": { + "id": "indexedCalculateScore", + "lang" : "groovy", + "params": { + "my_modifier": 8 + } } } } @@ -421,8 +425,10 @@ curl -XPOST localhost:9200/_search -d '{ "functions": [ { "script_score": { - "id": "my_script", - "lang" : "native" + "script": { + "id": "my_script", + "lang" : "native" + } } } ] diff --git a/docs/reference/query-dsl/common-terms-query.asciidoc b/docs/reference/query-dsl/common-terms-query.asciidoc index aeecf39e6f7..a956c33c1ee 100644 --- a/docs/reference/query-dsl/common-terms-query.asciidoc +++ b/docs/reference/query-dsl/common-terms-query.asciidoc @@ -73,7 +73,7 @@ In this example, words that have a document frequency greater than 0.1% { "common": { "body": { - "query": "this is bonsai cool", + "query": "this is bonsai cool", "cutoff_frequency": 0.001 } } @@ -93,7 +93,7 @@ all terms required: { "common": { "body": { - "query": "nelly the elephant as a cartoon", + "query": "nelly the elephant as a cartoon", "cutoff_frequency": 0.001, "low_freq_operator": "and" } @@ -113,8 +113,8 @@ which is roughly equivalent to: { "term": { "body": "cartoon"}} ], "should": [ - { "term": { "body": "the"}}, - { "term": { "body": "as"}}, + { "term": { "body": "the"}} + { "term": { "body": "as"}} { "term": { "body": "a"}} ] } @@ -131,8 +131,8 @@ must be present, for instance: { "common": { "body": { - "query": "nelly the elephant as a cartoon", - "cutoff_frequency": 0.001, + "query": "nelly the elephant as a cartoon", + "cutoff_frequency": 0.001, "minimum_should_match": 2 } } @@ -156,8 +156,8 @@ which is roughly equivalent to: } }, "should": [ - { "term": { "body": "the"}}, - { "term": { "body": "as"}}, + { "term": { "body": "the"}} + { "term": { "body": "as"}} { "term": { "body": "a"}} ] } @@ -169,7 +169,7 @@ minimum_should_match A different <> can be applied for low and high frequency terms with the additional -`low_freq` and `high_freq` parameters Here is an example when providing +`low_freq` and `high_freq` parameters. Here is an example when providing additional parameters (note the change in structure): [source,js] @@ -177,8 +177,8 @@ additional parameters (note the change in structure): { "common": { "body": { - "query": "nelly the elephant not as a cartoon", - "cutoff_frequency": 0.001, + "query": "nelly the elephant not as a cartoon", + "cutoff_frequency": 0.001, "minimum_should_match": { "low_freq" : 2, "high_freq" : 3 @@ -230,8 +230,8 @@ for high frequency terms is when there are only high frequency terms: { "common": { "body": { - "query": "how not to be", - "cutoff_frequency": 0.001, + "query": "how not to be", + "cutoff_frequency": 0.001, "minimum_should_match": { "low_freq" : 2, "high_freq" : 3 diff --git a/docs/reference/search/request/post-filter.asciidoc b/docs/reference/search/request/post-filter.asciidoc index 7c352e9fd50..7bd95400312 100644 --- a/docs/reference/search/request/post-filter.asciidoc +++ b/docs/reference/search/request/post-filter.asciidoc @@ -78,7 +78,7 @@ curl -XGET localhost:9200/shirts/_search -d ' }, "aggs": { "colors": { - "terms": { "field": "color" }, <2> + "terms": { "field": "color" } <2> }, "color_red": { "filter": { diff --git a/docs/reference/search/request/sort.asciidoc b/docs/reference/search/request/sort.asciidoc index f73181f19ed..8d0b6708979 100644 --- a/docs/reference/search/request/sort.asciidoc +++ b/docs/reference/search/request/sort.asciidoc @@ -84,13 +84,12 @@ field support has the following parameters on top of the already existing sort options: `nested_path`:: - Defines the on what nested object to sort. The actual - sort field must be a direct field inside this nested object. The default - is to use the most immediate inherited nested object from the sort - field. + Defines on which nested object to sort. The actual + sort field must be a direct field inside this nested object. + When sorting by nested field, this field is mandatory. `nested_filter`:: - A filter the inner objects inside the nested path + A filter that the inner objects inside the nested path should match with in order for its field values to be taken into account by sorting. Common case is to repeat the query / filter inside the nested filter or query. By default no `nested_filter` is active. @@ -98,7 +97,7 @@ existing sort options: ===== Nested sorting example In the below example `offer` is a field of type `nested`. -The `nested_path` needs to be specified other elasticsearch doesn't on what nested level sort values need to be captured. +The `nested_path` needs to be specified; otherwise, elasticsearch doesn't know on what nested level sort values need to be captured. [source,js] -------------------------------------------------- diff --git a/docs/reference/setup/rolling_upgrade.asciidoc b/docs/reference/setup/rolling_upgrade.asciidoc index 2ac2963a239..b3c00d337f8 100644 --- a/docs/reference/setup/rolling_upgrade.asciidoc +++ b/docs/reference/setup/rolling_upgrade.asciidoc @@ -60,7 +60,7 @@ default. It is a good idea to place these directories in a different location so that there is no chance of deleting them when upgrading Elasticsearch. These -custom paths can be <> with the `path.config` and +custom paths can be <> with the `path.conf` and `path.data` settings. The Debian and RPM packages place these directories in the @@ -80,7 +80,7 @@ To upgrade using a zip or compressed tarball: overwrite the `config` or `data` directories. * Either copy the files in the `config` directory from your old installation - to your new installation, or use the `--path.config` option on the command + to your new installation, or use the `--path.conf` option on the command line to point to an external config directory. * Either copy the files in the `data` directory from your old installation diff --git a/docs/reference/setup/upgrade.asciidoc b/docs/reference/setup/upgrade.asciidoc index 15cd90f98dd..894f82a6db5 100644 --- a/docs/reference/setup/upgrade.asciidoc +++ b/docs/reference/setup/upgrade.asciidoc @@ -21,12 +21,10 @@ consult this table: [cols="1> -|< 0.90.7 |0.90.x |<> -|>= 0.90.7 |0.90.x |<> -|1.0.0 - 1.3.1 |1.x |<> (if <> set to `false`) -|>= 1.3.2 |1.x |<> +|0.90.x |2.x |<> |1.x |2.x |<> +|2.x |2.y |<> (where `y > x `) +|2.x |3.x |<> |======================================================================= TIP: Take plugins into consideration as well when upgrading. Most plugins will have to be upgraded alongside Elasticsearch, although some plugins accessed primarily through the browser (`_site` plugins) may continue to work given that API changes are compatible. diff --git a/docs/resiliency/index.asciidoc b/docs/resiliency/index.asciidoc index c109d513b1b..3929bc7c8a8 100644 --- a/docs/resiliency/index.asciidoc +++ b/docs/resiliency/index.asciidoc @@ -56,7 +56,7 @@ If you encounter an issue, https://github.com/elasticsearch/elasticsearch/issues We are committed to tracking down and fixing all the issues that are posted. [float] -=== Use two phase commit for Cluster State publishing (STATUS: ONGOING) +=== Use two phase commit for Cluster State publishing (STATUS: ONGOING, v3.0.0) A master node in Elasticsearch continuously https://www.elastic.co/guide/en/elasticsearch/reference/current/modules-discovery-zen.html#fault-detection[monitors the cluster nodes] and removes any node from the cluster that doesn't respond to its pings in a timely @@ -103,38 +103,6 @@ Further issues remain with the retry mechanism: See {GIT}9967[#9967]. (STATUS: ONGOING) -[float] -=== Wait on incoming joins before electing local node as master (STATUS: ONGOING) - -During master election each node pings in order to discover other nodes and validate the liveness of existing -nodes. Based on this information the node either discovers an existing master or, if enough nodes are found -(see https://www.elastic.co/guide/en/elasticsearch/reference/current/modules-discovery-zen.html#master-election[`discovery.zen.minimum_master_nodes`]) a new master will be elected. Currently, the node that is -elected as master will update the cluster state to indicate the result of the election. Other nodes will submit -a join request to the newly elected master node. Instead of immediately processing the election result, the elected master -node should wait for the incoming joins from other nodes, thus validating that the result of the election is properly applied. As soon as enough -nodes have sent their joins request (based on the `minimum_master_nodes` settings) the cluster state is updated. -{GIT}12161[#12161] - - -[float] -=== Write index metadata on data nodes where shards allocated (STATUS: ONGOING) - -Today, index metadata is written only on nodes that are master-eligible, not on -data-only nodes. This is not a problem when running with multiple master nodes, -as recommended, as the loss of all but one master node is still recoverable. -However, users running with a single master node are at risk of losing -their index metadata if the master fails. Instead, this metadata should -also be written on any node where a shard is allocated. {GIT}8823[#8823] - -[float] -=== Better file distribution with multiple data paths (STATUS: ONGOING) - -Today, a node configured with multiple data paths distributes writes across -all paths by writing one file to each path in turn. This can mean that the -failure of a single disk corrupts many shards at once. Instead, by allocating -an entire shard to a single data path, the extent of the damage can be limited -to just the shards on that disk. {GIT}9498[#9498] - [float] === OOM resiliency (STATUS: ONGOING) @@ -142,21 +110,10 @@ The family of circuit breakers has greatly reduced the occurrence of OOM exceptions, but it is still possible to cause a node to run out of heap space. The following issues have been identified: -* Set a hard limit on `from`/`size` parameters {GIT}9311[#9311]. (STATUS: ONGOING) +* Set a hard limit on `from`/`size` parameters {GIT}9311[#9311]. (STATUS: DONE, v2.1.0) * Prevent combinatorial explosion in aggregations from causing OOM {GIT}8081[#8081]. (STATUS: ONGOING) * Add the byte size of each hit to the request circuit breaker {GIT}9310[#9310]. (STATUS: ONGOING) -[float] -=== Mapping changes should be applied synchronously (STATUS: ONGOING) - -When introducing new fields using dynamic mapping, it is possible that the same -field can be added to different shards with different data types. Each shard -will operate with its local data type but, if the shard is relocated, the -data type from the cluster state will be applied to the new shard, which -can result in a corrupt shard. To prevent this, new fields should not -be added to a shard's mapping until confirmed by the master. -{GIT}8688[#8688] (STATUS: DONE) - [float] === Loss of documents during network partition (STATUS: ONGOING) @@ -166,26 +123,6 @@ If the node hosts a primary shard at the moment of partition, and ends up being A test to replicate this condition was added in {GIT}7493[#7493]. -[float] -=== Lucene checksums phase 3 (STATUS:ONGOING) - -Almost all files in Elasticsearch now have checksums which are validated before use. A few changes remain: - -* {GIT}7586[#7586] adds checksums for cluster and index state files. (STATUS: DONE, Fixed in v1.5.0) -* {GIT}9183[#9183] supports validating the checksums on all files when starting a node. (STATUS: DONE, Fixed in v2.0.0) -* {JIRA}5894[LUCENE-5894] lays the groundwork for extending more efficient checksum validation to all files during optimized bulk merges. (STATUS: DONE, Fixed in v2.0.0) -* {GIT}8403[#8403] to add validation of checksums on Lucene `segments_N` files. (STATUS: NOT STARTED) - -[float] -=== Add per-segment and per-commit ID to help replication (STATUS: ONGOING) - -{JIRA}5895[LUCENE-5895] adds a unique ID for each segment and each commit point. File-based replication (as performed by snapshot/restore) can use this ID to know whether the segment/commit on the source and destination machines are the same. Fixed in Lucene 5.0. - -[float] -=== Report shard-level statuses on write operations (STATUS: ONGOING) - -Make write calls return the number of total/successful/missing shards in the same way that we do in search, which ensures transparency in the consistency of write operations. {GIT}7994[#7994]. (STATUS: DONE, v2.0.0) - [float] === Jepsen Test Failures (STATUS: ONGOING) @@ -196,24 +133,96 @@ We have increased our test coverage to include scenarios tested by Jepsen. We ma This status page is a start, but we can do a better job of explicitly documenting the processes at work in Elasticsearch, and what happens in the case of each type of failure. The plan is to have a test case that validates each behavior under simulated conditions. Every test will document the expected results, the associated test code and an explicit PASS or FAIL status for each simulated case. - -[float] -=== Take filter cache key size into account (STATUS: ONGOING) - -Commonly used filters are cached in Elasticsearch. That cache is limited in size (10% of node's memory by default) and is being evicted based on a least recently used policy. The amount of memory used by the cache depends on two primary components - the values it stores and the keys associated with them. Calculating the memory footprint of the values is easy enough but the keys accounting is trickier to achieve as they are, by default, raw Lucene objects. This is largely not a problem as the keys are dominated by the values. However, recent optimizations in Lucene have changed the balance causing the filter cache to grow beyond it's size. - -While we are working on a longer term solution ({GIT}9176[#9176]), we introduced a minimum weight of 1k for each cache entry. This puts an effective limit on the number of entries in the cache. See {GIT}8304[#8304] (STATUS: DONE, fixed in v1.4.0) - [float] === Do not allow stale shards to automatically be promoted to primary (STATUS: ONGOING) -In some scenarios, after loss of all valid copies, a stale replica shard can be assigned as a primary. This can lead to +In some scenarios, after the loss of all valid copies, a stale replica shard can be assigned as a primary. This can lead to a loss of acknowledged writes if the valid copies are not lost but are rather temporarily isolated. Work is underway ({GIT}14671[#14671]) to prevent the automatic promotion of a stale primary and only allow such promotion to occur when a system operator manually intervenes. == Completed +[float] +=== Wait on incoming joins before electing local node as master (STATUS: DONE, v2.0.0) + +During master election each node pings in order to discover other nodes and validate the liveness of existing +nodes. Based on this information the node either discovers an existing master or, if enough nodes are found +(see https://www.elastic.co/guide/en/elasticsearch/reference/current/modules-discovery-zen.html#master-election[`discovery.zen.minimum_master_nodes`]) a new master will be elected. Currently, the node that is +elected as master will update the cluster state to indicate the result of the election. Other nodes will submit +a join request to the newly elected master node. Instead of immediately processing the election result, the elected master +node should wait for the incoming joins from other nodes, thus validating that the result of the election is properly applied. As soon as enough +nodes have sent their joins request (based on the `minimum_master_nodes` settings) the cluster state is updated. +{GIT}12161[#12161] + +[float] +=== Mapping changes should be applied synchronously (STATUS: DONE, v2.0.0) + +When introducing new fields using dynamic mapping, it is possible that the same +field can be added to different shards with different data types. Each shard +will operate with its local data type but, if the shard is relocated, the +data type from the cluster state will be applied to the new shard, which +can result in a corrupt shard. To prevent this, new fields should not +be added to a shard's mapping until confirmed by the master. +{GIT}8688[#8688] (STATUS: DONE) + +[float] +=== Add per-segment and per-commit ID to help replication (STATUS: DONE, v2.0.0) + +{JIRA}5895[LUCENE-5895] adds a unique ID for each segment and each commit point. File-based replication (as performed by snapshot/restore) can use this ID to know whether the segment/commit on the source and destination machines are the same. Fixed in Lucene 5.0. + +[float] +=== Write index metadata on data nodes where shards allocated (STATUS: DONE, v2.0.0) + +Today, index metadata is written only on nodes that are master-eligible, not on +data-only nodes. This is not a problem when running with multiple master nodes, +as recommended, as the loss of all but one master node is still recoverable. +However, users running with a single master node are at risk of losing +their index metadata if the master fails. Instead, this metadata should +also be written on any node where a shard is allocated. {GIT}8823[#8823], {GIT}9952[#9952] + +[float] +=== Better file distribution with multiple data paths (STATUS: DONE, v2.0.0) + +Today, a node configured with multiple data paths distributes writes across +all paths by writing one file to each path in turn. This can mean that the +failure of a single disk corrupts many shards at once. Instead, by allocating +an entire shard to a single data path, the extent of the damage can be limited +to just the shards on that disk. {GIT}9498[#9498] + +[float] +=== Lucene checksums phase 3 (STATUS: DONE, v2.0.0) + +Almost all files in Elasticsearch now have checksums which are validated before use. A few changes remain: + +* {GIT}7586[#7586] adds checksums for cluster and index state files. (STATUS: DONE, Fixed in v1.5.0) +* {GIT}9183[#9183] supports validating the checksums on all files when starting a node. (STATUS: DONE, Fixed in v2.0.0) +* {JIRA}5894[LUCENE-5894] lays the groundwork for extending more efficient checksum validation to all files during optimized bulk merges. (STATUS: DONE, Fixed in v2.0.0) +* {GIT}8403[#8403] to add validation of checksums on Lucene `segments_N` files. (STATUS: DONE, v2.0.0) + +[float] +=== Report shard-level statuses on write operations (STATUS: DONE, v2.0.0) + +Make write calls return the number of total/successful/missing shards in the same way that we do in search, which ensures transparency in the consistency of write operations. {GIT}7994[#7994]. (STATUS: DONE, v2.0.0) + +[float] +=== Take filter cache key size into account (STATUS: DONE, v2.0.0) + +Commonly used filters are cached in Elasticsearch. That cache is limited in size +(10% of node's memory by default) and is being evicted based on a least recently +used policy. The amount of memory used by the cache depends on two primary +components - the values it stores and the keys associated with them. Calculating +the memory footprint of the values is easy enough but the keys accounting is +trickier to achieve as they are, by default, raw Lucene objects. This is largely +not a problem as the keys are dominated by the values. However, recent +optimizations in Lucene have changed the balance causing the filter cache to +grow beyond it's size. + +As a temporary solution, we introduced a minimum weight of 1k for each cache entry. +This puts an effective limit on the number of entries in the cache. See {GIT}8304[#8304] (STATUS: DONE, fixed in v1.4.0) + +The issue has been completely solved by the move to Lucene's query cache. See {GIT}10897[#10897] + [float] === Ensure shard state ID is incremental (STATUS: DONE, v1.5.1) @@ -491,4 +500,3 @@ At Elasticsearch, we live the philosophy that we can miss a bug once, but never === Lucene Loses Data On File Descriptors Failure (STATUS: DONE, v0.90.0) When a process runs out of file descriptors, Lucene can causes an index to be completely deleted. This issue was fixed in Lucene ({JIRA}4870[version 4.2.1]) and fixed in an early version of Elasticsearch. See issue {GIT}2812[#2812]. - diff --git a/gradle.properties b/gradle.properties new file mode 100644 index 00000000000..6b1823d86a6 --- /dev/null +++ b/gradle.properties @@ -0,0 +1 @@ +org.gradle.daemon=false diff --git a/modules/build.gradle b/modules/build.gradle new file mode 100644 index 00000000000..41f7a8873b4 --- /dev/null +++ b/modules/build.gradle @@ -0,0 +1,46 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +subprojects { + apply plugin: 'elasticsearch.esplugin' + + esplugin { + // for local ES plugins, the name of the plugin is the same as the directory + name project.name + } + + if (project.file('src/main/packaging').exists()) { + throw new InvalidModelException("Modules cannot contain packaging files") + } + if (project.file('src/main/bin').exists()) { + throw new InvalidModelException("Modules cannot contain bin files") + } + if (project.file('src/main/config').exists()) { + throw new InvalidModelException("Modules cannot contain config files") + } + + project.afterEvaluate { + if (esplugin.isolated == false) { + throw new InvalidModelException("Modules cannot disable isolation") + } + if (esplugin.jvm == false) { + throw new InvalidModelException("Modules must be jvm plugins") + } + } +} diff --git a/plugins/lang-expression/build.gradle b/modules/lang-expression/build.gradle similarity index 100% rename from plugins/lang-expression/build.gradle rename to modules/lang-expression/build.gradle diff --git a/plugins/lang-expression/licenses/antlr4-runtime-4.5.1-1.jar.sha1 b/modules/lang-expression/licenses/antlr4-runtime-4.5.1-1.jar.sha1 similarity index 100% rename from plugins/lang-expression/licenses/antlr4-runtime-4.5.1-1.jar.sha1 rename to modules/lang-expression/licenses/antlr4-runtime-4.5.1-1.jar.sha1 diff --git a/plugins/lang-expression/licenses/antlr4-runtime-LICENSE.txt b/modules/lang-expression/licenses/antlr4-runtime-LICENSE.txt similarity index 100% rename from plugins/lang-expression/licenses/antlr4-runtime-LICENSE.txt rename to modules/lang-expression/licenses/antlr4-runtime-LICENSE.txt diff --git a/plugins/lang-expression/licenses/antlr4-runtime-NOTICE.txt b/modules/lang-expression/licenses/antlr4-runtime-NOTICE.txt similarity index 100% rename from plugins/lang-expression/licenses/antlr4-runtime-NOTICE.txt rename to modules/lang-expression/licenses/antlr4-runtime-NOTICE.txt diff --git a/plugins/lang-expression/licenses/asm-5.0.4.jar.sha1 b/modules/lang-expression/licenses/asm-5.0.4.jar.sha1 similarity index 100% rename from plugins/lang-expression/licenses/asm-5.0.4.jar.sha1 rename to modules/lang-expression/licenses/asm-5.0.4.jar.sha1 diff --git a/plugins/lang-expression/licenses/asm-LICENSE.txt b/modules/lang-expression/licenses/asm-LICENSE.txt similarity index 100% rename from plugins/lang-expression/licenses/asm-LICENSE.txt rename to modules/lang-expression/licenses/asm-LICENSE.txt diff --git a/plugins/lang-expression/licenses/asm-NOTICE.txt b/modules/lang-expression/licenses/asm-NOTICE.txt similarity index 100% rename from plugins/lang-expression/licenses/asm-NOTICE.txt rename to modules/lang-expression/licenses/asm-NOTICE.txt diff --git a/plugins/lang-expression/licenses/asm-commons-5.0.4.jar.sha1 b/modules/lang-expression/licenses/asm-commons-5.0.4.jar.sha1 similarity index 100% rename from plugins/lang-expression/licenses/asm-commons-5.0.4.jar.sha1 rename to modules/lang-expression/licenses/asm-commons-5.0.4.jar.sha1 diff --git a/plugins/lang-expression/licenses/asm-commons-LICENSE.txt b/modules/lang-expression/licenses/asm-commons-LICENSE.txt similarity index 100% rename from plugins/lang-expression/licenses/asm-commons-LICENSE.txt rename to modules/lang-expression/licenses/asm-commons-LICENSE.txt diff --git a/plugins/lang-expression/licenses/asm-commons-NOTICE.txt b/modules/lang-expression/licenses/asm-commons-NOTICE.txt similarity index 100% rename from plugins/lang-expression/licenses/asm-commons-NOTICE.txt rename to modules/lang-expression/licenses/asm-commons-NOTICE.txt diff --git a/plugins/lang-expression/licenses/lucene-LICENSE.txt b/modules/lang-expression/licenses/lucene-LICENSE.txt similarity index 100% rename from plugins/lang-expression/licenses/lucene-LICENSE.txt rename to modules/lang-expression/licenses/lucene-LICENSE.txt diff --git a/plugins/lang-expression/licenses/lucene-NOTICE.txt b/modules/lang-expression/licenses/lucene-NOTICE.txt similarity index 100% rename from plugins/lang-expression/licenses/lucene-NOTICE.txt rename to modules/lang-expression/licenses/lucene-NOTICE.txt diff --git a/plugins/lang-expression/licenses/lucene-expressions-5.4.0-snapshot-1715952.jar.sha1 b/modules/lang-expression/licenses/lucene-expressions-5.4.0-snapshot-1715952.jar.sha1 similarity index 100% rename from plugins/lang-expression/licenses/lucene-expressions-5.4.0-snapshot-1715952.jar.sha1 rename to modules/lang-expression/licenses/lucene-expressions-5.4.0-snapshot-1715952.jar.sha1 diff --git a/plugins/lang-expression/src/main/java/org/elasticsearch/script/expression/CountMethodFunctionValues.java b/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/CountMethodFunctionValues.java similarity index 100% rename from plugins/lang-expression/src/main/java/org/elasticsearch/script/expression/CountMethodFunctionValues.java rename to modules/lang-expression/src/main/java/org/elasticsearch/script/expression/CountMethodFunctionValues.java diff --git a/plugins/lang-expression/src/main/java/org/elasticsearch/script/expression/CountMethodValueSource.java b/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/CountMethodValueSource.java similarity index 100% rename from plugins/lang-expression/src/main/java/org/elasticsearch/script/expression/CountMethodValueSource.java rename to modules/lang-expression/src/main/java/org/elasticsearch/script/expression/CountMethodValueSource.java diff --git a/plugins/lang-expression/src/main/java/org/elasticsearch/script/expression/DateMethodFunctionValues.java b/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/DateMethodFunctionValues.java similarity index 100% rename from plugins/lang-expression/src/main/java/org/elasticsearch/script/expression/DateMethodFunctionValues.java rename to modules/lang-expression/src/main/java/org/elasticsearch/script/expression/DateMethodFunctionValues.java diff --git a/plugins/lang-expression/src/main/java/org/elasticsearch/script/expression/DateMethodValueSource.java b/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/DateMethodValueSource.java similarity index 100% rename from plugins/lang-expression/src/main/java/org/elasticsearch/script/expression/DateMethodValueSource.java rename to modules/lang-expression/src/main/java/org/elasticsearch/script/expression/DateMethodValueSource.java diff --git a/plugins/lang-expression/src/main/java/org/elasticsearch/script/expression/ExpressionExecutableScript.java b/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/ExpressionExecutableScript.java similarity index 100% rename from plugins/lang-expression/src/main/java/org/elasticsearch/script/expression/ExpressionExecutableScript.java rename to modules/lang-expression/src/main/java/org/elasticsearch/script/expression/ExpressionExecutableScript.java diff --git a/plugins/lang-expression/src/main/java/org/elasticsearch/script/expression/ExpressionPlugin.java b/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/ExpressionPlugin.java similarity index 58% rename from plugins/lang-expression/src/main/java/org/elasticsearch/script/expression/ExpressionPlugin.java rename to modules/lang-expression/src/main/java/org/elasticsearch/script/expression/ExpressionPlugin.java index c1e2ed47dc4..48c7b4cb34d 100644 --- a/plugins/lang-expression/src/main/java/org/elasticsearch/script/expression/ExpressionPlugin.java +++ b/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/ExpressionPlugin.java @@ -19,10 +19,37 @@ package org.elasticsearch.script.expression; +import org.apache.lucene.expressions.js.JavascriptCompiler; +import org.elasticsearch.SpecialPermission; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.script.ScriptModule; +import java.security.AccessController; +import java.security.PrivilegedAction; +import java.text.ParseException; + public class ExpressionPlugin extends Plugin { + + // lucene expressions has crazy checks in its clinit for the functions map + // it violates rules of classloaders to detect accessibility + // TODO: clean that up + static { + SecurityManager sm = System.getSecurityManager(); + if (sm != null) { + sm.checkPermission(new SpecialPermission()); + } + AccessController.doPrivileged(new PrivilegedAction() { + @Override + public Void run() { + try { + JavascriptCompiler.compile("0"); + } catch (ParseException e) { + throw new RuntimeException(e); + } + return null; + } + }); + } @Override public String name() { diff --git a/plugins/lang-expression/src/main/java/org/elasticsearch/script/expression/ExpressionScriptEngineService.java b/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/ExpressionScriptEngineService.java similarity index 90% rename from plugins/lang-expression/src/main/java/org/elasticsearch/script/expression/ExpressionScriptEngineService.java rename to modules/lang-expression/src/main/java/org/elasticsearch/script/expression/ExpressionScriptEngineService.java index 72a1dd7d5c6..a7f93925119 100644 --- a/plugins/lang-expression/src/main/java/org/elasticsearch/script/expression/ExpressionScriptEngineService.java +++ b/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/ExpressionScriptEngineService.java @@ -36,6 +36,7 @@ import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.core.DateFieldMapper; import org.elasticsearch.index.mapper.core.NumberFieldMapper; +import org.elasticsearch.script.ClassPermission; import org.elasticsearch.script.CompiledScript; import org.elasticsearch.script.ExecutableScript; import org.elasticsearch.script.ScriptEngineService; @@ -44,6 +45,7 @@ import org.elasticsearch.script.SearchScript; import org.elasticsearch.search.MultiValueMode; import org.elasticsearch.search.lookup.SearchLookup; +import java.security.AccessControlContext; import java.security.AccessController; import java.security.PrivilegedAction; import java.text.ParseException; @@ -95,7 +97,7 @@ public class ExpressionScriptEngineService extends AbstractComponent implements @Override public Object compile(String script) { // classloader created here - SecurityManager sm = System.getSecurityManager(); + final SecurityManager sm = System.getSecurityManager(); if (sm != null) { sm.checkPermission(new SpecialPermission()); } @@ -103,8 +105,24 @@ public class ExpressionScriptEngineService extends AbstractComponent implements @Override public Expression run() { try { + // snapshot our context here, we check on behalf of the expression + AccessControlContext engineContext = AccessController.getContext(); + ClassLoader loader = getClass().getClassLoader(); + if (sm != null) { + loader = new ClassLoader(loader) { + @Override + protected Class loadClass(String name, boolean resolve) throws ClassNotFoundException { + try { + engineContext.checkPermission(new ClassPermission(name)); + } catch (SecurityException e) { + throw new ClassNotFoundException(name, e); + } + return super.loadClass(name, resolve); + } + }; + } // NOTE: validation is delayed to allow runtime vars, and we don't have access to per index stuff here - return JavascriptCompiler.compile(script); + return JavascriptCompiler.compile(script, JavascriptCompiler.DEFAULT_FUNCTIONS, loader); } catch (ParseException e) { throw new ScriptException("Failed to parse expression: " + script, e); } diff --git a/plugins/lang-expression/src/main/java/org/elasticsearch/script/expression/ExpressionSearchScript.java b/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/ExpressionSearchScript.java similarity index 100% rename from plugins/lang-expression/src/main/java/org/elasticsearch/script/expression/ExpressionSearchScript.java rename to modules/lang-expression/src/main/java/org/elasticsearch/script/expression/ExpressionSearchScript.java diff --git a/plugins/lang-expression/src/main/java/org/elasticsearch/script/expression/FieldDataFunctionValues.java b/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/FieldDataFunctionValues.java similarity index 100% rename from plugins/lang-expression/src/main/java/org/elasticsearch/script/expression/FieldDataFunctionValues.java rename to modules/lang-expression/src/main/java/org/elasticsearch/script/expression/FieldDataFunctionValues.java diff --git a/plugins/lang-expression/src/main/java/org/elasticsearch/script/expression/FieldDataValueSource.java b/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/FieldDataValueSource.java similarity index 100% rename from plugins/lang-expression/src/main/java/org/elasticsearch/script/expression/FieldDataValueSource.java rename to modules/lang-expression/src/main/java/org/elasticsearch/script/expression/FieldDataValueSource.java diff --git a/plugins/lang-expression/src/main/java/org/elasticsearch/script/expression/ReplaceableConstFunctionValues.java b/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/ReplaceableConstFunctionValues.java similarity index 100% rename from plugins/lang-expression/src/main/java/org/elasticsearch/script/expression/ReplaceableConstFunctionValues.java rename to modules/lang-expression/src/main/java/org/elasticsearch/script/expression/ReplaceableConstFunctionValues.java diff --git a/plugins/lang-expression/src/main/java/org/elasticsearch/script/expression/ReplaceableConstValueSource.java b/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/ReplaceableConstValueSource.java similarity index 100% rename from plugins/lang-expression/src/main/java/org/elasticsearch/script/expression/ReplaceableConstValueSource.java rename to modules/lang-expression/src/main/java/org/elasticsearch/script/expression/ReplaceableConstValueSource.java diff --git a/plugins/lang-groovy/src/main/plugin-metadata/plugin-security.policy b/modules/lang-expression/src/main/plugin-metadata/plugin-security.policy similarity index 56% rename from plugins/lang-groovy/src/main/plugin-metadata/plugin-security.policy rename to modules/lang-expression/src/main/plugin-metadata/plugin-security.policy index 55c2fab13f7..9f50be3dd05 100644 --- a/plugins/lang-groovy/src/main/plugin-metadata/plugin-security.policy +++ b/modules/lang-expression/src/main/plugin-metadata/plugin-security.policy @@ -20,10 +20,15 @@ grant { // needed to generate runtime classes permission java.lang.RuntimePermission "createClassLoader"; - // needed by groovy engine - permission java.lang.RuntimePermission "accessClassInPackage.sun.reflect"; - // needed by GroovyScriptEngineService to close its classloader (why?) - permission java.lang.RuntimePermission "closeClassLoader"; - // Allow executing groovy scripts with codesource of /untrusted - permission groovy.security.GroovyCodeSourcePermission "/untrusted"; + // needed because of security problems in JavascriptCompiler + permission java.lang.RuntimePermission "getClassLoader"; + + // expression runtime + permission org.elasticsearch.script.ClassPermission "java.lang.String"; + permission org.elasticsearch.script.ClassPermission "org.apache.lucene.expressions.Expression"; + permission org.elasticsearch.script.ClassPermission "org.apache.lucene.queries.function.FunctionValues"; + // available functions + permission org.elasticsearch.script.ClassPermission "java.lang.Math"; + permission org.elasticsearch.script.ClassPermission "org.apache.lucene.util.MathUtil"; + permission org.elasticsearch.script.ClassPermission "org.apache.lucene.util.SloppyMath"; }; diff --git a/plugins/lang-expression/src/test/java/org/elasticsearch/script/expression/ExpressionRestIT.java b/modules/lang-expression/src/test/java/org/elasticsearch/script/expression/ExpressionRestIT.java similarity index 100% rename from plugins/lang-expression/src/test/java/org/elasticsearch/script/expression/ExpressionRestIT.java rename to modules/lang-expression/src/test/java/org/elasticsearch/script/expression/ExpressionRestIT.java diff --git a/plugins/lang-expression/src/test/java/org/elasticsearch/script/expression/ExpressionTests.java b/modules/lang-expression/src/test/java/org/elasticsearch/script/expression/ExpressionTests.java similarity index 100% rename from plugins/lang-expression/src/test/java/org/elasticsearch/script/expression/ExpressionTests.java rename to modules/lang-expression/src/test/java/org/elasticsearch/script/expression/ExpressionTests.java diff --git a/plugins/lang-expression/src/test/java/org/elasticsearch/script/expression/IndexedExpressionTests.java b/modules/lang-expression/src/test/java/org/elasticsearch/script/expression/IndexedExpressionTests.java similarity index 100% rename from plugins/lang-expression/src/test/java/org/elasticsearch/script/expression/IndexedExpressionTests.java rename to modules/lang-expression/src/test/java/org/elasticsearch/script/expression/IndexedExpressionTests.java diff --git a/plugins/lang-expression/src/test/java/org/elasticsearch/script/expression/MoreExpressionTests.java b/modules/lang-expression/src/test/java/org/elasticsearch/script/expression/MoreExpressionTests.java similarity index 98% rename from plugins/lang-expression/src/test/java/org/elasticsearch/script/expression/MoreExpressionTests.java rename to modules/lang-expression/src/test/java/org/elasticsearch/script/expression/MoreExpressionTests.java index 4ac4b402c17..89a5be7ff1c 100644 --- a/plugins/lang-expression/src/test/java/org/elasticsearch/script/expression/MoreExpressionTests.java +++ b/modules/lang-expression/src/test/java/org/elasticsearch/script/expression/MoreExpressionTests.java @@ -97,6 +97,16 @@ public class MoreExpressionTests extends ESIntegTestCase { assertEquals(1, rsp.getHits().getTotalHits()); assertEquals(5.0, rsp.getHits().getAt(0).field("foo").getValue(), 0.0D); } + + public void testFunction() throws Exception { + createIndex("test"); + ensureGreen("test"); + client().prepareIndex("test", "doc", "1").setSource("foo", 4).setRefresh(true).get(); + SearchResponse rsp = buildRequest("doc['foo'] + abs(1)").get(); + assertSearchResponse(rsp); + assertEquals(1, rsp.getHits().getTotalHits()); + assertEquals(5.0, rsp.getHits().getAt(0).field("foo").getValue(), 0.0D); + } public void testBasicUsingDotValue() throws Exception { createIndex("test"); diff --git a/plugins/lang-expression/src/test/resources/rest-api-spec/test/lang_expression/10_basic.yaml b/modules/lang-expression/src/test/resources/rest-api-spec/test/lang_expression/10_basic.yaml similarity index 62% rename from plugins/lang-expression/src/test/resources/rest-api-spec/test/lang_expression/10_basic.yaml rename to modules/lang-expression/src/test/resources/rest-api-spec/test/lang_expression/10_basic.yaml index 9c7819d925d..1550f2a7f81 100644 --- a/plugins/lang-expression/src/test/resources/rest-api-spec/test/lang_expression/10_basic.yaml +++ b/modules/lang-expression/src/test/resources/rest-api-spec/test/lang_expression/10_basic.yaml @@ -10,5 +10,5 @@ - do: nodes.info: {} - - match: { nodes.$master.plugins.0.name: lang-expression } - - match: { nodes.$master.plugins.0.jvm: true } + - match: { nodes.$master.modules.0.name: lang-expression } + - match: { nodes.$master.modules.0.jvm: true } diff --git a/plugins/lang-expression/src/test/resources/rest-api-spec/test/lang_expression/20_search.yaml b/modules/lang-expression/src/test/resources/rest-api-spec/test/lang_expression/20_search.yaml similarity index 100% rename from plugins/lang-expression/src/test/resources/rest-api-spec/test/lang_expression/20_search.yaml rename to modules/lang-expression/src/test/resources/rest-api-spec/test/lang_expression/20_search.yaml diff --git a/plugins/lang-groovy/build.gradle b/modules/lang-groovy/build.gradle similarity index 100% rename from plugins/lang-groovy/build.gradle rename to modules/lang-groovy/build.gradle diff --git a/plugins/lang-groovy/licenses/groovy-all-2.4.4-indy.jar.sha1 b/modules/lang-groovy/licenses/groovy-all-2.4.4-indy.jar.sha1 similarity index 100% rename from plugins/lang-groovy/licenses/groovy-all-2.4.4-indy.jar.sha1 rename to modules/lang-groovy/licenses/groovy-all-2.4.4-indy.jar.sha1 diff --git a/plugins/lang-groovy/licenses/groovy-all-LICENSE-ANTLR.txt b/modules/lang-groovy/licenses/groovy-all-LICENSE-ANTLR.txt similarity index 100% rename from plugins/lang-groovy/licenses/groovy-all-LICENSE-ANTLR.txt rename to modules/lang-groovy/licenses/groovy-all-LICENSE-ANTLR.txt diff --git a/plugins/lang-groovy/licenses/groovy-all-LICENSE-ASM.txt b/modules/lang-groovy/licenses/groovy-all-LICENSE-ASM.txt similarity index 100% rename from plugins/lang-groovy/licenses/groovy-all-LICENSE-ASM.txt rename to modules/lang-groovy/licenses/groovy-all-LICENSE-ASM.txt diff --git a/plugins/lang-groovy/licenses/groovy-all-LICENSE-CLI.txt b/modules/lang-groovy/licenses/groovy-all-LICENSE-CLI.txt similarity index 100% rename from plugins/lang-groovy/licenses/groovy-all-LICENSE-CLI.txt rename to modules/lang-groovy/licenses/groovy-all-LICENSE-CLI.txt diff --git a/plugins/lang-groovy/licenses/groovy-all-LICENSE-JSR223.txt b/modules/lang-groovy/licenses/groovy-all-LICENSE-JSR223.txt similarity index 100% rename from plugins/lang-groovy/licenses/groovy-all-LICENSE-JSR223.txt rename to modules/lang-groovy/licenses/groovy-all-LICENSE-JSR223.txt diff --git a/plugins/lang-groovy/licenses/groovy-all-LICENSE.txt b/modules/lang-groovy/licenses/groovy-all-LICENSE.txt similarity index 100% rename from plugins/lang-groovy/licenses/groovy-all-LICENSE.txt rename to modules/lang-groovy/licenses/groovy-all-LICENSE.txt diff --git a/plugins/lang-groovy/licenses/groovy-all-NOTICE.txt b/modules/lang-groovy/licenses/groovy-all-NOTICE.txt similarity index 100% rename from plugins/lang-groovy/licenses/groovy-all-NOTICE.txt rename to modules/lang-groovy/licenses/groovy-all-NOTICE.txt diff --git a/plugins/lang-groovy/src/main/java/org/elasticsearch/script/groovy/GroovyPlugin.java b/modules/lang-groovy/src/main/java/org/elasticsearch/script/groovy/GroovyPlugin.java similarity index 100% rename from plugins/lang-groovy/src/main/java/org/elasticsearch/script/groovy/GroovyPlugin.java rename to modules/lang-groovy/src/main/java/org/elasticsearch/script/groovy/GroovyPlugin.java diff --git a/plugins/lang-groovy/src/main/java/org/elasticsearch/script/groovy/GroovyScriptEngineService.java b/modules/lang-groovy/src/main/java/org/elasticsearch/script/groovy/GroovyScriptEngineService.java similarity index 85% rename from plugins/lang-groovy/src/main/java/org/elasticsearch/script/groovy/GroovyScriptEngineService.java rename to modules/lang-groovy/src/main/java/org/elasticsearch/script/groovy/GroovyScriptEngineService.java index d1e7160282b..85f57694ce6 100644 --- a/plugins/lang-groovy/src/main/java/org/elasticsearch/script/groovy/GroovyScriptEngineService.java +++ b/modules/lang-groovy/src/main/java/org/elasticsearch/script/groovy/GroovyScriptEngineService.java @@ -51,6 +51,7 @@ import org.elasticsearch.search.lookup.SearchLookup; import java.io.IOException; import java.math.BigDecimal; import java.nio.charset.StandardCharsets; +import java.security.AccessControlContext; import java.security.AccessController; import java.security.PrivilegedAction; import java.util.HashMap; @@ -65,16 +66,6 @@ public class GroovyScriptEngineService extends AbstractComponent implements Scri * The name of the scripting engine/language. */ public static final String NAME = "groovy"; - /** - * The setting to enable or disable invokedynamic instruction support in Java 7+. - *

    - * Note: If this is disabled because invokedynamic is causing issues, then the Groovy - * indy jar needs to be replaced by the non-indy variant of it on the classpath (e.g., - * groovy-all-2.4.4-indy.jar should be replaced by groovy-all-2.4.4.jar). - *

    - * Defaults to {@code true}. - */ - public static final String GROOVY_INDY_ENABLED = "script.groovy.indy"; /** * The name of the Groovy compiler setting to use associated with activating invokedynamic support. */ @@ -96,22 +87,33 @@ public class GroovyScriptEngineService extends AbstractComponent implements Scri // Add BigDecimal -> Double transformer config.addCompilationCustomizers(new GroovyBigDecimalTransformer(CompilePhase.CONVERSION)); - // Implicitly requires Java 7u60 or later to get valid support - if (settings.getAsBoolean(GROOVY_INDY_ENABLED, true)) { - // maintain any default optimizations - config.getOptimizationOptions().put(GROOVY_INDY_SETTING_NAME, true); - } + // always enable invokeDynamic, not the crazy softreference-based stuff + config.getOptimizationOptions().put(GROOVY_INDY_SETTING_NAME, true); // Groovy class loader to isolate Groovy-land code // classloader created here - SecurityManager sm = System.getSecurityManager(); + final SecurityManager sm = System.getSecurityManager(); if (sm != null) { sm.checkPermission(new SpecialPermission()); } this.loader = AccessController.doPrivileged(new PrivilegedAction() { @Override public GroovyClassLoader run() { - return new GroovyClassLoader(getClass().getClassLoader(), config); + // snapshot our context (which has permissions for classes), since the script has none + final AccessControlContext engineContext = AccessController.getContext(); + return new GroovyClassLoader(new ClassLoader(getClass().getClassLoader()) { + @Override + protected Class loadClass(String name, boolean resolve) throws ClassNotFoundException { + if (sm != null) { + try { + engineContext.checkPermission(new ClassPermission(name)); + } catch (SecurityException e) { + throw new ClassNotFoundException(name, e); + } + } + return super.loadClass(name, resolve); + } + }, config); } }); } @@ -172,13 +174,15 @@ public class GroovyScriptEngineService extends AbstractComponent implements Scri } String fake = MessageDigests.toHexString(MessageDigests.sha1().digest(script.getBytes(StandardCharsets.UTF_8))); // same logic as GroovyClassLoader.parseClass() but with a different codesource string: - GroovyCodeSource gcs = AccessController.doPrivileged(new PrivilegedAction() { - public GroovyCodeSource run() { - return new GroovyCodeSource(script, fake, BootstrapInfo.UNTRUSTED_CODEBASE); + return AccessController.doPrivileged(new PrivilegedAction() { + public Class run() { + GroovyCodeSource gcs = new GroovyCodeSource(script, fake, BootstrapInfo.UNTRUSTED_CODEBASE); + gcs.setCachable(false); + // TODO: we could be more complicated and paranoid, and move this to separate block, to + // sandbox the compilation process itself better. + return loader.parseClass(gcs); } }); - gcs.setCachable(false); - return loader.parseClass(gcs); } catch (Throwable e) { if (logger.isTraceEnabled()) { logger.trace("exception compiling Groovy script:", e); @@ -293,7 +297,14 @@ public class GroovyScriptEngineService extends AbstractComponent implements Scri @Override public Object run() { try { - return script.run(); + // NOTE: we truncate the stack because IndyInterface has security issue (needs getClassLoader) + // we don't do a security check just as a tradeoff, it cannot really escalate to anything. + return AccessController.doPrivileged(new PrivilegedAction() { + @Override + public Object run() { + return script.run(); + } + }); } catch (Throwable e) { if (logger.isTraceEnabled()) { logger.trace("failed to run " + compiledScript, e); diff --git a/modules/lang-groovy/src/main/plugin-metadata/plugin-security.policy b/modules/lang-groovy/src/main/plugin-metadata/plugin-security.policy new file mode 100644 index 00000000000..7de3e1a62aa --- /dev/null +++ b/modules/lang-groovy/src/main/plugin-metadata/plugin-security.policy @@ -0,0 +1,51 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +grant { + // needed to generate runtime classes + permission java.lang.RuntimePermission "createClassLoader"; + // needed by IndyInterface + permission java.lang.RuntimePermission "getClassLoader"; + // needed by groovy engine + permission java.lang.RuntimePermission "accessClassInPackage.sun.reflect"; + // needed by GroovyScriptEngineService to close its classloader (why?) + permission java.lang.RuntimePermission "closeClassLoader"; + // Allow executing groovy scripts with codesource of /untrusted + permission groovy.security.GroovyCodeSourcePermission "/untrusted"; + + // Standard set of classes + permission org.elasticsearch.script.ClassPermission "<>"; + // groovy runtime (TODO: clean these up if possible) + permission org.elasticsearch.script.ClassPermission "groovy.grape.GrabAnnotationTransformation"; + permission org.elasticsearch.script.ClassPermission "groovy.json.JsonOutput"; + permission org.elasticsearch.script.ClassPermission "groovy.lang.Binding"; + permission org.elasticsearch.script.ClassPermission "groovy.lang.GroovyObject"; + permission org.elasticsearch.script.ClassPermission "groovy.lang.GString"; + permission org.elasticsearch.script.ClassPermission "groovy.lang.Script"; + permission org.elasticsearch.script.ClassPermission "groovy.util.GroovyCollections"; + permission org.elasticsearch.script.ClassPermission "org.codehaus.groovy.ast.builder.AstBuilderTransformation"; + permission org.elasticsearch.script.ClassPermission "org.codehaus.groovy.reflection.ClassInfo"; + permission org.elasticsearch.script.ClassPermission "org.codehaus.groovy.runtime.GStringImpl"; + permission org.elasticsearch.script.ClassPermission "org.codehaus.groovy.runtime.powerassert.ValueRecorder"; + permission org.elasticsearch.script.ClassPermission "org.codehaus.groovy.runtime.powerassert.AssertionRenderer"; + permission org.elasticsearch.script.ClassPermission "org.codehaus.groovy.runtime.ScriptBytecodeAdapter"; + permission org.elasticsearch.script.ClassPermission "org.codehaus.groovy.runtime.typehandling.DefaultTypeTransformation"; + permission org.elasticsearch.script.ClassPermission "org.codehaus.groovy.vmplugin.v7.IndyInterface"; + permission org.elasticsearch.script.ClassPermission "sun.reflect.ConstructorAccessorImpl"; +}; diff --git a/plugins/lang-groovy/src/test/java/org/elasticsearch/messy/tests/BucketScriptTests.java b/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/BucketScriptTests.java similarity index 100% rename from plugins/lang-groovy/src/test/java/org/elasticsearch/messy/tests/BucketScriptTests.java rename to modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/BucketScriptTests.java diff --git a/plugins/lang-groovy/src/test/java/org/elasticsearch/messy/tests/BucketSelectorTests.java b/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/BucketSelectorTests.java similarity index 100% rename from plugins/lang-groovy/src/test/java/org/elasticsearch/messy/tests/BucketSelectorTests.java rename to modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/BucketSelectorTests.java diff --git a/plugins/lang-groovy/src/test/java/org/elasticsearch/messy/tests/BulkTests.java b/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/BulkTests.java similarity index 100% rename from plugins/lang-groovy/src/test/java/org/elasticsearch/messy/tests/BulkTests.java rename to modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/BulkTests.java diff --git a/plugins/lang-groovy/src/test/java/org/elasticsearch/messy/tests/CardinalityTests.java b/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/CardinalityTests.java similarity index 100% rename from plugins/lang-groovy/src/test/java/org/elasticsearch/messy/tests/CardinalityTests.java rename to modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/CardinalityTests.java diff --git a/plugins/lang-groovy/src/test/java/org/elasticsearch/messy/tests/ContextAndHeaderTransportTests.java b/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/ContextAndHeaderTransportTests.java similarity index 100% rename from plugins/lang-groovy/src/test/java/org/elasticsearch/messy/tests/ContextAndHeaderTransportTests.java rename to modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/ContextAndHeaderTransportTests.java diff --git a/plugins/lang-groovy/src/test/java/org/elasticsearch/messy/tests/DateRangeTests.java b/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/DateRangeTests.java similarity index 100% rename from plugins/lang-groovy/src/test/java/org/elasticsearch/messy/tests/DateRangeTests.java rename to modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/DateRangeTests.java diff --git a/plugins/lang-groovy/src/test/java/org/elasticsearch/messy/tests/DoubleTermsTests.java b/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/DoubleTermsTests.java similarity index 100% rename from plugins/lang-groovy/src/test/java/org/elasticsearch/messy/tests/DoubleTermsTests.java rename to modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/DoubleTermsTests.java diff --git a/plugins/lang-groovy/src/test/java/org/elasticsearch/messy/tests/EquivalenceTests.java b/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/EquivalenceTests.java similarity index 100% rename from plugins/lang-groovy/src/test/java/org/elasticsearch/messy/tests/EquivalenceTests.java rename to modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/EquivalenceTests.java diff --git a/plugins/lang-groovy/src/test/java/org/elasticsearch/messy/tests/ExtendedStatsTests.java b/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/ExtendedStatsTests.java similarity index 100% rename from plugins/lang-groovy/src/test/java/org/elasticsearch/messy/tests/ExtendedStatsTests.java rename to modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/ExtendedStatsTests.java diff --git a/plugins/lang-groovy/src/test/java/org/elasticsearch/messy/tests/FunctionScoreTests.java b/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/FunctionScoreTests.java similarity index 100% rename from plugins/lang-groovy/src/test/java/org/elasticsearch/messy/tests/FunctionScoreTests.java rename to modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/FunctionScoreTests.java diff --git a/plugins/lang-groovy/src/test/java/org/elasticsearch/messy/tests/GeoDistanceTests.java b/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/GeoDistanceTests.java similarity index 99% rename from plugins/lang-groovy/src/test/java/org/elasticsearch/messy/tests/GeoDistanceTests.java rename to modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/GeoDistanceTests.java index f71a1f0014d..d79beb186cf 100644 --- a/plugins/lang-groovy/src/test/java/org/elasticsearch/messy/tests/GeoDistanceTests.java +++ b/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/GeoDistanceTests.java @@ -739,7 +739,7 @@ public class GeoDistanceTests extends ESIntegTestCase { for (int i = 0; i < 10; ++i) { final double originLat = randomLat(); final double originLon = randomLon(); - final String distance = DistanceUnit.KILOMETERS.toString(randomInt(10000)); + final String distance = DistanceUnit.KILOMETERS.toString(randomIntBetween(1, 10000)); for (GeoDistance geoDistance : Arrays.asList(GeoDistance.ARC, GeoDistance.SLOPPY_ARC)) { logger.info("Now testing GeoDistance={}, distance={}, origin=({}, {})", geoDistance, distance, originLat, originLon); GeoDistanceQueryBuilder qb = QueryBuilders.geoDistanceQuery("location").point(originLat, originLon).distance(distance).geoDistance(geoDistance); @@ -772,4 +772,4 @@ public class GeoDistanceTests extends ESIntegTestCase { } return matches; } -} \ No newline at end of file +} diff --git a/plugins/lang-groovy/src/test/java/org/elasticsearch/messy/tests/GeoShapeIntegrationTests.java b/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/GeoShapeIntegrationTests.java similarity index 100% rename from plugins/lang-groovy/src/test/java/org/elasticsearch/messy/tests/GeoShapeIntegrationTests.java rename to modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/GeoShapeIntegrationTests.java diff --git a/plugins/lang-groovy/src/test/java/org/elasticsearch/messy/tests/HDRPercentileRanksTests.java b/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/HDRPercentileRanksTests.java similarity index 100% rename from plugins/lang-groovy/src/test/java/org/elasticsearch/messy/tests/HDRPercentileRanksTests.java rename to modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/HDRPercentileRanksTests.java diff --git a/plugins/lang-groovy/src/test/java/org/elasticsearch/messy/tests/HDRPercentilesTests.java b/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/HDRPercentilesTests.java similarity index 100% rename from plugins/lang-groovy/src/test/java/org/elasticsearch/messy/tests/HDRPercentilesTests.java rename to modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/HDRPercentilesTests.java diff --git a/plugins/lang-groovy/src/test/java/org/elasticsearch/messy/tests/HistogramTests.java b/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/HistogramTests.java similarity index 100% rename from plugins/lang-groovy/src/test/java/org/elasticsearch/messy/tests/HistogramTests.java rename to modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/HistogramTests.java diff --git a/plugins/lang-groovy/src/test/java/org/elasticsearch/messy/tests/IPv4RangeTests.java b/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/IPv4RangeTests.java similarity index 100% rename from plugins/lang-groovy/src/test/java/org/elasticsearch/messy/tests/IPv4RangeTests.java rename to modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/IPv4RangeTests.java diff --git a/plugins/lang-groovy/src/test/java/org/elasticsearch/messy/tests/IndexLookupTests.java b/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/IndexLookupTests.java similarity index 100% rename from plugins/lang-groovy/src/test/java/org/elasticsearch/messy/tests/IndexLookupTests.java rename to modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/IndexLookupTests.java diff --git a/plugins/lang-groovy/src/test/java/org/elasticsearch/messy/tests/IndexedScriptTests.java b/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/IndexedScriptTests.java similarity index 100% rename from plugins/lang-groovy/src/test/java/org/elasticsearch/messy/tests/IndexedScriptTests.java rename to modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/IndexedScriptTests.java diff --git a/plugins/lang-groovy/src/test/java/org/elasticsearch/messy/tests/IndicesRequestTests.java b/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/IndicesRequestTests.java similarity index 100% rename from plugins/lang-groovy/src/test/java/org/elasticsearch/messy/tests/IndicesRequestTests.java rename to modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/IndicesRequestTests.java diff --git a/plugins/lang-groovy/src/test/java/org/elasticsearch/messy/tests/LongTermsTests.java b/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/LongTermsTests.java similarity index 100% rename from plugins/lang-groovy/src/test/java/org/elasticsearch/messy/tests/LongTermsTests.java rename to modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/LongTermsTests.java diff --git a/plugins/lang-groovy/src/test/java/org/elasticsearch/messy/tests/MaxTests.java b/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/MaxTests.java similarity index 100% rename from plugins/lang-groovy/src/test/java/org/elasticsearch/messy/tests/MaxTests.java rename to modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/MaxTests.java diff --git a/plugins/lang-groovy/src/test/java/org/elasticsearch/messy/tests/MinDocCountTests.java b/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/MinDocCountTests.java similarity index 100% rename from plugins/lang-groovy/src/test/java/org/elasticsearch/messy/tests/MinDocCountTests.java rename to modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/MinDocCountTests.java diff --git a/plugins/lang-groovy/src/test/java/org/elasticsearch/messy/tests/MinTests.java b/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/MinTests.java similarity index 100% rename from plugins/lang-groovy/src/test/java/org/elasticsearch/messy/tests/MinTests.java rename to modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/MinTests.java diff --git a/plugins/lang-groovy/src/test/java/org/elasticsearch/messy/tests/RandomScoreFunctionTests.java b/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/RandomScoreFunctionTests.java similarity index 100% rename from plugins/lang-groovy/src/test/java/org/elasticsearch/messy/tests/RandomScoreFunctionTests.java rename to modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/RandomScoreFunctionTests.java diff --git a/plugins/lang-groovy/src/test/java/org/elasticsearch/messy/tests/RangeTests.java b/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/RangeTests.java similarity index 100% rename from plugins/lang-groovy/src/test/java/org/elasticsearch/messy/tests/RangeTests.java rename to modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/RangeTests.java diff --git a/plugins/lang-groovy/src/test/java/org/elasticsearch/messy/tests/ScriptIndexSettingsTests.java b/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/ScriptIndexSettingsTests.java similarity index 100% rename from plugins/lang-groovy/src/test/java/org/elasticsearch/messy/tests/ScriptIndexSettingsTests.java rename to modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/ScriptIndexSettingsTests.java diff --git a/plugins/lang-groovy/src/test/java/org/elasticsearch/messy/tests/ScriptQuerySearchTests.java b/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/ScriptQuerySearchTests.java similarity index 100% rename from plugins/lang-groovy/src/test/java/org/elasticsearch/messy/tests/ScriptQuerySearchTests.java rename to modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/ScriptQuerySearchTests.java diff --git a/plugins/lang-groovy/src/test/java/org/elasticsearch/messy/tests/ScriptedMetricTests.java b/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/ScriptedMetricTests.java similarity index 100% rename from plugins/lang-groovy/src/test/java/org/elasticsearch/messy/tests/ScriptedMetricTests.java rename to modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/ScriptedMetricTests.java diff --git a/plugins/lang-groovy/src/test/java/org/elasticsearch/messy/tests/SearchFieldsTests.java b/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/SearchFieldsTests.java similarity index 94% rename from plugins/lang-groovy/src/test/java/org/elasticsearch/messy/tests/SearchFieldsTests.java rename to modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/SearchFieldsTests.java index 632f93d870f..8153d207b7c 100644 --- a/plugins/lang-groovy/src/test/java/org/elasticsearch/messy/tests/SearchFieldsTests.java +++ b/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/SearchFieldsTests.java @@ -108,12 +108,12 @@ public class SearchFieldsTests extends ESIntegTestCase { assertThat(searchResponse.getHits().getAt(0).fields().size(), equalTo(1)); assertThat(searchResponse.getHits().getAt(0).fields().get("field1").value().toString(), equalTo("value1")); - // field2 is not stored, check that it gets extracted from source + // field2 is not stored, check that it is not extracted from source. searchResponse = client().prepareSearch().setQuery(matchAllQuery()).addField("field2").execute().actionGet(); assertThat(searchResponse.getHits().getTotalHits(), equalTo(1l)); assertThat(searchResponse.getHits().hits().length, equalTo(1)); - assertThat(searchResponse.getHits().getAt(0).fields().size(), equalTo(1)); - assertThat(searchResponse.getHits().getAt(0).fields().get("field2").value().toString(), equalTo("value2")); + assertThat(searchResponse.getHits().getAt(0).fields().size(), equalTo(0)); + assertThat(searchResponse.getHits().getAt(0).fields().get("field2"), nullValue()); searchResponse = client().prepareSearch().setQuery(matchAllQuery()).addField("field3").execute().actionGet(); assertThat(searchResponse.getHits().getTotalHits(), equalTo(1l)); @@ -121,6 +121,34 @@ public class SearchFieldsTests extends ESIntegTestCase { assertThat(searchResponse.getHits().getAt(0).fields().size(), equalTo(1)); assertThat(searchResponse.getHits().getAt(0).fields().get("field3").value().toString(), equalTo("value3")); + searchResponse = client().prepareSearch().setQuery(matchAllQuery()).addField("*3").execute().actionGet(); + assertThat(searchResponse.getHits().getTotalHits(), equalTo(1l)); + assertThat(searchResponse.getHits().hits().length, equalTo(1)); + assertThat(searchResponse.getHits().getAt(0).fields().size(), equalTo(1)); + assertThat(searchResponse.getHits().getAt(0).fields().get("field3").value().toString(), equalTo("value3")); + + + searchResponse = client().prepareSearch().setQuery(matchAllQuery()).addField("*3").addField("field1").addField("field2").execute().actionGet(); + assertThat(searchResponse.getHits().getTotalHits(), equalTo(1l)); + assertThat(searchResponse.getHits().hits().length, equalTo(1)); + assertThat(searchResponse.getHits().getAt(0).fields().size(), equalTo(2)); + assertThat(searchResponse.getHits().getAt(0).fields().get("field3").value().toString(), equalTo("value3")); + assertThat(searchResponse.getHits().getAt(0).fields().get("field1").value().toString(), equalTo("value1")); + + + searchResponse = client().prepareSearch().setQuery(matchAllQuery()).addField("field*").execute().actionGet(); + assertThat(searchResponse.getHits().getTotalHits(), equalTo(1l)); + assertThat(searchResponse.getHits().hits().length, equalTo(1)); + assertThat(searchResponse.getHits().getAt(0).fields().size(), equalTo(2)); + assertThat(searchResponse.getHits().getAt(0).fields().get("field3").value().toString(), equalTo("value3")); + assertThat(searchResponse.getHits().getAt(0).fields().get("field1").value().toString(), equalTo("value1")); + + searchResponse = client().prepareSearch().setQuery(matchAllQuery()).addField("f*3").execute().actionGet(); + assertThat(searchResponse.getHits().getTotalHits(), equalTo(1l)); + assertThat(searchResponse.getHits().hits().length, equalTo(1)); + assertThat(searchResponse.getHits().getAt(0).fields().size(), equalTo(1)); + assertThat(searchResponse.getHits().getAt(0).fields().get("field3").value().toString(), equalTo("value3")); + searchResponse = client().prepareSearch().setQuery(matchAllQuery()).addField("*").execute().actionGet(); assertThat(searchResponse.getHits().getTotalHits(), equalTo(1l)); assertThat(searchResponse.getHits().hits().length, equalTo(1)); @@ -439,8 +467,7 @@ public class SearchFieldsTests extends ESIntegTestCase { .get(); assertThat(searchResponse.getHits().totalHits(), equalTo(1l)); - assertThat(searchResponse.getHits().getAt(0).field("field1").isMetadataField(), equalTo(false)); - assertThat(searchResponse.getHits().getAt(0).field("field1").getValue().toString(), equalTo("value")); + assertThat(searchResponse.getHits().getAt(0).field("field1"), nullValue()); assertThat(searchResponse.getHits().getAt(0).field("_routing").isMetadataField(), equalTo(true)); assertThat(searchResponse.getHits().getAt(0).field("_routing").getValue().toString(), equalTo("1")); } @@ -647,8 +674,7 @@ public class SearchFieldsTests extends ESIntegTestCase { Map fields = response.getHits().getAt(0).getFields(); - assertThat(fields.get("field1").isMetadataField(), equalTo(false)); - assertThat(fields.get("field1").getValue().toString(), equalTo("value")); + assertThat(fields.get("field1"), nullValue()); assertThat(fields.get("_routing").isMetadataField(), equalTo(true)); assertThat(fields.get("_routing").getValue().toString(), equalTo("1")); assertThat(fields.get("_timestamp").isMetadataField(), equalTo(true)); diff --git a/plugins/lang-groovy/src/test/java/org/elasticsearch/messy/tests/SearchStatsTests.java b/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/SearchStatsTests.java similarity index 100% rename from plugins/lang-groovy/src/test/java/org/elasticsearch/messy/tests/SearchStatsTests.java rename to modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/SearchStatsTests.java diff --git a/plugins/lang-groovy/src/test/java/org/elasticsearch/messy/tests/SimpleSortTests.java b/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/SimpleSortTests.java similarity index 100% rename from plugins/lang-groovy/src/test/java/org/elasticsearch/messy/tests/SimpleSortTests.java rename to modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/SimpleSortTests.java diff --git a/plugins/lang-groovy/src/test/java/org/elasticsearch/messy/tests/StatsTests.java b/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/StatsTests.java similarity index 100% rename from plugins/lang-groovy/src/test/java/org/elasticsearch/messy/tests/StatsTests.java rename to modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/StatsTests.java diff --git a/plugins/lang-groovy/src/test/java/org/elasticsearch/messy/tests/StringTermsTests.java b/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/StringTermsTests.java similarity index 100% rename from plugins/lang-groovy/src/test/java/org/elasticsearch/messy/tests/StringTermsTests.java rename to modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/StringTermsTests.java diff --git a/plugins/lang-groovy/src/test/java/org/elasticsearch/messy/tests/TDigestPercentileRanksTests.java b/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/TDigestPercentileRanksTests.java similarity index 100% rename from plugins/lang-groovy/src/test/java/org/elasticsearch/messy/tests/TDigestPercentileRanksTests.java rename to modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/TDigestPercentileRanksTests.java diff --git a/plugins/lang-groovy/src/test/java/org/elasticsearch/messy/tests/TDigestPercentilesTests.java b/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/TDigestPercentilesTests.java similarity index 100% rename from plugins/lang-groovy/src/test/java/org/elasticsearch/messy/tests/TDigestPercentilesTests.java rename to modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/TDigestPercentilesTests.java diff --git a/plugins/lang-groovy/src/test/java/org/elasticsearch/messy/tests/package-info.java b/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/package-info.java similarity index 100% rename from plugins/lang-groovy/src/test/java/org/elasticsearch/messy/tests/package-info.java rename to modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/package-info.java diff --git a/plugins/lang-groovy/src/test/java/org/elasticsearch/script/groovy/GroovyRestIT.java b/modules/lang-groovy/src/test/java/org/elasticsearch/script/groovy/GroovyRestIT.java similarity index 100% rename from plugins/lang-groovy/src/test/java/org/elasticsearch/script/groovy/GroovyRestIT.java rename to modules/lang-groovy/src/test/java/org/elasticsearch/script/groovy/GroovyRestIT.java diff --git a/plugins/lang-groovy/src/test/java/org/elasticsearch/script/groovy/GroovyScriptTests.java b/modules/lang-groovy/src/test/java/org/elasticsearch/script/groovy/GroovyScriptTests.java similarity index 100% rename from plugins/lang-groovy/src/test/java/org/elasticsearch/script/groovy/GroovyScriptTests.java rename to modules/lang-groovy/src/test/java/org/elasticsearch/script/groovy/GroovyScriptTests.java diff --git a/plugins/lang-groovy/src/test/java/org/elasticsearch/script/groovy/GroovySecurityTests.java b/modules/lang-groovy/src/test/java/org/elasticsearch/script/groovy/GroovySecurityTests.java similarity index 60% rename from plugins/lang-groovy/src/test/java/org/elasticsearch/script/groovy/GroovySecurityTests.java rename to modules/lang-groovy/src/test/java/org/elasticsearch/script/groovy/GroovySecurityTests.java index 258d957d77e..5f91631c021 100644 --- a/plugins/lang-groovy/src/test/java/org/elasticsearch/script/groovy/GroovySecurityTests.java +++ b/modules/lang-groovy/src/test/java/org/elasticsearch/script/groovy/GroovySecurityTests.java @@ -20,16 +20,22 @@ package org.elasticsearch.script.groovy; import org.apache.lucene.util.Constants; +import org.codehaus.groovy.control.MultipleCompilationErrorsException; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.script.CompiledScript; import org.elasticsearch.script.ScriptException; import org.elasticsearch.script.ScriptService; import org.elasticsearch.test.ESTestCase; +import groovy.lang.MissingPropertyException; + import java.nio.file.Path; +import java.security.PrivilegedActionException; import java.util.AbstractMap; +import java.util.Arrays; import java.util.Collections; import java.util.HashMap; +import java.util.List; import java.util.Map; /** @@ -48,7 +54,7 @@ public class GroovySecurityTests extends ESTestCase { @Override public void setUp() throws Exception { super.setUp(); - se = new GroovyScriptEngineService(Settings.Builder.EMPTY_SETTINGS); + se = new GroovyScriptEngineService(Settings.EMPTY); // otherwise will exit your VM and other bad stuff assumeTrue("test requires security manager to be enabled", System.getSecurityManager() != null); } @@ -62,8 +68,16 @@ public class GroovySecurityTests extends ESTestCase { public void testEvilGroovyScripts() throws Exception { // Plain test assertSuccess(""); - // field access + // field access (via map) assertSuccess("def foo = doc['foo'].value; if (foo == null) { return 5; }"); + // field access (via list) + assertSuccess("def foo = mylist[0]; if (foo == null) { return 5; }"); + // field access (via array) + assertSuccess("def foo = myarray[0]; if (foo == null) { return 5; }"); + // field access (via object) + assertSuccess("def foo = myobject.primitive.toString(); if (foo == null) { return 5; }"); + assertSuccess("def foo = myobject.object.toString(); if (foo == null) { return 5; }"); + assertSuccess("def foo = myobject.list[0].primitive.toString(); if (foo == null) { return 5; }"); // List assertSuccess("def list = [doc['foo'].value, 3, 4]; def v = list.get(1); list.add(10)"); // Ranges @@ -78,35 +92,35 @@ public class GroovySecurityTests extends ESTestCase { assertSuccess("def n = [1,2,3]; GroovyCollections.max(n)"); // Fail cases: - // AccessControlException[access denied ("java.io.FilePermission" "<>" "execute")] - assertFailure("pr = Runtime.getRuntime().exec(\"touch /tmp/gotcha\"); pr.waitFor()"); + assertFailure("pr = Runtime.getRuntime().exec(\"touch /tmp/gotcha\"); pr.waitFor()", MissingPropertyException.class); - // AccessControlException[access denied ("java.lang.RuntimePermission" "accessClassInPackage.sun.reflect")] - assertFailure("d = new DateTime(); d.getClass().getDeclaredMethod(\"year\").setAccessible(true)"); + // infamous: + assertFailure("java.lang.Math.class.forName(\"java.lang.Runtime\")", PrivilegedActionException.class); + // filtered directly by our classloader + assertFailure("getClass().getClassLoader().loadClass(\"java.lang.Runtime\").availableProcessors()", PrivilegedActionException.class); + // unfortunately, we have access to other classloaders (due to indy mechanism needing getClassLoader permission) + // but we can't do much with them directly at least. + assertFailure("myobject.getClass().getClassLoader().loadClass(\"java.lang.Runtime\").availableProcessors()", SecurityException.class); + assertFailure("d = new DateTime(); d.getClass().getDeclaredMethod(\"year\").setAccessible(true)", SecurityException.class); assertFailure("d = new DateTime(); d.\"${'get' + 'Class'}\"()." + - "\"${'getDeclared' + 'Method'}\"(\"year\").\"${'set' + 'Accessible'}\"(false)"); - assertFailure("Class.forName(\"org.joda.time.DateTime\").getDeclaredMethod(\"year\").setAccessible(true)"); + "\"${'getDeclared' + 'Method'}\"(\"year\").\"${'set' + 'Accessible'}\"(false)", SecurityException.class); + assertFailure("Class.forName(\"org.joda.time.DateTime\").getDeclaredMethod(\"year\").setAccessible(true)", MissingPropertyException.class); - // AccessControlException[access denied ("groovy.security.GroovyCodeSourcePermission" "/groovy/shell")] - assertFailure("Eval.me('2 + 2')"); - assertFailure("Eval.x(5, 'x + 2')"); + assertFailure("Eval.me('2 + 2')", MissingPropertyException.class); + assertFailure("Eval.x(5, 'x + 2')", MissingPropertyException.class); - // AccessControlException[access denied ("java.lang.RuntimePermission" "accessDeclaredMembers")] assertFailure("d = new Date(); java.lang.reflect.Field f = Date.class.getDeclaredField(\"fastTime\");" + - " f.setAccessible(true); f.get(\"fastTime\")"); + " f.setAccessible(true); f.get(\"fastTime\")", MultipleCompilationErrorsException.class); - // AccessControlException[access denied ("java.io.FilePermission" "<>" "execute")] - assertFailure("def methodName = 'ex'; Runtime.\"${'get' + 'Runtime'}\"().\"${methodName}ec\"(\"touch /tmp/gotcha2\")"); + assertFailure("def methodName = 'ex'; Runtime.\"${'get' + 'Runtime'}\"().\"${methodName}ec\"(\"touch /tmp/gotcha2\")", MissingPropertyException.class); - // AccessControlException[access denied ("java.lang.RuntimePermission" "modifyThreadGroup")] - assertFailure("t = new Thread({ println 3 });"); + assertFailure("t = new Thread({ println 3 });", MultipleCompilationErrorsException.class); // test a directory we normally have access to, but the groovy script does not. Path dir = createTempDir(); // TODO: figure out the necessary escaping for windows paths here :) if (!Constants.WINDOWS) { - // access denied ("java.io.FilePermission" ".../tempDir-00N" "read") - assertFailure("new File(\"" + dir + "\").exists()"); + assertFailure("new File(\"" + dir + "\").exists()", MultipleCompilationErrorsException.class); } } @@ -115,8 +129,18 @@ public class GroovySecurityTests extends ESTestCase { Map vars = new HashMap(); // we add a "mock document" containing a single field "foo" that returns 4 (abusing a jdk class with a getValue() method) vars.put("doc", Collections.singletonMap("foo", new AbstractMap.SimpleEntry(null, 4))); + vars.put("mylist", Arrays.asList("foo")); + vars.put("myarray", Arrays.asList("foo")); + vars.put("myobject", new MyObject()); + se.executable(new CompiledScript(ScriptService.ScriptType.INLINE, "test", "js", se.compile(script)), vars).run(); } + + public static class MyObject { + public int getPrimitive() { return 0; } + public Object getObject() { return "value"; } + public List getList() { return Arrays.asList(new MyObject()); } + } /** asserts that a script runs without exception */ private void assertSuccess(String script) { @@ -124,14 +148,16 @@ public class GroovySecurityTests extends ESTestCase { } /** asserts that a script triggers securityexception */ - private void assertFailure(String script) { + private void assertFailure(String script, Class exceptionClass) { try { doTest(script); fail("did not get expected exception"); } catch (ScriptException expected) { Throwable cause = expected.getCause(); assertNotNull(cause); - assertTrue("unexpected exception: " + cause, cause instanceof SecurityException); + if (exceptionClass.isAssignableFrom(cause.getClass()) == false) { + throw new AssertionError("unexpected exception: " + cause, expected); + } } } } diff --git a/plugins/lang-groovy/src/test/resources/org/elasticsearch/messy/tests/conf/scripts/combine_script.groovy b/modules/lang-groovy/src/test/resources/org/elasticsearch/messy/tests/conf/scripts/combine_script.groovy similarity index 100% rename from plugins/lang-groovy/src/test/resources/org/elasticsearch/messy/tests/conf/scripts/combine_script.groovy rename to modules/lang-groovy/src/test/resources/org/elasticsearch/messy/tests/conf/scripts/combine_script.groovy diff --git a/plugins/lang-groovy/src/test/resources/org/elasticsearch/messy/tests/conf/scripts/init_script.groovy b/modules/lang-groovy/src/test/resources/org/elasticsearch/messy/tests/conf/scripts/init_script.groovy similarity index 100% rename from plugins/lang-groovy/src/test/resources/org/elasticsearch/messy/tests/conf/scripts/init_script.groovy rename to modules/lang-groovy/src/test/resources/org/elasticsearch/messy/tests/conf/scripts/init_script.groovy diff --git a/plugins/lang-groovy/src/test/resources/org/elasticsearch/messy/tests/conf/scripts/map_script.groovy b/modules/lang-groovy/src/test/resources/org/elasticsearch/messy/tests/conf/scripts/map_script.groovy similarity index 100% rename from plugins/lang-groovy/src/test/resources/org/elasticsearch/messy/tests/conf/scripts/map_script.groovy rename to modules/lang-groovy/src/test/resources/org/elasticsearch/messy/tests/conf/scripts/map_script.groovy diff --git a/plugins/lang-groovy/src/test/resources/org/elasticsearch/messy/tests/conf/scripts/reduce_script.groovy b/modules/lang-groovy/src/test/resources/org/elasticsearch/messy/tests/conf/scripts/reduce_script.groovy similarity index 100% rename from plugins/lang-groovy/src/test/resources/org/elasticsearch/messy/tests/conf/scripts/reduce_script.groovy rename to modules/lang-groovy/src/test/resources/org/elasticsearch/messy/tests/conf/scripts/reduce_script.groovy diff --git a/plugins/lang-groovy/src/test/resources/org/elasticsearch/messy/tests/conf/scripts/significance_script_no_params.groovy b/modules/lang-groovy/src/test/resources/org/elasticsearch/messy/tests/conf/scripts/significance_script_no_params.groovy similarity index 100% rename from plugins/lang-groovy/src/test/resources/org/elasticsearch/messy/tests/conf/scripts/significance_script_no_params.groovy rename to modules/lang-groovy/src/test/resources/org/elasticsearch/messy/tests/conf/scripts/significance_script_no_params.groovy diff --git a/plugins/lang-groovy/src/test/resources/org/elasticsearch/messy/tests/conf/scripts/significance_script_with_params.groovy b/modules/lang-groovy/src/test/resources/org/elasticsearch/messy/tests/conf/scripts/significance_script_with_params.groovy similarity index 100% rename from plugins/lang-groovy/src/test/resources/org/elasticsearch/messy/tests/conf/scripts/significance_script_with_params.groovy rename to modules/lang-groovy/src/test/resources/org/elasticsearch/messy/tests/conf/scripts/significance_script_with_params.groovy diff --git a/plugins/lang-groovy/src/test/resources/rest-api-spec/test/lang_groovy/10_basic.yaml b/modules/lang-groovy/src/test/resources/rest-api-spec/test/lang_groovy/10_basic.yaml similarity index 62% rename from plugins/lang-groovy/src/test/resources/rest-api-spec/test/lang_groovy/10_basic.yaml rename to modules/lang-groovy/src/test/resources/rest-api-spec/test/lang_groovy/10_basic.yaml index 123b02fc7fa..c276bab6495 100644 --- a/plugins/lang-groovy/src/test/resources/rest-api-spec/test/lang_groovy/10_basic.yaml +++ b/modules/lang-groovy/src/test/resources/rest-api-spec/test/lang_groovy/10_basic.yaml @@ -10,5 +10,5 @@ - do: nodes.info: {} - - match: { nodes.$master.plugins.0.name: lang-groovy } - - match: { nodes.$master.plugins.0.jvm: true } + - match: { nodes.$master.modules.0.name: lang-groovy } + - match: { nodes.$master.modules.0.jvm: true } diff --git a/plugins/lang-groovy/src/test/resources/rest-api-spec/test/lang_groovy/15_update.yaml b/modules/lang-groovy/src/test/resources/rest-api-spec/test/lang_groovy/15_update.yaml similarity index 100% rename from plugins/lang-groovy/src/test/resources/rest-api-spec/test/lang_groovy/15_update.yaml rename to modules/lang-groovy/src/test/resources/rest-api-spec/test/lang_groovy/15_update.yaml diff --git a/plugins/lang-groovy/src/test/resources/rest-api-spec/test/lang_groovy/16_update2.yaml b/modules/lang-groovy/src/test/resources/rest-api-spec/test/lang_groovy/16_update2.yaml similarity index 100% rename from plugins/lang-groovy/src/test/resources/rest-api-spec/test/lang_groovy/16_update2.yaml rename to modules/lang-groovy/src/test/resources/rest-api-spec/test/lang_groovy/16_update2.yaml diff --git a/plugins/lang-groovy/src/test/resources/rest-api-spec/test/lang_groovy/20_versions.yaml b/modules/lang-groovy/src/test/resources/rest-api-spec/test/lang_groovy/20_versions.yaml similarity index 100% rename from plugins/lang-groovy/src/test/resources/rest-api-spec/test/lang_groovy/20_versions.yaml rename to modules/lang-groovy/src/test/resources/rest-api-spec/test/lang_groovy/20_versions.yaml diff --git a/plugins/lang-groovy/src/test/resources/rest-api-spec/test/lang_groovy/25_script_upsert.yaml b/modules/lang-groovy/src/test/resources/rest-api-spec/test/lang_groovy/25_script_upsert.yaml similarity index 100% rename from plugins/lang-groovy/src/test/resources/rest-api-spec/test/lang_groovy/25_script_upsert.yaml rename to modules/lang-groovy/src/test/resources/rest-api-spec/test/lang_groovy/25_script_upsert.yaml diff --git a/plugins/lang-groovy/src/test/resources/rest-api-spec/test/lang_groovy/90_missing.yaml b/modules/lang-groovy/src/test/resources/rest-api-spec/test/lang_groovy/90_missing.yaml similarity index 100% rename from plugins/lang-groovy/src/test/resources/rest-api-spec/test/lang_groovy/90_missing.yaml rename to modules/lang-groovy/src/test/resources/rest-api-spec/test/lang_groovy/90_missing.yaml diff --git a/plugins/analysis-icu/src/test/java/org/elasticsearch/index/analysis/AnalysisTestUtils.java b/plugins/analysis-icu/src/test/java/org/elasticsearch/index/analysis/AnalysisTestUtils.java index e8bfb8840c5..36a43e344d9 100644 --- a/plugins/analysis-icu/src/test/java/org/elasticsearch/index/analysis/AnalysisTestUtils.java +++ b/plugins/analysis-icu/src/test/java/org/elasticsearch/index/analysis/AnalysisTestUtils.java @@ -24,6 +24,7 @@ import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.common.inject.Injector; import org.elasticsearch.common.inject.ModulesBuilder; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.settings.SettingsFilter; import org.elasticsearch.common.settings.SettingsModule; import org.elasticsearch.env.Environment; import org.elasticsearch.env.EnvironmentModule; @@ -45,7 +46,7 @@ public class AnalysisTestUtils { .build(); AnalysisModule analysisModule = new AnalysisModule(new Environment(settings)); new AnalysisICUPlugin().onModule(analysisModule); - Injector parentInjector = new ModulesBuilder().add(new SettingsModule(settings), + Injector parentInjector = new ModulesBuilder().add(new SettingsModule(settings, new SettingsFilter(settings)), new EnvironmentModule(new Environment(settings)), analysisModule) .createInjector(); final AnalysisService analysisService = parentInjector.getInstance(AnalysisRegistry.class).build(IndexSettingsModule.newIndexSettings(index, indexSettings)); diff --git a/plugins/analysis-kuromoji/src/test/java/org/elasticsearch/index/analysis/KuromojiAnalysisTests.java b/plugins/analysis-kuromoji/src/test/java/org/elasticsearch/index/analysis/KuromojiAnalysisTests.java index aa620d6612c..0942be5399a 100644 --- a/plugins/analysis-kuromoji/src/test/java/org/elasticsearch/index/analysis/KuromojiAnalysisTests.java +++ b/plugins/analysis-kuromoji/src/test/java/org/elasticsearch/index/analysis/KuromojiAnalysisTests.java @@ -29,6 +29,7 @@ import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.common.inject.Injector; import org.elasticsearch.common.inject.ModulesBuilder; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.settings.SettingsFilter; import org.elasticsearch.common.settings.SettingsModule; import org.elasticsearch.env.Environment; import org.elasticsearch.env.EnvironmentModule; @@ -203,7 +204,7 @@ public class KuromojiAnalysisTests extends ESTestCase { AnalysisModule analysisModule = new AnalysisModule(new Environment(settings)); new AnalysisKuromojiPlugin().onModule(analysisModule); - Injector parentInjector = new ModulesBuilder().add(new SettingsModule(settings), + Injector parentInjector = new ModulesBuilder().add(new SettingsModule(settings, new SettingsFilter(settings)), new EnvironmentModule(new Environment(settings)), analysisModule) .createInjector(); diff --git a/plugins/analysis-phonetic/src/test/java/org/elasticsearch/index/analysis/SimplePhoneticAnalysisTests.java b/plugins/analysis-phonetic/src/test/java/org/elasticsearch/index/analysis/SimplePhoneticAnalysisTests.java index 8699b8c85cc..0b6a4027685 100644 --- a/plugins/analysis-phonetic/src/test/java/org/elasticsearch/index/analysis/SimplePhoneticAnalysisTests.java +++ b/plugins/analysis-phonetic/src/test/java/org/elasticsearch/index/analysis/SimplePhoneticAnalysisTests.java @@ -24,6 +24,7 @@ import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.common.inject.Injector; import org.elasticsearch.common.inject.ModulesBuilder; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.settings.SettingsFilter; import org.elasticsearch.common.settings.SettingsModule; import org.elasticsearch.env.Environment; import org.elasticsearch.env.EnvironmentModule; @@ -57,7 +58,7 @@ public class SimplePhoneticAnalysisTests extends ESTestCase { Index index = new Index("test"); AnalysisModule analysisModule = new AnalysisModule(new Environment(settings)); new AnalysisPhoneticPlugin().onModule(analysisModule); - Injector parentInjector = new ModulesBuilder().add(new SettingsModule(settings), + Injector parentInjector = new ModulesBuilder().add(new SettingsModule(settings, new SettingsFilter(settings)), new EnvironmentModule(new Environment(settings)), analysisModule) .createInjector(); return parentInjector.getInstance(AnalysisRegistry.class).build(IndexSettingsModule.newIndexSettings(index, settings)); diff --git a/plugins/analysis-smartcn/src/test/java/org/elasticsearch/index/analysis/SimpleSmartChineseAnalysisTests.java b/plugins/analysis-smartcn/src/test/java/org/elasticsearch/index/analysis/SimpleSmartChineseAnalysisTests.java index f5d231968ce..cfc2b28ec6a 100644 --- a/plugins/analysis-smartcn/src/test/java/org/elasticsearch/index/analysis/SimpleSmartChineseAnalysisTests.java +++ b/plugins/analysis-smartcn/src/test/java/org/elasticsearch/index/analysis/SimpleSmartChineseAnalysisTests.java @@ -24,6 +24,7 @@ import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.common.inject.Injector; import org.elasticsearch.common.inject.ModulesBuilder; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.settings.SettingsFilter; import org.elasticsearch.common.settings.SettingsModule; import org.elasticsearch.env.Environment; import org.elasticsearch.env.EnvironmentModule; @@ -50,7 +51,7 @@ public class SimpleSmartChineseAnalysisTests extends ESTestCase { .build(); AnalysisModule analysisModule = new AnalysisModule(new Environment(settings)); new AnalysisSmartChinesePlugin().onModule(analysisModule); - Injector parentInjector = new ModulesBuilder().add(new SettingsModule(settings), + Injector parentInjector = new ModulesBuilder().add(new SettingsModule(settings, new SettingsFilter(settings)), new EnvironmentModule(new Environment(settings)), analysisModule) .createInjector(); final AnalysisService analysisService = parentInjector.getInstance(AnalysisRegistry.class).build(IndexSettingsModule.newIndexSettings(index, settings)); diff --git a/plugins/analysis-stempel/src/test/java/org/elasticsearch/index/analysis/PolishAnalysisTests.java b/plugins/analysis-stempel/src/test/java/org/elasticsearch/index/analysis/PolishAnalysisTests.java index 52dcb3a8d59..f3ce4326afb 100644 --- a/plugins/analysis-stempel/src/test/java/org/elasticsearch/index/analysis/PolishAnalysisTests.java +++ b/plugins/analysis-stempel/src/test/java/org/elasticsearch/index/analysis/PolishAnalysisTests.java @@ -26,6 +26,7 @@ import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.common.inject.Injector; import org.elasticsearch.common.inject.ModulesBuilder; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.settings.SettingsFilter; import org.elasticsearch.common.settings.SettingsModule; import org.elasticsearch.env.Environment; import org.elasticsearch.env.EnvironmentModule; @@ -55,7 +56,7 @@ public class PolishAnalysisTests extends ESTestCase { AnalysisModule analysisModule = new AnalysisModule(new Environment(settings)); new AnalysisStempelPlugin().onModule(analysisModule); - Injector parentInjector = new ModulesBuilder().add(new SettingsModule(settings), + Injector parentInjector = new ModulesBuilder().add(new SettingsModule(settings, new SettingsFilter(settings)), new EnvironmentModule(new Environment(settings)), analysisModule) .createInjector(); diff --git a/plugins/analysis-stempel/src/test/java/org/elasticsearch/index/analysis/SimplePolishTokenFilterTests.java b/plugins/analysis-stempel/src/test/java/org/elasticsearch/index/analysis/SimplePolishTokenFilterTests.java index d96da6d3c42..a68f958580e 100644 --- a/plugins/analysis-stempel/src/test/java/org/elasticsearch/index/analysis/SimplePolishTokenFilterTests.java +++ b/plugins/analysis-stempel/src/test/java/org/elasticsearch/index/analysis/SimplePolishTokenFilterTests.java @@ -29,6 +29,7 @@ import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.common.inject.Injector; import org.elasticsearch.common.inject.ModulesBuilder; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.settings.SettingsFilter; import org.elasticsearch.common.settings.SettingsModule; import org.elasticsearch.env.Environment; import org.elasticsearch.env.EnvironmentModule; @@ -99,7 +100,7 @@ public class SimplePolishTokenFilterTests extends ESTestCase { private AnalysisService createAnalysisService(Index index, Settings settings) throws IOException { AnalysisModule analysisModule = new AnalysisModule(new Environment(settings)); new AnalysisStempelPlugin().onModule(analysisModule); - Injector parentInjector = new ModulesBuilder().add(new SettingsModule(settings), + Injector parentInjector = new ModulesBuilder().add(new SettingsModule(settings, new SettingsFilter(settings)), new EnvironmentModule(new Environment(settings)), analysisModule) .createInjector(); return parentInjector.getInstance(AnalysisRegistry.class).build(IndexSettingsModule.newIndexSettings(index, settings)); diff --git a/plugins/build.gradle b/plugins/build.gradle index fae8113ce96..bdcc604a296 100644 --- a/plugins/build.gradle +++ b/plugins/build.gradle @@ -17,8 +17,6 @@ * under the License. */ -import org.elasticsearch.gradle.precommit.DependencyLicensesTask - subprojects { group = 'org.elasticsearch.plugin' @@ -29,9 +27,4 @@ subprojects { // for local ES plugins, the name of the plugin is the same as the directory name project.name } - - Task dependencyLicensesTask = DependencyLicensesTask.configure(project) { - dependencies = project.configurations.runtime - project.configurations.provided - } - project.precommit.dependsOn(dependencyLicensesTask) } diff --git a/plugins/delete-by-query/licenses/no_deps.txt b/plugins/delete-by-query/licenses/no_deps.txt deleted file mode 100644 index 8cce254d037..00000000000 --- a/plugins/delete-by-query/licenses/no_deps.txt +++ /dev/null @@ -1 +0,0 @@ -This plugin has no third party dependencies diff --git a/plugins/discovery-azure/build.gradle b/plugins/discovery-azure/build.gradle index d72c203d089..5042824eb07 100644 --- a/plugins/discovery-azure/build.gradle +++ b/plugins/discovery-azure/build.gradle @@ -23,18 +23,19 @@ esplugin { } versions << [ - 'azure': '0.7.0', + 'azure': '0.9.0', 'jersey': '1.13' ] dependencies { - compile "com.microsoft.azure:azure-management-compute:${versions.azure}" - compile "com.microsoft.azure:azure-management:${versions.azure}" + compile "com.microsoft.azure:azure-svc-mgmt-compute:${versions.azure}" compile "com.microsoft.azure:azure-core:${versions.azure}" compile "org.apache.httpcomponents:httpclient:${versions.httpclient}" compile "org.apache.httpcomponents:httpcore:${versions.httpcore}" compile "commons-logging:commons-logging:${versions.commonslogging}" compile "commons-codec:commons-codec:${versions.commonscodec}" + compile "commons-lang:commons-lang:2.6" + compile "commons-io:commons-io:2.4" compile 'javax.mail:mail:1.4.5' compile 'javax.activation:activation:1.1' compile 'javax.inject:javax.inject:1' diff --git a/plugins/discovery-azure/licenses/azure-core-0.7.0.jar.sha1 b/plugins/discovery-azure/licenses/azure-core-0.7.0.jar.sha1 deleted file mode 100644 index f7d0b7caabc..00000000000 --- a/plugins/discovery-azure/licenses/azure-core-0.7.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -feed802efe8a7a83d15962d11c6780c63997c528 diff --git a/plugins/discovery-azure/licenses/azure-core-0.9.0.jar.sha1 b/plugins/discovery-azure/licenses/azure-core-0.9.0.jar.sha1 new file mode 100644 index 00000000000..f9696307afe --- /dev/null +++ b/plugins/discovery-azure/licenses/azure-core-0.9.0.jar.sha1 @@ -0,0 +1 @@ +050719f91deceed1be1aaf87e85099a861295fa2 \ No newline at end of file diff --git a/plugins/discovery-azure/licenses/azure-management-0.7.0.jar.sha1 b/plugins/discovery-azure/licenses/azure-management-0.7.0.jar.sha1 deleted file mode 100644 index f69856a386e..00000000000 --- a/plugins/discovery-azure/licenses/azure-management-0.7.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -0dfdd1c3a9bd783b087050e979f6ba34f06a68f3 diff --git a/plugins/discovery-azure/licenses/azure-management-compute-0.7.0.jar.sha1 b/plugins/discovery-azure/licenses/azure-management-compute-0.7.0.jar.sha1 deleted file mode 100644 index bcab189bc14..00000000000 --- a/plugins/discovery-azure/licenses/azure-management-compute-0.7.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -b945fc3968a4e5a64bbde419c14d92a4a53fa7a1 diff --git a/plugins/discovery-azure/licenses/azure-svc-mgmt-compute-0.9.0.jar.sha1 b/plugins/discovery-azure/licenses/azure-svc-mgmt-compute-0.9.0.jar.sha1 new file mode 100644 index 00000000000..c971d7c5724 --- /dev/null +++ b/plugins/discovery-azure/licenses/azure-svc-mgmt-compute-0.9.0.jar.sha1 @@ -0,0 +1 @@ +887ca8ee5564e8ba2351e6b5db2a1293a8d04674 \ No newline at end of file diff --git a/plugins/discovery-azure/licenses/commons-io-2.4.jar.sha1 b/plugins/discovery-azure/licenses/commons-io-2.4.jar.sha1 new file mode 100644 index 00000000000..2f5b30d0edb --- /dev/null +++ b/plugins/discovery-azure/licenses/commons-io-2.4.jar.sha1 @@ -0,0 +1 @@ +b1b6ea3b7e4aa4f492509a4952029cd8e48019ad \ No newline at end of file diff --git a/distribution/licenses/snakeyaml-LICENSE.txt b/plugins/discovery-azure/licenses/commons-io-LICENSE.txt similarity index 89% rename from distribution/licenses/snakeyaml-LICENSE.txt rename to plugins/discovery-azure/licenses/commons-io-LICENSE.txt index d9a10c0d8e8..d6456956733 100644 --- a/distribution/licenses/snakeyaml-LICENSE.txt +++ b/plugins/discovery-azure/licenses/commons-io-LICENSE.txt @@ -1,3 +1,4 @@ + Apache License Version 2.0, January 2004 http://www.apache.org/licenses/ @@ -174,3 +175,28 @@ of your accepting any such warranty or additional liability. END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/plugins/discovery-azure/licenses/commons-io-NOTICE.txt b/plugins/discovery-azure/licenses/commons-io-NOTICE.txt new file mode 100644 index 00000000000..a6b77d1eb60 --- /dev/null +++ b/plugins/discovery-azure/licenses/commons-io-NOTICE.txt @@ -0,0 +1,5 @@ +Apache Commons IO +Copyright 2002-2014 The Apache Software Foundation + +This product includes software developed at +The Apache Software Foundation (http://www.apache.org/). diff --git a/plugins/discovery-azure/licenses/commons-lang-2.6.jar.sha1 b/plugins/discovery-azure/licenses/commons-lang-2.6.jar.sha1 new file mode 100644 index 00000000000..4ee9249d2b7 --- /dev/null +++ b/plugins/discovery-azure/licenses/commons-lang-2.6.jar.sha1 @@ -0,0 +1 @@ +0ce1edb914c94ebc388f086c6827e8bdeec71ac2 \ No newline at end of file diff --git a/plugins/discovery-azure/licenses/commons-lang-LICENSE.txt b/plugins/discovery-azure/licenses/commons-lang-LICENSE.txt new file mode 100644 index 00000000000..d6456956733 --- /dev/null +++ b/plugins/discovery-azure/licenses/commons-lang-LICENSE.txt @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/plugins/discovery-azure/licenses/commons-lang-NOTICE.txt b/plugins/discovery-azure/licenses/commons-lang-NOTICE.txt new file mode 100644 index 00000000000..592023af76b --- /dev/null +++ b/plugins/discovery-azure/licenses/commons-lang-NOTICE.txt @@ -0,0 +1,8 @@ +Apache Commons Lang +Copyright 2001-2015 The Apache Software Foundation + +This product includes software developed at +The Apache Software Foundation (http://www.apache.org/). + +This product includes software from the Spring Framework, +under the Apache License 2.0 (see: StringUtils.containsWhitespace()) diff --git a/plugins/discovery-ec2/src/main/java/org/elasticsearch/plugin/discovery/ec2/Ec2DiscoveryPlugin.java b/plugins/discovery-ec2/src/main/java/org/elasticsearch/plugin/discovery/ec2/Ec2DiscoveryPlugin.java index a95d1a73a75..ffa76c6b9b3 100644 --- a/plugins/discovery-ec2/src/main/java/org/elasticsearch/plugin/discovery/ec2/Ec2DiscoveryPlugin.java +++ b/plugins/discovery-ec2/src/main/java/org/elasticsearch/plugin/discovery/ec2/Ec2DiscoveryPlugin.java @@ -19,6 +19,7 @@ package org.elasticsearch.plugin.discovery.ec2; +import org.elasticsearch.SpecialPermission; import org.elasticsearch.cloud.aws.AwsEc2ServiceImpl; import org.elasticsearch.cloud.aws.Ec2Module; import org.elasticsearch.common.component.LifecycleComponent; @@ -31,6 +32,8 @@ import org.elasticsearch.discovery.ec2.AwsEc2UnicastHostsProvider; import org.elasticsearch.discovery.ec2.Ec2Discovery; import org.elasticsearch.plugins.Plugin; +import java.security.AccessController; +import java.security.PrivilegedAction; import java.util.ArrayList; import java.util.Collection; @@ -38,6 +41,26 @@ import java.util.Collection; * */ public class Ec2DiscoveryPlugin extends Plugin { + + // ClientConfiguration clinit has some classloader problems + // TODO: fix that + static { + SecurityManager sm = System.getSecurityManager(); + if (sm != null) { + sm.checkPermission(new SpecialPermission()); + } + AccessController.doPrivileged(new PrivilegedAction() { + @Override + public Void run() { + try { + Class.forName("com.amazonaws.ClientConfiguration"); + } catch (ClassNotFoundException e) { + throw new RuntimeException(e); + } + return null; + } + }); + } private final Settings settings; protected final ESLogger logger = Loggers.getLogger(Ec2DiscoveryPlugin.class); diff --git a/core/src/test/java/org/elasticsearch/common/compress/lzf/LZFCompressedStreamTests.java b/plugins/discovery-ec2/src/main/plugin-metadata/plugin-security.policy similarity index 72% rename from core/src/test/java/org/elasticsearch/common/compress/lzf/LZFCompressedStreamTests.java rename to plugins/discovery-ec2/src/main/plugin-metadata/plugin-security.policy index 89ee148f4a8..42bd707b7b9 100644 --- a/core/src/test/java/org/elasticsearch/common/compress/lzf/LZFCompressedStreamTests.java +++ b/plugins/discovery-ec2/src/main/plugin-metadata/plugin-security.policy @@ -17,14 +17,10 @@ * under the License. */ -package org.elasticsearch.common.compress.lzf; - -import org.elasticsearch.common.compress.AbstractCompressedStreamTestCase; - -public class LZFCompressedStreamTests extends AbstractCompressedStreamTestCase { - - public LZFCompressedStreamTests() { - super(new LZFTestCompressor()); - } - -} +grant { + // needed because of problems in ClientConfiguration + // TODO: get this fixed in aws sdk + // NOTE: no tests fail without this, but we know the problem + // exists in AWS sdk, and tests here are not thorough + permission java.lang.RuntimePermission "getClassLoader"; +}; diff --git a/plugins/discovery-gce/src/main/java/org/elasticsearch/cloud/gce/GceComputeServiceImpl.java b/plugins/discovery-gce/src/main/java/org/elasticsearch/cloud/gce/GceComputeServiceImpl.java index d8f6dd331b2..943b1cd0eae 100644 --- a/plugins/discovery-gce/src/main/java/org/elasticsearch/cloud/gce/GceComputeServiceImpl.java +++ b/plugins/discovery-gce/src/main/java/org/elasticsearch/cloud/gce/GceComputeServiceImpl.java @@ -79,14 +79,13 @@ public class GceComputeServiceImpl extends AbstractLifecycleComponentemptyList() : instanceList.getItems(); } catch (PrivilegedActionException e) { logger.warn("Problem fetching instance list for zone {}", zoneId); logger.debug("Full exception:", e); - return Collections.EMPTY_LIST; + // assist type inference + return Collections.emptyList(); } }).reduce(new ArrayList<>(), (a, b) -> { a.addAll(b); diff --git a/plugins/discovery-multicast/licenses/no_deps.txt b/plugins/discovery-multicast/licenses/no_deps.txt deleted file mode 100644 index e69de29bb2d..00000000000 diff --git a/plugins/jvm-example/licenses/no_deps.txt b/plugins/jvm-example/licenses/no_deps.txt deleted file mode 100644 index 8cce254d037..00000000000 --- a/plugins/jvm-example/licenses/no_deps.txt +++ /dev/null @@ -1 +0,0 @@ -This plugin has no third party dependencies diff --git a/plugins/lang-javascript/src/main/java/org/elasticsearch/script/javascript/JavaScriptScriptEngineService.java b/plugins/lang-javascript/src/main/java/org/elasticsearch/script/javascript/JavaScriptScriptEngineService.java index 621d338128f..33a4e55801b 100644 --- a/plugins/lang-javascript/src/main/java/org/elasticsearch/script/javascript/JavaScriptScriptEngineService.java +++ b/plugins/lang-javascript/src/main/java/org/elasticsearch/script/javascript/JavaScriptScriptEngineService.java @@ -39,7 +39,10 @@ import org.mozilla.javascript.Script; import java.io.IOException; import java.net.MalformedURLException; import java.net.URL; +import java.security.AccessControlContext; +import java.security.AccessController; import java.security.CodeSource; +import java.security.PrivilegedAction; import java.security.cert.Certificate; import java.util.List; import java.util.Map; @@ -54,18 +57,47 @@ public class JavaScriptScriptEngineService extends AbstractComponent implements private static WrapFactory wrapFactory = new CustomWrapFactory(); - private final int optimizationLevel; - private Scriptable globalScope; // one time initialization of rhino security manager integration private static final CodeSource DOMAIN; + private static final int OPTIMIZATION_LEVEL = 1; + static { try { DOMAIN = new CodeSource(new URL("file:" + BootstrapInfo.UNTRUSTED_CODEBASE), (Certificate[]) null); } catch (MalformedURLException e) { throw new RuntimeException(e); } + ContextFactory factory = new ContextFactory() { + @Override + protected void onContextCreated(Context cx) { + cx.setWrapFactory(wrapFactory); + cx.setOptimizationLevel(OPTIMIZATION_LEVEL); + } + }; + if (System.getSecurityManager() != null) { + factory.initApplicationClassLoader(AccessController.doPrivileged(new PrivilegedAction() { + @Override + public ClassLoader run() { + // snapshot our context (which has permissions for classes), since the script has none + final AccessControlContext engineContext = AccessController.getContext(); + return new ClassLoader(JavaScriptScriptEngineService.class.getClassLoader()) { + @Override + protected Class loadClass(String name, boolean resolve) throws ClassNotFoundException { + try { + engineContext.checkPermission(new ClassPermission(name)); + } catch (SecurityException e) { + throw new ClassNotFoundException(name, e); + } + return super.loadClass(name, resolve); + } + }; + } + })); + } + factory.seal(); + ContextFactory.initGlobal(factory); SecurityController.initGlobal(new PolicySecurityController() { @Override public GeneratedClassLoader createClassLoader(ClassLoader parent, Object securityDomain) { @@ -78,6 +110,7 @@ public class JavaScriptScriptEngineService extends AbstractComponent implements if (securityDomain != DOMAIN) { throw new SecurityException("illegal securityDomain: " + securityDomain); } + return super.createClassLoader(parent, securityDomain); } }); @@ -90,11 +123,8 @@ public class JavaScriptScriptEngineService extends AbstractComponent implements public JavaScriptScriptEngineService(Settings settings) { super(settings); - this.optimizationLevel = settings.getAsInt("script.javascript.optimization_level", 1); - Context ctx = Context.enter(); try { - ctx.setWrapFactory(wrapFactory); globalScope = ctx.initStandardObjects(null, true); } finally { Context.exit(); @@ -130,8 +160,6 @@ public class JavaScriptScriptEngineService extends AbstractComponent implements public Object compile(String script) { Context ctx = Context.enter(); try { - ctx.setWrapFactory(wrapFactory); - ctx.setOptimizationLevel(optimizationLevel); return ctx.compileString(script, generateScriptName(), 1, DOMAIN); } finally { Context.exit(); @@ -142,8 +170,6 @@ public class JavaScriptScriptEngineService extends AbstractComponent implements public ExecutableScript executable(CompiledScript compiledScript, Map vars) { Context ctx = Context.enter(); try { - ctx.setWrapFactory(wrapFactory); - Scriptable scope = ctx.newObject(globalScope); scope.setPrototype(globalScope); scope.setParentScope(null); @@ -161,8 +187,6 @@ public class JavaScriptScriptEngineService extends AbstractComponent implements public SearchScript search(final CompiledScript compiledScript, final SearchLookup lookup, @Nullable final Map vars) { Context ctx = Context.enter(); try { - ctx.setWrapFactory(wrapFactory); - final Scriptable scope = ctx.newObject(globalScope); scope.setPrototype(globalScope); scope.setParentScope(null); @@ -215,7 +239,6 @@ public class JavaScriptScriptEngineService extends AbstractComponent implements public Object run() { Context ctx = Context.enter(); try { - ctx.setWrapFactory(wrapFactory); return ScriptValueConverter.unwrapValue(script.exec(ctx, scope)); } finally { Context.exit(); @@ -276,7 +299,6 @@ public class JavaScriptScriptEngineService extends AbstractComponent implements public Object run() { Context ctx = Context.enter(); try { - ctx.setWrapFactory(wrapFactory); return ScriptValueConverter.unwrapValue(script.exec(ctx, scope)); } finally { Context.exit(); diff --git a/plugins/lang-javascript/src/main/plugin-metadata/plugin-security.policy b/plugins/lang-javascript/src/main/plugin-metadata/plugin-security.policy index e45c1b86ceb..739a2531d2f 100644 --- a/plugins/lang-javascript/src/main/plugin-metadata/plugin-security.policy +++ b/plugins/lang-javascript/src/main/plugin-metadata/plugin-security.policy @@ -20,4 +20,15 @@ grant { // needed to generate runtime classes permission java.lang.RuntimePermission "createClassLoader"; + + // Standard set of classes + permission org.elasticsearch.script.ClassPermission "<>"; + // rhino runtime (TODO: clean these up if possible) + permission org.elasticsearch.script.ClassPermission "org.mozilla.javascript.ContextFactory"; + permission org.elasticsearch.script.ClassPermission "org.mozilla.javascript.Callable"; + permission org.elasticsearch.script.ClassPermission "org.mozilla.javascript.NativeFunction"; + permission org.elasticsearch.script.ClassPermission "org.mozilla.javascript.Script"; + permission org.elasticsearch.script.ClassPermission "org.mozilla.javascript.ScriptRuntime"; + permission org.elasticsearch.script.ClassPermission "org.mozilla.javascript.Undefined"; + permission org.elasticsearch.script.ClassPermission "org.mozilla.javascript.optimizer.OptRuntime"; }; diff --git a/plugins/lang-javascript/src/test/java/org/elasticsearch/script/javascript/JavaScriptSecurityTests.java b/plugins/lang-javascript/src/test/java/org/elasticsearch/script/javascript/JavaScriptSecurityTests.java index 410099de0a7..c6f9805f818 100644 --- a/plugins/lang-javascript/src/test/java/org/elasticsearch/script/javascript/JavaScriptSecurityTests.java +++ b/plugins/lang-javascript/src/test/java/org/elasticsearch/script/javascript/JavaScriptSecurityTests.java @@ -23,6 +23,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.script.CompiledScript; import org.elasticsearch.script.ScriptService; import org.elasticsearch.test.ESTestCase; +import org.mozilla.javascript.EcmaError; import org.mozilla.javascript.WrappedException; import java.util.HashMap; @@ -61,14 +62,20 @@ public class JavaScriptSecurityTests extends ESTestCase { } /** assert that a security exception is hit */ - private void assertFailure(String script) { + private void assertFailure(String script, Class exceptionClass) { try { doTest(script); fail("did not get expected exception"); } catch (WrappedException expected) { Throwable cause = expected.getCause(); assertNotNull(cause); - assertTrue("unexpected exception: " + cause, cause instanceof SecurityException); + if (exceptionClass.isAssignableFrom(cause.getClass()) == false) { + throw new AssertionError("unexpected exception: " + expected, expected); + } + } catch (EcmaError expected) { + if (exceptionClass.isAssignableFrom(expected.getClass()) == false) { + throw new AssertionError("unexpected exception: " + expected, expected); + } } } @@ -79,22 +86,22 @@ public class JavaScriptSecurityTests extends ESTestCase { } /** Test some javascripts that should hit security exception */ - public void testNotOK() { + public void testNotOK() throws Exception { // sanity check :) - assertFailure("java.lang.Runtime.getRuntime().halt(0)"); + assertFailure("java.lang.Runtime.getRuntime().halt(0)", EcmaError.class); // check a few things more restrictive than the ordinary policy // no network - assertFailure("new java.net.Socket(\"localhost\", 1024)"); + assertFailure("new java.net.Socket(\"localhost\", 1024)", EcmaError.class); // no files - assertFailure("java.io.File.createTempFile(\"test\", \"tmp\")"); + assertFailure("java.io.File.createTempFile(\"test\", \"tmp\")", EcmaError.class); } public void testDefinitelyNotOK() { // no mucking with security controller assertFailure("var ctx = org.mozilla.javascript.Context.getCurrentContext(); " + - "ctx.setSecurityController(new org.mozilla.javascript.PolicySecurityController());"); + "ctx.setSecurityController(new org.mozilla.javascript.PolicySecurityController());", EcmaError.class); // no compiling scripts from scripts assertFailure("var ctx = org.mozilla.javascript.Context.getCurrentContext(); " + - "ctx.compileString(\"1 + 1\", \"foobar\", 1, null); "); + "ctx.compileString(\"1 + 1\", \"foobar\", 1, null); ", EcmaError.class); } } diff --git a/plugins/lang-python/src/main/java/org/elasticsearch/script/python/PythonScriptEngineService.java b/plugins/lang-python/src/main/java/org/elasticsearch/script/python/PythonScriptEngineService.java index 3dfa4bcd0f9..1930f530671 100644 --- a/plugins/lang-python/src/main/java/org/elasticsearch/script/python/PythonScriptEngineService.java +++ b/plugins/lang-python/src/main/java/org/elasticsearch/script/python/PythonScriptEngineService.java @@ -25,7 +25,11 @@ import java.security.AccessController; import java.security.Permissions; import java.security.PrivilegedAction; import java.security.ProtectionDomain; +import java.util.Arrays; +import java.util.Collections; +import java.util.HashSet; import java.util.Map; +import java.util.Set; import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.search.Scorer; @@ -34,6 +38,7 @@ import org.elasticsearch.common.Nullable; import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.script.ClassPermission; import org.elasticsearch.script.CompiledScript; import org.elasticsearch.script.ExecutableScript; import org.elasticsearch.script.LeafSearchScript; @@ -55,20 +60,36 @@ import org.python.util.PythonInterpreter; public class PythonScriptEngineService extends AbstractComponent implements ScriptEngineService { private final PythonInterpreter interp; - + @Inject public PythonScriptEngineService(Settings settings) { super(settings); // classloader created here - SecurityManager sm = System.getSecurityManager(); + final SecurityManager sm = System.getSecurityManager(); if (sm != null) { sm.checkPermission(new SpecialPermission()); } this.interp = AccessController.doPrivileged(new PrivilegedAction () { @Override public PythonInterpreter run() { - return PythonInterpreter.threadLocalStateInterpreter(null); + // snapshot our context here for checks, as the script has no permissions + final AccessControlContext engineContext = AccessController.getContext(); + PythonInterpreter interp = PythonInterpreter.threadLocalStateInterpreter(null); + if (sm != null) { + interp.getSystemState().setClassLoader(new ClassLoader(getClass().getClassLoader()) { + @Override + protected Class loadClass(String name, boolean resolve) throws ClassNotFoundException { + try { + engineContext.checkPermission(new ClassPermission(name)); + } catch (SecurityException e) { + throw new ClassNotFoundException(name, e); + } + return super.loadClass(name, resolve); + } + }); + } + return interp; } }); } diff --git a/plugins/lang-python/src/main/plugin-metadata/plugin-security.policy b/plugins/lang-python/src/main/plugin-metadata/plugin-security.policy index e45c1b86ceb..86f4df64db4 100644 --- a/plugins/lang-python/src/main/plugin-metadata/plugin-security.policy +++ b/plugins/lang-python/src/main/plugin-metadata/plugin-security.policy @@ -20,4 +20,8 @@ grant { // needed to generate runtime classes permission java.lang.RuntimePermission "createClassLoader"; + // needed by PySystemState init (TODO: see if we can avoid this) + permission java.lang.RuntimePermission "getClassLoader"; + // Standard set of classes + permission org.elasticsearch.script.ClassPermission "<>"; }; diff --git a/plugins/lang-python/src/test/java/org/elasticsearch/script/python/PythonSecurityTests.java b/plugins/lang-python/src/test/java/org/elasticsearch/script/python/PythonSecurityTests.java index dd25db815ac..e90ac503f13 100644 --- a/plugins/lang-python/src/test/java/org/elasticsearch/script/python/PythonSecurityTests.java +++ b/plugins/lang-python/src/test/java/org/elasticsearch/script/python/PythonSecurityTests.java @@ -25,7 +25,9 @@ import org.elasticsearch.script.ScriptService; import org.elasticsearch.test.ESTestCase; import org.python.core.PyException; +import java.text.DecimalFormatSymbols; import java.util.HashMap; +import java.util.Locale; import java.util.Map; /** @@ -66,12 +68,12 @@ public class PythonSecurityTests extends ESTestCase { doTest(script); fail("did not get expected exception"); } catch (PyException expected) { - Throwable cause = expected.getCause(); // TODO: fix jython localization bugs: https://github.com/elastic/elasticsearch/issues/13967 - // this is the correct assert: - // assertNotNull("null cause for exception: " + expected, cause); - assertNotNull("null cause for exception", cause); - assertTrue("unexpected exception: " + cause, cause instanceof SecurityException); + // we do a gross hack for now + DecimalFormatSymbols symbols = DecimalFormatSymbols.getInstance(Locale.getDefault()); + if (symbols.getZeroDigit() == '0') { + assertTrue(expected.toString().contains("cannot import")); + } } } @@ -91,4 +93,16 @@ public class PythonSecurityTests extends ESTestCase { // no files assertFailure("from java.io import File\nFile.createTempFile(\"test\", \"tmp\")"); } + + /** Test again from a new thread, python has complex threadlocal configuration */ + public void testNotOKFromSeparateThread() throws Exception { + Thread t = new Thread() { + @Override + public void run() { + assertFailure("from java.lang import Runtime\nRuntime.availableProcessors()"); + } + }; + t.start(); + t.join(); + } } diff --git a/plugins/mapper-attachments/src/main/java/org/elasticsearch/mapper/attachments/AttachmentMapper.java b/plugins/mapper-attachments/src/main/java/org/elasticsearch/mapper/attachments/AttachmentMapper.java index a52f0768082..eb0e143c946 100644 --- a/plugins/mapper-attachments/src/main/java/org/elasticsearch/mapper/attachments/AttachmentMapper.java +++ b/plugins/mapper-attachments/src/main/java/org/elasticsearch/mapper/attachments/AttachmentMapper.java @@ -602,7 +602,7 @@ public class AttachmentMapper extends FieldMapper { } @Override - public void merge(Mapper mergeWith, MergeResult mergeResult) throws MergeMappingException { + public void merge(Mapper mergeWith, MergeResult mergeResult) { // ignore this for now } @@ -624,7 +624,7 @@ public class AttachmentMapper extends FieldMapper { @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(name()); + builder.startObject(simpleName()); builder.field("type", CONTENT_TYPE); if (indexCreatedBefore2x) { builder.field("path", pathType.name().toLowerCase(Locale.ROOT)); diff --git a/plugins/mapper-attachments/src/test/java/org/elasticsearch/mapper/attachments/EncryptedDocMapperTests.java b/plugins/mapper-attachments/src/test/java/org/elasticsearch/mapper/attachments/EncryptedDocMapperTests.java index dfa719cebde..e086d9ba5c4 100644 --- a/plugins/mapper-attachments/src/test/java/org/elasticsearch/mapper/attachments/EncryptedDocMapperTests.java +++ b/plugins/mapper-attachments/src/test/java/org/elasticsearch/mapper/attachments/EncryptedDocMapperTests.java @@ -60,8 +60,8 @@ public class EncryptedDocMapperTests extends AttachmentUnitTestCase { assertThat(doc.get(docMapper.mappers().getMapper("file1.title").fieldType().names().indexName()), equalTo("Hello")); assertThat(doc.get(docMapper.mappers().getMapper("file1.author").fieldType().names().indexName()), equalTo("kimchy")); assertThat(doc.get(docMapper.mappers().getMapper("file1.keywords").fieldType().names().indexName()), equalTo("elasticsearch,cool,bonsai")); - assertThat(doc.get(docMapper.mappers().getMapper("file1.content_type").fieldType().names().indexName()), equalTo("text/html; charset=ISO-8859-1")); - assertThat(doc.getField(docMapper.mappers().getMapper("file1.content_length").fieldType().names().indexName()).numericValue().longValue(), is(344L)); + assertThat(doc.get(docMapper.mappers().getMapper("file1.content_type").fieldType().names().indexName()), startsWith("text/html;")); + assertThat(doc.getField(docMapper.mappers().getMapper("file1.content_length").fieldType().names().indexName()).numericValue().longValue(), greaterThan(0L)); assertThat(doc.get(docMapper.mappers().getMapper("file2").fieldType().names().indexName()), nullValue()); assertThat(doc.get(docMapper.mappers().getMapper("file2.title").fieldType().names().indexName()), nullValue()); @@ -96,8 +96,8 @@ public class EncryptedDocMapperTests extends AttachmentUnitTestCase { assertThat(doc.get(docMapper.mappers().getMapper("file2.title").fieldType().names().indexName()), equalTo("Hello")); assertThat(doc.get(docMapper.mappers().getMapper("file2.author").fieldType().names().indexName()), equalTo("kimchy")); assertThat(doc.get(docMapper.mappers().getMapper("file2.keywords").fieldType().names().indexName()), equalTo("elasticsearch,cool,bonsai")); - assertThat(doc.get(docMapper.mappers().getMapper("file2.content_type").fieldType().names().indexName()), equalTo("text/html; charset=ISO-8859-1")); - assertThat(doc.getField(docMapper.mappers().getMapper("file2.content_length").fieldType().names().indexName()).numericValue().longValue(), is(344L)); + assertThat(doc.get(docMapper.mappers().getMapper("file2.content_type").fieldType().names().indexName()), startsWith("text/html;")); + assertThat(doc.getField(docMapper.mappers().getMapper("file2.content_length").fieldType().names().indexName()).numericValue().longValue(), greaterThan(0L)); } public void testMultipleDocsEncryptedNotIgnoringErrors() throws IOException { diff --git a/plugins/mapper-attachments/src/test/java/org/elasticsearch/mapper/attachments/MetadataMapperTests.java b/plugins/mapper-attachments/src/test/java/org/elasticsearch/mapper/attachments/MetadataMapperTests.java index a3275f4c938..cf2a130829f 100644 --- a/plugins/mapper-attachments/src/test/java/org/elasticsearch/mapper/attachments/MetadataMapperTests.java +++ b/plugins/mapper-attachments/src/test/java/org/elasticsearch/mapper/attachments/MetadataMapperTests.java @@ -69,8 +69,12 @@ public class MetadataMapperTests extends AttachmentUnitTestCase { assertThat(doc.get(docMapper.mappers().getMapper("file.title").fieldType().names().indexName()), equalTo("Hello")); assertThat(doc.get(docMapper.mappers().getMapper("file.author").fieldType().names().indexName()), equalTo("kimchy")); assertThat(doc.get(docMapper.mappers().getMapper("file.keywords").fieldType().names().indexName()), equalTo("elasticsearch,cool,bonsai")); - assertThat(doc.get(docMapper.mappers().getMapper("file.content_type").fieldType().names().indexName()), equalTo("text/html; charset=ISO-8859-1")); - assertThat(doc.getField(docMapper.mappers().getMapper("file.content_length").fieldType().names().indexName()).numericValue().longValue(), is(expectedLength)); + assertThat(doc.get(docMapper.mappers().getMapper("file.content_type").fieldType().names().indexName()), startsWith("text/html;")); + if (expectedLength == null) { + assertNull(doc.getField(docMapper.mappers().getMapper("file.content_length").fieldType().names().indexName()).numericValue().longValue()); + } else { + assertThat(doc.getField(docMapper.mappers().getMapper("file.content_length").fieldType().names().indexName()).numericValue().longValue(), greaterThan(0L)); + } } public void testIgnoreWithoutDate() throws Exception { diff --git a/plugins/mapper-attachments/src/test/java/org/elasticsearch/mapper/attachments/MultifieldAttachmentMapperTests.java b/plugins/mapper-attachments/src/test/java/org/elasticsearch/mapper/attachments/MultifieldAttachmentMapperTests.java index ce1b38f63b6..4f070bd0dd1 100644 --- a/plugins/mapper-attachments/src/test/java/org/elasticsearch/mapper/attachments/MultifieldAttachmentMapperTests.java +++ b/plugins/mapper-attachments/src/test/java/org/elasticsearch/mapper/attachments/MultifieldAttachmentMapperTests.java @@ -86,7 +86,6 @@ public class MultifieldAttachmentMapperTests extends AttachmentUnitTestCase { public void testExternalValues() throws Exception { String originalText = "This is an elasticsearch mapper attachment test."; - String contentType = "text/plain; charset=ISO-8859-1"; String forcedName = "dummyname.txt"; String bytes = Base64.encodeBytes(originalText.getBytes(StandardCharsets.ISO_8859_1)); @@ -108,9 +107,9 @@ public class MultifieldAttachmentMapperTests extends AttachmentUnitTestCase { assertThat(doc.rootDoc().getField("file.content").stringValue(), is(originalText + "\n")); assertThat(doc.rootDoc().getField("file.content_type"), notNullValue()); - assertThat(doc.rootDoc().getField("file.content_type").stringValue(), is(contentType)); + assertThat(doc.rootDoc().getField("file.content_type").stringValue(), startsWith("text/plain;")); assertThat(doc.rootDoc().getField("file.content_type.suggest"), notNullValue()); - assertThat(doc.rootDoc().getField("file.content_type.suggest").stringValue(), is(contentType)); + assertThat(doc.rootDoc().getField("file.content_type.suggest").stringValue(), startsWith("text/plain;")); assertThat(doc.rootDoc().getField("file.content_length"), notNullValue()); assertThat(doc.rootDoc().getField("file.content_length").numericValue().intValue(), is(originalText.length())); @@ -131,9 +130,9 @@ public class MultifieldAttachmentMapperTests extends AttachmentUnitTestCase { assertThat(doc.rootDoc().getField("file.content").stringValue(), is(originalText + "\n")); assertThat(doc.rootDoc().getField("file.content_type"), notNullValue()); - assertThat(doc.rootDoc().getField("file.content_type").stringValue(), is(contentType)); + assertThat(doc.rootDoc().getField("file.content_type").stringValue(), startsWith("text/plain;")); assertThat(doc.rootDoc().getField("file.content_type.suggest"), notNullValue()); - assertThat(doc.rootDoc().getField("file.content_type.suggest").stringValue(), is(contentType)); + assertThat(doc.rootDoc().getField("file.content_type.suggest").stringValue(), startsWith("text/plain;")); assertThat(doc.rootDoc().getField("file.content_length"), notNullValue()); assertThat(doc.rootDoc().getField("file.content_length").numericValue().intValue(), is(originalText.length())); diff --git a/plugins/mapper-attachments/src/test/java/org/elasticsearch/mapper/attachments/SimpleAttachmentMapperTests.java b/plugins/mapper-attachments/src/test/java/org/elasticsearch/mapper/attachments/SimpleAttachmentMapperTests.java index 934bf1b7157..0023fc44e24 100644 --- a/plugins/mapper-attachments/src/test/java/org/elasticsearch/mapper/attachments/SimpleAttachmentMapperTests.java +++ b/plugins/mapper-attachments/src/test/java/org/elasticsearch/mapper/attachments/SimpleAttachmentMapperTests.java @@ -22,10 +22,14 @@ package org.elasticsearch.mapper.attachments; import org.elasticsearch.Version; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.index.mapper.DocumentMapper; import org.elasticsearch.index.mapper.DocumentMapperParser; +import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.ParseContext; +import org.junit.Test; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; import static org.elasticsearch.test.StreamsUtils.copyToBytesFromClasspath; @@ -107,4 +111,32 @@ public class SimpleAttachmentMapperTests extends AttachmentUnitTestCase { assertThat(doc.get(docMapper.mappers().getMapper("file.content").fieldType().names().indexName()), containsString("This document tests the ability of Apache Tika to extract content")); } + /** + * See issue https://github.com/elastic/elasticsearch-mapper-attachments/issues/169 + * Mapping should not contain field names with dot. + */ + public void testMapperErrorWithDotTwoLevels169() throws Exception { + XContentBuilder mappingBuilder = jsonBuilder(); + mappingBuilder.startObject() + .startObject("mail") + .startObject("properties") + .startObject("attachments") + .startObject("properties") + .startObject("innerfield") + .field("type", "attachment") + .endObject() + .endObject() + .endObject() + .endObject() + .endObject(); + + byte[] mapping = mappingBuilder.bytes().toBytes(); + MapperService mapperService = MapperTestUtils.newMapperService(createTempDir(), Settings.EMPTY); + DocumentMapper docMapper = mapperService.parse("mail", new CompressedXContent(mapping), true); + // this should not throw an exception + mapperService.parse("mail", new CompressedXContent(docMapper.mapping().toString()), true); + // the mapping may not contain a field name with a dot + assertFalse(docMapper.mapping().toString().contains(".")); + } + } diff --git a/plugins/mapper-murmur3/licenses/no_deps.txt b/plugins/mapper-murmur3/licenses/no_deps.txt deleted file mode 100644 index 8cce254d037..00000000000 --- a/plugins/mapper-murmur3/licenses/no_deps.txt +++ /dev/null @@ -1 +0,0 @@ -This plugin has no third party dependencies diff --git a/plugins/mapper-murmur3/src/test/java/org/elasticsearch/index/mapper/murmur3/Murmur3FieldMapperUpgradeTests.java b/plugins/mapper-murmur3/src/test/java/org/elasticsearch/index/mapper/murmur3/Murmur3FieldMapperUpgradeTests.java new file mode 100644 index 00000000000..b3ad01bae49 --- /dev/null +++ b/plugins/mapper-murmur3/src/test/java/org/elasticsearch/index/mapper/murmur3/Murmur3FieldMapperUpgradeTests.java @@ -0,0 +1,87 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.index.mapper.murmur3; + +import org.apache.lucene.util.LuceneTestCase; +import org.apache.lucene.util.TestUtil; +import org.elasticsearch.action.search.SearchResponse; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.env.NodeEnvironment; +import org.elasticsearch.plugin.mapper.MapperMurmur3Plugin; +import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.search.aggregations.AggregationBuilders; +import org.elasticsearch.search.aggregations.metrics.cardinality.Cardinality; +import org.elasticsearch.test.ESIntegTestCase; +import org.elasticsearch.test.InternalTestCluster; +import org.elasticsearch.test.hamcrest.ElasticsearchAssertions; + +import java.io.IOException; +import java.io.InputStream; +import java.nio.file.Files; +import java.nio.file.Path; +import java.util.Collection; +import java.util.Collections; +import java.util.concurrent.ExecutionException; + +@ESIntegTestCase.ClusterScope(scope = ESIntegTestCase.Scope.TEST, numDataNodes = 0) +@LuceneTestCase.SuppressFileSystems("ExtrasFS") +public class Murmur3FieldMapperUpgradeTests extends ESIntegTestCase { + + @Override + protected Collection> nodePlugins() { + return Collections.singleton(MapperMurmur3Plugin.class); + } + + public void testUpgradeOldMapping() throws IOException, ExecutionException, InterruptedException { + final String indexName = "index-mapper-murmur3-2.0.0"; + InternalTestCluster.Async master = internalCluster().startNodeAsync(); + Path unzipDir = createTempDir(); + Path unzipDataDir = unzipDir.resolve("data"); + Path backwardsIndex = getBwcIndicesPath().resolve(indexName + ".zip"); + try (InputStream stream = Files.newInputStream(backwardsIndex)) { + TestUtil.unzip(stream, unzipDir); + } + assertTrue(Files.exists(unzipDataDir)); + + Path dataPath = createTempDir(); + Settings settings = Settings.builder() + .put("path.data", dataPath) + .build(); + final String node = internalCluster().startDataOnlyNode(settings); // workaround for dangling index loading issue when node is master + Path[] nodePaths = internalCluster().getInstance(NodeEnvironment.class, node).nodeDataPaths(); + assertEquals(1, nodePaths.length); + dataPath = nodePaths[0].resolve(NodeEnvironment.INDICES_FOLDER); + assertFalse(Files.exists(dataPath)); + Path src = unzipDataDir.resolve(indexName + "/nodes/0/indices"); + Files.move(src, dataPath); + + master.get(); + // force reloading dangling indices with a cluster state republish + client().admin().cluster().prepareReroute().get(); + ensureGreen(indexName); + final SearchResponse countResponse = client().prepareSearch(indexName).setSize(0).get(); + ElasticsearchAssertions.assertHitCount(countResponse, 3L); + + final SearchResponse cardinalityResponse = client().prepareSearch(indexName).addAggregation( + AggregationBuilders.cardinality("card").field("foo.hash")).get(); + Cardinality cardinality = cardinalityResponse.getAggregations().get("card"); + assertEquals(3L, cardinality.getValue()); + } +} diff --git a/plugins/mapper-murmur3/src/test/resources/indices/bwc/index-mapper-murmur3-2.0.0.zip b/plugins/mapper-murmur3/src/test/resources/indices/bwc/index-mapper-murmur3-2.0.0.zip new file mode 100644 index 00000000000..0b69aac180b Binary files /dev/null and b/plugins/mapper-murmur3/src/test/resources/indices/bwc/index-mapper-murmur3-2.0.0.zip differ diff --git a/plugins/mapper-size/licenses/no_deps.txt b/plugins/mapper-size/licenses/no_deps.txt deleted file mode 100644 index 8cce254d037..00000000000 --- a/plugins/mapper-size/licenses/no_deps.txt +++ /dev/null @@ -1 +0,0 @@ -This plugin has no third party dependencies diff --git a/plugins/mapper-size/src/main/java/org/elasticsearch/index/mapper/size/SizeFieldMapper.java b/plugins/mapper-size/src/main/java/org/elasticsearch/index/mapper/size/SizeFieldMapper.java index 4de98f57d2f..aaf46553a75 100644 --- a/plugins/mapper-size/src/main/java/org/elasticsearch/index/mapper/size/SizeFieldMapper.java +++ b/plugins/mapper-size/src/main/java/org/elasticsearch/index/mapper/size/SizeFieldMapper.java @@ -28,7 +28,6 @@ import org.elasticsearch.index.analysis.NumericIntegerAnalyzer; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.Mapper; import org.elasticsearch.index.mapper.MapperParsingException; -import org.elasticsearch.index.mapper.MergeMappingException; import org.elasticsearch.index.mapper.MergeResult; import org.elasticsearch.index.mapper.MetadataFieldMapper; import org.elasticsearch.index.mapper.ParseContext; @@ -178,7 +177,7 @@ public class SizeFieldMapper extends MetadataFieldMapper { } @Override - public void merge(Mapper mergeWith, MergeResult mergeResult) throws MergeMappingException { + public void merge(Mapper mergeWith, MergeResult mergeResult) { SizeFieldMapper sizeFieldMapperMergeWith = (SizeFieldMapper) mergeWith; if (!mergeResult.simulate()) { if (sizeFieldMapperMergeWith.enabledState != enabledState && !sizeFieldMapperMergeWith.enabledState.unset()) { diff --git a/plugins/mapper-size/src/test/java/org/elasticsearch/index/mapper/size/SizeFieldMapperUpgradeTests.java b/plugins/mapper-size/src/test/java/org/elasticsearch/index/mapper/size/SizeFieldMapperUpgradeTests.java new file mode 100644 index 00000000000..4529111c16e --- /dev/null +++ b/plugins/mapper-size/src/test/java/org/elasticsearch/index/mapper/size/SizeFieldMapperUpgradeTests.java @@ -0,0 +1,98 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.index.mapper.size; + +import org.apache.lucene.util.LuceneTestCase; +import org.apache.lucene.util.TestUtil; +import org.elasticsearch.action.search.SearchResponse; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.env.NodeEnvironment; +import org.elasticsearch.plugin.mapper.MapperSizePlugin; +import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.search.SearchHit; +import org.elasticsearch.search.SearchHitField; +import org.elasticsearch.test.ESIntegTestCase; +import org.elasticsearch.test.InternalTestCluster; +import org.elasticsearch.test.hamcrest.ElasticsearchAssertions; + +import java.io.IOException; +import java.io.InputStream; +import java.nio.file.Files; +import java.nio.file.Path; +import java.util.Collection; +import java.util.Collections; +import java.util.Map; +import java.util.concurrent.ExecutionException; + +@ESIntegTestCase.ClusterScope(scope = ESIntegTestCase.Scope.TEST, numDataNodes = 0) +@LuceneTestCase.SuppressFileSystems("ExtrasFS") +public class SizeFieldMapperUpgradeTests extends ESIntegTestCase { + + @Override + protected Collection> nodePlugins() { + return Collections.singleton(MapperSizePlugin.class); + } + + public void testUpgradeOldMapping() throws IOException, ExecutionException, InterruptedException { + final String indexName = "index-mapper-size-2.0.0"; + InternalTestCluster.Async master = internalCluster().startNodeAsync(); + Path unzipDir = createTempDir(); + Path unzipDataDir = unzipDir.resolve("data"); + Path backwardsIndex = getBwcIndicesPath().resolve(indexName + ".zip"); + try (InputStream stream = Files.newInputStream(backwardsIndex)) { + TestUtil.unzip(stream, unzipDir); + } + assertTrue(Files.exists(unzipDataDir)); + + Path dataPath = createTempDir(); + Settings settings = Settings.builder() + .put("path.data", dataPath) + .build(); + final String node = internalCluster().startDataOnlyNode(settings); // workaround for dangling index loading issue when node is master + Path[] nodePaths = internalCluster().getInstance(NodeEnvironment.class, node).nodeDataPaths(); + assertEquals(1, nodePaths.length); + dataPath = nodePaths[0].resolve(NodeEnvironment.INDICES_FOLDER); + assertFalse(Files.exists(dataPath)); + Path src = unzipDataDir.resolve(indexName + "/nodes/0/indices"); + Files.move(src, dataPath); + master.get(); + // force reloading dangling indices with a cluster state republish + client().admin().cluster().prepareReroute().get(); + ensureGreen(indexName); + final SearchResponse countResponse = client().prepareSearch(indexName).setSize(0).get(); + ElasticsearchAssertions.assertHitCount(countResponse, 3L); + + final SearchResponse sizeResponse = client().prepareSearch(indexName) + .addField("_source") + .addField("_size") + .get(); + ElasticsearchAssertions.assertHitCount(sizeResponse, 3L); + for (SearchHit hit : sizeResponse.getHits().getHits()) { + String source = hit.getSourceAsString(); + assertNotNull(source); + Map fields = hit.getFields(); + assertTrue(fields.containsKey("_size")); + Number size = fields.get("_size").getValue(); + assertNotNull(size); + assertEquals(source.length(), size.longValue()); + } + } + +} diff --git a/plugins/mapper-size/src/test/resources/indices/bwc/index-mapper-size-2.0.0.zip b/plugins/mapper-size/src/test/resources/indices/bwc/index-mapper-size-2.0.0.zip new file mode 100644 index 00000000000..0a74f835c3e Binary files /dev/null and b/plugins/mapper-size/src/test/resources/indices/bwc/index-mapper-size-2.0.0.zip differ diff --git a/plugins/repository-s3/src/main/java/org/elasticsearch/plugin/repository/s3/S3RepositoryPlugin.java b/plugins/repository-s3/src/main/java/org/elasticsearch/plugin/repository/s3/S3RepositoryPlugin.java index 2911e278c38..a38a8ed3c51 100644 --- a/plugins/repository-s3/src/main/java/org/elasticsearch/plugin/repository/s3/S3RepositoryPlugin.java +++ b/plugins/repository-s3/src/main/java/org/elasticsearch/plugin/repository/s3/S3RepositoryPlugin.java @@ -19,6 +19,7 @@ package org.elasticsearch.plugin.repository.s3; +import org.elasticsearch.SpecialPermission; import org.elasticsearch.cloud.aws.S3Module; import org.elasticsearch.common.component.LifecycleComponent; import org.elasticsearch.common.inject.Module; @@ -27,6 +28,9 @@ import org.elasticsearch.plugins.Plugin; import org.elasticsearch.repositories.RepositoriesModule; import org.elasticsearch.repositories.s3.S3Repository; +import java.security.AccessController; +import java.security.PrivilegedAction; +import java.text.ParseException; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; @@ -36,6 +40,26 @@ import java.util.Collections; */ public class S3RepositoryPlugin extends Plugin { + // ClientConfiguration clinit has some classloader problems + // TODO: fix that + static { + SecurityManager sm = System.getSecurityManager(); + if (sm != null) { + sm.checkPermission(new SpecialPermission()); + } + AccessController.doPrivileged(new PrivilegedAction() { + @Override + public Void run() { + try { + Class.forName("com.amazonaws.ClientConfiguration"); + } catch (ClassNotFoundException e) { + throw new RuntimeException(e); + } + return null; + } + }); + } + @Override public String name() { return "repository-s3"; diff --git a/plugins/lang-expression/src/main/plugin-metadata/plugin-security.policy b/plugins/repository-s3/src/main/plugin-metadata/plugin-security.policy similarity index 84% rename from plugins/lang-expression/src/main/plugin-metadata/plugin-security.policy rename to plugins/repository-s3/src/main/plugin-metadata/plugin-security.policy index e45c1b86ceb..62b29a2b78f 100644 --- a/plugins/lang-expression/src/main/plugin-metadata/plugin-security.policy +++ b/plugins/repository-s3/src/main/plugin-metadata/plugin-security.policy @@ -18,6 +18,7 @@ */ grant { - // needed to generate runtime classes - permission java.lang.RuntimePermission "createClassLoader"; + // needed because of problems in ClientConfiguration + // TODO: get this fixed in aws sdk + permission java.lang.RuntimePermission "getClassLoader"; }; diff --git a/plugins/site-example/licenses/no_deps.txt b/plugins/site-example/licenses/no_deps.txt deleted file mode 100644 index 8cce254d037..00000000000 --- a/plugins/site-example/licenses/no_deps.txt +++ /dev/null @@ -1 +0,0 @@ -This plugin has no third party dependencies diff --git a/plugins/store-smb/licenses/no_deps.txt b/plugins/store-smb/licenses/no_deps.txt deleted file mode 100644 index 8cce254d037..00000000000 --- a/plugins/store-smb/licenses/no_deps.txt +++ /dev/null @@ -1 +0,0 @@ -This plugin has no third party dependencies diff --git a/qa/evil-tests/src/test/java/org/elasticsearch/bootstrap/EvilSecurityTests.java b/qa/evil-tests/src/test/java/org/elasticsearch/bootstrap/EvilSecurityTests.java index 20ef464e83b..695d2a42321 100644 --- a/qa/evil-tests/src/test/java/org/elasticsearch/bootstrap/EvilSecurityTests.java +++ b/qa/evil-tests/src/test/java/org/elasticsearch/bootstrap/EvilSecurityTests.java @@ -111,6 +111,8 @@ public class EvilSecurityTests extends ESTestCase { assertExactPermissions(new FilePermission(environment.binFile().toString(), "read,readlink"), permissions); // lib file: ro assertExactPermissions(new FilePermission(environment.libFile().toString(), "read,readlink"), permissions); + // modules file: ro + assertExactPermissions(new FilePermission(environment.modulesFile().toString(), "read,readlink"), permissions); // config file: ro assertExactPermissions(new FilePermission(environment.configFile().toString(), "read,readlink"), permissions); // scripts file: ro diff --git a/qa/evil-tests/src/test/java/org/elasticsearch/bootstrap/SeccompTests.java b/qa/evil-tests/src/test/java/org/elasticsearch/bootstrap/SeccompTests.java index da0530fca4a..a319aaabb70 100644 --- a/qa/evil-tests/src/test/java/org/elasticsearch/bootstrap/SeccompTests.java +++ b/qa/evil-tests/src/test/java/org/elasticsearch/bootstrap/SeccompTests.java @@ -19,11 +19,15 @@ package org.elasticsearch.bootstrap; +import org.apache.lucene.util.Constants; import org.elasticsearch.test.ESTestCase; /** Simple tests seccomp filter is working. */ public class SeccompTests extends ESTestCase { + /** command to try to run in tests */ + static final String EXECUTABLE = Constants.WINDOWS ? "calc" : "ls"; + @Override public void setUp() throws Exception { super.setUp(); @@ -44,7 +48,7 @@ public class SeccompTests extends ESTestCase { public void testNoExecution() throws Exception { try { - Runtime.getRuntime().exec("ls"); + Runtime.getRuntime().exec(EXECUTABLE); fail("should not have been able to execute!"); } catch (Exception expected) { // we can't guarantee how its converted, currently its an IOException, like this: @@ -70,7 +74,7 @@ public class SeccompTests extends ESTestCase { @Override public void run() { try { - Runtime.getRuntime().exec("ls"); + Runtime.getRuntime().exec(EXECUTABLE); fail("should not have been able to execute!"); } catch (Exception expected) { // ok diff --git a/qa/evil-tests/src/test/java/org/elasticsearch/plugins/PluginManagerTests.java b/qa/evil-tests/src/test/java/org/elasticsearch/plugins/PluginManagerTests.java index 0e7597e61e8..34a57d7fbcb 100644 --- a/qa/evil-tests/src/test/java/org/elasticsearch/plugins/PluginManagerTests.java +++ b/qa/evil-tests/src/test/java/org/elasticsearch/plugins/PluginManagerTests.java @@ -630,8 +630,6 @@ public class PluginManagerTests extends ESIntegTestCase { PluginManager.checkForOfficialPlugins("analysis-smartcn"); PluginManager.checkForOfficialPlugins("analysis-stempel"); PluginManager.checkForOfficialPlugins("delete-by-query"); - PluginManager.checkForOfficialPlugins("lang-expression"); - PluginManager.checkForOfficialPlugins("lang-groovy"); PluginManager.checkForOfficialPlugins("lang-javascript"); PluginManager.checkForOfficialPlugins("lang-python"); PluginManager.checkForOfficialPlugins("mapper-attachments"); diff --git a/qa/smoke-test-plugins/build.gradle b/qa/smoke-test-plugins/build.gradle index d93594bd6dc..70611aed371 100644 --- a/qa/smoke-test-plugins/build.gradle +++ b/qa/smoke-test-plugins/build.gradle @@ -21,22 +21,19 @@ import org.elasticsearch.gradle.MavenFilteringHack apply plugin: 'elasticsearch.rest-test' -ext.pluginCount = 0 -for (Project subproj : project.rootProject.subprojects) { - if (subproj.path.startsWith(':plugins:')) { - integTest { - def bundlePlugin = subproj.tasks.findByName('bundlePlugin') - dependsOn bundlePlugin - cluster { - plugin subproj.name, bundlePlugin.outputs.files - } +ext.pluginsCount = 0 +project.rootProject.subprojects.findAll { it.path.startsWith(':projects:') }.each { subproj -> + integTest { + cluster { + // need to get a non-decorated project object, so must re-lookup the project by path + plugin subproj.name, project(subproj.path) } - pluginCount += 1 } -} + pluginCount += 1 +} ext.expansions = [ - 'expected.plugin.count': pluginCount + 'expected.plugins.count': pluginsCount ] processTestResources { diff --git a/qa/smoke-test-plugins/src/test/resources/rest-api-spec/test/smoke_test_plugins/10_basic.yaml b/qa/smoke-test-plugins/src/test/resources/rest-api-spec/test/smoke_test_plugins/10_basic.yaml index dbb09225fce..6a92845a062 100644 --- a/qa/smoke-test-plugins/src/test/resources/rest-api-spec/test/smoke_test_plugins/10_basic.yaml +++ b/qa/smoke-test-plugins/src/test/resources/rest-api-spec/test/smoke_test_plugins/10_basic.yaml @@ -10,4 +10,4 @@ - do: nodes.info: {} - - length: { nodes.$master.plugins: ${expected.plugin.count} } + - length: { nodes.$master.plugins: ${expected.plugins.count} } diff --git a/qa/vagrant/src/test/resources/packaging/scripts/20_tar_package.bats b/qa/vagrant/src/test/resources/packaging/scripts/20_tar_package.bats index 383375f9531..f6a9b22ec3b 100644 --- a/qa/vagrant/src/test/resources/packaging/scripts/20_tar_package.bats +++ b/qa/vagrant/src/test/resources/packaging/scripts/20_tar_package.bats @@ -92,7 +92,6 @@ setup() { # starting Elasticsearch so we don't have to wait for elasticsearch to scan for # them. install_elasticsearch_test_scripts - ESPLUGIN_COMMAND_USER=elasticsearch install_and_check_plugin lang groovy start_elasticsearch_service run_elasticsearch_tests stop_elasticsearch_service diff --git a/qa/vagrant/src/test/resources/packaging/scripts/30_deb_package.bats b/qa/vagrant/src/test/resources/packaging/scripts/30_deb_package.bats index 553f867f60d..048b208105e 100644 --- a/qa/vagrant/src/test/resources/packaging/scripts/30_deb_package.bats +++ b/qa/vagrant/src/test/resources/packaging/scripts/30_deb_package.bats @@ -86,7 +86,6 @@ setup() { # starting Elasticsearch so we don't have to wait for elasticsearch to scan for # them. install_elasticsearch_test_scripts - ESPLUGIN_COMMAND_USER=root install_and_check_plugin lang groovy start_elasticsearch_service run_elasticsearch_tests } diff --git a/qa/vagrant/src/test/resources/packaging/scripts/40_rpm_package.bats b/qa/vagrant/src/test/resources/packaging/scripts/40_rpm_package.bats index 7f447e51cb3..d27622ffdda 100644 --- a/qa/vagrant/src/test/resources/packaging/scripts/40_rpm_package.bats +++ b/qa/vagrant/src/test/resources/packaging/scripts/40_rpm_package.bats @@ -81,7 +81,6 @@ setup() { # starting Elasticsearch so we don't have to wait for elasticsearch to scan for # them. install_elasticsearch_test_scripts - ESPLUGIN_COMMAND_USER=root install_and_check_plugin lang groovy start_elasticsearch_service run_elasticsearch_tests } diff --git a/qa/vagrant/src/test/resources/packaging/scripts/60_systemd.bats b/qa/vagrant/src/test/resources/packaging/scripts/60_systemd.bats index 6558a3831b3..da7b6a180f1 100644 --- a/qa/vagrant/src/test/resources/packaging/scripts/60_systemd.bats +++ b/qa/vagrant/src/test/resources/packaging/scripts/60_systemd.bats @@ -68,7 +68,6 @@ setup() { # starting Elasticsearch so we don't have to wait for elasticsearch to scan for # them. install_elasticsearch_test_scripts - ESPLUGIN_COMMAND_USER=root install_and_check_plugin lang groovy systemctl start elasticsearch.service wait_for_elasticsearch_status assert_file_exist "/var/run/elasticsearch/elasticsearch.pid" diff --git a/qa/vagrant/src/test/resources/packaging/scripts/70_sysv_initd.bats b/qa/vagrant/src/test/resources/packaging/scripts/70_sysv_initd.bats index 1c5cce59174..fad764eb711 100644 --- a/qa/vagrant/src/test/resources/packaging/scripts/70_sysv_initd.bats +++ b/qa/vagrant/src/test/resources/packaging/scripts/70_sysv_initd.bats @@ -70,7 +70,6 @@ setup() { # Install scripts used to test script filters and search templates before # starting Elasticsearch so we don't have to wait for elasticsearch to scan for # them. - ESPLUGIN_COMMAND_USER=root install_and_check_plugin lang groovy install_elasticsearch_test_scripts service elasticsearch start wait_for_elasticsearch_status diff --git a/qa/vagrant/src/test/resources/packaging/scripts/plugin_test_cases.bash b/qa/vagrant/src/test/resources/packaging/scripts/plugin_test_cases.bash index 79e4426095a..48c34fa03af 100644 --- a/qa/vagrant/src/test/resources/packaging/scripts/plugin_test_cases.bash +++ b/qa/vagrant/src/test/resources/packaging/scripts/plugin_test_cases.bash @@ -223,14 +223,6 @@ fi install_and_check_plugin discovery multicast } -@test "[$GROUP] install lang-expression plugin" { - install_and_check_plugin lang expression -} - -@test "[$GROUP] install lang-groovy plugin" { - install_and_check_plugin lang groovy -} - @test "[$GROUP] install javascript plugin" { install_and_check_plugin lang javascript rhino-*.jar } @@ -331,14 +323,6 @@ fi remove_plugin discovery-multicast } -@test "[$GROUP] remove lang-expression plugin" { - remove_plugin lang-expression -} - -@test "[$GROUP] remove lang-groovy plugin" { - remove_plugin lang-groovy -} - @test "[$GROUP] remove javascript plugin" { remove_plugin lang-javascript } diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.forcemerge.json b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.forcemerge.json index 0e6c6ab23f9..c4170c1962a 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.forcemerge.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.forcemerge.json @@ -1,7 +1,7 @@ { "indices.forcemerge": { "documentation": "http://www.elastic.co/guide/en/elasticsearch/reference/master/indices-forcemerge.html", - "methods": ["POST", "GET"], + "methods": ["POST"], "url": { "path": "/_forcemerge", "paths": ["/_forcemerge", "/{index}/_forcemerge"], diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.segments.json b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.segments.json index 123ce1373bb..cc51bdab2c3 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.segments.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.segments.json @@ -33,6 +33,11 @@ }, "operation_threading": { "description" : "TODO: ?" + }, + "verbose": { + "type": "boolean", + "description": "Includes detailed memory usage by Lucene.", + "default": false } } }, diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.stats.json b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.stats.json index 7ec665ca060..7099f3e2fd2 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.stats.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.stats.json @@ -53,8 +53,8 @@ "type" : "list", "description" : "A comma-separated list of document types for the `indexing` index metric" } - }, - "body": null - } + } + }, + "body": null } } diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/get/50_with_headers.yaml b/rest-api-spec/src/main/resources/rest-api-spec/test/get/50_with_headers.yaml new file mode 100644 index 00000000000..71229686eed --- /dev/null +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/get/50_with_headers.yaml @@ -0,0 +1,30 @@ +--- +"REST test with headers": + - skip: + features: headers + + - do: + index: + index: test_1 + type: test + id: 1 + body: { "body": "foo" } + + - do: + headers: + Content-Type: application/yaml + get: + index: test_1 + type: _all + id: 1 + + - match: + $body: | + /^---\n + _index:\s+\"test_1"\n + _type:\s+"test"\n + _id:\s+"1"\n + _version:\s+1\n + found:\s+true\n + _source:\n + \s+body:\s+"foo"\n$/ diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/search/10_source_filtering.yaml b/rest-api-spec/src/main/resources/rest-api-spec/test/search/10_source_filtering.yaml index b49d659cec3..e0ac2aea2df 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/search/10_source_filtering.yaml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/search/10_source_filtering.yaml @@ -79,7 +79,6 @@ body: fields: [ include.field2 ] query: { match_all: {} } - - match: { hits.hits.0.fields: { include.field2 : [v2] }} - is_false: hits.hits.0._source - do: @@ -87,7 +86,7 @@ body: fields: [ include.field2, _source ] query: { match_all: {} } - - match: { hits.hits.0.fields: { include.field2 : [v2] }} + - match: { hits.hits.0._source.include.field2: v2 } - is_true: hits.hits.0._source @@ -95,4 +94,3 @@ search: fielddata_fields: [ "count" ] - match: { hits.hits.0.fields.count: [1] } - diff --git a/settings.gradle b/settings.gradle index a8a37efd731..0791c3d1752 100644 --- a/settings.gradle +++ b/settings.gradle @@ -3,11 +3,14 @@ rootProject.name = 'elasticsearch' List projects = [ 'rest-api-spec', 'core', + 'distribution:integ-test-zip', 'distribution:zip', 'distribution:tar', 'distribution:deb', 'distribution:rpm', 'test-framework', + 'modules:lang-expression', + 'modules:lang-groovy', 'plugins:analysis-icu', 'plugins:analysis-kuromoji', 'plugins:analysis-phonetic', @@ -18,8 +21,6 @@ List projects = [ 'plugins:discovery-ec2', 'plugins:discovery-gce', 'plugins:discovery-multicast', - 'plugins:lang-expression', - 'plugins:lang-groovy', 'plugins:lang-javascript', 'plugins:lang-python', 'plugins:mapper-attachments', diff --git a/test-framework/build.gradle b/test-framework/build.gradle index 8252df9e3dc..a423f56c922 100644 --- a/test-framework/build.gradle +++ b/test-framework/build.gradle @@ -43,3 +43,7 @@ forbiddenApisMain { signaturesURLs = [PrecommitTasks.getResource('/forbidden/all-signatures.txt'), PrecommitTasks.getResource('/forbidden/test-signatures.txt')] } + +// TODO: should we have licenses for our test deps? +dependencyLicenses.enabled = false + diff --git a/test-framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java b/test-framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java index 7506c52c709..cc5348bc6bc 100644 --- a/test-framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java +++ b/test-framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java @@ -1035,7 +1035,7 @@ public abstract class ESIntegTestCase extends ESTestCase { /** * Sets the cluster's minimum master node and make sure the response is acknowledge. - * Note: this doesn't guaranty the new settings is in effect, just that it has been received bu all nodes. + * Note: this doesn't guarantee that the new setting has taken effect, just that it has been received by all nodes. */ public void setMinimumMasterNodes(int n) { assertTrue(client().admin().cluster().prepareUpdateSettings().setTransientSettings( diff --git a/test-framework/src/main/java/org/elasticsearch/test/IndexSettingsModule.java b/test-framework/src/main/java/org/elasticsearch/test/IndexSettingsModule.java index c040aae1292..39e1857f412 100644 --- a/test-framework/src/main/java/org/elasticsearch/test/IndexSettingsModule.java +++ b/test-framework/src/main/java/org/elasticsearch/test/IndexSettingsModule.java @@ -54,6 +54,6 @@ public class IndexSettingsModule extends AbstractModule { .put(settings) .build(); IndexMetaData metaData = IndexMetaData.builder(index.getName()).settings(build).build(); - return new IndexSettings(metaData, Settings.EMPTY, Collections.EMPTY_LIST); + return new IndexSettings(metaData, Settings.EMPTY, Collections.emptyList()); } } diff --git a/test-framework/src/main/java/org/elasticsearch/test/TestSearchContext.java b/test-framework/src/main/java/org/elasticsearch/test/TestSearchContext.java index d4e2af5f30c..468b1877250 100644 --- a/test-framework/src/main/java/org/elasticsearch/test/TestSearchContext.java +++ b/test-framework/src/main/java/org/elasticsearch/test/TestSearchContext.java @@ -650,7 +650,7 @@ public class TestSearchContext extends SearchContext { @Override public Set getHeaders() { - return Collections.EMPTY_SET; + return Collections.emptySet(); } @Override diff --git a/test-framework/src/main/java/org/elasticsearch/test/XContentTestUtils.java b/test-framework/src/main/java/org/elasticsearch/test/XContentTestUtils.java index 866a19e0a72..31380e25eb3 100644 --- a/test-framework/src/main/java/org/elasticsearch/test/XContentTestUtils.java +++ b/test-framework/src/main/java/org/elasticsearch/test/XContentTestUtils.java @@ -111,7 +111,7 @@ public final class XContentTestUtils { if (second instanceof Map) { return differenceBetweenMapsIgnoringArrayOrder(path, (Map) first, (Map) second); } else { - return path + ": the second element is not a map"; + return path + ": the second element is not a map (got " + second +")"; } } else { if (first.equals(second)) { diff --git a/test-framework/src/main/java/org/elasticsearch/test/cluster/NoopClusterService.java b/test-framework/src/main/java/org/elasticsearch/test/cluster/NoopClusterService.java index 834e7d540c4..cb3d643f555 100644 --- a/test-framework/src/main/java/org/elasticsearch/test/cluster/NoopClusterService.java +++ b/test-framework/src/main/java/org/elasticsearch/test/cluster/NoopClusterService.java @@ -25,7 +25,6 @@ import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.cluster.routing.OperationRouting; import org.elasticsearch.cluster.service.PendingClusterTask; -import org.elasticsearch.common.Priority; import org.elasticsearch.common.component.Lifecycle; import org.elasticsearch.common.component.LifecycleListener; import org.elasticsearch.common.transport.DummyTransportAddress; @@ -115,12 +114,12 @@ public class NoopClusterService implements ClusterService { } @Override - public void submitStateUpdateTask(String source, Priority priority, ClusterStateUpdateTask updateTask) { + public void submitStateUpdateTask(String source, ClusterStateUpdateTask updateTask) { } @Override - public void submitStateUpdateTask(String source, ClusterStateUpdateTask updateTask) { + public void submitStateUpdateTask(String source, T task, ClusterStateTaskConfig config, ClusterStateTaskExecutor executor, ClusterStateTaskListener listener) { } diff --git a/test-framework/src/main/java/org/elasticsearch/test/cluster/TestClusterService.java b/test-framework/src/main/java/org/elasticsearch/test/cluster/TestClusterService.java index b13963961a0..5dc8cce99c6 100644 --- a/test-framework/src/main/java/org/elasticsearch/test/cluster/TestClusterService.java +++ b/test-framework/src/main/java/org/elasticsearch/test/cluster/TestClusterService.java @@ -28,7 +28,6 @@ import org.elasticsearch.cluster.routing.OperationRouting; import org.elasticsearch.cluster.routing.allocation.decider.AwarenessAllocationDecider; import org.elasticsearch.cluster.service.PendingClusterTask; import org.elasticsearch.common.Nullable; -import org.elasticsearch.common.Priority; import org.elasticsearch.common.component.Lifecycle; import org.elasticsearch.common.component.LifecycleListener; import org.elasticsearch.common.logging.ESLogger; @@ -40,10 +39,7 @@ import org.elasticsearch.common.util.concurrent.ConcurrentCollections; import org.elasticsearch.common.util.concurrent.FutureUtils; import org.elasticsearch.threadpool.ThreadPool; -import java.util.Collection; -import java.util.Iterator; -import java.util.List; -import java.util.Queue; +import java.util.*; import java.util.concurrent.CopyOnWriteArrayList; import java.util.concurrent.ScheduledFuture; @@ -183,31 +179,35 @@ public class TestClusterService implements ClusterService { } @Override - synchronized public void submitStateUpdateTask(String source, Priority priority, ClusterStateUpdateTask updateTask) { - logger.debug("processing [{}]", source); - if (state().nodes().localNodeMaster() == false && updateTask.runOnlyOnMaster()) { - updateTask.onNoLongerMaster(source); - logger.debug("failed [{}], no longer master", source); - return; - } - ClusterState newState; - ClusterState previousClusterState = state; - try { - newState = updateTask.execute(previousClusterState); - } catch (Exception e) { - updateTask.onFailure(source, new ElasticsearchException("failed to process cluster state update task [" + source + "]", e)); - return; - } - setStateAndNotifyListeners(newState); - if (updateTask instanceof ClusterStateUpdateTask) { - ((ClusterStateUpdateTask) updateTask).clusterStateProcessed(source, previousClusterState, newState); - } - logger.debug("finished [{}]", source); + public void submitStateUpdateTask(String source, ClusterStateUpdateTask updateTask) { + submitStateUpdateTask(source, null, updateTask, updateTask, updateTask); } @Override - public void submitStateUpdateTask(String source, ClusterStateUpdateTask updateTask) { - submitStateUpdateTask(source, Priority.NORMAL, updateTask); + synchronized public void submitStateUpdateTask(String source, T task, ClusterStateTaskConfig config, ClusterStateTaskExecutor executor, ClusterStateTaskListener listener) { + logger.debug("processing [{}]", source); + if (state().nodes().localNodeMaster() == false && executor.runOnlyOnMaster()) { + listener.onNoLongerMaster(source); + logger.debug("failed [{}], no longer master", source); + return; + } + ClusterStateTaskExecutor.BatchResult batchResult; + ClusterState previousClusterState = state; + try { + batchResult = executor.execute(previousClusterState, Arrays.asList(task)); + } catch (Exception e) { + batchResult = ClusterStateTaskExecutor.BatchResult.builder().failure(task, e).build(previousClusterState); + } + + batchResult.executionResults.get(task).handle( + () -> {}, + ex -> listener.onFailure(source, new ElasticsearchException("failed to process cluster state update task [" + source + "]", ex)) + ); + + setStateAndNotifyListeners(batchResult.resultingState); + listener.clusterStateProcessed(source, previousClusterState, batchResult.resultingState); + logger.debug("finished [{}]", source); + } @Override diff --git a/test-framework/src/main/java/org/elasticsearch/test/disruption/BlockClusterStateProcessing.java b/test-framework/src/main/java/org/elasticsearch/test/disruption/BlockClusterStateProcessing.java index 8154abfbd33..e318843e84f 100644 --- a/test-framework/src/main/java/org/elasticsearch/test/disruption/BlockClusterStateProcessing.java +++ b/test-framework/src/main/java/org/elasticsearch/test/disruption/BlockClusterStateProcessing.java @@ -58,7 +58,7 @@ public class BlockClusterStateProcessing extends SingleNodeDisruption { boolean success = disruptionLatch.compareAndSet(null, new CountDownLatch(1)); assert success : "startDisrupting called without waiting on stopDistrupting to complete"; final CountDownLatch started = new CountDownLatch(1); - clusterService.submitStateUpdateTask("service_disruption_block", Priority.IMMEDIATE, new ClusterStateUpdateTask() { + clusterService.submitStateUpdateTask("service_disruption_block", new ClusterStateUpdateTask(Priority.IMMEDIATE) { @Override public boolean runOnlyOnMaster() { diff --git a/test-framework/src/main/java/org/elasticsearch/test/disruption/SlowClusterStateProcessing.java b/test-framework/src/main/java/org/elasticsearch/test/disruption/SlowClusterStateProcessing.java index 3c56f8305c0..b9c663686b1 100644 --- a/test-framework/src/main/java/org/elasticsearch/test/disruption/SlowClusterStateProcessing.java +++ b/test-framework/src/main/java/org/elasticsearch/test/disruption/SlowClusterStateProcessing.java @@ -102,7 +102,7 @@ public class SlowClusterStateProcessing extends SingleNodeDisruption { return false; } final AtomicBoolean stopped = new AtomicBoolean(false); - clusterService.submitStateUpdateTask("service_disruption_delay", Priority.IMMEDIATE, new ClusterStateUpdateTask() { + clusterService.submitStateUpdateTask("service_disruption_delay", new ClusterStateUpdateTask(Priority.IMMEDIATE) { @Override public boolean runOnlyOnMaster() { diff --git a/test-framework/src/main/java/org/elasticsearch/test/hamcrest/ElasticsearchAssertions.java b/test-framework/src/main/java/org/elasticsearch/test/hamcrest/ElasticsearchAssertions.java index 457a283c839..7176916cdfe 100644 --- a/test-framework/src/main/java/org/elasticsearch/test/hamcrest/ElasticsearchAssertions.java +++ b/test-framework/src/main/java/org/elasticsearch/test/hamcrest/ElasticsearchAssertions.java @@ -30,7 +30,7 @@ import org.elasticsearch.action.ShardOperationFailedException; import org.elasticsearch.action.admin.cluster.health.ClusterHealthRequestBuilder; import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse; import org.elasticsearch.action.admin.cluster.node.info.NodesInfoResponse; -import org.elasticsearch.action.admin.cluster.node.info.PluginsInfo; +import org.elasticsearch.action.admin.cluster.node.info.PluginsAndModules; import org.elasticsearch.action.admin.indices.alias.exists.AliasesExistResponse; import org.elasticsearch.action.admin.indices.delete.DeleteIndexRequestBuilder; import org.elasticsearch.action.admin.indices.delete.DeleteIndexResponse; @@ -741,7 +741,7 @@ public class ElasticsearchAssertions { Assert.assertThat(response.getNodesMap().get(nodeId), notNullValue()); - PluginsInfo plugins = response.getNodesMap().get(nodeId).getPlugins(); + PluginsAndModules plugins = response.getNodesMap().get(nodeId).getPlugins(); Assert.assertThat(plugins, notNullValue()); List pluginNames = filterAndMap(plugins, jvmPluginPredicate, nameFunction); @@ -761,7 +761,7 @@ public class ElasticsearchAssertions { boolean anyHaveUrls = plugins - .getInfos() + .getPluginInfos() .stream() .filter(jvmPluginPredicate.and(sitePluginPredicate.negate())) .map(urlFunction) @@ -791,8 +791,8 @@ public class ElasticsearchAssertions { } } - private static List filterAndMap(PluginsInfo pluginsInfo, Predicate predicate, Function function) { - return pluginsInfo.getInfos().stream().filter(predicate).map(function).collect(Collectors.toList()); + private static List filterAndMap(PluginsAndModules pluginsInfo, Predicate predicate, Function function) { + return pluginsInfo.getPluginInfos().stream().filter(predicate).map(function).collect(Collectors.toList()); } private static Predicate jvmPluginPredicate = p -> p.isJvm(); diff --git a/test-framework/src/main/java/org/elasticsearch/test/rest/Rest1IT.java b/test-framework/src/main/java/org/elasticsearch/test/rest/Rest1IT.java deleted file mode 100644 index bc80123debc..00000000000 --- a/test-framework/src/main/java/org/elasticsearch/test/rest/Rest1IT.java +++ /dev/null @@ -1,38 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.test.rest; - -import com.carrotsearch.randomizedtesting.annotations.Name; -import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; - -import org.elasticsearch.test.rest.parser.RestTestParseException; - -import java.io.IOException; - -/** Rest API tests subset 1 */ -public class Rest1IT extends ESRestTestCase { - public Rest1IT(@Name("yaml") RestTestCandidate testCandidate) { - super(testCandidate); - } - @ParametersFactory - public static Iterable parameters() throws IOException, RestTestParseException { - return createParameters(1, 8); - } -} diff --git a/test-framework/src/main/java/org/elasticsearch/test/rest/Rest2IT.java b/test-framework/src/main/java/org/elasticsearch/test/rest/Rest2IT.java deleted file mode 100644 index a2fb5ad9226..00000000000 --- a/test-framework/src/main/java/org/elasticsearch/test/rest/Rest2IT.java +++ /dev/null @@ -1,38 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.test.rest; - -import com.carrotsearch.randomizedtesting.annotations.Name; -import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; - -import org.elasticsearch.test.rest.parser.RestTestParseException; - -import java.io.IOException; - -/** Rest API tests subset 2 */ -public class Rest2IT extends ESRestTestCase { - public Rest2IT(@Name("yaml") RestTestCandidate testCandidate) { - super(testCandidate); - } - @ParametersFactory - public static Iterable parameters() throws IOException, RestTestParseException { - return createParameters(2, 8); - } -} diff --git a/test-framework/src/main/java/org/elasticsearch/test/rest/Rest4IT.java b/test-framework/src/main/java/org/elasticsearch/test/rest/Rest4IT.java deleted file mode 100644 index 3910167702a..00000000000 --- a/test-framework/src/main/java/org/elasticsearch/test/rest/Rest4IT.java +++ /dev/null @@ -1,38 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.test.rest; - -import com.carrotsearch.randomizedtesting.annotations.Name; -import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; - -import org.elasticsearch.test.rest.parser.RestTestParseException; - -import java.io.IOException; - -/** Rest API tests subset 4 */ -public class Rest4IT extends ESRestTestCase { - public Rest4IT(@Name("yaml") RestTestCandidate testCandidate) { - super(testCandidate); - } - @ParametersFactory - public static Iterable parameters() throws IOException, RestTestParseException { - return createParameters(4, 8); - } -} diff --git a/test-framework/src/main/java/org/elasticsearch/test/rest/Rest5IT.java b/test-framework/src/main/java/org/elasticsearch/test/rest/Rest5IT.java deleted file mode 100644 index 748b06c2c2a..00000000000 --- a/test-framework/src/main/java/org/elasticsearch/test/rest/Rest5IT.java +++ /dev/null @@ -1,38 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.test.rest; - -import com.carrotsearch.randomizedtesting.annotations.Name; -import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; - -import org.elasticsearch.test.rest.parser.RestTestParseException; - -import java.io.IOException; - -/** Rest API tests subset 5 */ -public class Rest5IT extends ESRestTestCase { - public Rest5IT(@Name("yaml") RestTestCandidate testCandidate) { - super(testCandidate); - } - @ParametersFactory - public static Iterable parameters() throws IOException, RestTestParseException { - return createParameters(5, 8); - } -} diff --git a/test-framework/src/main/java/org/elasticsearch/test/rest/Rest6IT.java b/test-framework/src/main/java/org/elasticsearch/test/rest/Rest6IT.java deleted file mode 100644 index e8fbcd4826c..00000000000 --- a/test-framework/src/main/java/org/elasticsearch/test/rest/Rest6IT.java +++ /dev/null @@ -1,38 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.test.rest; - -import com.carrotsearch.randomizedtesting.annotations.Name; -import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; - -import org.elasticsearch.test.rest.parser.RestTestParseException; - -import java.io.IOException; - -/** Rest API tests subset 6 */ -public class Rest6IT extends ESRestTestCase { - public Rest6IT(@Name("yaml") RestTestCandidate testCandidate) { - super(testCandidate); - } - @ParametersFactory - public static Iterable parameters() throws IOException, RestTestParseException { - return createParameters(6, 8); - } -} diff --git a/test-framework/src/main/java/org/elasticsearch/test/rest/Rest7IT.java b/test-framework/src/main/java/org/elasticsearch/test/rest/Rest7IT.java deleted file mode 100644 index cf68bdb5606..00000000000 --- a/test-framework/src/main/java/org/elasticsearch/test/rest/Rest7IT.java +++ /dev/null @@ -1,38 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.test.rest; - -import com.carrotsearch.randomizedtesting.annotations.Name; -import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; - -import org.elasticsearch.test.rest.parser.RestTestParseException; - -import java.io.IOException; - -/** Rest API tests subset 7 */ -public class Rest7IT extends ESRestTestCase { - public Rest7IT(@Name("yaml") RestTestCandidate testCandidate) { - super(testCandidate); - } - @ParametersFactory - public static Iterable parameters() throws IOException, RestTestParseException { - return createParameters(7, 8); - } -} diff --git a/test-framework/src/main/java/org/elasticsearch/test/rest/RestTestExecutionContext.java b/test-framework/src/main/java/org/elasticsearch/test/rest/RestTestExecutionContext.java index b7dad93d593..4054b8efce1 100644 --- a/test-framework/src/main/java/org/elasticsearch/test/rest/RestTestExecutionContext.java +++ b/test-framework/src/main/java/org/elasticsearch/test/rest/RestTestExecutionContext.java @@ -62,7 +62,8 @@ public class RestTestExecutionContext implements Closeable { * Saves the obtained response in the execution context. * @throws RestException if the returned status code is non ok */ - public RestResponse callApi(String apiName, Map params, List> bodies) throws IOException, RestException { + public RestResponse callApi(String apiName, Map params, List> bodies, + Map headers) throws IOException, RestException { //makes a copy of the parameters before modifying them for this specific request HashMap requestParams = new HashMap<>(params); for (Map.Entry entry : requestParams.entrySet()) { @@ -74,7 +75,7 @@ public class RestTestExecutionContext implements Closeable { String body = actualBody(bodies); try { - response = callApiInternal(apiName, requestParams, body); + response = callApiInternal(apiName, requestParams, body, headers); //we always stash the last response body stash.stashValue("body", response.getBody()); return response; @@ -104,8 +105,8 @@ public class RestTestExecutionContext implements Closeable { return XContentFactory.jsonBuilder().map(body).string(); } - private RestResponse callApiInternal(String apiName, Map params, String body) throws IOException, RestException { - return restClient.callApi(apiName, params, body); + private RestResponse callApiInternal(String apiName, Map params, String body, Map headers) throws IOException, RestException { + return restClient.callApi(apiName, params, body, headers); } /** diff --git a/test-framework/src/main/java/org/elasticsearch/test/rest/client/RestClient.java b/test-framework/src/main/java/org/elasticsearch/test/rest/client/RestClient.java index 4b46a0e6498..63a8b397c45 100644 --- a/test-framework/src/main/java/org/elasticsearch/test/rest/client/RestClient.java +++ b/test-framework/src/main/java/org/elasticsearch/test/rest/client/RestClient.java @@ -132,7 +132,7 @@ public class RestClient implements Closeable { * @throws RestException if the obtained status code is non ok, unless the specific error code needs to be ignored * according to the ignore parameter received as input (which won't get sent to elasticsearch) */ - public RestResponse callApi(String apiName, Map params, String body) throws IOException, RestException { + public RestResponse callApi(String apiName, Map params, String body, Map headers) throws IOException, RestException { List ignores = new ArrayList<>(); Map requestParams = null; @@ -151,6 +151,9 @@ public class RestClient implements Closeable { } HttpRequestBuilder httpRequestBuilder = callApiBuilder(apiName, requestParams, body); + for (Map.Entry header : headers.entrySet()) { + httpRequestBuilder.addHeader(header.getKey(), header.getValue()); + } logger.debug("calling api [{}]", apiName); HttpResponse httpResponse = httpRequestBuilder.execute(); diff --git a/test-framework/src/main/java/org/elasticsearch/test/rest/parser/DoSectionParser.java b/test-framework/src/main/java/org/elasticsearch/test/rest/parser/DoSectionParser.java index ec5aef54459..2a20e0f3146 100644 --- a/test-framework/src/main/java/org/elasticsearch/test/rest/parser/DoSectionParser.java +++ b/test-framework/src/main/java/org/elasticsearch/test/rest/parser/DoSectionParser.java @@ -25,6 +25,8 @@ import org.elasticsearch.test.rest.section.ApiCallSection; import org.elasticsearch.test.rest.section.DoSection; import java.io.IOException; +import java.util.HashMap; +import java.util.Map; /** * Parser for do sections @@ -40,6 +42,8 @@ public class DoSectionParser implements RestTestFragmentParser { XContentParser.Token token; DoSection doSection = new DoSection(); + ApiCallSection apiCallSection = null; + Map headers = new HashMap<>(); while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { if (token == XContentParser.Token.FIELD_NAME) { @@ -49,8 +53,17 @@ public class DoSectionParser implements RestTestFragmentParser { doSection.setCatch(parser.text()); } } else if (token == XContentParser.Token.START_OBJECT) { - if (currentFieldName != null) { - ApiCallSection apiCallSection = new ApiCallSection(currentFieldName); + if ("headers".equals(currentFieldName)) { + String headerName = null; + while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { + if (token == XContentParser.Token.FIELD_NAME) { + headerName = parser.currentName(); + } else if (token.isValue()) { + headers.put(headerName, parser.text()); + } + } + } else if (currentFieldName != null) { // must be part of API call then + apiCallSection = new ApiCallSection(currentFieldName); String paramName = null; while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { if (token == XContentParser.Token.FIELD_NAME) { @@ -73,17 +86,20 @@ public class DoSectionParser implements RestTestFragmentParser { } } } - doSection.setApiCallSection(apiCallSection); } } } - - parser.nextToken(); - - if (doSection.getApiCallSection() == null) { - throw new RestTestParseException("client call section is mandatory within a do section"); + try { + if (apiCallSection == null) { + throw new RestTestParseException("client call section is mandatory within a do section"); + } + if (headers.isEmpty() == false) { + apiCallSection.addHeaders(headers); + } + doSection.setApiCallSection(apiCallSection); + } finally { + parser.nextToken(); } - return doSection; } } diff --git a/test-framework/src/main/java/org/elasticsearch/test/rest/section/ApiCallSection.java b/test-framework/src/main/java/org/elasticsearch/test/rest/section/ApiCallSection.java index da6c0b3be2c..030469148ed 100644 --- a/test-framework/src/main/java/org/elasticsearch/test/rest/section/ApiCallSection.java +++ b/test-framework/src/main/java/org/elasticsearch/test/rest/section/ApiCallSection.java @@ -33,6 +33,7 @@ public class ApiCallSection { private final String api; private final Map params = new HashMap<>(); + private final Map headers = new HashMap<>(); private final List> bodies = new ArrayList<>(); public ApiCallSection(String api) { @@ -56,6 +57,18 @@ public class ApiCallSection { this.params.put(key, value); } + public void addHeaders(Map otherHeaders) { + this.headers.putAll(otherHeaders); + } + + public void addHeader(String key, String value) { + this.headers.put(key, value); + } + + public Map getHeaders() { + return unmodifiableMap(headers); + } + public List> getBodies() { return Collections.unmodifiableList(bodies); } diff --git a/test-framework/src/main/java/org/elasticsearch/test/rest/section/DoSection.java b/test-framework/src/main/java/org/elasticsearch/test/rest/section/DoSection.java index 9a1bf1c9267..38504c4af5f 100644 --- a/test-framework/src/main/java/org/elasticsearch/test/rest/section/DoSection.java +++ b/test-framework/src/main/java/org/elasticsearch/test/rest/section/DoSection.java @@ -45,6 +45,9 @@ import static org.junit.Assert.fail; * * - do: * catch: missing + * headers: + * Authorization: Basic user:pass + * Content-Type: application/json * update: * index: test_1 * type: test @@ -86,7 +89,8 @@ public class DoSection implements ExecutableSection { } try { - RestResponse restResponse = executionContext.callApi(apiCallSection.getApi(), apiCallSection.getParams(), apiCallSection.getBodies()); + RestResponse restResponse = executionContext.callApi(apiCallSection.getApi(), apiCallSection.getParams(), + apiCallSection.getBodies(), apiCallSection.getHeaders()); if (Strings.hasLength(catchParam)) { String catchStatusCode; if (catches.containsKey(catchParam)) { diff --git a/test-framework/src/main/java/org/elasticsearch/test/rest/support/Features.java b/test-framework/src/main/java/org/elasticsearch/test/rest/support/Features.java index 018d2413737..0f51f72e8e5 100644 --- a/test-framework/src/main/java/org/elasticsearch/test/rest/support/Features.java +++ b/test-framework/src/main/java/org/elasticsearch/test/rest/support/Features.java @@ -34,7 +34,7 @@ import java.util.List; */ public final class Features { - private static final List SUPPORTED = Arrays.asList("stash_in_path", "groovy_scripting"); + private static final List SUPPORTED = Arrays.asList("stash_in_path", "groovy_scripting", "headers"); private Features() { diff --git a/test-framework/src/main/java/org/elasticsearch/test/store/MockFSDirectoryService.java b/test-framework/src/main/java/org/elasticsearch/test/store/MockFSDirectoryService.java index 9c2b8612d51..27a2e6fb22e 100644 --- a/test-framework/src/main/java/org/elasticsearch/test/store/MockFSDirectoryService.java +++ b/test-framework/src/main/java/org/elasticsearch/test/store/MockFSDirectoryService.java @@ -173,7 +173,7 @@ public class MockFSDirectoryService extends FsDirectoryService { private FsDirectoryService randomDirectorService(IndexStore indexStore, ShardPath path) { final IndexSettings indexSettings = indexStore.getIndexSettings(); final IndexMetaData build = IndexMetaData.builder(indexSettings.getIndexMetaData()).settings(Settings.builder().put(indexSettings.getSettings()).put(IndexModule.STORE_TYPE, RandomPicks.randomFrom(random, IndexModule.Type.values()).getSettingsKey())).build(); - final IndexSettings newIndexSettings = new IndexSettings(build, indexSettings.getNodeSettings(), Collections.EMPTY_LIST); + final IndexSettings newIndexSettings = new IndexSettings(build, indexSettings.getNodeSettings(), Collections.emptyList()); return new FsDirectoryService(newIndexSettings, indexStore, path); } diff --git a/test-framework/src/main/java/org/elasticsearch/test/transport/CapturingTransport.java b/test-framework/src/main/java/org/elasticsearch/test/transport/CapturingTransport.java index 476b89aa1a9..2363d98a113 100644 --- a/test-framework/src/main/java/org/elasticsearch/test/transport/CapturingTransport.java +++ b/test-framework/src/main/java/org/elasticsearch/test/transport/CapturingTransport.java @@ -178,6 +178,6 @@ public class CapturingTransport implements Transport { @Override public List getLocalAddresses() { - return Collections.EMPTY_LIST; + return Collections.emptyList(); } } diff --git a/test-framework/src/test/java/org/elasticsearch/test/rest/test/DoSectionParserTests.java b/test-framework/src/test/java/org/elasticsearch/test/rest/test/DoSectionParserTests.java index 5f0f2bd8b35..3c65fda94ca 100644 --- a/test-framework/src/test/java/org/elasticsearch/test/rest/test/DoSectionParserTests.java +++ b/test-framework/src/test/java/org/elasticsearch/test/rest/test/DoSectionParserTests.java @@ -341,6 +341,29 @@ public class DoSectionParserTests extends AbstractParserTestCase { assertThat(doSection.getApiCallSection().hasBody(), equalTo(false)); } + public void testParseDoSectionWithHeaders() throws Exception { + parser = YamlXContent.yamlXContent.createParser( + "headers:\n" + + " Authorization: \"thing one\"\n" + + " Content-Type: \"application/json\"\n" + + "indices.get_warmer:\n" + + " index: test_index\n" + + " name: test_warmer" + ); + + DoSectionParser doSectionParser = new DoSectionParser(); + DoSection doSection = doSectionParser.parse(new RestTestSuiteParseContext("api", "suite", parser)); + + assertThat(doSection.getApiCallSection(), notNullValue()); + assertThat(doSection.getApiCallSection().getApi(), equalTo("indices.get_warmer")); + assertThat(doSection.getApiCallSection().getParams().size(), equalTo(2)); + assertThat(doSection.getApiCallSection().hasBody(), equalTo(false)); + assertThat(doSection.getApiCallSection().getHeaders(), notNullValue()); + assertThat(doSection.getApiCallSection().getHeaders().size(), equalTo(2)); + assertThat(doSection.getApiCallSection().getHeaders().get("Authorization"), equalTo("thing one")); + assertThat(doSection.getApiCallSection().getHeaders().get("Content-Type"), equalTo("application/json")); + } + public void testParseDoSectionWithoutClientCallSection() throws Exception { parser = YamlXContent.yamlXContent.createParser( "catch: missing\n"